#include "toplev.h"
#include "output.h"
#include "ggc.h"
+#include "target.h"
/* Simplification and canonicalization of RTL. */
avoid_constant_pool_reference (x)
rtx x;
{
- rtx c, addr;
+ rtx c, tmp, addr;
enum machine_mode cmode;
- if (GET_CODE (x) != MEM)
- return x;
+ switch (GET_CODE (x))
+ {
+ case MEM:
+ break;
+
+ case FLOAT_EXTEND:
+ /* Handle float extensions of constant pool references. */
+ tmp = XEXP (x, 0);
+ c = avoid_constant_pool_reference (tmp);
+ if (c != tmp && GET_CODE (c) == CONST_DOUBLE)
+ {
+ REAL_VALUE_TYPE d;
+
+ REAL_VALUE_FROM_CONST_DOUBLE (d, c);
+ return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
+ }
+ return x;
+
+ default:
+ return x;
+ }
+
addr = XEXP (x, 0);
+ /* Call target hook to avoid the effects of -fpic etc... */
+ addr = (*targetm.delegitimize_address) (addr);
+
if (GET_CODE (addr) == LO_SUM)
addr = XEXP (addr, 1);
GET_MODE (SUBREG_REG (x)),
SUBREG_BYTE (x));
if (exp)
- x = exp;
+ x = exp;
}
return x;
unsigned int width = GET_MODE_BITSIZE (mode);
rtx trueop = avoid_constant_pool_reference (op);
+ if (code == VEC_DUPLICATE)
+ {
+ if (!VECTOR_MODE_P (mode))
+ abort ();
+ if (GET_MODE (trueop) != VOIDmode
+ && !VECTOR_MODE_P (GET_MODE (trueop))
+ && GET_MODE_INNER (mode) != GET_MODE (trueop))
+ abort ();
+ if (GET_MODE (trueop) != VOIDmode
+ && VECTOR_MODE_P (GET_MODE (trueop))
+ && GET_MODE_INNER (mode) != GET_MODE_INNER (GET_MODE (trueop)))
+ abort ();
+ if (GET_CODE (trueop) == CONST_INT || GET_CODE (trueop) == CONST_DOUBLE
+ || GET_CODE (trueop) == CONST_VECTOR)
+ {
+ int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
+ unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
+ rtvec v = rtvec_alloc (n_elts);
+ unsigned int i;
+
+ if (GET_CODE (trueop) != CONST_VECTOR)
+ for (i = 0; i < n_elts; i++)
+ RTVEC_ELT (v, i) = trueop;
+ else
+ {
+ enum machine_mode inmode = GET_MODE (trueop);
+ int in_elt_size = GET_MODE_SIZE (GET_MODE_INNER (inmode));
+ unsigned in_n_elts = (GET_MODE_SIZE (inmode) / in_elt_size);
+
+ if (in_n_elts >= n_elts || n_elts % in_n_elts)
+ abort ();
+ for (i = 0; i < n_elts; i++)
+ RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop, i % in_n_elts);
+ }
+ return gen_rtx_CONST_VECTOR (mode, v);
+ }
+ }
+
+ if (VECTOR_MODE_P (mode) && GET_CODE (trueop) == CONST_VECTOR)
+ {
+ int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
+ unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
+ enum machine_mode opmode = GET_MODE (trueop);
+ int op_elt_size = GET_MODE_SIZE (GET_MODE_INNER (opmode));
+ unsigned op_n_elts = (GET_MODE_SIZE (opmode) / op_elt_size);
+ rtvec v = rtvec_alloc (n_elts);
+ unsigned int i;
+
+ if (op_n_elts != n_elts)
+ abort ();
+
+ for (i = 0; i < n_elts; i++)
+ {
+ rtx x = simplify_unary_operation (code, GET_MODE_INNER (mode),
+ CONST_VECTOR_ELT (trueop, i),
+ GET_MODE_INNER (opmode));
+ if (!x)
+ return 0;
+ RTVEC_ELT (v, i) = x;
+ }
+ return gen_rtx_CONST_VECTOR (mode, v);
+ }
+
/* The order of these tests is critical so that, for example, we don't
check the wrong mode (input vs. output) for a conversion operation,
such as FIX. At some point, this should be simplified. */
val = exact_log2 (arg0 & (- arg0)) + 1;
break;
+ case CLZ:
+ arg0 &= GET_MODE_MASK (mode);
+ if (arg0 == 0 && CLZ_DEFINED_VALUE_AT_ZERO (mode, val))
+ ;
+ else
+ val = GET_MODE_BITSIZE (mode) - floor_log2 (arg0) - 1;
+ break;
+
+ case CTZ:
+ arg0 &= GET_MODE_MASK (mode);
+ if (arg0 == 0)
+ {
+ /* Even if the value at zero is undefined, we have to come
+ up with some replacement. Seems good enough. */
+ if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, val))
+ val = GET_MODE_BITSIZE (mode);
+ }
+ else
+ val = exact_log2 (arg0 & -arg0);
+ break;
+
+ case POPCOUNT:
+ arg0 &= GET_MODE_MASK (mode);
+ val = 0;
+ while (arg0)
+ val++, arg0 &= arg0 - 1;
+ break;
+
+ case PARITY:
+ arg0 &= GET_MODE_MASK (mode);
+ val = 0;
+ while (arg0)
+ val++, arg0 &= arg0 - 1;
+ val &= 1;
+ break;
+
case TRUNCATE:
val = arg0;
break;
case FFS:
hv = 0;
if (l1 == 0)
- lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
+ {
+ if (h1 == 0)
+ lv = 0;
+ else
+ lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & -h1) + 1;
+ }
+ else
+ lv = exact_log2 (l1 & -l1) + 1;
+ break;
+
+ case CLZ:
+ hv = 0;
+ if (h1 == 0)
+ lv = GET_MODE_BITSIZE (mode) - floor_log2 (l1) - 1;
else
- lv = exact_log2 (l1 & (-l1)) + 1;
+ lv = GET_MODE_BITSIZE (mode) - floor_log2 (h1) - 1
+ - HOST_BITS_PER_WIDE_INT;
+ break;
+
+ case CTZ:
+ hv = 0;
+ if (l1 == 0)
+ {
+ if (h1 == 0)
+ lv = GET_MODE_BITSIZE (mode);
+ else
+ lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & -h1);
+ }
+ else
+ lv = exact_log2 (l1 & -l1);
+ break;
+
+ case POPCOUNT:
+ hv = 0;
+ lv = 0;
+ while (l1)
+ lv++, l1 &= l1 - 1;
+ while (h1)
+ lv++, h1 &= h1 - 1;
+ break;
+
+ case PARITY:
+ hv = 0;
+ lv = 0;
+ while (l1)
+ lv++, l1 &= l1 - 1;
+ while (h1)
+ lv++, h1 &= h1 - 1;
+ lv &= 1;
break;
case TRUNCATE:
tem = trueop0, trueop0 = trueop1, trueop1 = tem;
}
+ if (VECTOR_MODE_P (mode)
+ && GET_CODE (trueop0) == CONST_VECTOR
+ && GET_CODE (trueop1) == CONST_VECTOR)
+ {
+ int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
+ unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
+ enum machine_mode op0mode = GET_MODE (trueop0);
+ int op0_elt_size = GET_MODE_SIZE (GET_MODE_INNER (op0mode));
+ unsigned op0_n_elts = (GET_MODE_SIZE (op0mode) / op0_elt_size);
+ enum machine_mode op1mode = GET_MODE (trueop1);
+ int op1_elt_size = GET_MODE_SIZE (GET_MODE_INNER (op1mode));
+ unsigned op1_n_elts = (GET_MODE_SIZE (op1mode) / op1_elt_size);
+ rtvec v = rtvec_alloc (n_elts);
+ unsigned int i;
+
+ if (op0_n_elts != n_elts || op1_n_elts != n_elts)
+ abort ();
+
+ for (i = 0; i < n_elts; i++)
+ {
+ rtx x = simplify_binary_operation (code, GET_MODE_INNER (mode),
+ CONST_VECTOR_ELT (trueop0, i),
+ CONST_VECTOR_ELT (trueop1, i));
+ if (!x)
+ return 0;
+ RTVEC_ELT (v, i) = x;
+ }
+
+ return gen_rtx_CONST_VECTOR (mode, v);
+ }
+
if (GET_MODE_CLASS (mode) == MODE_FLOAT
&& GET_CODE (trueop0) == CONST_DOUBLE
&& GET_CODE (trueop1) == CONST_DOUBLE
/* (x - (x & y)) -> (x & ~y) */
if (GET_CODE (op1) == AND)
{
- if (rtx_equal_p (op0, XEXP (op1, 0)))
- return simplify_gen_binary (AND, mode, op0,
- gen_rtx_NOT (mode, XEXP (op1, 1)));
- if (rtx_equal_p (op0, XEXP (op1, 1)))
- return simplify_gen_binary (AND, mode, op0,
- gen_rtx_NOT (mode, XEXP (op1, 0)));
- }
+ if (rtx_equal_p (op0, XEXP (op1, 0)))
+ {
+ tem = simplify_gen_unary (NOT, mode, XEXP (op1, 1),
+ GET_MODE (XEXP (op1, 1)));
+ return simplify_gen_binary (AND, mode, op0, tem);
+ }
+ if (rtx_equal_p (op0, XEXP (op1, 1)))
+ {
+ tem = simplify_gen_unary (NOT, mode, XEXP (op1, 0),
+ GET_MODE (XEXP (op1, 0)));
+ return simplify_gen_binary (AND, mode, op0, tem);
+ }
+ }
break;
case MULT:
/* ??? There are simplifications that can be done. */
return 0;
+ case VEC_SELECT:
+ if (!VECTOR_MODE_P (mode))
+ {
+ if (!VECTOR_MODE_P (GET_MODE (trueop0))
+ || (mode
+ != GET_MODE_INNER (GET_MODE (trueop0)))
+ || GET_CODE (trueop1) != PARALLEL
+ || XVECLEN (trueop1, 0) != 1
+ || GET_CODE (XVECEXP (trueop1, 0, 0)) != CONST_INT)
+ abort ();
+
+ if (GET_CODE (trueop0) == CONST_VECTOR)
+ return CONST_VECTOR_ELT (trueop0, INTVAL (XVECEXP (trueop1, 0, 0)));
+ }
+ else
+ {
+ if (!VECTOR_MODE_P (GET_MODE (trueop0))
+ || (GET_MODE_INNER (mode)
+ != GET_MODE_INNER (GET_MODE (trueop0)))
+ || GET_CODE (trueop1) != PARALLEL)
+ abort ();
+
+ if (GET_CODE (trueop0) == CONST_VECTOR)
+ {
+ int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
+ unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
+ rtvec v = rtvec_alloc (n_elts);
+ unsigned int i;
+
+ if (XVECLEN (trueop1, 0) != (int) n_elts)
+ abort ();
+ for (i = 0; i < n_elts; i++)
+ {
+ rtx x = XVECEXP (trueop1, 0, i);
+
+ if (GET_CODE (x) != CONST_INT)
+ abort ();
+ RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0, INTVAL (x));
+ }
+
+ return gen_rtx_CONST_VECTOR (mode, v);
+ }
+ }
+ return 0;
+ case VEC_CONCAT:
+ {
+ enum machine_mode op0_mode = (GET_MODE (trueop0) != VOIDmode
+ ? GET_MODE (trueop0)
+ : GET_MODE_INNER (mode));
+ enum machine_mode op1_mode = (GET_MODE (trueop1) != VOIDmode
+ ? GET_MODE (trueop1)
+ : GET_MODE_INNER (mode));
+
+ if (!VECTOR_MODE_P (mode)
+ || (GET_MODE_SIZE (op0_mode) + GET_MODE_SIZE (op1_mode)
+ != GET_MODE_SIZE (mode)))
+ abort ();
+
+ if ((VECTOR_MODE_P (op0_mode)
+ && (GET_MODE_INNER (mode)
+ != GET_MODE_INNER (op0_mode)))
+ || (!VECTOR_MODE_P (op0_mode)
+ && GET_MODE_INNER (mode) != op0_mode))
+ abort ();
+
+ if ((VECTOR_MODE_P (op1_mode)
+ && (GET_MODE_INNER (mode)
+ != GET_MODE_INNER (op1_mode)))
+ || (!VECTOR_MODE_P (op1_mode)
+ && GET_MODE_INNER (mode) != op1_mode))
+ abort ();
+
+ if ((GET_CODE (trueop0) == CONST_VECTOR
+ || GET_CODE (trueop0) == CONST_INT
+ || GET_CODE (trueop0) == CONST_DOUBLE)
+ && (GET_CODE (trueop1) == CONST_VECTOR
+ || GET_CODE (trueop1) == CONST_INT
+ || GET_CODE (trueop1) == CONST_DOUBLE))
+ {
+ int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
+ unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
+ rtvec v = rtvec_alloc (n_elts);
+ unsigned int i;
+ unsigned in_n_elts = 1;
+
+ if (VECTOR_MODE_P (op0_mode))
+ in_n_elts = (GET_MODE_SIZE (op0_mode) / elt_size);
+ for (i = 0; i < n_elts; i++)
+ {
+ if (i < in_n_elts)
+ {
+ if (!VECTOR_MODE_P (op0_mode))
+ RTVEC_ELT (v, i) = trueop0;
+ else
+ RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0, i);
+ }
+ else
+ {
+ if (!VECTOR_MODE_P (op1_mode))
+ RTVEC_ELT (v, i) = trueop1;
+ else
+ RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop1,
+ i - in_n_elts);
+ }
+ }
+
+ return gen_rtx_CONST_VECTOR (mode, v);
+ }
+ }
+ return 0;
+
default:
abort ();
}
> (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
break;
+ case SS_PLUS:
+ case US_PLUS:
+ case SS_MINUS:
+ case US_MINUS:
+ /* ??? There are simplifications that can be done. */
+ return 0;
+
default:
abort ();
}
/* We can't simplify MODE_CC values since we don't know what the
actual comparison is. */
- if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
-#ifdef HAVE_cc0
- || op0 == cc0_rtx
-#endif
- )
+ if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC || CC0_P (op0))
return 0;
/* Make sure the constant is second. */
}
}
break;
+ case VEC_MERGE:
+ if (GET_MODE (op0) != mode
+ || GET_MODE (op1) != mode
+ || !VECTOR_MODE_P (mode))
+ abort ();
+ op2 = avoid_constant_pool_reference (op2);
+ if (GET_CODE (op2) == CONST_INT)
+ {
+ int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
+ unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
+ int mask = (1 << n_elts) - 1;
+
+ if (!(INTVAL (op2) & mask))
+ return op1;
+ if ((INTVAL (op2) & mask) == mask)
+ return op0;
+
+ op0 = avoid_constant_pool_reference (op0);
+ op1 = avoid_constant_pool_reference (op1);
+ if (GET_CODE (op0) == CONST_VECTOR
+ && GET_CODE (op1) == CONST_VECTOR)
+ {
+ rtvec v = rtvec_alloc (n_elts);
+ unsigned int i;
+
+ for (i = 0; i < n_elts; i++)
+ RTVEC_ELT (v, i) = (INTVAL (op2) & (1 << i)
+ ? CONST_VECTOR_ELT (op0, i)
+ : CONST_VECTOR_ELT (op1, i));
+ return gen_rtx_CONST_VECTOR (mode, v);
+ }
+ }
+ break;
default:
abort ();
}
if (GET_CODE (elt) != CONST_INT)
return NULL_RTX;
+ /* Avoid overflow. */
+ if (high >> (HOST_BITS_PER_WIDE_INT - shift))
+ return NULL_RTX;
high = high << shift | sum >> (HOST_BITS_PER_WIDE_INT - shift);
sum = (sum << shift) + INTVAL (elt);
}
if (GET_MODE_BITSIZE (outermode) <= HOST_BITS_PER_WIDE_INT)
return GEN_INT (trunc_int_for_mode (sum, outermode));
else if (GET_MODE_BITSIZE (outermode) == 2* HOST_BITS_PER_WIDE_INT)
- return immed_double_const (high, sum, outermode);
+ return immed_double_const (sum, high, outermode);
else
return NULL_RTX;
}
int subbyte = byte % elt_size;
op = simplify_subreg (new_mode, op, innermode, byte - subbyte);
- if (! op)
- return NULL_RTX;
+ if (! op)
+ return NULL_RTX;
return simplify_subreg (outermode, op, new_mode, subbyte);
}
else if (GET_MODE_CLASS (outermode) == MODE_INT)
|| ! rtx_equal_function_value_matters)
&& REGNO (op) < FIRST_PSEUDO_REGISTER
#ifdef CANNOT_CHANGE_MODE_CLASS
- && ! (REG_CANNOT_CHANGE_MODE_P (REGNO (op), outermode, innermode)
+ && ! (REG_CANNOT_CHANGE_MODE_P (REGNO (op), innermode, outermode)
&& GET_MODE_CLASS (innermode) != MODE_COMPLEX_INT
&& GET_MODE_CLASS (innermode) != MODE_COMPLEX_FLOAT)
#endif
if (HARD_REGNO_MODE_OK (final_regno, outermode)
|| ! HARD_REGNO_MODE_OK (REGNO (op), innermode))
{
- rtx x = gen_rtx_REG (outermode, final_regno);
+ rtx x = gen_rtx_REG_offset (op, outermode, final_regno, byte);
/* Propagate original regno. We don't have any way to specify
the offset inside original regno, so do so only for lowpart.
SUBREG_BYTE (x));
if (code == CONSTANT_P_RTX)
{
- if (CONSTANT_P (XEXP (x,0)))
+ if (CONSTANT_P (XEXP (x, 0)))
return const1_rtx;
}
return NULL;