/* RTL simplification functions for GNU compiler.
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
This file is part of GCC.
width = GET_MODE_BITSIZE (mode);
if (width == 0)
return false;
-
+
if (width <= HOST_BITS_PER_WIDE_INT
- && GET_CODE (x) == CONST_INT)
+ && CONST_INT_P (x))
val = INTVAL (x);
else if (width <= 2 * HOST_BITS_PER_WIDE_INT
&& GET_CODE (x) == CONST_DOUBLE
/* Split the address into a base and integer offset. */
if (GET_CODE (addr) == CONST
&& GET_CODE (XEXP (addr, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
+ && CONST_INT_P (XEXP (XEXP (addr, 0), 1)))
{
offset = INTVAL (XEXP (XEXP (addr, 0), 1));
addr = XEXP (XEXP (addr, 0), 0);
return x;
}
\f
+/* Simplify a MEM based on its attributes. This is the default
+ delegitimize_address target hook, and it's recommended that every
+ overrider call it. */
+
+rtx
+delegitimize_mem_from_attrs (rtx x)
+{
+ if (MEM_P (x)
+ && MEM_EXPR (x)
+ && (!MEM_OFFSET (x)
+ || GET_CODE (MEM_OFFSET (x)) == CONST_INT))
+ {
+ tree decl = MEM_EXPR (x);
+ enum machine_mode mode = GET_MODE (x);
+ HOST_WIDE_INT offset = 0;
+
+ switch (TREE_CODE (decl))
+ {
+ default:
+ decl = NULL;
+ break;
+
+ case VAR_DECL:
+ break;
+
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ case COMPONENT_REF:
+ case BIT_FIELD_REF:
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
+ case VIEW_CONVERT_EXPR:
+ {
+ HOST_WIDE_INT bitsize, bitpos;
+ tree toffset;
+ int unsignedp = 0, volatilep = 0;
+
+ decl = get_inner_reference (decl, &bitsize, &bitpos, &toffset,
+ &mode, &unsignedp, &volatilep, false);
+ if (bitsize != GET_MODE_BITSIZE (mode)
+ || (bitpos % BITS_PER_UNIT)
+ || (toffset && !host_integerp (toffset, 0)))
+ decl = NULL;
+ else
+ {
+ offset += bitpos / BITS_PER_UNIT;
+ if (toffset)
+ offset += TREE_INT_CST_LOW (toffset);
+ }
+ break;
+ }
+ }
+
+ if (decl
+ && mode == GET_MODE (x)
+ && TREE_CODE (decl) == VAR_DECL
+ && (TREE_STATIC (decl)
+ || DECL_THREAD_LOCAL_P (decl))
+ && DECL_RTL_SET_P (decl)
+ && MEM_P (DECL_RTL (decl)))
+ {
+ rtx newx;
+
+ if (MEM_OFFSET (x))
+ offset += INTVAL (MEM_OFFSET (x));
+
+ newx = DECL_RTL (decl);
+
+ if (MEM_P (newx))
+ {
+ rtx n = XEXP (newx, 0), o = XEXP (x, 0);
+
+ /* Avoid creating a new MEM needlessly if we already had
+ the same address. We do if there's no OFFSET and the
+ old address X is identical to NEWX, or if X is of the
+ form (plus NEWX OFFSET), or the NEWX is of the form
+ (plus Y (const_int Z)) and X is that with the offset
+ added: (plus Y (const_int Z+OFFSET)). */
+ if (!((offset == 0
+ || (GET_CODE (o) == PLUS
+ && GET_CODE (XEXP (o, 1)) == CONST_INT
+ && (offset == INTVAL (XEXP (o, 1))
+ || (GET_CODE (n) == PLUS
+ && GET_CODE (XEXP (n, 1)) == CONST_INT
+ && (INTVAL (XEXP (n, 1)) + offset
+ == INTVAL (XEXP (o, 1)))
+ && (n = XEXP (n, 0))))
+ && (o = XEXP (o, 0))))
+ && rtx_equal_p (o, n)))
+ x = adjust_address_nv (newx, mode, offset);
+ }
+ else if (GET_MODE (x) == GET_MODE (newx)
+ && offset == 0)
+ x = newx;
+ }
+ }
+
+ return x;
+}
+\f
/* Make a unary operation by first seeing if it folds and otherwise making
the specified operation. */
return gen_rtx_fmt_ee (code, mode, op0, op1);
}
\f
-/* Replace all occurrences of OLD_RTX in X with NEW_RTX and try to simplify the
- resulting RTX. Return a new RTX which is as simplified as possible. */
+/* If FN is NULL, replace all occurrences of OLD_RTX in X with copy_rtx (DATA)
+ and simplify the result. If FN is non-NULL, call this callback on each
+ X, if it returns non-NULL, replace X with its return value and simplify the
+ result. */
rtx
-simplify_replace_rtx (rtx x, const_rtx old_rtx, rtx new_rtx)
+simplify_replace_fn_rtx (rtx x, const_rtx old_rtx,
+ rtx (*fn) (rtx, const_rtx, void *), void *data)
{
enum rtx_code code = GET_CODE (x);
enum machine_mode mode = GET_MODE (x);
enum machine_mode op_mode;
- rtx op0, op1, op2;
-
- /* If X is OLD_RTX, return NEW_RTX. Otherwise, if this is an expression, try
- to build a new expression substituting recursively. If we can't do
- anything, return our input. */
+ const char *fmt;
+ rtx op0, op1, op2, newx, op;
+ rtvec vec, newvec;
+ int i, j;
- if (x == old_rtx)
- return new_rtx;
+ if (__builtin_expect (fn != NULL, 0))
+ {
+ newx = fn (x, old_rtx, data);
+ if (newx)
+ return newx;
+ }
+ else if (rtx_equal_p (x, old_rtx))
+ return copy_rtx ((rtx) data);
switch (GET_RTX_CLASS (code))
{
case RTX_UNARY:
op0 = XEXP (x, 0);
op_mode = GET_MODE (op0);
- op0 = simplify_replace_rtx (op0, old_rtx, new_rtx);
+ op0 = simplify_replace_fn_rtx (op0, old_rtx, fn, data);
if (op0 == XEXP (x, 0))
return x;
return simplify_gen_unary (code, mode, op0, op_mode);
case RTX_BIN_ARITH:
case RTX_COMM_ARITH:
- op0 = simplify_replace_rtx (XEXP (x, 0), old_rtx, new_rtx);
- op1 = simplify_replace_rtx (XEXP (x, 1), old_rtx, new_rtx);
+ op0 = simplify_replace_fn_rtx (XEXP (x, 0), old_rtx, fn, data);
+ op1 = simplify_replace_fn_rtx (XEXP (x, 1), old_rtx, fn, data);
if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
return x;
return simplify_gen_binary (code, mode, op0, op1);
op0 = XEXP (x, 0);
op1 = XEXP (x, 1);
op_mode = GET_MODE (op0) != VOIDmode ? GET_MODE (op0) : GET_MODE (op1);
- op0 = simplify_replace_rtx (op0, old_rtx, new_rtx);
- op1 = simplify_replace_rtx (op1, old_rtx, new_rtx);
+ op0 = simplify_replace_fn_rtx (op0, old_rtx, fn, data);
+ op1 = simplify_replace_fn_rtx (op1, old_rtx, fn, data);
if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
return x;
return simplify_gen_relational (code, mode, op_mode, op0, op1);
case RTX_BITFIELD_OPS:
op0 = XEXP (x, 0);
op_mode = GET_MODE (op0);
- op0 = simplify_replace_rtx (op0, old_rtx, new_rtx);
- op1 = simplify_replace_rtx (XEXP (x, 1), old_rtx, new_rtx);
- op2 = simplify_replace_rtx (XEXP (x, 2), old_rtx, new_rtx);
+ op0 = simplify_replace_fn_rtx (op0, old_rtx, fn, data);
+ op1 = simplify_replace_fn_rtx (XEXP (x, 1), old_rtx, fn, data);
+ op2 = simplify_replace_fn_rtx (XEXP (x, 2), old_rtx, fn, data);
if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1) && op2 == XEXP (x, 2))
return x;
if (op_mode == VOIDmode)
return simplify_gen_ternary (code, mode, op_mode, op0, op1, op2);
case RTX_EXTRA:
- /* The only case we try to handle is a SUBREG. */
if (code == SUBREG)
{
- op0 = simplify_replace_rtx (SUBREG_REG (x), old_rtx, new_rtx);
+ op0 = simplify_replace_fn_rtx (SUBREG_REG (x), old_rtx, fn, data);
if (op0 == SUBREG_REG (x))
return x;
op0 = simplify_gen_subreg (GET_MODE (x), op0,
case RTX_OBJ:
if (code == MEM)
{
- op0 = simplify_replace_rtx (XEXP (x, 0), old_rtx, new_rtx);
+ op0 = simplify_replace_fn_rtx (XEXP (x, 0), old_rtx, fn, data);
if (op0 == XEXP (x, 0))
return x;
return replace_equiv_address_nv (x, op0);
}
else if (code == LO_SUM)
{
- op0 = simplify_replace_rtx (XEXP (x, 0), old_rtx, new_rtx);
- op1 = simplify_replace_rtx (XEXP (x, 1), old_rtx, new_rtx);
+ op0 = simplify_replace_fn_rtx (XEXP (x, 0), old_rtx, fn, data);
+ op1 = simplify_replace_fn_rtx (XEXP (x, 1), old_rtx, fn, data);
/* (lo_sum (high x) x) -> x */
if (GET_CODE (op0) == HIGH && rtx_equal_p (XEXP (op0, 0), op1))
return x;
return gen_rtx_LO_SUM (mode, op0, op1);
}
- else if (code == REG)
- {
- if (rtx_equal_p (x, old_rtx))
- return new_rtx;
- }
break;
default:
break;
}
- return x;
+
+ newx = x;
+ fmt = GET_RTX_FORMAT (code);
+ for (i = 0; fmt[i]; i++)
+ switch (fmt[i])
+ {
+ case 'E':
+ vec = XVEC (x, i);
+ newvec = XVEC (newx, i);
+ for (j = 0; j < GET_NUM_ELEM (vec); j++)
+ {
+ op = simplify_replace_fn_rtx (RTVEC_ELT (vec, j),
+ old_rtx, fn, data);
+ if (op != RTVEC_ELT (vec, j))
+ {
+ if (newvec == vec)
+ {
+ newvec = shallow_copy_rtvec (vec);
+ if (x == newx)
+ newx = shallow_copy_rtx (x);
+ XVEC (newx, i) = newvec;
+ }
+ RTVEC_ELT (newvec, j) = op;
+ }
+ }
+ break;
+
+ case 'e':
+ if (XEXP (x, i))
+ {
+ op = simplify_replace_fn_rtx (XEXP (x, i), old_rtx, fn, data);
+ if (op != XEXP (x, i))
+ {
+ if (x == newx)
+ newx = shallow_copy_rtx (x);
+ XEXP (newx, i) = op;
+ }
+ }
+ break;
+ }
+ return newx;
+}
+
+/* Replace all occurrences of OLD_RTX in X with NEW_RTX and try to simplify the
+ resulting RTX. Return a new RTX which is as simplified as possible. */
+
+rtx
+simplify_replace_rtx (rtx x, const_rtx old_rtx, rtx new_rtx)
+{
+ return simplify_replace_fn_rtx (x, old_rtx, 0, new_rtx);
}
\f
/* Try to simplify a unary operation CODE whose output mode is to be
{
rtx trueop, tem;
- if (GET_CODE (op) == CONST)
- op = XEXP (op, 0);
-
trueop = avoid_constant_pool_reference (op);
tem = simplify_const_unary_operation (code, mode, trueop, op_mode);
/* (not (xor X C)) for C constant is (xor X D) with D = ~C. */
if (GET_CODE (op) == XOR
- && GET_CODE (XEXP (op, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (op, 1))
&& (temp = simplify_unary_operation (NOT, mode,
XEXP (op, 1), mode)) != 0)
return simplify_gen_binary (XOR, mode, XEXP (op, 0), temp);
/* (not (plus X C)) for signbit C is (xor X D) with D = ~C. */
if (GET_CODE (op) == PLUS
- && GET_CODE (XEXP (op, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (op, 1))
&& mode_signbit_p (mode, XEXP (op, 1))
&& (temp = simplify_unary_operation (NOT, mode,
XEXP (op, 1), mode)) != 0)
/* (not (ashiftrt foo C)) where C is the number of bits in FOO
minus 1 is (ge foo (const_int 0)) if STORE_FLAG_VALUE is -1,
so we can perform the above simplification. */
-
+
if (STORE_FLAG_VALUE == -1
&& GET_CODE (op) == ASHIFTRT
- && GET_CODE (XEXP (op, 1)) == CONST_INT
+ && GET_CODE (XEXP (op, 1))
&& INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1)
return simplify_gen_relational (GE, mode, VOIDmode,
XEXP (op, 0), const0_rtx);
if (GET_CODE (op) == PLUS
&& XEXP (op, 1) == const1_rtx)
return simplify_gen_unary (NOT, mode, XEXP (op, 0), mode);
-
+
/* Similarly, (neg (not X)) is (plus X 1). */
if (GET_CODE (op) == NOT)
return plus_constant (XEXP (op, 0), 1);
-
+
/* (neg (minus X Y)) can become (minus Y X). This transformation
isn't safe for modes with signed zeros, since if X and Y are
both +0, (minus Y X) is the same as (minus X Y). If the
&& !HONOR_SIGNED_ZEROS (mode)
&& !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
return simplify_gen_binary (MINUS, mode, XEXP (op, 1), XEXP (op, 0));
-
+
if (GET_CODE (op) == PLUS
&& !HONOR_SIGNED_ZEROS (mode)
&& !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
{
/* (neg (plus A C)) is simplified to (minus -C A). */
- if (GET_CODE (XEXP (op, 1)) == CONST_INT
+ if (CONST_INT_P (XEXP (op, 1))
|| GET_CODE (XEXP (op, 1)) == CONST_DOUBLE)
{
temp = simplify_unary_operation (NEG, mode, XEXP (op, 1), mode);
/* (neg (ashiftrt X C)) can be replaced by (lshiftrt X C) when
C is equal to the width of MODE minus 1. */
if (GET_CODE (op) == ASHIFTRT
- && GET_CODE (XEXP (op, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (op, 1))
&& INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1)
return simplify_gen_binary (LSHIFTRT, mode,
XEXP (op, 0), XEXP (op, 1));
/* (neg (lshiftrt X C)) can be replaced by (ashiftrt X C) when
C is equal to the width of MODE minus 1. */
if (GET_CODE (op) == LSHIFTRT
- && GET_CODE (XEXP (op, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (op, 1))
&& INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1)
return simplify_gen_binary (ASHIFTRT, mode,
XEXP (op, 0), XEXP (op, 1));
-
+
/* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
if (GET_CODE (op) == XOR
&& XEXP (op, 1) == const1_rtx
replace the TRUNCATE with a SUBREG. Note that this is also
valid if TRULY_NOOP_TRUNCATION is false for the corresponding
modes we just have to apply a different definition for
- truncation. But don't do this for an (LSHIFTRT (MULT ...))
+ truncation. But don't do this for an (LSHIFTRT (MULT ...))
since this will cause problems with the umulXi3_highpart
patterns. */
if ((TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
return rtl_hooks.gen_lowpart_no_emit (mode, op);
#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
- if (! POINTERS_EXTEND_UNSIGNED
+ /* As we do not know which address space the pointer is refering to,
+ we can do this only if the target does not support different pointer
+ or address modes depending on the address space. */
+ if (target_default_pointer_address_modes_p ()
+ && ! POINTERS_EXTEND_UNSIGNED
&& mode == Pmode && GET_MODE (op) == ptr_mode
&& (CONSTANT_P (op)
|| (GET_CODE (op) == SUBREG
return rtl_hooks.gen_lowpart_no_emit (mode, op);
#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
- if (POINTERS_EXTEND_UNSIGNED > 0
+ /* As we do not know which address space the pointer is refering to,
+ we can do this only if the target does not support different pointer
+ or address modes depending on the address space. */
+ if (target_default_pointer_address_modes_p ()
+ && POINTERS_EXTEND_UNSIGNED > 0
&& mode == Pmode && GET_MODE (op) == ptr_mode
&& (CONSTANT_P (op)
|| (GET_CODE (op) == SUBREG
default:
break;
}
-
+
return 0;
}
gcc_assert (GET_MODE_INNER (mode) == GET_MODE_INNER
(GET_MODE (op)));
}
- if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE
+ if (CONST_INT_P (op) || GET_CODE (op) == CONST_DOUBLE
|| GET_CODE (op) == CONST_VECTOR)
{
int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
such as FIX. At some point, this should be simplified. */
if (code == FLOAT && GET_MODE (op) == VOIDmode
- && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
+ && (GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op)))
{
HOST_WIDE_INT hv, lv;
REAL_VALUE_TYPE d;
- if (GET_CODE (op) == CONST_INT)
+ if (CONST_INT_P (op))
lv = INTVAL (op), hv = HWI_SIGN_EXTEND (lv);
else
lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
}
else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
&& (GET_CODE (op) == CONST_DOUBLE
- || GET_CODE (op) == CONST_INT))
+ || CONST_INT_P (op)))
{
HOST_WIDE_INT hv, lv;
REAL_VALUE_TYPE d;
- if (GET_CODE (op) == CONST_INT)
+ if (CONST_INT_P (op))
lv = INTVAL (op), hv = HWI_SIGN_EXTEND (lv);
else
lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
}
- if (GET_CODE (op) == CONST_INT
+ if (CONST_INT_P (op)
&& width <= HOST_BITS_PER_WIDE_INT && width > 0)
{
HOST_WIDE_INT arg0 = INTVAL (op);
case US_TRUNCATE:
case SS_NEG:
case US_NEG:
+ case SS_ABS:
return 0;
default:
else if (GET_MODE (op) == VOIDmode
&& width <= HOST_BITS_PER_WIDE_INT * 2
&& (GET_CODE (op) == CONST_DOUBLE
- || GET_CODE (op) == CONST_INT))
+ || CONST_INT_P (op)))
{
unsigned HOST_WIDE_INT l1, lv;
HOST_WIDE_INT h1, hv;
if ((GET_CODE (op0) == CONST
|| GET_CODE (op0) == SYMBOL_REF
|| GET_CODE (op0) == LABEL_REF)
- && GET_CODE (op1) == CONST_INT)
+ && CONST_INT_P (op1))
return plus_constant (op0, INTVAL (op1));
else if ((GET_CODE (op1) == CONST
|| GET_CODE (op1) == SYMBOL_REF
|| GET_CODE (op1) == LABEL_REF)
- && GET_CODE (op0) == CONST_INT)
+ && CONST_INT_P (op0))
return plus_constant (op1, INTVAL (op0));
/* See if this is something like X * C - X or vice versa or
lhs = XEXP (lhs, 0);
}
else if (GET_CODE (lhs) == MULT
- && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
+ && CONST_INT_P (XEXP (lhs, 1)))
{
coeff0l = INTVAL (XEXP (lhs, 1));
coeff0h = INTVAL (XEXP (lhs, 1)) < 0 ? -1 : 0;
lhs = XEXP (lhs, 0);
}
else if (GET_CODE (lhs) == ASHIFT
- && GET_CODE (XEXP (lhs, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (lhs, 1))
&& INTVAL (XEXP (lhs, 1)) >= 0
&& INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
{
rhs = XEXP (rhs, 0);
}
else if (GET_CODE (rhs) == MULT
- && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
+ && CONST_INT_P (XEXP (rhs, 1)))
{
coeff1l = INTVAL (XEXP (rhs, 1));
coeff1h = INTVAL (XEXP (rhs, 1)) < 0 ? -1 : 0;
rhs = XEXP (rhs, 0);
}
else if (GET_CODE (rhs) == ASHIFT
- && GET_CODE (XEXP (rhs, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (rhs, 1))
&& INTVAL (XEXP (rhs, 1)) >= 0
&& INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
{
}
/* (plus (xor X C1) C2) is (xor X (C1^C2)) if C2 is signbit. */
- if ((GET_CODE (op1) == CONST_INT
+ if ((CONST_INT_P (op1)
|| GET_CODE (op1) == CONST_DOUBLE)
&& GET_CODE (op0) == XOR
- && (GET_CODE (XEXP (op0, 1)) == CONST_INT
+ && (CONST_INT_P (XEXP (op0, 1))
|| GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE)
&& mode_signbit_p (mode, op1))
return simplify_gen_binary (XOR, mode, XEXP (op0, 0),
break;
case COMPARE:
-#ifdef HAVE_cc0
- /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
- using cc0, in which case we want to leave it as a COMPARE
- so we can distinguish it from a register-register-copy.
-
- In IEEE floating point, x-0 is not the same as x. */
- if (!(HONOR_SIGNED_ZEROS (mode)
- && HONOR_SIGN_DEPENDENT_ROUNDING (mode))
- && trueop1 == CONST0_RTX (mode))
- return op0;
-#endif
-
/* Convert (compare (gt (flags) 0) (lt (flags) 0)) to (flags). */
if (((GET_CODE (op0) == GT && GET_CODE (op1) == LT)
|| (GET_CODE (op0) == GTU && GET_CODE (op1) == LTU))
lhs = XEXP (lhs, 0);
}
else if (GET_CODE (lhs) == MULT
- && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
+ && CONST_INT_P (XEXP (lhs, 1)))
{
coeff0l = INTVAL (XEXP (lhs, 1));
coeff0h = INTVAL (XEXP (lhs, 1)) < 0 ? -1 : 0;
lhs = XEXP (lhs, 0);
}
else if (GET_CODE (lhs) == ASHIFT
- && GET_CODE (XEXP (lhs, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (lhs, 1))
&& INTVAL (XEXP (lhs, 1)) >= 0
&& INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
{
rhs = XEXP (rhs, 0);
}
else if (GET_CODE (rhs) == MULT
- && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
+ && CONST_INT_P (XEXP (rhs, 1)))
{
negcoeff1l = -INTVAL (XEXP (rhs, 1));
negcoeff1h = INTVAL (XEXP (rhs, 1)) <= 0 ? 0 : -1;
rhs = XEXP (rhs, 0);
}
else if (GET_CODE (rhs) == ASHIFT
- && GET_CODE (XEXP (rhs, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (rhs, 1))
&& INTVAL (XEXP (rhs, 1)) >= 0
&& INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
{
/* (-x - c) may be simplified as (-c - x). */
if (GET_CODE (op0) == NEG
- && (GET_CODE (op1) == CONST_INT
+ && (CONST_INT_P (op1)
|| GET_CODE (op1) == CONST_DOUBLE))
{
tem = simplify_unary_operation (NEG, mode, op1, mode);
}
/* Don't let a relocatable value get a negative coeff. */
- if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
+ if (CONST_INT_P (op1) && GET_MODE (op0) != VOIDmode)
return simplify_gen_binary (PLUS, mode,
op0,
neg_const_int (mode, op1));
/* Convert multiply by constant power of two into shift unless
we are still generating RTL. This test is a kludge. */
- if (GET_CODE (trueop1) == CONST_INT
+ if (CONST_INT_P (trueop1)
&& (val = exact_log2 (INTVAL (trueop1))) >= 0
/* If the mode is larger than the host word size, and the
uppermost bit is set, then this isn't a power of two due
/* x*2 is x+x and x*(-1) is -x */
if (GET_CODE (trueop1) == CONST_DOUBLE
&& SCALAR_FLOAT_MODE_P (GET_MODE (trueop1))
+ && !DECIMAL_FLOAT_MODE_P (GET_MODE (trueop1))
&& GET_MODE (op0) == mode)
{
REAL_VALUE_TYPE d;
case IOR:
if (trueop1 == const0_rtx)
return op0;
- if (GET_CODE (trueop1) == CONST_INT
+ if (CONST_INT_P (trueop1)
&& ((INTVAL (trueop1) & GET_MODE_MASK (mode))
== GET_MODE_MASK (mode)))
return op1;
return constm1_rtx;
/* (ior A C) is C if all bits of A that might be nonzero are on in C. */
- if (GET_CODE (op1) == CONST_INT
+ if (CONST_INT_P (op1)
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
&& (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
return op1;
-
+
/* Canonicalize (X & C1) | C2. */
if (GET_CODE (op0) == AND
- && GET_CODE (trueop1) == CONST_INT
- && GET_CODE (XEXP (op0, 1)) == CONST_INT)
+ && CONST_INT_P (trueop1)
+ && CONST_INT_P (XEXP (op0, 1)))
{
HOST_WIDE_INT mask = GET_MODE_MASK (mode);
HOST_WIDE_INT c1 = INTVAL (XEXP (op0, 1));
if (GET_CODE (opleft) == ASHIFT && GET_CODE (opright) == LSHIFTRT
&& rtx_equal_p (XEXP (opleft, 0), XEXP (opright, 0))
- && GET_CODE (XEXP (opleft, 1)) == CONST_INT
- && GET_CODE (XEXP (opright, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (opleft, 1))
+ && CONST_INT_P (XEXP (opright, 1))
&& (INTVAL (XEXP (opleft, 1)) + INTVAL (XEXP (opright, 1))
== GET_MODE_BITSIZE (mode)))
return gen_rtx_ROTATE (mode, XEXP (opright, 0), XEXP (opleft, 1));
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (opleft))))
&& rtx_equal_p (XEXP (SUBREG_REG (opleft), 0),
SUBREG_REG (XEXP (opright, 0)))
- && GET_CODE (XEXP (SUBREG_REG (opleft), 1)) == CONST_INT
- && GET_CODE (XEXP (opright, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (SUBREG_REG (opleft), 1))
+ && CONST_INT_P (XEXP (opright, 1))
&& (INTVAL (XEXP (SUBREG_REG (opleft), 1)) + INTVAL (XEXP (opright, 1))
== GET_MODE_BITSIZE (mode)))
return gen_rtx_ROTATE (mode, XEXP (opright, 0),
/* If we have (ior (and (X C1) C2)), simplify this by making
C1 as small as possible if C1 actually changes. */
- if (GET_CODE (op1) == CONST_INT
+ if (CONST_INT_P (op1)
&& (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|| INTVAL (op1) > 0)
&& GET_CODE (op0) == AND
- && GET_CODE (XEXP (op0, 1)) == CONST_INT
- && GET_CODE (op1) == CONST_INT
+ && CONST_INT_P (XEXP (op0, 1))
+ && CONST_INT_P (op1)
&& (INTVAL (XEXP (op0, 1)) & INTVAL (op1)) != 0)
return simplify_gen_binary (IOR, mode,
simplify_gen_binary
the PLUS does not affect any of the bits in OP1: then we can do
the IOR as a PLUS and we can associate. This is valid if OP1
can be safely shifted left C bits. */
- if (GET_CODE (trueop1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
+ if (CONST_INT_P (trueop1) && GET_CODE (op0) == ASHIFTRT
&& GET_CODE (XEXP (op0, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
- && GET_CODE (XEXP (op0, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
+ && CONST_INT_P (XEXP (op0, 1))
&& INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
{
int count = INTVAL (XEXP (op0, 1));
case XOR:
if (trueop1 == const0_rtx)
return op0;
- if (GET_CODE (trueop1) == CONST_INT
+ if (CONST_INT_P (trueop1)
&& ((INTVAL (trueop1) & GET_MODE_MASK (mode))
== GET_MODE_MASK (mode)))
return simplify_gen_unary (NOT, mode, op0, mode);
return CONST0_RTX (mode);
/* Canonicalize XOR of the most significant bit to PLUS. */
- if ((GET_CODE (op1) == CONST_INT
+ if ((CONST_INT_P (op1)
|| GET_CODE (op1) == CONST_DOUBLE)
&& mode_signbit_p (mode, op1))
return simplify_gen_binary (PLUS, mode, op0, op1);
/* (xor (plus X C1) C2) is (xor X (C1^C2)) if C1 is signbit. */
- if ((GET_CODE (op1) == CONST_INT
+ if ((CONST_INT_P (op1)
|| GET_CODE (op1) == CONST_DOUBLE)
&& GET_CODE (op0) == PLUS
- && (GET_CODE (XEXP (op0, 1)) == CONST_INT
+ && (CONST_INT_P (XEXP (op0, 1))
|| GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE)
&& mode_signbit_p (mode, XEXP (op0, 1)))
return simplify_gen_binary (XOR, mode, XEXP (op0, 0),
if (STORE_FLAG_VALUE == 1
&& trueop1 == const1_rtx
&& GET_CODE (op0) == LSHIFTRT
- && GET_CODE (XEXP (op0, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (op0, 1))
&& INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
case AND:
if (trueop1 == CONST0_RTX (mode) && ! side_effects_p (op0))
return trueop1;
- if (GET_CODE (trueop1) == CONST_INT
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
+ if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
{
HOST_WIDE_INT nzop0 = nonzero_bits (trueop0, mode);
- HOST_WIDE_INT val1 = INTVAL (trueop1);
- /* If we are turning off bits already known off in OP0, we need
- not do an AND. */
- if ((nzop0 & ~val1) == 0)
- return op0;
+ HOST_WIDE_INT nzop1;
+ if (CONST_INT_P (trueop1))
+ {
+ HOST_WIDE_INT val1 = INTVAL (trueop1);
+ /* If we are turning off bits already known off in OP0, we need
+ not do an AND. */
+ if ((nzop0 & ~val1) == 0)
+ return op0;
+ }
+ nzop1 = nonzero_bits (trueop1, mode);
/* If we are clearing all the nonzero bits, the result is zero. */
- if ((val1 & nzop0) == 0 && !side_effects_p (op0))
+ if ((nzop1 & nzop0) == 0
+ && !side_effects_p (op0) && !side_effects_p (op1))
return CONST0_RTX (mode);
}
if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0)
there are no nonzero bits of C outside of X's mode. */
if ((GET_CODE (op0) == SIGN_EXTEND
|| GET_CODE (op0) == ZERO_EXTEND)
- && GET_CODE (trueop1) == CONST_INT
+ && CONST_INT_P (trueop1)
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
&& (~GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))
& INTVAL (trueop1)) == 0)
return simplify_gen_unary (ZERO_EXTEND, mode, tem, imode);
}
+ /* Transform (and (truncate X) C) into (truncate (and X C)). This way
+ we might be able to further simplify the AND with X and potentially
+ remove the truncation altogether. */
+ if (GET_CODE (op0) == TRUNCATE && CONST_INT_P (trueop1))
+ {
+ rtx x = XEXP (op0, 0);
+ enum machine_mode xmode = GET_MODE (x);
+ tem = simplify_gen_binary (AND, xmode, x,
+ gen_int_mode (INTVAL (trueop1), xmode));
+ return simplify_gen_unary (TRUNCATE, mode, tem, xmode);
+ }
+
/* Canonicalize (A | C1) & C2 as (A & C2) | (C1 & C2). */
if (GET_CODE (op0) == IOR
- && GET_CODE (trueop1) == CONST_INT
- && GET_CODE (XEXP (op0, 1)) == CONST_INT)
+ && CONST_INT_P (trueop1)
+ && CONST_INT_P (XEXP (op0, 1)))
{
HOST_WIDE_INT tmp = INTVAL (trueop1) & INTVAL (XEXP (op0, 1));
return simplify_gen_binary (IOR, mode,
and for - instead of + and/or ^ instead of |.
Also, if (N & M) == 0, then
(A +- N) & M -> A & M. */
- if (GET_CODE (trueop1) == CONST_INT
+ if (CONST_INT_P (trueop1)
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
&& ~INTVAL (trueop1)
&& (INTVAL (trueop1) & (INTVAL (trueop1) + 1)) == 0
pmop[0] = XEXP (op0, 0);
pmop[1] = XEXP (op0, 1);
- if (GET_CODE (pmop[1]) == CONST_INT
+ if (CONST_INT_P (pmop[1])
&& (INTVAL (pmop[1]) & INTVAL (trueop1)) == 0)
return simplify_gen_binary (AND, mode, pmop[0], op1);
switch (GET_CODE (tem))
{
case AND:
- if (GET_CODE (XEXP (tem, 1)) == CONST_INT
+ if (CONST_INT_P (XEXP (tem, 1))
&& (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1))
== INTVAL (trueop1))
pmop[which] = XEXP (tem, 0);
break;
case IOR:
case XOR:
- if (GET_CODE (XEXP (tem, 1)) == CONST_INT
+ if (CONST_INT_P (XEXP (tem, 1))
&& (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1)) == 0)
pmop[which] = XEXP (tem, 0);
break;
if (trueop1 == CONST1_RTX (mode))
return rtl_hooks.gen_lowpart_no_emit (mode, op0);
/* Convert divide by power of two into shift. */
- if (GET_CODE (trueop1) == CONST_INT
+ if (CONST_INT_P (trueop1)
&& (val = exact_log2 (INTVAL (trueop1))) > 0)
return simplify_gen_binary (LSHIFTRT, mode, op0, GEN_INT (val));
break;
return CONST0_RTX (mode);
}
/* Implement modulus by power of two as AND. */
- if (GET_CODE (trueop1) == CONST_INT
+ if (CONST_INT_P (trueop1)
&& exact_log2 (INTVAL (trueop1)) > 0)
return simplify_gen_binary (AND, mode, op0,
GEN_INT (INTVAL (op1) - 1));
if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
return op0;
/* Rotating ~0 always results in ~0. */
- if (GET_CODE (trueop0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
+ if (CONST_INT_P (trueop0) && width <= HOST_BITS_PER_WIDE_INT
&& (unsigned HOST_WIDE_INT) INTVAL (trueop0) == GET_MODE_MASK (mode)
&& ! side_effects_p (op1))
return op0;
canonicalize_shift:
- if (SHIFT_COUNT_TRUNCATED && GET_CODE (op1) == CONST_INT)
+ if (SHIFT_COUNT_TRUNCATED && CONST_INT_P (op1))
{
val = INTVAL (op1) & (GET_MODE_BITSIZE (mode) - 1);
if (val != INTVAL (op1))
return op0;
/* Optimize (lshiftrt (clz X) C) as (eq X 0). */
if (GET_CODE (op0) == CLZ
- && GET_CODE (trueop1) == CONST_INT
+ && CONST_INT_P (trueop1)
&& STORE_FLAG_VALUE == 1
&& INTVAL (trueop1) < (HOST_WIDE_INT)width)
{
case SMIN:
if (width <= HOST_BITS_PER_WIDE_INT
- && GET_CODE (trueop1) == CONST_INT
+ && CONST_INT_P (trueop1)
&& INTVAL (trueop1) == (HOST_WIDE_INT) 1 << (width -1)
&& ! side_effects_p (op0))
return op1;
case SMAX:
if (width <= HOST_BITS_PER_WIDE_INT
- && GET_CODE (trueop1) == CONST_INT
+ && CONST_INT_P (trueop1)
&& ((unsigned HOST_WIDE_INT) INTVAL (trueop1)
== (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
&& ! side_effects_p (op0))
gcc_assert (mode == GET_MODE_INNER (GET_MODE (trueop0)));
gcc_assert (GET_CODE (trueop1) == PARALLEL);
gcc_assert (XVECLEN (trueop1, 0) == 1);
- gcc_assert (GET_CODE (XVECEXP (trueop1, 0, 0)) == CONST_INT);
+ gcc_assert (CONST_INT_P (XVECEXP (trueop1, 0, 0)));
if (GET_CODE (trueop0) == CONST_VECTOR)
return CONST_VECTOR_ELT (trueop0, INTVAL (XVECEXP
tmp_op, gen_rtx_PARALLEL (VOIDmode, vec));
return tmp;
}
+ if (GET_CODE (trueop0) == VEC_DUPLICATE
+ && GET_MODE (XEXP (trueop0, 0)) == mode)
+ return XEXP (trueop0, 0);
}
else
{
{
rtx x = XVECEXP (trueop1, 0, i);
- gcc_assert (GET_CODE (x) == CONST_INT);
+ gcc_assert (CONST_INT_P (x));
RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0,
INTVAL (x));
}
}
if (XVECLEN (trueop1, 0) == 1
- && GET_CODE (XVECEXP (trueop1, 0, 0)) == CONST_INT
+ && CONST_INT_P (XVECEXP (trueop1, 0, 0))
&& GET_CODE (trueop0) == VEC_CONCAT)
{
rtx vec = trueop0;
gcc_assert (GET_MODE_INNER (mode) == op1_mode);
if ((GET_CODE (trueop0) == CONST_VECTOR
- || GET_CODE (trueop0) == CONST_INT
+ || CONST_INT_P (trueop0)
|| GET_CODE (trueop0) == CONST_DOUBLE)
&& (GET_CODE (trueop1) == CONST_VECTOR
- || GET_CODE (trueop1) == CONST_INT
+ || CONST_INT_P (trueop1)
|| GET_CODE (trueop1) == CONST_DOUBLE))
{
int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
/* We can fold some multi-word operations. */
if (GET_MODE_CLASS (mode) == MODE_INT
&& width == HOST_BITS_PER_WIDE_INT * 2
- && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
- && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
+ && (GET_CODE (op0) == CONST_DOUBLE || CONST_INT_P (op0))
+ && (GET_CODE (op1) == CONST_DOUBLE || CONST_INT_P (op1)))
{
unsigned HOST_WIDE_INT l1, l2, lv, lt;
HOST_WIDE_INT h1, h2, hv, ht;
return immed_double_const (lv, hv, mode);
}
- if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
+ if (CONST_INT_P (op0) && CONST_INT_P (op1)
&& width <= HOST_BITS_PER_WIDE_INT && width != 0)
{
/* Get the integer argument values in two forms:
arg0s = arg0;
arg1s = arg1;
}
-
+
/* Compute the value of the arithmetic. */
-
+
switch (code)
{
case PLUS:
val = arg0s + arg1s;
break;
-
+
case MINUS:
val = arg0s - arg1s;
break;
-
+
case MULT:
val = arg0s * arg1s;
break;
-
+
case DIV:
if (arg1s == 0
|| (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
return 0;
val = arg0s / arg1s;
break;
-
+
case MOD:
if (arg1s == 0
|| (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
return 0;
val = arg0s % arg1s;
break;
-
+
case UDIV:
if (arg1 == 0
|| (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
return 0;
val = (unsigned HOST_WIDE_INT) arg0 / arg1;
break;
-
+
case UMOD:
if (arg1 == 0
|| (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
return 0;
val = (unsigned HOST_WIDE_INT) arg0 % arg1;
break;
-
+
case AND:
val = arg0 & arg1;
break;
-
+
case IOR:
val = arg0 | arg1;
break;
-
+
case XOR:
val = arg0 ^ arg1;
break;
-
+
case LSHIFTRT:
case ASHIFT:
case ASHIFTRT:
arg1 = (unsigned HOST_WIDE_INT) arg1 % width;
else if (arg1 < 0 || arg1 >= GET_MODE_BITSIZE (mode))
return 0;
-
+
val = (code == ASHIFT
? ((unsigned HOST_WIDE_INT) arg0) << arg1
: ((unsigned HOST_WIDE_INT) arg0) >> arg1);
-
+
/* Sign-extend the result for arithmetic right shifts. */
if (code == ASHIFTRT && arg0s < 0 && arg1 > 0)
val |= ((HOST_WIDE_INT) -1) << (width - arg1);
break;
-
+
case ROTATERT:
if (arg1 < 0)
return 0;
-
+
arg1 %= width;
val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
| (((unsigned HOST_WIDE_INT) arg0) >> arg1));
break;
-
+
case ROTATE:
if (arg1 < 0)
return 0;
-
+
arg1 %= width;
val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
| (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
break;
-
+
case COMPARE:
/* Do nothing here. */
return 0;
-
+
case SMIN:
val = arg0s <= arg1s ? arg0s : arg1s;
break;
-
+
case UMIN:
val = ((unsigned HOST_WIDE_INT) arg0
<= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
break;
-
+
case SMAX:
val = arg0s > arg1s ? arg0s : arg1s;
break;
-
+
case UMAX:
val = ((unsigned HOST_WIDE_INT) arg0
> (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
break;
-
+
case SS_PLUS:
case US_PLUS:
case SS_MINUS:
case US_ASHIFT:
/* ??? There are simplifications that can be done. */
return 0;
-
+
default:
gcc_unreachable ();
}
else if (swap_commutative_operands_p (lhs, rhs))
tem = lhs, lhs = rhs, rhs = tem;
- if ((GET_CODE (lhs) == CONST || GET_CODE (lhs) == CONST_INT)
- && (GET_CODE (rhs) == CONST || GET_CODE (rhs) == CONST_INT))
+ if ((GET_CODE (lhs) == CONST || CONST_INT_P (lhs))
+ && (GET_CODE (rhs) == CONST || CONST_INT_P (rhs)))
{
rtx tem_lhs, tem_rhs;
}
else
tem = simplify_binary_operation (ncode, mode, lhs, rhs);
-
+
/* Reject "simplifications" that just wrap the two
arguments in a CONST. Failure to do so can result
in infinite recursion with simplify_binary_operation
lneg &= rneg;
if (GET_CODE (tem) == NEG)
tem = XEXP (tem, 0), lneg = !lneg;
- if (GET_CODE (tem) == CONST_INT && lneg)
+ if (CONST_INT_P (tem) && lneg)
tem = neg_const_int (mode, tem), lneg = 0;
ops[i].op = tem;
/* Create (minus -C X) instead of (neg (const (plus X C))). */
if (n_ops == 2
- && GET_CODE (ops[1].op) == CONST_INT
+ && CONST_INT_P (ops[1].op)
&& CONSTANT_P (ops[0].op)
&& ops[0].neg)
return gen_rtx_fmt_ee (MINUS, mode, ops[1].op, ops[0].op);
-
+
/* We suppressed creation of trivial CONST expressions in the
combination loop to avoid recursion. Create one manually now.
The combination loop should have ensured that there is exactly
in the array and that any other constant will be next-to-last. */
if (n_ops > 1
- && GET_CODE (ops[n_ops - 1].op) == CONST_INT
+ && CONST_INT_P (ops[n_ops - 1].op)
&& CONSTANT_P (ops[n_ops - 2].op))
{
rtx value = ops[n_ops - 1].op;
}
#else
return NULL_RTX;
-#endif
+#endif
}
if (VECTOR_MODE_P (mode))
{
/* If op0 is a compare, extract the comparison arguments from it. */
if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
- return simplify_relational_operation (code, mode, VOIDmode,
- XEXP (op0, 0), XEXP (op0, 1));
+ return simplify_gen_relational (code, mode, VOIDmode,
+ XEXP (op0, 0), XEXP (op0, 1));
if (GET_MODE_CLASS (cmp_mode) == MODE_CC
|| CC0_P (op0))
}
}
+ /* (LTU/GEU (PLUS a C) C), where C is constant, can be simplified to
+ (GEU/LTU a -C). Likewise for (LTU/GEU (PLUS a C) a). */
+ if ((code == LTU || code == GEU)
+ && GET_CODE (op0) == PLUS
+ && CONST_INT_P (XEXP (op0, 1))
+ && (rtx_equal_p (op1, XEXP (op0, 0))
+ || rtx_equal_p (op1, XEXP (op0, 1))))
+ {
+ rtx new_cmp
+ = simplify_gen_unary (NEG, cmp_mode, XEXP (op0, 1), cmp_mode);
+ return simplify_gen_relational ((code == LTU ? GEU : LTU), mode,
+ cmp_mode, XEXP (op0, 0), new_cmp);
+ }
+
/* Canonicalize (LTU/GEU (PLUS a b) b) as (LTU/GEU (PLUS a b) a). */
if ((code == LTU || code == GEU)
&& GET_CODE (op0) == PLUS
&& rtx_equal_p (op1, XEXP (op0, 1))
/* Don't recurse "infinitely" for (LTU/GEU (PLUS b b) b). */
&& !rtx_equal_p (op1, XEXP (op0, 0)))
- return simplify_gen_relational (code, mode, cmp_mode, op0, XEXP (op0, 0));
+ return simplify_gen_relational (code, mode, cmp_mode, op0,
+ copy_rtx (XEXP (op0, 0)));
if (op1 == const0_rtx)
{
/* (eq/ne (xor x C1) C2) simplifies to (eq/ne x (C1^C2)). */
if ((code == EQ || code == NE)
&& op0code == XOR
- && (GET_CODE (op1) == CONST_INT
+ && (CONST_INT_P (op1)
|| GET_CODE (op1) == CONST_DOUBLE)
- && (GET_CODE (XEXP (op0, 1)) == CONST_INT
+ && (CONST_INT_P (XEXP (op0, 1))
|| GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE))
return simplify_gen_relational (code, mode, cmp_mode, XEXP (op0, 0),
simplify_gen_binary (XOR, cmp_mode,
return NULL_RTX;
}
-enum
+enum
{
CMP_EQ = 1,
CMP_LT = 2,
/* Convert the known results for EQ, LT, GT, LTU, GTU contained in
KNOWN_RESULT to a CONST_INT, based on the requested comparison CODE
- For KNOWN_RESULT to make sense it should be either CMP_EQ, or the
+ For KNOWN_RESULT to make sense it should be either CMP_EQ, or the
logical OR of one of (CMP_LT, CMP_GT) and one of (CMP_LTU, CMP_GTU).
For floating-point comparisons, assume that the operands were ordered. */
if (INTEGRAL_MODE_P (mode) && trueop1 != const0_rtx
&& (code == EQ || code == NE)
- && ! ((REG_P (op0) || GET_CODE (trueop0) == CONST_INT)
- && (REG_P (op1) || GET_CODE (trueop1) == CONST_INT))
+ && ! ((REG_P (op0) || CONST_INT_P (trueop0))
+ && (REG_P (op1) || CONST_INT_P (trueop1)))
&& 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
/* We cannot do this if tem is a nonzero address. */
&& ! nonzero_address_p (tem))
/* Otherwise, see if the operands are both integers. */
if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
&& (GET_CODE (trueop0) == CONST_DOUBLE
- || GET_CODE (trueop0) == CONST_INT)
+ || CONST_INT_P (trueop0))
&& (GET_CODE (trueop1) == CONST_DOUBLE
- || GET_CODE (trueop1) == CONST_INT))
+ || CONST_INT_P (trueop1)))
{
int width = GET_MODE_BITSIZE (mode);
HOST_WIDE_INT l0s, h0s, l1s, h1s;
/* Optimize comparisons with upper and lower bounds. */
if (SCALAR_INT_MODE_P (mode)
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
- && GET_CODE (trueop1) == CONST_INT)
+ && CONST_INT_P (trueop1))
{
int sign;
unsigned HOST_WIDE_INT nonzero = nonzero_bits (trueop0, mode);
if (GET_CODE (op0) == IOR)
{
rtx inner_const = avoid_constant_pool_reference (XEXP (op0, 1));
- if (GET_CODE (inner_const) == CONST_INT && inner_const != const0_rtx)
+ if (CONST_INT_P (inner_const) && inner_const != const0_rtx)
{
int sign_bitnum = GET_MODE_BITSIZE (mode) - 1;
int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
{
case SIGN_EXTRACT:
case ZERO_EXTRACT:
- if (GET_CODE (op0) == CONST_INT
- && GET_CODE (op1) == CONST_INT
- && GET_CODE (op2) == CONST_INT
+ if (CONST_INT_P (op0)
+ && CONST_INT_P (op1)
+ && CONST_INT_P (op2)
&& ((unsigned) INTVAL (op1) + (unsigned) INTVAL (op2) <= width)
&& width <= (unsigned) HOST_BITS_PER_WIDE_INT)
{
break;
case IF_THEN_ELSE:
- if (GET_CODE (op0) == CONST_INT)
+ if (CONST_INT_P (op0))
return op0 != const0_rtx ? op1 : op2;
/* Convert c ? a : a into "a". */
rtx temp;
/* Look for happy constants in op1 and op2. */
- if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
+ if (CONST_INT_P (op1) && CONST_INT_P (op2))
{
HOST_WIDE_INT t = INTVAL (op1);
HOST_WIDE_INT f = INTVAL (op2);
/* See if any simplifications were possible. */
if (temp)
{
- if (GET_CODE (temp) == CONST_INT)
+ if (CONST_INT_P (temp))
return temp == const0_rtx ? op2 : op1;
else if (temp)
return gen_rtx_IF_THEN_ELSE (mode, temp, op1, op2);
gcc_assert (GET_MODE (op1) == mode);
gcc_assert (VECTOR_MODE_P (mode));
op2 = avoid_constant_pool_reference (op2);
- if (GET_CODE (op2) == CONST_INT)
+ if (CONST_INT_P (op2))
{
int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
and then repacking them again for OUTERMODE. */
static rtx
-simplify_immed_subreg (enum machine_mode outermode, rtx op,
+simplify_immed_subreg (enum machine_mode outermode, rtx op,
enum machine_mode innermode, unsigned int byte)
{
/* We support up to 512-bit values (for V8DFmode). */
enum machine_mode outer_submode;
/* Some ports misuse CCmode. */
- if (GET_MODE_CLASS (outermode) == MODE_CC && GET_CODE (op) == CONST_INT)
+ if (GET_MODE_CLASS (outermode) == MODE_CC && CONST_INT_P (op))
return op;
/* We have no way to represent a complex constant at the rtl level. */
gcc_assert (BITS_PER_UNIT % value_bit == 0);
/* I don't know how to handle endianness of sub-units. */
gcc_assert (elem_bitsize % BITS_PER_UNIT == 0);
-
+
for (elem = 0; elem < num_elem; elem++)
{
unsigned char * vp;
rtx el = elems[elem];
-
+
/* Vectors are kept in target memory order. (This is probably
a mistake.) */
{
unsigned byte = (elem * elem_bitsize) / BITS_PER_UNIT;
- unsigned ibyte = (((num_elem - 1 - elem) * elem_bitsize)
+ unsigned ibyte = (((num_elem - 1 - elem) * elem_bitsize)
/ BITS_PER_UNIT);
unsigned word_byte = WORDS_BIG_ENDIAN ? ibyte : byte;
unsigned subword_byte = BYTES_BIG_ENDIAN ? ibyte : byte;
+ (word_byte / UNITS_PER_WORD) * UNITS_PER_WORD);
vp = value + (bytele * BITS_PER_UNIT) / value_bit;
}
-
+
switch (GET_CODE (el))
{
case CONST_INT:
for (i = 0;
- i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
+ i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
i += value_bit)
*vp++ = INTVAL (el) >> i;
/* CONST_INTs are always logically sign-extended. */
for (; i < elem_bitsize; i += value_bit)
*vp++ = INTVAL (el) < 0 ? -1 : 0;
break;
-
+
case CONST_DOUBLE:
if (GET_MODE (el) == VOIDmode)
{
ibase = i;
*vp++ = tmp[ibase / 32] >> i % 32;
}
-
+
/* It shouldn't matter what's done here, so fill it with
zero. */
for (; i < elem_bitsize; i += value_bit)
*vp++ = 0;
}
break;
-
+
default:
gcc_unreachable ();
}
will already have offset 0. */
if (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode))
{
- unsigned ibyte = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode)
+ unsigned ibyte = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode)
- byte);
unsigned word_byte = WORDS_BIG_ENDIAN ? ibyte : byte;
unsigned subword_byte = BYTES_BIG_ENDIAN ? ibyte : byte;
value_start = byte * (BITS_PER_UNIT / value_bit);
/* Re-pack the value. */
-
+
if (VECTOR_MODE_P (outermode))
{
num_elem = GET_MODE_NUNITS (outermode);
for (elem = 0; elem < num_elem; elem++)
{
unsigned char *vp;
-
+
/* Vectors are stored in target memory order. (This is probably
a mistake.) */
{
unsigned byte = (elem * elem_bitsize) / BITS_PER_UNIT;
- unsigned ibyte = (((num_elem - 1 - elem) * elem_bitsize)
+ unsigned ibyte = (((num_elem - 1 - elem) * elem_bitsize)
/ BITS_PER_UNIT);
unsigned word_byte = WORDS_BIG_ENDIAN ? ibyte : byte;
unsigned subword_byte = BYTES_BIG_ENDIAN ? ibyte : byte;
for (; i < elem_bitsize; i += value_bit)
hi |= ((HOST_WIDE_INT)(*vp++ & value_mask)
<< (i - HOST_BITS_PER_WIDE_INT));
-
+
/* immed_double_const doesn't call trunc_int_for_mode. I don't
know why. */
if (elem_bitsize <= HOST_BITS_PER_WIDE_INT)
return NULL_RTX;
}
break;
-
+
case MODE_FLOAT:
case MODE_DECIMAL_FLOAT:
{
REAL_VALUE_TYPE r;
long tmp[max_bitsize / 32];
-
+
/* real_from_target wants its input in words affected by
FLOAT_WORDS_BIG_ENDIAN. However, we ignore this,
and use WORDS_BIG_ENDIAN instead; see the documentation
elems[elem] = CONST_FIXED_FROM_FIXED_VALUE (f, outer_submode);
}
break;
-
+
default:
gcc_unreachable ();
}
if (outermode == innermode && !byte)
return op;
- if (GET_CODE (op) == CONST_INT
+ if (CONST_INT_P (op)
|| GET_CODE (op) == CONST_DOUBLE
|| GET_CODE (op) == CONST_FIXED
|| GET_CODE (op) == CONST_VECTOR)
than the sign extension's sign_bit_copies and introduces zeros
into the high bits of the result. */
&& (2 * GET_MODE_BITSIZE (outermode)) <= GET_MODE_BITSIZE (innermode)
- && GET_CODE (XEXP (op, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (op, 1))
&& GET_CODE (XEXP (op, 0)) == SIGN_EXTEND
&& GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
&& INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode)
|| GET_CODE (op) == ASHIFTRT)
&& SCALAR_INT_MODE_P (outermode)
&& GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode)
- && GET_CODE (XEXP (op, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (op, 1))
&& GET_CODE (XEXP (op, 0)) == ZERO_EXTEND
&& GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
&& INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode)
if (GET_CODE (op) == ASHIFT
&& SCALAR_INT_MODE_P (outermode)
&& GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode)
- && GET_CODE (XEXP (op, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (op, 1))
&& (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND
|| GET_CODE (XEXP (op, 0)) == SIGN_EXTEND)
&& GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
&& SCALAR_INT_MODE_P (outermode)
&& GET_MODE_BITSIZE (outermode) >= BITS_PER_WORD
&& GET_MODE_BITSIZE (innermode) >= (2 * GET_MODE_BITSIZE (outermode))
- && GET_CODE (XEXP (op, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (op, 1))
&& (INTVAL (XEXP (op, 1)) & (GET_MODE_BITSIZE (outermode) - 1)) == 0
- && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (innermode)
+ && INTVAL (XEXP (op, 1)) >= 0
+ && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (innermode)
&& byte == subreg_lowpart_offset (outermode, innermode))
{
int shifted_bytes = INTVAL (XEXP (op, 1)) / BITS_PER_UNIT;
return simplify_gen_subreg (outermode, XEXP (op, 0), innermode,
(WORDS_BIG_ENDIAN
- ? byte - shifted_bytes : byte + shifted_bytes));
+ ? byte - shifted_bytes
+ : byte + shifted_bytes));
}
return NULL_RTX;