};
/* Forward declarations */
-static void set_of_1 (rtx, rtx, void *);
+static void set_of_1 (rtx, const_rtx, void *);
static bool covers_regno_p (const_rtx, unsigned int);
static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
static int rtx_referenced_p_1 (rtx *, void *);
static int computed_jump_p_1 (const_rtx);
-static void parms_set (rtx, rtx, void *);
+static void parms_set (rtx, const_rtx, void *);
static void subreg_get_info (unsigned int, enum machine_mode,
unsigned int, enum machine_mode,
struct subreg_info *);
-static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
- rtx, enum machine_mode,
+static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
+ const_rtx, enum machine_mode,
unsigned HOST_WIDE_INT);
-static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode,
- rtx, enum machine_mode,
+static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
+ const_rtx, enum machine_mode,
unsigned HOST_WIDE_INT);
-static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
+static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
enum machine_mode,
unsigned int);
-static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
+static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
enum machine_mode, unsigned int);
/* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
zero, we are slightly more conservative.
The frame pointer and the arg pointer are considered constant. */
-int
-rtx_varies_p (rtx x, int for_alias)
+bool
+rtx_varies_p (const_rtx x, bool for_alias)
{
RTX_CODE code;
int i;
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
FOR_ALIAS is nonzero if we are called from alias analysis; if it is
zero, we are slightly more conservative. */
-int
-rtx_addr_varies_p (rtx x, int for_alias)
+bool
+rtx_addr_varies_p (const_rtx x, bool for_alias)
{
enum rtx_code code;
int i;
case REG:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case SYMBOL_REF:
case CODE_LABEL:
case CONST_INT:
case CONST_VECTOR:
case CONST_DOUBLE:
+ case CONST_FIXED:
/* These are kept unique for a given value. */
return 0;
FROM_INSN and TO_INSN (exclusive of those two). */
int
-reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
+reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
{
- rtx insn;
+ const_rtx insn;
if (from_insn == to_insn)
return 0;
/* Internals of reg_set_between_p. */
int
-reg_set_p (rtx reg, rtx insn)
+reg_set_p (const_rtx reg, const_rtx insn)
{
/* We can be passed an insn or part of one. If we are passed an insn,
check if a side-effect of the insn clobbers REG. */
X contains a MEM; this routine does usememory aliasing. */
int
-modified_between_p (rtx x, rtx start, rtx end)
+modified_between_p (const_rtx x, const_rtx start, const_rtx end)
{
- enum rtx_code code = GET_CODE (x);
+ const enum rtx_code code = GET_CODE (x);
const char *fmt;
int i, j;
rtx insn;
{
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case CONST:
case SYMBOL_REF:
does use memory aliasing. */
int
-modified_in_p (rtx x, rtx insn)
+modified_in_p (const_rtx x, const_rtx insn)
{
- enum rtx_code code = GET_CODE (x);
+ const enum rtx_code code = GET_CODE (x);
const char *fmt;
int i, j;
{
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case CONST:
case SYMBOL_REF:
/* Helper function for set_of. */
struct set_of_data
{
- rtx found;
- rtx pat;
+ const_rtx found;
+ const_rtx pat;
};
static void
-set_of_1 (rtx x, rtx pat, void *data1)
+set_of_1 (rtx x, const_rtx pat, void *data1)
{
- struct set_of_data *data = (struct set_of_data *) (data1);
- if (rtx_equal_p (x, data->pat)
- || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
- data->found = pat;
+ struct set_of_data *const data = (struct set_of_data *) (data1);
+ if (rtx_equal_p (x, data->pat)
+ || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
+ data->found = pat;
}
/* Give an INSN, return a SET or CLOBBER expression that does modify PAT
(either directly or via STRICT_LOW_PART and similar modifiers). */
-rtx
-set_of (rtx pat, rtx insn)
+const_rtx
+set_of (const_rtx pat, const_rtx insn)
{
struct set_of_data data;
data.found = NULL_RTX;
value to itself. */
int
-noop_move_p (rtx insn)
+noop_move_p (const_rtx insn)
{
rtx pat = PATTERN (insn);
the SUBREG will be passed. */
void
-note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
+note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
{
int i;
case CONST_INT:
case CONST:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case CC0:
case PC:
case CONST_INT:
case CONST:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case CC0:
case PC:
case CONST_INT:
case CONST:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case CC0:
case PC:
/* Handle these cases quickly. */
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
case CC0:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case CONST:
case LABEL_REF:
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case SYMBOL_REF:
case REG:
{
rtx pat = PATTERN (insn);
- if (find_reg_note (insn, REG_LABEL, NULL_RTX))
+ /* If we have a JUMP_LABEL set, we're not a computed jump. */
+ if (JUMP_LABEL (insn) != NULL)
return 0;
- else if (GET_CODE (pat) == PARALLEL)
+
+ if (GET_CODE (pat) == PARALLEL)
{
int len = XVECLEN (pat, 0);
int has_use_labelref = 0;
return -8;
if (code == CONST_DOUBLE)
return -7;
+ if (code == CONST_FIXED)
+ return -7;
op = avoid_constant_pool_reference (op);
code = GET_CODE (op);
return -6;
if (code == CONST_DOUBLE)
return -5;
+ if (code == CONST_FIXED)
+ return -5;
return -4;
case RTX_EXTRA:
unsigned int
subreg_nregs (const_rtx x)
{
+ return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
+}
+
+/* Return the number of registers that a subreg REG with REGNO
+ expression refers to. This is a copy of the rtlanal.c:subreg_nregs
+ changed so that the regno can be passed in. */
+
+unsigned int
+subreg_nregs_with_regno (unsigned int regno, const_rtx x)
+{
struct subreg_info info;
rtx subreg = SUBREG_REG (x);
- int regno = REGNO (subreg);
subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
&info);
return info.nregs;
}
+
struct parms_set_data
{
int nregs;
/* Helper function for noticing stores to parameter registers. */
static void
-parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
+parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
{
struct parms_set_data *d = data;
if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
call instruction. */
bool
-keep_with_call_p (rtx insn)
+keep_with_call_p (const_rtx insn)
{
rtx set;
if we can break or not. */
if (SET_DEST (set) == stack_pointer_rtx)
{
- rtx i2 = next_nonnote_insn (insn);
+ /* This CONST_CAST is okay because next_nonnote_insn just
+ returns it's argument and we assign it to a const_rtx
+ variable. */
+ const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn));
if (i2 && keep_with_call_p (i2))
return true;
}
\f
unsigned HOST_WIDE_INT
-nonzero_bits (rtx x, enum machine_mode mode)
+nonzero_bits (const_rtx x, enum machine_mode mode)
{
return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
}
unsigned int
-num_sign_bit_copies (rtx x, enum machine_mode mode)
+num_sign_bit_copies (const_rtx x, enum machine_mode mode)
{
return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
}
identical subexpressions on the first or the second level. */
static unsigned HOST_WIDE_INT
-cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
+cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
enum machine_mode known_mode,
unsigned HOST_WIDE_INT known_ret)
{
an arithmetic operation, we can do better. */
static unsigned HOST_WIDE_INT
-nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
+nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
enum machine_mode known_mode,
unsigned HOST_WIDE_INT known_ret)
{
first or the second level. */
static unsigned int
-cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
+cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
enum machine_mode known_mode,
unsigned int known_ret)
{
be between 1 and the number of bits in MODE. */
static unsigned int
-num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
+num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
enum machine_mode known_mode,
unsigned int known_ret)
{
assume it already contains a truncated value of MODE. */
bool
-truncated_to_mode (enum machine_mode mode, rtx x)
+truncated_to_mode (enum machine_mode mode, const_rtx x)
{
/* This register has already been used in MODE without explicit
truncation. */