/* Perform simple optimizations to clean up the result of reload.
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
This file is part of GCC.
rtx insn;
rtx testreg = gen_rtx_REG (VOIDmode, -1);
- cselib_init ();
+ cselib_init (true);
init_alias_analysis ();
for (insn = first; insn; insn = NEXT_INSN (insn))
that combine made wrt the contents of sign bits. We'll do this by
generating an extend instruction instead of a reg->reg copy. Thus
the destination must be a register that we can widen. */
- if (GET_CODE (src) == MEM
+ if (MEM_P (src)
&& GET_MODE_BITSIZE (GET_MODE (src)) < BITS_PER_WORD
&& (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != NIL
- && GET_CODE (SET_DEST (set)) != REG)
+ && !REG_P (SET_DEST (set)))
return 0;
#endif
return 0;
/* If memory loads are cheaper than register copies, don't change them. */
- if (GET_CODE (src) == MEM)
+ if (MEM_P (src))
old_cost = MEMORY_MOVE_COST (GET_MODE (src), dclass, 1);
- else if (GET_CODE (src) == REG)
+ else if (REG_P (src))
old_cost = REGISTER_MOVE_COST (GET_MODE (src),
REGNO_REG_CLASS (REGNO (src)), dclass);
else
#endif
this_cost = rtx_cost (this_rtx, SET);
}
- else if (GET_CODE (this_rtx) == REG)
+ else if (REG_P (this_rtx))
{
#ifdef LOAD_EXTEND_OP
if (extend_op != NIL)
tends to lead to smaller instructions on some machines. */
if (this_cost < old_cost
|| (this_cost == old_cost
- && GET_CODE (this_rtx) == REG
- && GET_CODE (SET_SRC (set)) != REG))
+ && REG_P (this_rtx)
+ && !REG_P (SET_SRC (set))))
{
#ifdef LOAD_EXTEND_OP
if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD
{
cselib_val *v;
struct elt_loc_list *l;
+ rtx op;
+ enum machine_mode mode;
CLEAR_HARD_REG_SET (equiv_regs[i]);
/* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
right, so avoid the problem here. Likewise if we have a constant
and the insn pattern doesn't tell us the mode we need. */
- if (GET_CODE (recog_data.operand[i]) == CODE_LABEL
+ if (LABEL_P (recog_data.operand[i])
|| (CONSTANT_P (recog_data.operand[i])
&& recog_data.operand_mode[i] == VOIDmode))
continue;
- v = cselib_lookup (recog_data.operand[i], recog_data.operand_mode[i], 0);
+ op = recog_data.operand[i];
+ mode = GET_MODE (op);
+#ifdef LOAD_EXTEND_OP
+ if (MEM_P (op)
+ && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
+ && LOAD_EXTEND_OP (mode) != NIL)
+ {
+ rtx set = single_set (insn);
+
+ /* We might have multiple sets, some of which do implicit
+ extension. Punt on this for now. */
+ if (! set)
+ continue;
+ /* If the destination is a also MEM or a STRICT_LOW_PART, no
+ extension applies.
+ Also, if there is an explicit extension, we don't have to
+ worry about an implicit one. */
+ else if (MEM_P (SET_DEST (set))
+ || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART
+ || GET_CODE (SET_SRC (set)) == ZERO_EXTEND
+ || GET_CODE (SET_SRC (set)) == SIGN_EXTEND)
+ ; /* Continue ordinary processing. */
+#ifdef CANNOT_CHANGE_MODE_CLASS
+ /* If the register cannot change mode to word_mode, it follows that
+ it cannot have been used in word_mode. */
+ else if (REG_P (SET_DEST (set))
+ && CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
+ word_mode,
+ REGNO_REG_CLASS (REGNO (SET_DEST (set)))))
+ ; /* Continue ordinary processing. */
+#endif
+ /* If this is a straight load, make the extension explicit. */
+ else if (REG_P (SET_DEST (set))
+ && recog_data.n_operands == 2
+ && SET_SRC (set) == op
+ && SET_DEST (set) == recog_data.operand[1-i])
+ {
+ validate_change (insn, recog_data.operand_loc[i],
+ gen_rtx_fmt_e (LOAD_EXTEND_OP (mode),
+ word_mode, op),
+ 1);
+ validate_change (insn, recog_data.operand_loc[1-i],
+ gen_rtx_REG (word_mode, REGNO (SET_DEST (set))),
+ 1);
+ if (! apply_change_group ())
+ return 0;
+ return reload_cse_simplify_operands (insn, testreg);
+ }
+ else
+ /* ??? There might be arithmetic operations with memory that are
+ safe to optimize, but is it worth the trouble? */
+ continue;
+ }
+#endif /* LOAD_EXTEND_OP */
+ v = cselib_lookup (op, recog_data.operand_mode[i], 0);
if (! v)
continue;
for (l = v->locs; l; l = l->next)
- if (GET_CODE (l->loc) == REG)
+ if (REG_P (l->loc))
SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc));
}
FOR_EACH_BB_REVERSE (bb)
{
- insn = bb->head;
- if (GET_CODE (insn) == CODE_LABEL)
+ insn = BB_HEAD (bb);
+ if (LABEL_P (insn))
{
HARD_REG_SET live;
/* We cannot do our optimization across labels. Invalidating all the use
information we have would be costly, so we just note where the label
is and then later disable any optimization that would cross it. */
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
last_label_ruid = reload_combine_ruid;
- else if (GET_CODE (insn) == BARRIER)
+ else if (BARRIER_P (insn))
for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
if (! fixed_regs[r])
reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
... (MEM (PLUS (REGZ) (REGY)))... .
First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
- and that we know all uses of REGX before it dies. */
+ and that we know all uses of REGX before it dies.
+ Also, explicitly check that REGX != REGY; our life information
+ does not yet show whether REGY changes in this insn. */
set = single_set (insn);
if (set != NULL_RTX
- && GET_CODE (SET_DEST (set)) == REG
- && (HARD_REGNO_NREGS (REGNO (SET_DEST (set)),
- GET_MODE (SET_DEST (set)))
+ && REG_P (SET_DEST (set))
+ && (hard_regno_nregs[REGNO (SET_DEST (set))]
+ [GET_MODE (SET_DEST (set))]
== 1)
&& GET_CODE (SET_SRC (set)) == PLUS
- && GET_CODE (XEXP (SET_SRC (set), 1)) == REG
+ && REG_P (XEXP (SET_SRC (set), 1))
&& rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set))
+ && !rtx_equal_p (XEXP (SET_SRC (set), 1), SET_DEST (set))
&& last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid)
{
rtx reg = SET_DEST (set);
else
{
/* Otherwise, look for a free index register. Since we have
- checked above that neiter REG nor BASE are index registers,
+ checked above that neither REG nor BASE are index registers,
if we find anything at all, it will be different from these
two registers. */
for (i = first_index_reg; i <= last_index_reg; i++)
i)
&& reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
&& reg_state[i].store_ruid <= reg_state[regno].use_ruid
- && HARD_REGNO_NREGS (i, GET_MODE (reg)) == 1)
+ && hard_regno_nregs[i][GET_MODE (reg)] == 1)
{
rtx index_reg = gen_rtx_REG (GET_MODE (reg), i);
note_stores (PATTERN (insn), reload_combine_note_store, NULL);
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
rtx link;
link = XEXP (link, 1))
{
rtx usage_rtx = XEXP (XEXP (link, 0), 0);
- if (GET_CODE (usage_rtx) == REG)
+ if (REG_P (usage_rtx))
{
unsigned int i;
unsigned int start_reg = REGNO (usage_rtx);
unsigned int num_regs =
- HARD_REGNO_NREGS (start_reg, GET_MODE (usage_rtx));
+ hard_regno_nregs[start_reg][GET_MODE (usage_rtx)];
unsigned int end_reg = start_reg + num_regs - 1;
for (i = start_reg; i <= end_reg; i++)
if (GET_CODE (XEXP (link, 0)) == CLOBBER)
}
}
- else if (GET_CODE (insn) == JUMP_INSN
+ else if (JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) != RETURN)
{
/* Non-spill registers might be used at the call destination in
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
{
if (REG_NOTE_KIND (note) == REG_INC
- && GET_CODE (XEXP (note, 0)) == REG)
+ && REG_P (XEXP (note, 0)))
{
int regno = REGNO (XEXP (note, 0));
GET_MODE (dst));
dst = SUBREG_REG (dst);
}
- if (GET_CODE (dst) != REG)
+ if (!REG_P (dst))
return;
regno += REGNO (dst);
|| GET_CODE (SET_DEST (set)) == SIGN_EXTRACT
|| GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
{
- for (i = HARD_REGNO_NREGS (regno, mode) - 1 + regno; i >= regno; i--)
+ for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
{
reg_state[i].use_index = -1;
reg_state[i].store_ruid = reload_combine_ruid;
}
else
{
- for (i = HARD_REGNO_NREGS (regno, mode) - 1 + regno; i >= regno; i--)
+ for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
{
reg_state[i].store_ruid = reload_combine_ruid;
reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
switch (code)
{
case SET:
- if (GET_CODE (SET_DEST (x)) == REG)
+ if (REG_P (SET_DEST (x)))
{
reload_combine_note_use (&SET_SRC (x), insn);
return;
case USE:
/* If this is the USE of a return value, we can't change it. */
- if (GET_CODE (XEXP (x, 0)) == REG && REG_FUNCTION_VALUE_P (XEXP (x, 0)))
+ if (REG_P (XEXP (x, 0)) && REG_FUNCTION_VALUE_P (XEXP (x, 0)))
{
/* Mark the return register as used in an unknown fashion. */
rtx reg = XEXP (x, 0);
int regno = REGNO (reg);
- int nregs = HARD_REGNO_NREGS (regno, GET_MODE (reg));
+ int nregs = hard_regno_nregs[regno][GET_MODE (reg)];
while (--nregs >= 0)
reg_state[regno + nregs].use_index = -1;
break;
case CLOBBER:
- if (GET_CODE (SET_DEST (x)) == REG)
+ if (REG_P (SET_DEST (x)))
{
/* No spurious CLOBBERs of pseudo registers may remain. */
if (REGNO (SET_DEST (x)) >= FIRST_PSEUDO_REGISTER)
case PLUS:
/* We are interested in (plus (reg) (const_int)) . */
- if (GET_CODE (XEXP (x, 0)) != REG
+ if (!REG_P (XEXP (x, 0))
|| GET_CODE (XEXP (x, 1)) != CONST_INT)
break;
offset = XEXP (x, 1);
if (regno >= FIRST_PSEUDO_REGISTER)
abort ();
- nregs = HARD_REGNO_NREGS (regno, GET_MODE (x));
+ nregs = hard_regno_nregs[regno][GET_MODE (x)];
/* We can't substitute into multi-hard-reg uses. */
if (nregs > 1)
{
rtx pat, note;
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
{
move2add_last_label_luid = move2add_luid;
/* We're going to increment move2add_luid twice after a
/* For simplicity, we only perform this optimization on
straightforward SETs. */
if (GET_CODE (pat) == SET
- && GET_CODE (SET_DEST (pat)) == REG)
+ && REG_P (SET_DEST (pat)))
{
rtx reg = SET_DEST (pat);
int regno = REGNO (reg);
else if (rtx_cost (new_src, PLUS) < rtx_cost (src, SET)
&& have_add2_insn (reg, new_src))
{
- rtx newpat = gen_add2_insn (reg, new_src);
- if (INSN_P (newpat) && NEXT_INSN (newpat) == NULL_RTX)
- newpat = PATTERN (newpat);
- /* If it was the first insn of a sequence or
- some other emitted insn, validate_change will
- reject it. */
- validate_change (insn, &PATTERN (insn),
- newpat, 0);
+ rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
+ validate_change (insn, &SET_SRC (pat), tem, 0);
}
else
{
(set (REGX) (PLUS (REGX) (CONST_INT A)))
...
(set (REGX) (plus (REGX) (CONST_INT B-A))) */
- else if (GET_CODE (src) == REG
+ else if (REG_P (src)
&& reg_set_luid[regno] == reg_set_luid[REGNO (src)]
&& reg_base_reg[regno] == reg_base_reg[REGNO (src)]
&& MODES_OK_FOR_MOVE2ADD (GET_MODE (reg),
< COSTS_N_INSNS (1) + rtx_cost (src3, SET))
&& have_add2_insn (reg, new_src))
{
- rtx newpat = gen_add2_insn (reg, new_src);
- if (INSN_P (newpat)
- && NEXT_INSN (newpat) == NULL_RTX)
- newpat = PATTERN (newpat);
+ rtx newpat = gen_rtx_SET (VOIDmode,
+ reg,
+ gen_rtx_PLUS (GET_MODE (reg),
+ reg,
+ new_src));
success
= validate_change (next, &PATTERN (next),
newpat, 0);
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
{
if (REG_NOTE_KIND (note) == REG_INC
- && GET_CODE (XEXP (note, 0)) == REG)
+ && REG_P (XEXP (note, 0)))
{
/* Reset the information about this register. */
int regno = REGNO (XEXP (note, 0));
if (cnd != NULL_RTX
&& GET_CODE (cnd) == NE
- && GET_CODE (XEXP (cnd, 0)) == REG
+ && REG_P (XEXP (cnd, 0))
/* The following two checks, which are also in
move2add_note_store, are intended to reduce the
number of calls to gen_rtx_SET to avoid memory
allocation if possible. */
&& SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd, 0)))
- && HARD_REGNO_NREGS (REGNO (XEXP (cnd, 0)), GET_MODE (XEXP (cnd, 0))) == 1
+ && hard_regno_nregs[REGNO (XEXP (cnd, 0))][GET_MODE (XEXP (cnd, 0))] == 1
&& GET_CODE (XEXP (cnd, 1)) == CONST_INT)
{
rtx implicit_set =
/* If this is a CALL_INSN, all call used registers are stored with
unknown values. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
{
/* Some targets do argument pushes without adding REG_INC notes. */
- if (GET_CODE (dst) == MEM)
+ if (MEM_P (dst))
{
dst = XEXP (dst, 0);
if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
reg_set_luid[REGNO (XEXP (dst, 0))] = 0;
return;
}
- if (GET_CODE (dst) != REG)
+ if (!REG_P (dst))
return;
regno += REGNO (dst);
if (SCALAR_INT_MODE_P (mode)
- && HARD_REGNO_NREGS (regno, mode) == 1 && GET_CODE (set) == SET
+ && hard_regno_nregs[regno][mode] == 1 && GET_CODE (set) == SET
&& GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
&& GET_CODE (SET_DEST (set)) != SIGN_EXTRACT
&& GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
switch (GET_CODE (src))
{
case PLUS:
- if (GET_CODE (XEXP (src, 0)) == REG)
+ if (REG_P (XEXP (src, 0)))
{
base_reg = XEXP (src, 0);
if (GET_CODE (XEXP (src, 1)) == CONST_INT)
offset = INTVAL (XEXP (src, 1));
- else if (GET_CODE (XEXP (src, 1)) == REG
+ else if (REG_P (XEXP (src, 1))
&& (reg_set_luid[REGNO (XEXP (src, 1))]
> move2add_last_label_luid)
&& (MODES_OK_FOR_MOVE2ADD
}
else
{
- unsigned int endregno = regno + HARD_REGNO_NREGS (regno, mode);
+ unsigned int endregno = regno + hard_regno_nregs[regno][mode];
for (i = regno; i < endregno; i++)
/* Reset the information about this register. */