#include "insn-attr.h"
#include "insn-flags.h"
#include "insn-codes.h"
+#include "hard-reg-set.h"
#include "recog.h"
#include "regs.h"
-#include "hard-reg-set.h"
#include "function.h"
#include "flags.h"
#include "real.h"
#include "toplev.h"
#include "basic-block.h"
#include "output.h"
-#include "resource.h"
+#include "reload.h"
#ifndef STACK_PUSH_CODE
#ifdef STACK_GROWS_DOWNWARD
through this one. (The only exception is in combine.c.) */
int
-recog_memoized (insn)
+recog_memoized_1 (insn)
rtx insn;
{
if (INSN_CODE (insn) < 0)
newpat
= gen_rtx_PARALLEL (VOIDmode,
- gen_rtvec (XVECLEN (pat, 0) - 1));
+ rtvec_alloc (XVECLEN (pat, 0) - 1));
for (j = 0; j < XVECLEN (newpat, 0); j++)
XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
}
register int i, j;
register const char *fmt;
register rtx x = *loc;
- enum rtx_code code = GET_CODE (x);
+ enum rtx_code code;
+ if (!x)
+ return;
+ code = GET_CODE (x);
/* X matches FROM if it is the same rtx or they are both referring to the
same register in the same mode. Avoid calling rtx_equal_p unless the
operands look similar. */
/* In these cases, the operation to be performed depends on the mode
of the operand. If we are replacing the operand with a VOIDmode
constant, we lose the information. So try to simplify the operation
- in that case. If it fails, substitute in something that we know
- won't be recognized. */
+ in that case. */
if (GET_MODE (to) == VOIDmode
- && (XEXP (x, 0) == from
- || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
- && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
- && REGNO (XEXP (x, 0)) == REGNO (from))))
+ && (rtx_equal_p (XEXP (x, 0), from)
+ || (GET_CODE (XEXP (x, 0)) == SUBREG
+ && rtx_equal_p (SUBREG_REG (XEXP (x, 0)), from))))
{
- rtx new = simplify_unary_operation (code, GET_MODE (x), to,
- GET_MODE (from));
- if (new == 0)
+ rtx new = NULL_RTX;
+
+ /* If there is a subreg involved, crop to the portion of the
+ constant that we are interested in. */
+ if (GET_CODE (XEXP (x, 0)) == SUBREG)
+ to = operand_subword (to, SUBREG_WORD (XEXP (x, 0)),
+ 0, GET_MODE (from));
+
+ /* If the above didn't fail, perform the extension from the
+ mode of the operand (and not the mode of FROM). */
+ if (to)
+ new = simplify_unary_operation (code, GET_MODE (x), to,
+ GET_MODE (XEXP (x, 0)));
+
+ /* If any of the above failed, substitute in something that
+ we know won't be recognized. */
+ if (!new)
new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
validate_change (object, loc, new, 1);
break;
case SUBREG:
+ /* In case we are replacing by constant, attempt to simplify it to non-SUBREG
+ expression. We can't do this later, since the information about inner mode
+ may be lost. */
+ if (CONSTANT_P (to) && rtx_equal_p (SUBREG_REG (x), from))
+ {
+ if (GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
+ && GET_MODE_SIZE (GET_MODE (from)) > UNITS_PER_WORD
+ && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
+ {
+ rtx temp = operand_subword (to, SUBREG_WORD (x),
+ 0, GET_MODE (from));
+ if (temp)
+ {
+ validate_change (object, loc, temp, 1);
+ return;
+ }
+ }
+ if (subreg_lowpart_p (x))
+ {
+ rtx new = gen_lowpart_if_possible (GET_MODE (x), to);
+ if (new)
+ {
+ validate_change (object, loc, new, 1);
+ return;
+ }
+ }
+
+ /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
+ since we are saying that the high bits don't matter. */
+ if (GET_MODE (to) == VOIDmode
+ && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (from)))
+ {
+ validate_change (object, loc, to, 1);
+ return;
+ }
+ }
+
+ /* Changing mode twice with SUBREG => just change it once,
+ or not at all if changing back to starting mode. */
+ if (GET_CODE (to) == SUBREG
+ && rtx_equal_p (SUBREG_REG (x), from))
+ {
+ if (GET_MODE (x) == GET_MODE (SUBREG_REG (to))
+ && SUBREG_WORD (x) == 0 && SUBREG_WORD (to) == 0)
+ {
+ validate_change (object, loc, SUBREG_REG (to), 1);
+ return;
+ }
+
+ validate_change (object, loc,
+ gen_rtx_SUBREG (GET_MODE (x), SUBREG_REG (to),
+ SUBREG_WORD (x) + SUBREG_WORD (to)), 1);
+ return;
+ }
+
/* If we have a SUBREG of a register that we are replacing and we are
replacing it with a MEM, make a new MEM and try replacing the
SUBREG with it. Don't do this if the MEM has a mode-dependent address
or if we would be widening it. */
- if (SUBREG_REG (x) == from
- && GET_CODE (from) == REG
+ if (GET_CODE (from) == REG
&& GET_CODE (to) == MEM
+ && rtx_equal_p (SUBREG_REG (x), from)
&& ! mode_dependent_address_p (XEXP (to, 0))
&& ! MEM_VOLATILE_P (to)
&& GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
likely to be an insertion operation; if it was, nothing bad will
happen, we might just fail in some cases). */
- if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
+ if (GET_CODE (from) == REG && GET_CODE (to) == MEM
+ && rtx_equal_p (XEXP (x, 0), from)
&& GET_CODE (XEXP (x, 1)) == CONST_INT
&& GET_CODE (XEXP (x, 2)) == CONST_INT
&& ! mode_dependent_address_p (XEXP (to, 0))
}
}
+/* Try replacing every occurrence of FROM in subexpression LOC of INSN
+ with TO. After all changes have been made, validate by seeing
+ if INSN is still valid. */
+
+int
+validate_replace_rtx_subexp (from, to, insn, loc)
+ rtx from, to, insn, *loc;
+{
+ validate_replace_rtx_1 (loc, from, to, insn);
+ return apply_change_group ();
+}
+
/* Try replacing every occurrence of FROM in INSN with TO. After all
changes have been made, validate by seeing if INSN is still valid. */
for (next = next_nonnote_insn (insn);
next != 0 && GET_CODE (next) != CODE_LABEL;
next = next_nonnote_insn (next))
- if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
+ if (INSN_P (next) && dead_or_set_p (next, dest))
{
for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
if (XEXP (link, 0) == insn)
return 0;
if (CONSTANT_P (op))
- return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
+ return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
+ || mode == VOIDmode)
#ifdef LEGITIMATE_PIC_OPERAND_P
&& (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
#endif
if (code == MEM)
{
register rtx y = XEXP (op, 0);
+
if (! volatile_ok && MEM_VOLATILE_P (op))
return 0;
+
if (GET_CODE (y) == ADDRESSOF)
return 1;
+
/* Use the mem's mode, since it will be reloaded thus. */
mode = GET_MODE (op);
GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
return general_operand (op, mode);
-#ifdef CLASS_CANNOT_CHANGE_SIZE
+#ifdef CLASS_CANNOT_CHANGE_MODE
if (GET_CODE (SUBREG_REG (op)) == REG
&& REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
- && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
- REGNO (SUBREG_REG (op)))
- && (GET_MODE_SIZE (mode)
- != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
+ && (TEST_HARD_REG_BIT
+ (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
+ REGNO (SUBREG_REG (op))))
+ && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
&& GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
&& GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
return 0;
&& GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
return 0;
- return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
+ return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
+ || mode == VOIDmode)
#ifdef LEGITIMATE_PIC_OPERAND_P
&& (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
#endif
asm_noperands (body)
rtx body;
{
- if (GET_CODE (body) == ASM_OPERANDS)
- /* No output operands: return number of input operands. */
- return ASM_OPERANDS_INPUT_LENGTH (body);
- if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
- /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
- return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
- else if (GET_CODE (body) == PARALLEL
- && GET_CODE (XVECEXP (body, 0, 0)) == SET
- && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
+ switch (GET_CODE (body))
{
- /* Multiple output operands, or 1 output plus some clobbers:
- body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
- int i;
- int n_sets;
-
- /* Count backwards through CLOBBERs to determine number of SETs. */
- for (i = XVECLEN (body, 0); i > 0; i--)
+ case ASM_OPERANDS:
+ /* No output operands: return number of input operands. */
+ return ASM_OPERANDS_INPUT_LENGTH (body);
+ case SET:
+ if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
+ /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
+ return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
+ else
+ return -1;
+ case PARALLEL:
+ if (GET_CODE (XVECEXP (body, 0, 0)) == SET
+ && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
{
- if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
- break;
- if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
- return -1;
- }
+ /* Multiple output operands, or 1 output plus some clobbers:
+ body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
+ int i;
+ int n_sets;
- /* N_SETS is now number of output operands. */
- n_sets = i;
+ /* Count backwards through CLOBBERs to determine number of SETs. */
+ for (i = XVECLEN (body, 0); i > 0; i--)
+ {
+ if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
+ break;
+ if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
+ return -1;
+ }
- /* Verify that all the SETs we have
- came from a single original asm_operands insn
- (so that invalid combinations are blocked). */
- for (i = 0; i < n_sets; i++)
- {
- rtx elt = XVECEXP (body, 0, i);
- if (GET_CODE (elt) != SET)
- return -1;
- if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
- return -1;
- /* If these ASM_OPERANDS rtx's came from different original insns
- then they aren't allowed together. */
- if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
- != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
- return -1;
+ /* N_SETS is now number of output operands. */
+ n_sets = i;
+
+ /* Verify that all the SETs we have
+ came from a single original asm_operands insn
+ (so that invalid combinations are blocked). */
+ for (i = 0; i < n_sets; i++)
+ {
+ rtx elt = XVECEXP (body, 0, i);
+ if (GET_CODE (elt) != SET)
+ return -1;
+ if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
+ return -1;
+ /* If these ASM_OPERANDS rtx's came from different original insns
+ then they aren't allowed together. */
+ if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
+ != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
+ return -1;
+ }
+ return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
+ + n_sets);
}
- return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
- + n_sets);
- }
- else if (GET_CODE (body) == PARALLEL
- && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
- {
- /* 0 outputs, but some clobbers:
- body is [(asm_operands ...) (clobber (reg ...))...]. */
- int i;
+ else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
+ {
+ /* 0 outputs, but some clobbers:
+ body is [(asm_operands ...) (clobber (reg ...))...]. */
+ int i;
- /* Make sure all the other parallel things really are clobbers. */
- for (i = XVECLEN (body, 0) - 1; i > 0; i--)
- if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
- return -1;
+ /* Make sure all the other parallel things really are clobbers. */
+ for (i = XVECLEN (body, 0) - 1; i > 0; i--)
+ if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
+ return -1;
- return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
+ return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
+ }
+ else
+ return -1;
+ default:
+ return -1;
}
- else
- return -1;
}
/* Assuming BODY is an insn body that uses ASM_OPERANDS,
while (*constraint)
{
- switch (*constraint++)
+ char c = *constraint++;
+ switch (c)
{
case '=':
case '+':
return 1;
break;
+ default:
+ /* For all other letters, we first check for a register class,
+ otherwise it is an EXTRA_CONSTRAINT. */
+ if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
+ {
+ case 'r':
+ if (GET_MODE (op) == BLKmode)
+ break;
+ if (register_operand (op, VOIDmode))
+ return 1;
+ }
#ifdef EXTRA_CONSTRAINT
- case 'Q':
- if (EXTRA_CONSTRAINT (op, 'Q'))
- return 1;
- break;
- case 'R':
- if (EXTRA_CONSTRAINT (op, 'R'))
- return 1;
- break;
- case 'S':
- if (EXTRA_CONSTRAINT (op, 'S'))
+ if (EXTRA_CONSTRAINT (op, c))
return 1;
- break;
- case 'T':
- if (EXTRA_CONSTRAINT (op, 'T'))
- return 1;
- break;
- case 'U':
- if (EXTRA_CONSTRAINT (op, 'U'))
- return 1;
- break;
#endif
-
- case 'r':
- default:
- if (GET_MODE (op) == BLKmode)
- break;
- if (register_operand (op, VOIDmode))
- return 1;
break;
}
}
rtx *y2;
int (*addressp) PARAMS ((enum machine_mode, rtx)) =
(strictp ? strict_memory_address_p : memory_address_p);
+ unsigned int mode_sz = GET_MODE_SIZE (mode);
if (CONSTANT_ADDRESS_P (y))
return 1;
if (mode_dependent_address_p (y))
return 0;
+ /* ??? How much offset does an offsettable BLKmode reference need?
+ Clearly that depends on the situation in which it's being used.
+ However, the current situation in which we test 0xffffffff is
+ less than ideal. Caveat user. */
+ if (mode_sz == 0)
+ mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
+
/* If the expression contains a constant term,
see if it remains valid when max possible offset is added. */
int good;
y1 = *y2;
- *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
+ *y2 = plus_constant (*y2, mode_sz - 1);
/* Use QImode because an odd displacement may be automatically invalid
for any wider mode. But it should be valid for a single byte. */
good = (*addressp) (QImode, y);
return good;
}
- if (ycode == PRE_DEC || ycode == PRE_INC
- || ycode == POST_DEC || ycode == POST_INC)
+ if (GET_RTX_CLASS (ycode) == 'a')
return 0;
/* The offset added here is chosen as the maximum offset that
of the specified mode. We assume that if Y and Y+c are
valid addresses then so is Y+d for all 0<d<c. */
- z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
+ z = plus_constant_for_output (y, mode_sz - 1);
/* Use QImode because an odd displacement may be automatically invalid
for any wider mode. But it should be valid for a single byte. */
return 0;
}
-/* Given an operand OP that is a valid memory reference
- which satisfies offsettable_memref_p,
- return a new memory reference whose address has been adjusted by OFFSET.
- OFFSET should be positive and less than the size of the object referenced.
-*/
+/* Given an operand OP that is a valid memory reference which
+ satisfies offsettable_memref_p, return a new memory reference whose
+ address has been adjusted by OFFSET. OFFSET should be positive and
+ less than the size of the object referenced. */
rtx
adj_offsettable_operand (op, offset)
{
new = gen_rtx_MEM (GET_MODE (op),
plus_constant_for_output (y, offset));
- RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
+ MEM_COPY_ATTRIBUTES (new, op);
return new;
}
}
new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
- RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
+ MEM_COPY_ATTRIBUTES (new, op);
return new;
}
abort ();
}
\f
+/* Like extract_insn, but save insn extracted and don't extract again, when
+ called again for the same insn expecting that recog_data still contain the
+ valid information. This is used primary by gen_attr infrastructure that
+ often does extract insn again and again. */
+void
+extract_insn_cached (insn)
+ rtx insn;
+{
+ if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
+ return;
+ extract_insn (insn);
+ recog_data.insn = insn;
+}
+/* Do cached extract_insn, constrain_operand and complain about failures.
+ Used by insn_attrtab. */
+void
+extract_constrain_insn_cached (insn)
+ rtx insn;
+{
+ extract_insn_cached (insn);
+ if (which_alternative == -1
+ && !constrain_operands (reload_completed))
+ fatal_insn_not_found (insn);
+}
+/* Do cached constrain_operand and complain about failures. */
+int
+constrain_operands_cached (strict)
+ int strict;
+{
+ if (which_alternative == -1)
+ return constrain_operands (strict);
+ else
+ return 1;
+}
+\f
/* Analyze INSN and fill in recog_data. */
void
int noperands;
rtx body = PATTERN (insn);
+ recog_data.insn = NULL;
recog_data.n_operands = 0;
recog_data.n_alternatives = 0;
recog_data.n_dups = 0;
+ which_alternative = -1;
switch (GET_CODE (body))
{
return;
case SET:
+ if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
+ goto asm_insn;
+ else
+ goto normal_insn;
case PARALLEL:
+ if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
+ && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
+ || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
+ goto asm_insn;
+ else
+ goto normal_insn;
case ASM_OPERANDS:
+ asm_insn:
recog_data.n_operands = noperands = asm_noperands (body);
if (noperands >= 0)
{
}
break;
}
-
- /* FALLTHROUGH */
+ fatal_insn_not_found (insn);
default:
+ normal_insn:
/* Ordinary insn: recognize it, get the operands via insn_extract
and get the constraints. */
{
recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
+ /* VOIDmode match_operands gets mode from their real operand. */
+ if (recog_data.operand_mode[i] == VOIDmode)
+ recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
}
}
for (i = 0; i < noperands; i++)
case 's': case 'i': case 'n':
case 'I': case 'J': case 'K': case 'L':
case 'M': case 'N': case 'O': case 'P':
-#ifdef EXTRA_CONSTRAINT
- case 'Q': case 'R': case 'S': case 'T': case 'U':
-#endif
/* These don't say anything we care about. */
break;
break;
case 'p':
+ op_alt[j].is_address = 1;
op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
break;
struct funny_match funny_match[MAX_RECOG_OPERANDS];
int funny_match_index;
+ which_alternative = 0;
if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
return 1;
matching_operands[c] = -1;
}
- which_alternative = 0;
-
- while (which_alternative < recog_data.n_alternatives)
+ do
{
register int opno;
int lose = 0;
win = 1;
break;
- case 'r':
- if (strict < 0
- || (strict == 0
- && GET_CODE (op) == REG
- && REGNO (op) >= FIRST_PSEUDO_REGISTER)
- || (strict == 0 && GET_CODE (op) == SCRATCH)
- || (GET_CODE (op) == REG
- && ((GENERAL_REGS == ALL_REGS
- && REGNO (op) < FIRST_PSEUDO_REGISTER)
- || reg_fits_class_p (op, GENERAL_REGS,
- offset, mode))))
- win = 1;
- break;
-
case 'X':
/* This is used for a MATCH_SCRATCH in the cases when
we don't actually need anything. So anything goes
win = 1;
break;
-#ifdef EXTRA_CONSTRAINT
- case 'Q':
- case 'R':
- case 'S':
- case 'T':
- case 'U':
- if (EXTRA_CONSTRAINT (op, c))
- win = 1;
- break;
-#endif
-
case 'V':
if (GET_CODE (op) == MEM
&& ((strict > 0 && ! offsettable_memref_p (op))
break;
default:
- if (strict < 0
- || (strict == 0
- && GET_CODE (op) == REG
- && REGNO (op) >= FIRST_PSEUDO_REGISTER)
- || (strict == 0 && GET_CODE (op) == SCRATCH)
- || (GET_CODE (op) == REG
- && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
- offset, mode)))
- win = 1;
+ {
+ enum reg_class class;
+
+ class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
+ if (class != NO_REGS)
+ {
+ if (strict < 0
+ || (strict == 0
+ && GET_CODE (op) == REG
+ && REGNO (op) >= FIRST_PSEUDO_REGISTER)
+ || (strict == 0 && GET_CODE (op) == SCRATCH)
+ || (GET_CODE (op) == REG
+ && reg_fits_class_p (op, class, offset, mode)))
+ win = 1;
+ }
+#ifdef EXTRA_CONSTRAINT
+ else if (EXTRA_CONSTRAINT (op, c))
+ win = 1;
+#endif
+ break;
+ }
}
constraints[opno] = p;
which_alternative++;
}
+ while (which_alternative < recog_data.n_alternatives);
+ which_alternative = -1;
/* If we are about to reject this, but we are not to test strictly,
try a very loose test. Only return failure if it fails also. */
if (strict == 0)
/* Can't use `next_real_insn' because that might go across
CODE_LABELS and short-out basic blocks. */
next = NEXT_INSN (insn);
- if (GET_CODE (insn) != INSN)
+ if (! INSN_P (insn))
;
/* Don't split no-op move insns. These should silently
changed = 1;
/* try_split returns the NOTE that INSN became. */
- first = NEXT_INSN (first);
PUT_CODE (insn, NOTE);
NOTE_SOURCE_FILE (insn) = 0;
NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
+ /* ??? Coddle to md files that generate subregs in post-
+ reload splitters instead of computing the proper
+ hard register. */
+ if (reload_completed && first != last)
+ {
+ first = NEXT_INSN (first);
+ while (1)
+ {
+ if (INSN_P (first))
+ cleanup_subreg_operands (first);
+ if (first == last)
+ break;
+ first = NEXT_INSN (first);
+ }
+ }
+
if (insn == bb->end)
{
bb->end = last;
}
\f
#ifdef HAVE_peephole2
-/* This is the last insn we'll allow recog_next_insn to consider. */
-static rtx recog_last_allowed_insn;
+struct peep2_insn_data
+{
+ rtx insn;
+ regset live_before;
+};
+
+static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
+static int peep2_current;
+
+/* A non-insn marker indicating the last insn of the block.
+ The live_before regset for this element is correct, indicating
+ global_live_at_end for the block. */
+#define PEEP2_EOB pc_rtx
+
+/* Return the Nth non-note insn after `current', or return NULL_RTX if it
+ does not exist. Used by the recognizer to find the next insn to match
+ in a multi-insn pattern. */
-/* Return the Nth non-note insn after INSN, or return NULL_RTX if it does
- not exist. Used by the recognizer to find the next insn to match in a
- multi-insn pattern. */
rtx
-recog_next_insn (insn, n)
- rtx insn;
+peep2_next_insn (n)
int n;
{
- if (insn != NULL_RTX)
+ if (n >= MAX_INSNS_PER_PEEP2 + 1)
+ abort ();
+
+ n += peep2_current;
+ if (n >= MAX_INSNS_PER_PEEP2 + 1)
+ n -= MAX_INSNS_PER_PEEP2 + 1;
+
+ if (peep2_insn_data[n].insn == PEEP2_EOB)
+ return NULL_RTX;
+ return peep2_insn_data[n].insn;
+}
+
+/* Return true if REGNO is dead before the Nth non-note insn
+ after `current'. */
+
+int
+peep2_regno_dead_p (ofs, regno)
+ int ofs;
+ int regno;
+{
+ if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
+ abort ();
+
+ ofs += peep2_current;
+ if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
+ ofs -= MAX_INSNS_PER_PEEP2 + 1;
+
+ if (peep2_insn_data[ofs].insn == NULL_RTX)
+ abort ();
+
+ return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
+}
+
+/* Similarly for a REG. */
+
+int
+peep2_reg_dead_p (ofs, reg)
+ int ofs;
+ rtx reg;
+{
+ int regno, n;
+
+ if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
+ abort ();
+
+ ofs += peep2_current;
+ if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
+ ofs -= MAX_INSNS_PER_PEEP2 + 1;
+
+ if (peep2_insn_data[ofs].insn == NULL_RTX)
+ abort ();
+
+ regno = REGNO (reg);
+ n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
+ while (--n >= 0)
+ if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
+ return 0;
+ return 1;
+}
+
+/* Try to find a hard register of mode MODE, matching the register class in
+ CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
+ remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
+ in which case the only condition is that the register must be available
+ before CURRENT_INSN.
+ Registers that already have bits set in REG_SET will not be considered.
+
+ If an appropriate register is available, it will be returned and the
+ corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
+ returned. */
+
+rtx
+peep2_find_free_register (from, to, class_str, mode, reg_set)
+ int from, to;
+ const char *class_str;
+ enum machine_mode mode;
+ HARD_REG_SET *reg_set;
+{
+ static int search_ofs;
+ enum reg_class class;
+ HARD_REG_SET live;
+ int i;
+
+ if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
+ abort ();
+
+ from += peep2_current;
+ if (from >= MAX_INSNS_PER_PEEP2 + 1)
+ from -= MAX_INSNS_PER_PEEP2 + 1;
+ to += peep2_current;
+ if (to >= MAX_INSNS_PER_PEEP2 + 1)
+ to -= MAX_INSNS_PER_PEEP2 + 1;
+
+ if (peep2_insn_data[from].insn == NULL_RTX)
+ abort ();
+ REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
+
+ while (from != to)
{
- while (n > 0)
+ HARD_REG_SET this_live;
+
+ if (++from >= MAX_INSNS_PER_PEEP2 + 1)
+ from = 0;
+ if (peep2_insn_data[from].insn == NULL_RTX)
+ abort ();
+ REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
+ IOR_HARD_REG_SET (live, this_live);
+ }
+
+ class = (class_str[0] == 'r' ? GENERAL_REGS
+ : REG_CLASS_FROM_LETTER (class_str[0]));
+
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ {
+ int raw_regno, regno, success, j;
+
+ /* Distribute the free registers as much as possible. */
+ raw_regno = search_ofs + i;
+ if (raw_regno >= FIRST_PSEUDO_REGISTER)
+ raw_regno -= FIRST_PSEUDO_REGISTER;
+#ifdef REG_ALLOC_ORDER
+ regno = reg_alloc_order[raw_regno];
+#else
+ regno = raw_regno;
+#endif
+
+ /* Don't allocate fixed registers. */
+ if (fixed_regs[regno])
+ continue;
+ /* Make sure the register is of the right class. */
+ if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
+ continue;
+ /* And can support the mode we need. */
+ if (! HARD_REGNO_MODE_OK (regno, mode))
+ continue;
+ /* And that we don't create an extra save/restore. */
+ if (! call_used_regs[regno] && ! regs_ever_live[regno])
+ continue;
+ /* And we don't clobber traceback for noreturn functions. */
+ if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
+ && (! reload_completed || frame_pointer_needed))
+ continue;
+
+ success = 1;
+ for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
+ {
+ if (TEST_HARD_REG_BIT (*reg_set, regno + j)
+ || TEST_HARD_REG_BIT (live, regno + j))
+ {
+ success = 0;
+ break;
+ }
+ }
+ if (success)
{
- if (insn == recog_last_allowed_insn)
- return NULL_RTX;
+ for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
+ SET_HARD_REG_BIT (*reg_set, regno + j);
- insn = NEXT_INSN (insn);
- if (insn == NULL_RTX)
- break;
+ /* Start the next search with the next register. */
+ if (++raw_regno >= FIRST_PSEUDO_REGISTER)
+ raw_regno = 0;
+ search_ofs = raw_regno;
- if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
- n -= 1;
+ return gen_rtx_REG (mode, regno);
}
}
- return insn;
+ search_ofs = 0;
+ return NULL_RTX;
}
/* Perform the peephole2 optimization pass. */
+
void
peephole2_optimize (dump_file)
FILE *dump_file ATTRIBUTE_UNUSED;
{
+ regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
rtx insn, prev;
- int i, changed;
+ regset live;
+ int i, b;
+#ifdef HAVE_conditional_execution
sbitmap blocks;
+ int changed;
+#endif
- /* ??? TODO: Arrange with resource.c to start at bb->global_live_at_end
- and backtrack insn by insn as we proceed through the block. In this
- way we'll not need to keep searching forward from the beginning of
- basic blocks to find register life info. */
-
- init_resource_info (NULL);
+ /* Initialize the regsets we're going to use. */
+ for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
+ peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
+ live = INITIALIZE_REG_SET (rs_heads[i]);
+#ifdef HAVE_conditional_execution
blocks = sbitmap_alloc (n_basic_blocks);
sbitmap_zero (blocks);
changed = 0;
+#else
+ count_or_remove_death_notes (NULL, 1);
+#endif
- for (i = n_basic_blocks - 1; i >= 0; --i)
+ for (b = n_basic_blocks - 1; b >= 0; --b)
{
- basic_block bb = BASIC_BLOCK (i);
+ basic_block bb = BASIC_BLOCK (b);
+ struct propagate_block_info *pbi;
+
+ /* Indicate that all slots except the last holds invalid data. */
+ for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
+ peep2_insn_data[i].insn = NULL_RTX;
- /* Since we don't update life info until the very end, we can't
- allow matching instructions that we've replaced before. Walk
- backward through the basic block so that we don't have to
- care about subsequent life info; recog_last_allowed_insn to
- restrict how far forward we will allow the match to proceed. */
+ /* Indicate that the last slot contains live_after data. */
+ peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
+ peep2_current = MAX_INSNS_PER_PEEP2;
+
+ /* Start up propagation. */
+ COPY_REG_SET (live, bb->global_live_at_end);
+ COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
+
+#ifdef HAVE_conditional_execution
+ pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
+#else
+ pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
+#endif
- recog_last_allowed_insn = NEXT_INSN (bb->end);
for (insn = bb->end; ; insn = prev)
{
prev = PREV_INSN (insn);
- if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
+ if (INSN_P (insn))
{
- rtx try, last_insn;
-
- try = peephole2_insns (PATTERN (insn), insn, &last_insn);
+ rtx try;
+ int match_len;
+
+ /* Record this insn. */
+ if (--peep2_current < 0)
+ peep2_current = MAX_INSNS_PER_PEEP2;
+ peep2_insn_data[peep2_current].insn = insn;
+ propagate_one_insn (pbi, insn);
+ COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
+
+ /* Match the peephole. */
+ try = peephole2_insns (PATTERN (insn), insn, &match_len);
if (try != NULL)
{
- flow_delete_insn_chain (insn, last_insn);
+ i = match_len + peep2_current;
+ if (i >= MAX_INSNS_PER_PEEP2 + 1)
+ i -= MAX_INSNS_PER_PEEP2 + 1;
+
+ /* Replace the old sequence with the new. */
+ flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
try = emit_insn_after (try, prev);
- if (last_insn == bb->end)
+ /* Adjust the basic block boundaries. */
+ if (peep2_insn_data[i].insn == bb->end)
bb->end = try;
if (insn == bb->head)
bb->head = NEXT_INSN (prev);
- recog_last_allowed_insn = NEXT_INSN (prev);
- SET_BIT (blocks, i);
+#ifdef HAVE_conditional_execution
+ /* With conditional execution, we cannot back up the
+ live information so easily, since the conditional
+ death data structures are not so self-contained.
+ So record that we've made a modification to this
+ block and update life information at the end. */
+ SET_BIT (blocks, b);
changed = 1;
+
+ for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
+ peep2_insn_data[i].insn = NULL_RTX;
+ peep2_insn_data[peep2_current].insn = PEEP2_EOB;
+#else
+ /* Back up lifetime information past the end of the
+ newly created sequence. */
+ if (++i >= MAX_INSNS_PER_PEEP2 + 1)
+ i = 0;
+ COPY_REG_SET (live, peep2_insn_data[i].live_before);
+
+ /* Update life information for the new sequence. */
+ do
+ {
+ if (INSN_P (try))
+ {
+ if (--i < 0)
+ i = MAX_INSNS_PER_PEEP2;
+ peep2_insn_data[i].insn = try;
+ propagate_one_insn (pbi, try);
+ COPY_REG_SET (peep2_insn_data[i].live_before, live);
+ }
+ try = PREV_INSN (try);
+ }
+ while (try != prev);
+
+ /* ??? Should verify that LIVE now matches what we
+ had before the new sequence. */
+
+ peep2_current = i;
+#endif
}
}
if (insn == bb->head)
break;
}
+
+ free_propagate_block_info (pbi);
}
- free_resource_info ();
+ for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
+ FREE_REG_SET (peep2_insn_data[i].live_before);
+ FREE_REG_SET (live);
- compute_bb_for_insn (get_max_uid ());
+#ifdef HAVE_conditional_execution
count_or_remove_death_notes (blocks, 1);
update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
-}
+ sbitmap_free (blocks);
#endif
+}
+#endif /* HAVE_peephole2 */