#include "toplev.h"
#include "basic-block.h"
#include "output.h"
+#include "reload.h"
#ifndef STACK_PUSH_CODE
#ifdef STACK_GROWS_DOWNWARD
through this one. (The only exception is in combine.c.) */
int
-recog_memoized (insn)
+recog_memoized_1 (insn)
rtx insn;
{
if (INSN_CODE (insn) < 0)
newpat
= gen_rtx_PARALLEL (VOIDmode,
- gen_rtvec (XVECLEN (pat, 0) - 1));
+ rtvec_alloc (XVECLEN (pat, 0) - 1));
for (j = 0; j < XVECLEN (newpat, 0); j++)
XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
}
register int i, j;
register const char *fmt;
register rtx x = *loc;
- enum rtx_code code = GET_CODE (x);
+ enum rtx_code code;
+ if (!x)
+ return;
+ code = GET_CODE (x);
/* X matches FROM if it is the same rtx or they are both referring to the
same register in the same mode. Avoid calling rtx_equal_p unless the
operands look similar. */
/* In these cases, the operation to be performed depends on the mode
of the operand. If we are replacing the operand with a VOIDmode
constant, we lose the information. So try to simplify the operation
- in that case. If it fails, substitute in something that we know
- won't be recognized. */
+ in that case. */
if (GET_MODE (to) == VOIDmode
- && (XEXP (x, 0) == from
- || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
- && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
- && REGNO (XEXP (x, 0)) == REGNO (from))))
+ && (rtx_equal_p (XEXP (x, 0), from)
+ || (GET_CODE (XEXP (x, 0)) == SUBREG
+ && rtx_equal_p (SUBREG_REG (XEXP (x, 0)), from))))
{
- rtx new = simplify_unary_operation (code, GET_MODE (x), to,
- GET_MODE (from));
- if (new == 0)
+ rtx new = NULL_RTX;
+
+ /* If there is a subreg involved, crop to the portion of the
+ constant that we are interested in. */
+ if (GET_CODE (XEXP (x, 0)) == SUBREG)
+ to = operand_subword (to, SUBREG_WORD (XEXP (x, 0)),
+ 0, GET_MODE (from));
+
+ /* If the above didn't fail, perform the extension from the
+ mode of the operand (and not the mode of FROM). */
+ if (to)
+ new = simplify_unary_operation (code, GET_MODE (x), to,
+ GET_MODE (XEXP (x, 0)));
+
+ /* If any of the above failed, substitute in something that
+ we know won't be recognized. */
+ if (!new)
new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
validate_change (object, loc, new, 1);
break;
case SUBREG:
+ /* In case we are replacing by constant, attempt to simplify it to non-SUBREG
+ expression. We can't do this later, since the information about inner mode
+ may be lost. */
+ if (CONSTANT_P (to) && rtx_equal_p (SUBREG_REG (x), from))
+ {
+ if (GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
+ && GET_MODE_SIZE (GET_MODE (from)) > UNITS_PER_WORD
+ && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
+ {
+ rtx temp = operand_subword (to, SUBREG_WORD (x),
+ 0, GET_MODE (from));
+ if (temp)
+ {
+ validate_change (object, loc, temp, 1);
+ return;
+ }
+ }
+ if (subreg_lowpart_p (x))
+ {
+ rtx new = gen_lowpart_if_possible (GET_MODE (x), to);
+ if (new)
+ {
+ validate_change (object, loc, new, 1);
+ return;
+ }
+ }
+
+ /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
+ since we are saying that the high bits don't matter. */
+ if (GET_MODE (to) == VOIDmode
+ && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (from)))
+ {
+ validate_change (object, loc, to, 1);
+ return;
+ }
+ }
+
+ /* Changing mode twice with SUBREG => just change it once,
+ or not at all if changing back to starting mode. */
+ if (GET_CODE (to) == SUBREG
+ && rtx_equal_p (SUBREG_REG (x), from))
+ {
+ if (GET_MODE (x) == GET_MODE (SUBREG_REG (to))
+ && SUBREG_WORD (x) == 0 && SUBREG_WORD (to) == 0)
+ {
+ validate_change (object, loc, SUBREG_REG (to), 1);
+ return;
+ }
+
+ validate_change (object, loc,
+ gen_rtx_SUBREG (GET_MODE (x), SUBREG_REG (to),
+ SUBREG_WORD (x) + SUBREG_WORD (to)), 1);
+ return;
+ }
+
/* If we have a SUBREG of a register that we are replacing and we are
replacing it with a MEM, make a new MEM and try replacing the
SUBREG with it. Don't do this if the MEM has a mode-dependent address
or if we would be widening it. */
- if (SUBREG_REG (x) == from
- && GET_CODE (from) == REG
+ if (GET_CODE (from) == REG
&& GET_CODE (to) == MEM
+ && rtx_equal_p (SUBREG_REG (x), from)
&& ! mode_dependent_address_p (XEXP (to, 0))
&& ! MEM_VOLATILE_P (to)
&& GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
likely to be an insertion operation; if it was, nothing bad will
happen, we might just fail in some cases). */
- if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
+ if (GET_CODE (from) == REG && GET_CODE (to) == MEM
+ && rtx_equal_p (XEXP (x, 0), from)
&& GET_CODE (XEXP (x, 1)) == CONST_INT
&& GET_CODE (XEXP (x, 2)) == CONST_INT
&& ! mode_dependent_address_p (XEXP (to, 0))
for (next = next_nonnote_insn (insn);
next != 0 && GET_CODE (next) != CODE_LABEL;
next = next_nonnote_insn (next))
- if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
+ if (INSN_P (next) && dead_or_set_p (next, dest))
{
for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
if (XEXP (link, 0) == insn)
asm_noperands (body)
rtx body;
{
- if (GET_CODE (body) == ASM_OPERANDS)
- /* No output operands: return number of input operands. */
- return ASM_OPERANDS_INPUT_LENGTH (body);
- if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
- /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
- return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
- else if (GET_CODE (body) == PARALLEL
- && GET_CODE (XVECEXP (body, 0, 0)) == SET
- && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
+ switch (GET_CODE (body))
{
- /* Multiple output operands, or 1 output plus some clobbers:
- body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
- int i;
- int n_sets;
-
- /* Count backwards through CLOBBERs to determine number of SETs. */
- for (i = XVECLEN (body, 0); i > 0; i--)
+ case ASM_OPERANDS:
+ /* No output operands: return number of input operands. */
+ return ASM_OPERANDS_INPUT_LENGTH (body);
+ case SET:
+ if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
+ /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
+ return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
+ else
+ return -1;
+ case PARALLEL:
+ if (GET_CODE (XVECEXP (body, 0, 0)) == SET
+ && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
{
- if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
- break;
- if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
- return -1;
- }
+ /* Multiple output operands, or 1 output plus some clobbers:
+ body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
+ int i;
+ int n_sets;
- /* N_SETS is now number of output operands. */
- n_sets = i;
+ /* Count backwards through CLOBBERs to determine number of SETs. */
+ for (i = XVECLEN (body, 0); i > 0; i--)
+ {
+ if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
+ break;
+ if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
+ return -1;
+ }
- /* Verify that all the SETs we have
- came from a single original asm_operands insn
- (so that invalid combinations are blocked). */
- for (i = 0; i < n_sets; i++)
- {
- rtx elt = XVECEXP (body, 0, i);
- if (GET_CODE (elt) != SET)
- return -1;
- if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
- return -1;
- /* If these ASM_OPERANDS rtx's came from different original insns
- then they aren't allowed together. */
- if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
- != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
- return -1;
+ /* N_SETS is now number of output operands. */
+ n_sets = i;
+
+ /* Verify that all the SETs we have
+ came from a single original asm_operands insn
+ (so that invalid combinations are blocked). */
+ for (i = 0; i < n_sets; i++)
+ {
+ rtx elt = XVECEXP (body, 0, i);
+ if (GET_CODE (elt) != SET)
+ return -1;
+ if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
+ return -1;
+ /* If these ASM_OPERANDS rtx's came from different original insns
+ then they aren't allowed together. */
+ if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
+ != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
+ return -1;
+ }
+ return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
+ + n_sets);
}
- return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
- + n_sets);
- }
- else if (GET_CODE (body) == PARALLEL
- && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
- {
- /* 0 outputs, but some clobbers:
- body is [(asm_operands ...) (clobber (reg ...))...]. */
- int i;
+ else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
+ {
+ /* 0 outputs, but some clobbers:
+ body is [(asm_operands ...) (clobber (reg ...))...]. */
+ int i;
- /* Make sure all the other parallel things really are clobbers. */
- for (i = XVECLEN (body, 0) - 1; i > 0; i--)
- if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
- return -1;
+ /* Make sure all the other parallel things really are clobbers. */
+ for (i = XVECLEN (body, 0) - 1; i > 0; i--)
+ if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
+ return -1;
- return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
+ return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
+ }
+ else
+ return -1;
+ default:
+ return -1;
}
- else
- return -1;
}
/* Assuming BODY is an insn body that uses ASM_OPERANDS,
while (*constraint)
{
- switch (*constraint++)
+ char c = *constraint++;
+ switch (c)
{
case '=':
case '+':
return 1;
break;
+ default:
+ /* For all other letters, we first check for a register class,
+ otherwise it is an EXTRA_CONSTRAINT. */
+ if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
+ {
+ case 'r':
+ if (GET_MODE (op) == BLKmode)
+ break;
+ if (register_operand (op, VOIDmode))
+ return 1;
+ }
#ifdef EXTRA_CONSTRAINT
- case 'Q':
- if (EXTRA_CONSTRAINT (op, 'Q'))
- return 1;
- break;
- case 'R':
- if (EXTRA_CONSTRAINT (op, 'R'))
- return 1;
- break;
- case 'S':
- if (EXTRA_CONSTRAINT (op, 'S'))
+ if (EXTRA_CONSTRAINT (op, c))
return 1;
- break;
- case 'T':
- if (EXTRA_CONSTRAINT (op, 'T'))
- return 1;
- break;
- case 'U':
- if (EXTRA_CONSTRAINT (op, 'U'))
- return 1;
- break;
#endif
-
- case 'r':
- default:
- if (GET_MODE (op) == BLKmode)
- break;
- if (register_operand (op, VOIDmode))
- return 1;
break;
}
}
abort ();
}
\f
+/* Like extract_insn, but save insn extracted and don't extract again, when
+ called again for the same insn expecting that recog_data still contain the
+ valid information. This is used primary by gen_attr infrastructure that
+ often does extract insn again and again. */
+void
+extract_insn_cached (insn)
+ rtx insn;
+{
+ if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
+ return;
+ extract_insn (insn);
+ recog_data.insn = insn;
+}
+/* Do cached extract_insn, constrain_operand and complain about failures.
+ Used by insn_attrtab. */
+void
+extract_constrain_insn_cached (insn)
+ rtx insn;
+{
+ extract_insn_cached (insn);
+ if (which_alternative == -1
+ && !constrain_operands (reload_completed))
+ fatal_insn_not_found (insn);
+}
+/* Do cached constrain_operand and complain about failures. */
+int
+constrain_operands_cached (strict)
+ int strict;
+{
+ if (which_alternative == -1)
+ return constrain_operands (strict);
+ else
+ return 1;
+}
+\f
/* Analyze INSN and fill in recog_data. */
void
int noperands;
rtx body = PATTERN (insn);
+ recog_data.insn = NULL;
recog_data.n_operands = 0;
recog_data.n_alternatives = 0;
recog_data.n_dups = 0;
+ which_alternative = -1;
switch (GET_CODE (body))
{
return;
case SET:
+ if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
+ goto asm_insn;
+ else
+ goto normal_insn;
case PARALLEL:
+ if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
+ && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
+ || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
+ goto asm_insn;
+ else
+ goto normal_insn;
case ASM_OPERANDS:
+ asm_insn:
recog_data.n_operands = noperands = asm_noperands (body);
if (noperands >= 0)
{
}
break;
}
-
- /* FALLTHROUGH */
+ fatal_insn_not_found (insn);
default:
+ normal_insn:
/* Ordinary insn: recognize it, get the operands via insn_extract
and get the constraints. */
{
recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
+ /* VOIDmode match_operands gets mode from their real operand. */
+ if (recog_data.operand_mode[i] == VOIDmode)
+ recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
}
}
for (i = 0; i < noperands; i++)
case 's': case 'i': case 'n':
case 'I': case 'J': case 'K': case 'L':
case 'M': case 'N': case 'O': case 'P':
-#ifdef EXTRA_CONSTRAINT
- case 'Q': case 'R': case 'S': case 'T': case 'U':
-#endif
/* These don't say anything we care about. */
break;
break;
case 'p':
+ op_alt[j].is_address = 1;
op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
break;
struct funny_match funny_match[MAX_RECOG_OPERANDS];
int funny_match_index;
+ which_alternative = 0;
if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
return 1;
matching_operands[c] = -1;
}
- which_alternative = 0;
-
- while (which_alternative < recog_data.n_alternatives)
+ do
{
register int opno;
int lose = 0;
win = 1;
break;
- case 'r':
- if (strict < 0
- || (strict == 0
- && GET_CODE (op) == REG
- && REGNO (op) >= FIRST_PSEUDO_REGISTER)
- || (strict == 0 && GET_CODE (op) == SCRATCH)
- || (GET_CODE (op) == REG
- && ((GENERAL_REGS == ALL_REGS
- && REGNO (op) < FIRST_PSEUDO_REGISTER)
- || reg_fits_class_p (op, GENERAL_REGS,
- offset, mode))))
- win = 1;
- break;
-
case 'X':
/* This is used for a MATCH_SCRATCH in the cases when
we don't actually need anything. So anything goes
win = 1;
break;
-#ifdef EXTRA_CONSTRAINT
- case 'Q':
- case 'R':
- case 'S':
- case 'T':
- case 'U':
- if (EXTRA_CONSTRAINT (op, c))
- win = 1;
- break;
-#endif
-
case 'V':
if (GET_CODE (op) == MEM
&& ((strict > 0 && ! offsettable_memref_p (op))
break;
default:
- if (strict < 0
- || (strict == 0
- && GET_CODE (op) == REG
- && REGNO (op) >= FIRST_PSEUDO_REGISTER)
- || (strict == 0 && GET_CODE (op) == SCRATCH)
- || (GET_CODE (op) == REG
- && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
- offset, mode)))
- win = 1;
+ {
+ enum reg_class class;
+
+ class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
+ if (class != NO_REGS)
+ {
+ if (strict < 0
+ || (strict == 0
+ && GET_CODE (op) == REG
+ && REGNO (op) >= FIRST_PSEUDO_REGISTER)
+ || (strict == 0 && GET_CODE (op) == SCRATCH)
+ || (GET_CODE (op) == REG
+ && reg_fits_class_p (op, class, offset, mode)))
+ win = 1;
+ }
+#ifdef EXTRA_CONSTRAINT
+ else if (EXTRA_CONSTRAINT (op, c))
+ win = 1;
+#endif
+ break;
+ }
}
constraints[opno] = p;
which_alternative++;
}
+ while (which_alternative < recog_data.n_alternatives);
+ which_alternative = -1;
/* If we are about to reject this, but we are not to test strictly,
try a very loose test. Only return failure if it fails also. */
if (strict == 0)
changed = 1;
/* try_split returns the NOTE that INSN became. */
- first = NEXT_INSN (first);
PUT_CODE (insn, NOTE);
NOTE_SOURCE_FILE (insn) = 0;
NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
+ /* ??? Coddle to md files that generate subregs in post-
+ reload splitters instead of computing the proper
+ hard register. */
+ if (reload_completed && first != last)
+ {
+ first = NEXT_INSN (first);
+ while (1)
+ {
+ if (INSN_P (first))
+ cleanup_subreg_operands (first);
+ if (first == last)
+ break;
+ first = NEXT_INSN (first);
+ }
+ }
+
if (insn == bb->end)
{
bb->end = last;
COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
#ifdef HAVE_conditional_execution
- pbi = init_propagate_block_info (bb, live, NULL, 0);
+ pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
#else
- pbi = init_propagate_block_info (bb, live, NULL, PROP_DEATH_NOTES);
+ pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
#endif
for (insn = bb->end; ; insn = prev)