#include "toplev.h"
#include "basic-block.h"
#include "output.h"
+#include "reload.h"
#ifndef STACK_PUSH_CODE
#ifdef STACK_GROWS_DOWNWARD
through this one. (The only exception is in combine.c.) */
int
-recog_memoized (insn)
+recog_memoized_1 (insn)
rtx insn;
{
if (INSN_CODE (insn) < 0)
newpat
= gen_rtx_PARALLEL (VOIDmode,
- gen_rtvec (XVECLEN (pat, 0) - 1));
+ rtvec_alloc (XVECLEN (pat, 0) - 1));
for (j = 0; j < XVECLEN (newpat, 0); j++)
XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
}
/* In these cases, the operation to be performed depends on the mode
of the operand. If we are replacing the operand with a VOIDmode
constant, we lose the information. So try to simplify the operation
- in that case. If it fails, substitute in something that we know
- won't be recognized. */
+ in that case. */
if (GET_MODE (to) == VOIDmode
- && rtx_equal_p (XEXP (x, 0), from))
+ && (rtx_equal_p (XEXP (x, 0), from)
+ || (GET_CODE (XEXP (x, 0)) == SUBREG
+ && rtx_equal_p (SUBREG_REG (XEXP (x, 0)), from))))
{
- rtx new = simplify_unary_operation (code, GET_MODE (x), to,
- GET_MODE (from));
- if (new == 0)
+ rtx new = NULL_RTX;
+
+ /* If there is a subreg involved, crop to the portion of the
+ constant that we are interested in. */
+ if (GET_CODE (XEXP (x, 0)) == SUBREG)
+ to = operand_subword (to, SUBREG_WORD (XEXP (x, 0)),
+ 0, GET_MODE (from));
+
+ /* If the above didn't fail, perform the extension from the
+ mode of the operand (and not the mode of FROM). */
+ if (to)
+ new = simplify_unary_operation (code, GET_MODE (x), to,
+ GET_MODE (XEXP (x, 0)));
+
+ /* If any of the above failed, substitute in something that
+ we know won't be recognized. */
+ if (!new)
new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
validate_change (object, loc, new, 1);
asm_noperands (body)
rtx body;
{
- if (GET_CODE (body) == ASM_OPERANDS)
- /* No output operands: return number of input operands. */
- return ASM_OPERANDS_INPUT_LENGTH (body);
- if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
- /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
- return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
- else if (GET_CODE (body) == PARALLEL
- && GET_CODE (XVECEXP (body, 0, 0)) == SET
- && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
+ switch (GET_CODE (body))
{
- /* Multiple output operands, or 1 output plus some clobbers:
- body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
- int i;
- int n_sets;
-
- /* Count backwards through CLOBBERs to determine number of SETs. */
- for (i = XVECLEN (body, 0); i > 0; i--)
+ case ASM_OPERANDS:
+ /* No output operands: return number of input operands. */
+ return ASM_OPERANDS_INPUT_LENGTH (body);
+ case SET:
+ if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
+ /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
+ return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
+ else
+ return -1;
+ case PARALLEL:
+ if (GET_CODE (XVECEXP (body, 0, 0)) == SET
+ && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
{
- if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
- break;
- if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
- return -1;
- }
+ /* Multiple output operands, or 1 output plus some clobbers:
+ body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
+ int i;
+ int n_sets;
- /* N_SETS is now number of output operands. */
- n_sets = i;
+ /* Count backwards through CLOBBERs to determine number of SETs. */
+ for (i = XVECLEN (body, 0); i > 0; i--)
+ {
+ if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
+ break;
+ if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
+ return -1;
+ }
- /* Verify that all the SETs we have
- came from a single original asm_operands insn
- (so that invalid combinations are blocked). */
- for (i = 0; i < n_sets; i++)
- {
- rtx elt = XVECEXP (body, 0, i);
- if (GET_CODE (elt) != SET)
- return -1;
- if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
- return -1;
- /* If these ASM_OPERANDS rtx's came from different original insns
- then they aren't allowed together. */
- if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
- != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
- return -1;
+ /* N_SETS is now number of output operands. */
+ n_sets = i;
+
+ /* Verify that all the SETs we have
+ came from a single original asm_operands insn
+ (so that invalid combinations are blocked). */
+ for (i = 0; i < n_sets; i++)
+ {
+ rtx elt = XVECEXP (body, 0, i);
+ if (GET_CODE (elt) != SET)
+ return -1;
+ if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
+ return -1;
+ /* If these ASM_OPERANDS rtx's came from different original insns
+ then they aren't allowed together. */
+ if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
+ != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
+ return -1;
+ }
+ return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
+ + n_sets);
}
- return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
- + n_sets);
- }
- else if (GET_CODE (body) == PARALLEL
- && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
- {
- /* 0 outputs, but some clobbers:
- body is [(asm_operands ...) (clobber (reg ...))...]. */
- int i;
+ else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
+ {
+ /* 0 outputs, but some clobbers:
+ body is [(asm_operands ...) (clobber (reg ...))...]. */
+ int i;
- /* Make sure all the other parallel things really are clobbers. */
- for (i = XVECLEN (body, 0) - 1; i > 0; i--)
- if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
- return -1;
+ /* Make sure all the other parallel things really are clobbers. */
+ for (i = XVECLEN (body, 0) - 1; i > 0; i--)
+ if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
+ return -1;
- return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
+ return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
+ }
+ else
+ return -1;
+ default:
+ return -1;
}
- else
- return -1;
}
/* Assuming BODY is an insn body that uses ASM_OPERANDS,
abort ();
}
\f
+/* Like extract_insn, but save insn extracted and don't extract again, when
+ called again for the same insn expecting that recog_data still contain the
+ valid information. This is used primary by gen_attr infrastructure that
+ often does extract insn again and again. */
+void
+extract_insn_cached (insn)
+ rtx insn;
+{
+ if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
+ return;
+ extract_insn (insn);
+ recog_data.insn = insn;
+}
+/* Do cached extract_insn, constrain_operand and complain about failures.
+ Used by insn_attrtab. */
+void
+extract_constrain_insn_cached (insn)
+ rtx insn;
+{
+ extract_insn_cached (insn);
+ if (which_alternative == -1
+ && !constrain_operands (reload_completed))
+ fatal_insn_not_found (insn);
+}
+/* Do cached constrain_operand and complain about failures. */
+int
+constrain_operands_cached (strict)
+ int strict;
+{
+ if (which_alternative == -1)
+ return constrain_operands (strict);
+ else
+ return 1;
+}
+\f
/* Analyze INSN and fill in recog_data. */
void
int noperands;
rtx body = PATTERN (insn);
+ recog_data.insn = NULL;
recog_data.n_operands = 0;
recog_data.n_alternatives = 0;
recog_data.n_dups = 0;
+ which_alternative = -1;
switch (GET_CODE (body))
{
return;
case SET:
+ if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
+ goto asm_insn;
+ else
+ goto normal_insn;
case PARALLEL:
+ if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
+ && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
+ || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
+ goto asm_insn;
+ else
+ goto normal_insn;
case ASM_OPERANDS:
+ asm_insn:
recog_data.n_operands = noperands = asm_noperands (body);
if (noperands >= 0)
{
}
break;
}
-
- /* FALLTHROUGH */
+ fatal_insn_not_found (insn);
default:
+ normal_insn:
/* Ordinary insn: recognize it, get the operands via insn_extract
and get the constraints. */
break;
case 'p':
+ op_alt[j].is_address = 1;
op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
break;
struct funny_match funny_match[MAX_RECOG_OPERANDS];
int funny_match_index;
+ which_alternative = 0;
if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
return 1;
matching_operands[c] = -1;
}
- which_alternative = 0;
-
- while (which_alternative < recog_data.n_alternatives)
+ do
{
register int opno;
int lose = 0;
which_alternative++;
}
+ while (which_alternative < recog_data.n_alternatives);
+ which_alternative = -1;
/* If we are about to reject this, but we are not to test strictly,
try a very loose test. Only return failure if it fails also. */
if (strict == 0)
changed = 1;
/* try_split returns the NOTE that INSN became. */
- first = NEXT_INSN (first);
PUT_CODE (insn, NOTE);
NOTE_SOURCE_FILE (insn) = 0;
NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
+ /* ??? Coddle to md files that generate subregs in post-
+ reload splitters instead of computing the proper
+ hard register. */
+ if (reload_completed && first != last)
+ {
+ first = NEXT_INSN (first);
+ while (1)
+ {
+ if (INSN_P (first))
+ cleanup_subreg_operands (first);
+ if (first == last)
+ break;
+ first = NEXT_INSN (first);
+ }
+ }
+
if (insn == bb->end)
{
bb->end = last;
COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
#ifdef HAVE_conditional_execution
- pbi = init_propagate_block_info (bb, live, NULL, 0);
+ pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
#else
- pbi = init_propagate_block_info (bb, live, NULL, PROP_DEATH_NOTES);
+ pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
#endif
for (insn = bb->end; ; insn = prev)