if (in_group == 0 && num_changes != 0)
abort ();
- *loc = new;
+ *loc = new;
/* Save the information describing this change. */
if (num_changes >= changes_allocated)
&& GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
&& asm_noperands (PATTERN (object)) < 0)
{
- rtx newpat;
-
- if (XVECLEN (pat, 0) == 2)
- newpat = XVECEXP (pat, 0, 0);
- else
- {
- int j;
-
- newpat
- = gen_rtx_PARALLEL (VOIDmode,
- rtvec_alloc (XVECLEN (pat, 0) - 1));
- for (j = 0; j < XVECLEN (newpat, 0); j++)
- XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
- }
-
- /* Add a new change to this group to replace the pattern
- with this new pattern. Then consider this change
- as having succeeded. The change we added will
- cause the entire call to fail if things remain invalid.
-
- Note that this can lose if a later change than the one
- we are processing specified &XVECEXP (PATTERN (object), 0, X)
- but this shouldn't occur. */
-
- validate_change (object, &PATTERN (object), newpat, 1);
- continue;
- }
+ rtx newpat;
+
+ if (XVECLEN (pat, 0) == 2)
+ newpat = XVECEXP (pat, 0, 0);
+ else
+ {
+ int j;
+
+ newpat
+ = gen_rtx_PARALLEL (VOIDmode,
+ rtvec_alloc (XVECLEN (pat, 0) - 1));
+ for (j = 0; j < XVECLEN (newpat, 0); j++)
+ XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
+ }
+
+ /* Add a new change to this group to replace the pattern
+ with this new pattern. Then consider this change
+ as having succeeded. The change we added will
+ cause the entire call to fail if things remain invalid.
+
+ Note that this can lose if a later change than the one
+ we are processing specified &XVECEXP (PATTERN (object), 0, X)
+ but this shouldn't occur. */
+
+ validate_change (object, &PATTERN (object), newpat, 1);
+ continue;
+ }
else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
/* If this insn is a CLOBBER or USE, it is always valid, but is
never recognized. */
}
}
-/* Return the number of changes so far in the current group. */
+/* Return the number of changes so far in the current group. */
int
num_validated_changes ()
rtx *loc;
rtx from, to, object;
{
- register int i, j;
- register const char *fmt;
- register rtx x = *loc;
+ int i, j;
+ const char *fmt;
+ rtx x = *loc;
enum rtx_code code;
enum machine_mode op0_mode = VOIDmode;
int prev_changes = num_changes;
return;
}
- /* Call ourseves recursivly to perform the replacements. */
+ /* Call ourself recursively to perform the replacements. */
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
{
validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
}
- /* In case we didn't substituted, there is nothing to do. */
+ /* If we didn't substitute, there is nothing more to do. */
if (num_changes == prev_changes)
return;
separated from this function. */
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
validate_change (object, loc,
- plus_constant (XEXP (x, 0), INTVAL (XEXP (x, 1))), 1);
+ simplify_gen_binary
+ (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
break;
case MINUS:
if (GET_CODE (XEXP (x, 1)) == CONST_INT
new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
SUBREG_BYTE (x));
- /* Subregs of VOIDmode operands are incorect. */
+ /* Subregs of VOIDmode operands are incorrect. */
if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
if (new)
next_insn_tests_no_inequality (insn)
rtx insn;
{
- register rtx next = next_cc0_user (insn);
+ rtx next = next_cc0_user (insn);
/* If there is no next insn, we have to take the conservative choice. */
if (next == 0)
next_insns_test_no_inequality (insn)
rtx insn;
{
- register rtx next = NEXT_INSN (insn);
+ rtx next = NEXT_INSN (insn);
for (; next != 0; next = NEXT_INSN (next))
{
int
general_operand (op, mode)
- register rtx op;
+ rtx op;
enum machine_mode mode;
{
- register enum rtx_code code = GET_CODE (op);
+ enum rtx_code code = GET_CODE (op);
if (mode == VOIDmode)
mode = GET_MODE (op);
if (code == MEM)
{
- register rtx y = XEXP (op, 0);
+ rtx y = XEXP (op, 0);
if (! volatile_ok && MEM_VOLATILE_P (op))
return 0;
int
address_operand (op, mode)
- register rtx op;
+ rtx op;
enum machine_mode mode;
{
return memory_address_p (mode, op);
int
register_operand (op, mode)
- register rtx op;
+ rtx op;
enum machine_mode mode;
{
if (GET_MODE (op) != mode && mode != VOIDmode)
int
scratch_operand (op, mode)
- register rtx op;
+ rtx op;
enum machine_mode mode;
{
if (GET_MODE (op) != mode && mode != VOIDmode)
int
immediate_operand (op, mode)
- register rtx op;
+ rtx op;
enum machine_mode mode;
{
/* Don't accept CONST_INT or anything similar
int
const_int_operand (op, mode)
- register rtx op;
+ rtx op;
enum machine_mode mode;
{
if (GET_CODE (op) != CONST_INT)
int
const_double_operand (op, mode)
- register rtx op;
+ rtx op;
enum machine_mode mode;
{
/* Don't accept CONST_INT or anything similar
int
nonimmediate_operand (op, mode)
- register rtx op;
+ rtx op;
enum machine_mode mode;
{
return (general_operand (op, mode) && ! CONSTANT_P (op));
int
nonmemory_operand (op, mode)
- register rtx op;
+ rtx op;
enum machine_mode mode;
{
if (CONSTANT_P (op))
return 0;
return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
- || mode == VOIDmode)
+ || mode == VOIDmode)
#ifdef LEGITIMATE_PIC_OPERAND_P
&& (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
#endif
int
memory_address_p (mode, addr)
enum machine_mode mode ATTRIBUTE_UNUSED;
- register rtx addr;
+ rtx addr;
{
if (GET_CODE (addr) == ADDRESSOF)
return 1;
int
memory_operand (op, mode)
- register rtx op;
+ rtx op;
enum machine_mode mode;
{
rtx inner;
int
indirect_operand (op, mode)
- register rtx op;
+ rtx op;
enum machine_mode mode;
{
/* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
if (! reload_completed
&& GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
{
- register int offset = SUBREG_BYTE (op);
+ int offset = SUBREG_BYTE (op);
rtx inner = SUBREG_REG (op);
if (mode != VOIDmode && GET_MODE (op) != mode)
int
comparison_operator (op, mode)
- register rtx op;
+ rtx op;
enum machine_mode mode;
{
return ((mode == VOIDmode || GET_MODE (op) == mode)
const char **constraints;
enum machine_mode *modes;
{
- register int i;
+ int i;
int noperands;
const char *template = 0;
template = ASM_OPERANDS_TEMPLATE (asmop);
}
else if (GET_CODE (body) == PARALLEL
- && GET_CODE (XVECEXP (body, 0, 0)) == SET)
+ && GET_CODE (XVECEXP (body, 0, 0)) == SET
+ && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
{
rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
proper matching constraint, but we can't actually fail
the check if they didn't. Indicate that results are
inconclusive. */
+ while (ISDIGIT (*constraint))
+ constraint++;
result = -1;
break;
find_constant_term_loc (p)
rtx *p;
{
- register rtx *tem;
- register enum rtx_code code = GET_CODE (*p);
+ rtx *tem;
+ enum rtx_code code = GET_CODE (*p);
/* If *P IS such a constant term, P is its location. */
offsettable_address_p (strictp, mode, y)
int strictp;
enum machine_mode mode;
- register rtx y;
+ rtx y;
{
- register enum rtx_code ycode = GET_CODE (y);
- register rtx z;
+ enum rtx_code ycode = GET_CODE (y);
+ rtx z;
rtx y1 = y;
rtx *y2;
int (*addressp) PARAMS ((enum machine_mode, rtx)) =
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
- op_alt[j].matches = c - '0';
- recog_op_alt[op_alt[j].matches][j].matched = i;
+ {
+ char *end;
+ op_alt[j].matches = strtoul (p - 1, &end, 10);
+ recog_op_alt[op_alt[j].matches][j].matched = i;
+ p = end;
+ }
break;
case 'm':
case 'p':
op_alt[j].is_address = 1;
- op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
+ op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
+ [(int) MODE_BASE_REG_CLASS (VOIDmode)];
break;
case 'g': case 'r':
const char *constraints[MAX_RECOG_OPERANDS];
int matching_operands[MAX_RECOG_OPERANDS];
int earlyclobber[MAX_RECOG_OPERANDS];
- register int c;
+ int c;
struct funny_match funny_match[MAX_RECOG_OPERANDS];
int funny_match_index;
do
{
- register int opno;
+ int opno;
int lose = 0;
funny_match_index = 0;
for (opno = 0; opno < recog_data.n_operands; opno++)
{
- register rtx op = recog_data.operand[opno];
+ rtx op = recog_data.operand[opno];
enum machine_mode mode = GET_MODE (op);
- register const char *p = constraints[opno];
+ const char *p = constraints[opno];
int offset = 0;
int win = 0;
int val;
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
+ {
+ /* This operand must be the same as a previous one.
+ This kind of constraint is used for instructions such
+ as add when they take only two operands.
- /* This operand must be the same as a previous one.
- This kind of constraint is used for instructions such
- as add when they take only two operands.
+ Note that the lower-numbered operand is passed first.
- Note that the lower-numbered operand is passed first.
+ If we are not testing strictly, assume that this
+ constraint will be satisfied. */
- If we are not testing strictly, assume that this constraint
- will be satisfied. */
- if (strict < 0)
- val = 1;
- else
- {
- rtx op1 = recog_data.operand[c - '0'];
- rtx op2 = recog_data.operand[opno];
+ char *end;
+ int match;
- /* A unary operator may be accepted by the predicate,
- but it is irrelevant for matching constraints. */
- if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
- op1 = XEXP (op1, 0);
- if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
- op2 = XEXP (op2, 0);
+ match = strtoul (p - 1, &end, 10);
+ p = end;
- val = operands_match_p (op1, op2);
- }
+ if (strict < 0)
+ val = 1;
+ else
+ {
+ rtx op1 = recog_data.operand[match];
+ rtx op2 = recog_data.operand[opno];
- matching_operands[opno] = c - '0';
- matching_operands[c - '0'] = opno;
+ /* A unary operator may be accepted by the predicate,
+ but it is irrelevant for matching constraints. */
+ if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
+ op1 = XEXP (op1, 0);
+ if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
+ op2 = XEXP (op2, 0);
- if (val != 0)
- win = 1;
- /* If output is *x and input is *--x,
- arrange later to change the output to *--x as well,
- since the output op is the one that will be printed. */
- if (val == 2 && strict > 0)
- {
- funny_match[funny_match_index].this = opno;
- funny_match[funny_match_index++].other = c - '0';
- }
+ val = operands_match_p (op1, op2);
+ }
+
+ matching_operands[opno] = match;
+ matching_operands[match] = opno;
+
+ if (val != 0)
+ win = 1;
+
+ /* If output is *x and input is *--x, arrange later
+ to change the output to *--x as well, since the
+ output op is the one that will be printed. */
+ if (val == 2 && strict > 0)
+ {
+ funny_match[funny_match_index].this = opno;
+ funny_match[funny_match_index++].other = match;
+ }
+ }
break;
case 'p':
int
reg_fits_class_p (operand, class, offset, mode)
rtx operand;
- register enum reg_class class;
+ enum reg_class class;
int offset;
enum machine_mode mode;
{
- register int regno = REGNO (operand);
+ int regno = REGNO (operand);
if (regno < FIRST_PSEUDO_REGISTER
&& TEST_HARD_REG_BIT (reg_class_contents[(int) class],
regno + offset))
{
- register int sr;
+ int sr;
regno += offset;
for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
sr > 0; sr--)
}
\f
/* Split single instruction. Helper function for split_all_insns.
- Return last insn in the sequence if succesfull, or NULL if unsuccesfull. */
+ Return last insn in the sequence if successful, or NULL if unsuccessful. */
static rtx
split_insn (insn)
rtx insn;
if (changed)
{
- for (i = 0; i < n_basic_blocks; i++)
- find_sub_basic_blocks (BASIC_BLOCK (i));
+ find_many_sub_basic_blocks (blocks);
}
if (changed && upd_life)
try = peephole2_insns (PATTERN (insn), insn, &match_len);
if (try != NULL)
{
+ /* If we are splitting a CALL_INSN, look for the CALL_INSN
+ in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
+ cfg-related call notes. */
+ for (i = 0; i <= match_len; ++i)
+ {
+ int j, k;
+ rtx old_insn, new_insn, note;
+
+ j = i + peep2_current;
+ if (j >= MAX_INSNS_PER_PEEP2 + 1)
+ j -= MAX_INSNS_PER_PEEP2 + 1;
+ old_insn = peep2_insn_data[j].insn;
+ if (GET_CODE (old_insn) != CALL_INSN)
+ continue;
+
+ new_insn = NULL_RTX;
+ if (GET_CODE (try) == SEQUENCE)
+ for (k = XVECLEN (try, 0) - 1; k >= 0; k--)
+ {
+ rtx x = XVECEXP (try, 0, k);
+ if (GET_CODE (x) == CALL_INSN)
+ {
+ new_insn = x;
+ break;
+ }
+ }
+ else if (GET_CODE (try) == CALL_INSN)
+ new_insn = try;
+ if (! new_insn)
+ abort ();
+
+ CALL_INSN_FUNCTION_USAGE (new_insn)
+ = CALL_INSN_FUNCTION_USAGE (old_insn);
+
+ for (note = REG_NOTES (old_insn);
+ note;
+ note = XEXP (note, 1))
+ switch (REG_NOTE_KIND (note))
+ {
+ case REG_EH_REGION:
+ case REG_NORETURN:
+ case REG_SETJMP:
+ case REG_ALWAYS_RETURN:
+ REG_NOTES (new_insn)
+ = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
+ XEXP (note, 0),
+ REG_NOTES (new_insn));
+ default:
+ /* Discard all other reg notes. */
+ break;
+ }
+
+ /* Croak if there is another call in the sequence. */
+ while (++i <= match_len)
+ {
+ j = i + peep2_current;
+ if (j >= MAX_INSNS_PER_PEEP2 + 1)
+ j -= MAX_INSNS_PER_PEEP2 + 1;
+ old_insn = peep2_insn_data[j].insn;
+ if (GET_CODE (old_insn) == CALL_INSN)
+ abort ();
+ }
+ break;
+ }
+
i = match_len + peep2_current;
if (i >= MAX_INSNS_PER_PEEP2 + 1)
i -= MAX_INSNS_PER_PEEP2 + 1;
/* Replace the old sequence with the new. */
try = emit_insn_after (try, peep2_insn_data[i].insn);
- flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
+ delete_insn_chain (insn, peep2_insn_data[i].insn);
#ifdef HAVE_conditional_execution
/* With conditional execution, we cannot back up the