c = constraints[c[0] - '0'];
if (! asm_operand_ok (operands[i], c))
- return 0;
+ return 0;
}
return 1;
else
changes_allocated *= 2;
- changes =
- (change_t*) xrealloc (changes,
- sizeof (change_t) * changes_allocated);
+ changes =
+ (change_t*) xrealloc (changes,
+ sizeof (change_t) * changes_allocated);
}
-
+
changes[num_changes].object = object;
changes[num_changes].loc = loc;
changes[num_changes].old = old;
? &num_clobbers : 0);
int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
-
+
/* If this is an asm and the operand aren't legal, then fail. Likewise if
this is not an asm and the insn wasn't recognized. */
if ((is_asm && ! check_asm_operands (PATTERN (insn)))
int j;
newpat
- = gen_rtx_PARALLEL (VOIDmode,
+ = gen_rtx_PARALLEL (VOIDmode,
rtvec_alloc (XVECLEN (pat, 0) - 1));
for (j = 0; j < XVECLEN (newpat, 0); j++)
XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
for (i = 0; i < num_changes; i++)
if (changes[i].object
&& INSN_P (changes[i].object)
- && basic_block_for_insn
- && ((unsigned int)INSN_UID (changes[i].object)
- < basic_block_for_insn->num_elements)
&& (bb = BLOCK_FOR_INSN (changes[i].object)))
- bb->flags |= BB_DIRTY;
+ bb->flags |= BB_DIRTY;
num_changes = 0;
return 1;
case MEM:
case SUBREG:
return find_single_use_1 (dest, &XEXP (x, 0));
-
+
default:
break;
}
/* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
may result in incorrect reference. We should simplify all valid
subregs of MEM anyway. But allow this after reload because we
- might be called from cleanup_subreg_operands.
+ might be called from cleanup_subreg_operands.
??? This is a kludge. */
if (!reload_completed && SUBREG_BYTE (op) != 0
&& GET_CODE (SUBREG_REG (op)) == MEM)
- return 0;
+ return 0;
op = SUBREG_REG (op);
code = GET_CODE (op);
{
if (GET_CODE (addr) == ADDRESSOF)
return 1;
-
+
GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
return 0;
{
if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
break; /* Past last SET */
-
+
if (operands)
operands[i] = SET_DEST (XVECEXP (body, 0, i));
if (operand_locs)
return template;
}
-/* Check if an asm_operand matches it's constraints.
+/* Check if an asm_operand matches it's constraints.
Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
int
if (GET_CODE (op) == MEM
&& (1
|| GET_CODE (XEXP (op, 0)) == PRE_DEC
- || GET_CODE (XEXP (op, 0)) == POST_DEC))
+ || GET_CODE (XEXP (op, 0)) == POST_DEC))
return 1;
break;
if (GET_CODE (op) == MEM
&& (1
|| GET_CODE (XEXP (op, 0)) == PRE_INC
- || GET_CODE (XEXP (op, 0)) == POST_INC))
+ || GET_CODE (XEXP (op, 0)) == POST_INC))
return 1;
break;
of the specified mode. We assume that if Y and Y+c are
valid addresses then so is Y+d for all 0<d<c. adjust_address will
go inside a LO_SUM here, so we do so as well. */
- if (GET_CODE (y) == LO_SUM)
+ if (GET_CODE (y) == LO_SUM
+ && mode != BLKmode
+ && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
plus_constant (XEXP (y, 1), mode_sz - 1));
else
break;
case '&':
op_alt[j].earlyclobber = 1;
- break;
+ break;
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
}
}
}
-
+
/* Check the operands of an insn against the insn's operand constraints
and return 1 if they are valid.
The information about the insn's operands, constraints, operand modes
NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
/* ??? Coddle to md files that generate subregs in post-
- reload splitters instead of computing the proper
+ reload splitters instead of computing the proper
hard register. */
if (reload_completed && first != last)
{
{
sbitmap blocks;
int changed;
- int i;
+ basic_block bb;
- blocks = sbitmap_alloc (n_basic_blocks);
+ blocks = sbitmap_alloc (last_basic_block);
sbitmap_zero (blocks);
changed = 0;
- for (i = n_basic_blocks - 1; i >= 0; --i)
+ FOR_EACH_BB_REVERSE (bb)
{
- basic_block bb = BASIC_BLOCK (i);
rtx insn, next;
bool finish = false;
while (GET_CODE (last) == BARRIER)
last = PREV_INSN (last);
- SET_BIT (blocks, i);
+ SET_BIT (blocks, bb->index);
changed = 1;
insn = last;
}
sbitmap_free (blocks);
}
-/* Same as split_all_insns, but do not expect CFG to be available.
+/* Same as split_all_insns, but do not expect CFG to be available.
Used by machine depedent reorg passes. */
void
regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
rtx insn, prev;
regset live;
- int i, b;
+ int i;
+ basic_block bb;
#ifdef HAVE_conditional_execution
sbitmap blocks;
- int changed;
+ bool changed;
#endif
+ bool do_cleanup_cfg = false;
+ bool do_rebuild_jump_labels = false;
/* Initialize the regsets we're going to use. */
for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
live = INITIALIZE_REG_SET (rs_heads[i]);
#ifdef HAVE_conditional_execution
- blocks = sbitmap_alloc (n_basic_blocks);
+ blocks = sbitmap_alloc (last_basic_block);
sbitmap_zero (blocks);
- changed = 0;
+ changed = false;
#else
count_or_remove_death_notes (NULL, 1);
#endif
- for (b = n_basic_blocks - 1; b >= 0; --b)
+ FOR_EACH_BB_REVERSE (bb)
{
- basic_block bb = BASIC_BLOCK (b);
struct propagate_block_info *pbi;
/* Indicate that all slots except the last holds invalid data. */
prev = PREV_INSN (insn);
if (INSN_P (insn))
{
- rtx try;
+ rtx try, before_try, x;
int match_len;
rtx note;
+ bool was_call = false;
/* Record this insn. */
if (--peep2_current < 0)
cfg-related call notes. */
for (i = 0; i <= match_len; ++i)
{
- int j, k;
+ int j;
rtx old_insn, new_insn, note;
j = i + peep2_current;
old_insn = peep2_insn_data[j].insn;
if (GET_CODE (old_insn) != CALL_INSN)
continue;
+ was_call = true;
- new_insn = NULL_RTX;
- if (GET_CODE (try) == SEQUENCE)
- for (k = XVECLEN (try, 0) - 1; k >= 0; k--)
- {
- rtx x = XVECEXP (try, 0, k);
- if (GET_CODE (x) == CALL_INSN)
- {
- new_insn = x;
- break;
- }
- }
- else if (GET_CODE (try) == CALL_INSN)
- new_insn = try;
- if (! new_insn)
+ new_insn = try;
+ while (new_insn != NULL_RTX)
+ {
+ if (GET_CODE (new_insn) == CALL_INSN)
+ break;
+ new_insn = NEXT_INSN (new_insn);
+ }
+
+ if (new_insn == NULL_RTX)
abort ();
CALL_INSN_FUNCTION_USAGE (new_insn)
if (i >= MAX_INSNS_PER_PEEP2 + 1)
i -= MAX_INSNS_PER_PEEP2 + 1;
+ note = find_reg_note (peep2_insn_data[i].insn,
+ REG_EH_REGION, NULL_RTX);
+
/* Replace the old sequence with the new. */
- try = emit_insn_after (try, peep2_insn_data[i].insn);
+ try = emit_insn_after_scope (try, peep2_insn_data[i].insn,
+ INSN_SCOPE (peep2_insn_data[i].insn));
+ before_try = PREV_INSN (insn);
delete_insn_chain (insn, peep2_insn_data[i].insn);
/* Re-insert the EH_REGION notes. */
- if (try == bb->end
- && (note = find_reg_note (peep2_insn_data[i].insn,
- REG_EH_REGION, NULL_RTX)))
+ if (note || (was_call && nonlocal_goto_handler_labels))
{
- rtx x;
- for (x = NEXT_INSN (peep2_insn_data[i].insn);
- x != NEXT_INSN (try); x = NEXT_INSN (x))
+ edge eh_edge;
+
+ for (eh_edge = bb->succ; eh_edge
+ ; eh_edge = eh_edge->succ_next)
+ if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
+ break;
+
+ for (x = try ; x != before_try ; x = PREV_INSN (x))
if (GET_CODE (x) == CALL_INSN
|| (flag_non_call_exceptions
- && may_trap_p (PATTERN (x))))
- REG_NOTES (x)
- = gen_rtx_EXPR_LIST (REG_EH_REGION,
- XEXP (note, 0),
- REG_NOTES (x));
+ && may_trap_p (PATTERN (x))
+ && !find_reg_note (x, REG_EH_REGION, NULL)))
+ {
+ if (note)
+ REG_NOTES (x)
+ = gen_rtx_EXPR_LIST (REG_EH_REGION,
+ XEXP (note, 0),
+ REG_NOTES (x));
+
+ if (x != bb->end && eh_edge)
+ {
+ edge nfte, nehe;
+ int flags;
+
+ nfte = split_block (bb, x);
+ flags = (eh_edge->flags
+ & (EDGE_EH | EDGE_ABNORMAL));
+ if (GET_CODE (x) == CALL_INSN)
+ flags |= EDGE_ABNORMAL_CALL;
+ nehe = make_edge (nfte->src, eh_edge->dest,
+ flags);
+
+ nehe->probability = eh_edge->probability;
+ nfte->probability
+ = REG_BR_PROB_BASE - nehe->probability;
+
+ do_cleanup_cfg |= purge_dead_edges (nfte->dest);
+#ifdef HAVE_conditional_execution
+ SET_BIT (blocks, nfte->dest->index);
+ changed = true;
+#endif
+ bb = nfte->src;
+ eh_edge = nehe;
+ }
+ }
+
+ /* Converting possibly trapping insn to non-trapping is
+ possible. Zap dummy outgoing edges. */
+ do_cleanup_cfg |= purge_dead_edges (bb);
}
- /* Converting possibly trapping insn to non-trapping is
- possible. Zap dummy outgoing edges. */
- if (try == bb->end)
- purge_dead_edges (bb);
#ifdef HAVE_conditional_execution
/* With conditional execution, we cannot back up the
death data structures are not so self-contained.
So record that we've made a modification to this
block and update life information at the end. */
- SET_BIT (blocks, b);
- changed = 1;
+ SET_BIT (blocks, bb->index);
+ changed = true;
for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
peep2_insn_data[i].insn = NULL_RTX;
COPY_REG_SET (live, peep2_insn_data[i].live_before);
/* Update life information for the new sequence. */
+ x = try;
do
{
- if (INSN_P (try))
+ if (INSN_P (x))
{
if (--i < 0)
i = MAX_INSNS_PER_PEEP2;
- peep2_insn_data[i].insn = try;
- propagate_one_insn (pbi, try);
+ peep2_insn_data[i].insn = x;
+ propagate_one_insn (pbi, x);
COPY_REG_SET (peep2_insn_data[i].live_before, live);
}
- try = PREV_INSN (try);
+ x = PREV_INSN (x);
}
- while (try != prev);
+ while (x != prev);
/* ??? Should verify that LIVE now matches what we
had before the new sequence. */
peep2_current = i;
#endif
+
+ /* If we generated a jump instruction, it won't have
+ JUMP_LABEL set. Recompute after we're done. */
+ for (x = try; x != before_try; x = PREV_INSN (x))
+ if (GET_CODE (x) == JUMP_INSN)
+ {
+ do_rebuild_jump_labels = true;
+ break;
+ }
}
}
FREE_REG_SET (peep2_insn_data[i].live_before);
FREE_REG_SET (live);
+ if (do_rebuild_jump_labels)
+ rebuild_jump_labels (get_insns ());
+
+ /* If we eliminated EH edges, we may be able to merge blocks. Further,
+ we've changed global life since exception handlers are no longer
+ reachable. */
+ if (do_cleanup_cfg)
+ {
+ cleanup_cfg (0);
+ update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
+ }
#ifdef HAVE_conditional_execution
- count_or_remove_death_notes (blocks, 1);
- update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
+ else
+ {
+ count_or_remove_death_notes (blocks, 1);
+ update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
+ }
sbitmap_free (blocks);
#endif
}
#endif /* HAVE_peephole2 */
+
+/* Common predicates for use with define_bypass. */
+
+/* True if the dependency between OUT_INSN and IN_INSN is on the store
+ data not the address operand(s) of the store. IN_INSN must be
+ single_set. OUT_INSN must be either a single_set or a PARALLEL with
+ SETs inside. */
+
+int
+store_data_bypass_p (out_insn, in_insn)
+ rtx out_insn, in_insn;
+{
+ rtx out_set, in_set;
+
+ in_set = single_set (in_insn);
+ if (! in_set)
+ abort ();
+
+ if (GET_CODE (SET_DEST (in_set)) != MEM)
+ return false;
+
+ out_set = single_set (out_insn);
+ if (out_set)
+ {
+ if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
+ return false;
+ }
+ else
+ {
+ rtx out_pat;
+ int i;
+
+ out_pat = PATTERN (out_insn);
+ if (GET_CODE (out_pat) != PARALLEL)
+ abort ();
+
+ for (i = 0; i < XVECLEN (out_pat, 0); i++)
+ {
+ rtx exp = XVECEXP (out_pat, 0, i);
+
+ if (GET_CODE (exp) == CLOBBER)
+ continue;
+
+ if (GET_CODE (exp) != SET)
+ abort ();
+
+ if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
+ return false;
+ }
+ }
+
+ return true;
+}
+
+/* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
+ condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
+ or multiple set; IN_INSN should be single_set for truth, but for convenience
+ of insn categorization may be any JUMP or CALL insn. */
+
+int
+if_test_bypass_p (out_insn, in_insn)
+ rtx out_insn, in_insn;
+{
+ rtx out_set, in_set;
+
+ in_set = single_set (in_insn);
+ if (! in_set)
+ {
+ if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
+ return false;
+ abort ();
+ }
+
+ if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
+ return false;
+ in_set = SET_SRC (in_set);
+
+ out_set = single_set (out_insn);
+ if (out_set)
+ {
+ if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
+ || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
+ return false;
+ }
+ else
+ {
+ rtx out_pat;
+ int i;
+
+ out_pat = PATTERN (out_insn);
+ if (GET_CODE (out_pat) != PARALLEL)
+ abort ();
+
+ for (i = 0; i < XVECLEN (out_pat, 0); i++)
+ {
+ rtx exp = XVECEXP (out_pat, 0, i);
+
+ if (GET_CODE (exp) == CLOBBER)
+ continue;
+
+ if (GET_CODE (exp) != SET)
+ abort ();
+
+ if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
+ || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
+ return false;
+ }
+ }
+
+ return true;
+}