/* Instruction scheduling pass. Selective scheduler and pipeliner.
- Copyright (C) 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
+ Copyright (C) 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
This file is part of GCC.
flist_add (flist_t *lp, insn_t insn, state_t state, deps_t dc, void *tc,
insn_t last_scheduled_insn, VEC(rtx,gc) *executing_insns,
int *ready_ticks, int ready_ticks_size, insn_t sched_next,
- int cycle, int cycle_issued_insns,
+ int cycle, int cycle_issued_insns, int issue_more,
bool starts_cycle_p, bool after_stall_p)
{
fence_t f;
FENCE_TC (f) = tc;
FENCE_LAST_SCHEDULED_INSN (f) = last_scheduled_insn;
+ FENCE_ISSUE_MORE (f) = issue_more;
FENCE_EXECUTING_INSNS (f) = executing_insns;
FENCE_READY_TICKS (f) = ready_ticks;
FENCE_READY_TICKS_SIZE (f) = ready_ticks_size;
ready_ticks_size,
NULL_RTX /* sched_next */,
1 /* cycle */, 0 /* cycle_issued_insns */,
+ issue_rate, /* issue_more */
1 /* starts_cycle_p */, 0 /* after_stall_p */);
}
}
3) all other fields are set to corresponding constant values.
INSN, STATE, DC, TC, LAST_SCHEDULED_INSN, EXECUTING_INSNS,
- READY_TICKS, READY_TICKS_SIZE, SCHED_NEXT, CYCLE and AFTER_STALL_P
- are the corresponding fields of the second fence. */
+ READY_TICKS, READY_TICKS_SIZE, SCHED_NEXT, CYCLE, ISSUE_MORE
+ and AFTER_STALL_P are the corresponding fields of the second fence. */
static void
merge_fences (fence_t f, insn_t insn,
state_t state, deps_t dc, void *tc,
rtx last_scheduled_insn, VEC(rtx, gc) *executing_insns,
int *ready_ticks, int ready_ticks_size,
- rtx sched_next, int cycle, bool after_stall_p)
+ rtx sched_next, int cycle, int issue_more, bool after_stall_p)
{
insn_t last_scheduled_insn_old = FENCE_LAST_SCHEDULED_INSN (f);
FENCE_CYCLE (f) = cycle;
FENCE_LAST_SCHEDULED_INSN (f) = NULL;
+ FENCE_ISSUE_MORE (f) = issue_rate;
VEC_free (rtx, gc, executing_insns);
free (ready_ticks);
if (FENCE_EXECUTING_INSNS (f))
delete_target_context (tc);
FENCE_LAST_SCHEDULED_INSN (f) = NULL;
+ FENCE_ISSUE_MORE (f) = issue_rate;
}
else
if (candidate->src == BLOCK_FOR_INSN (last_scheduled_insn))
FENCE_TC (f) = tc;
FENCE_LAST_SCHEDULED_INSN (f) = last_scheduled_insn;
+ FENCE_ISSUE_MORE (f) = issue_more;
}
else
{
state_t state, deps_t dc, void *tc, rtx last_scheduled_insn,
VEC(rtx, gc) *executing_insns, int *ready_ticks,
int ready_ticks_size, rtx sched_next, int cycle,
- int cycle_issued_insns, bool starts_cycle_p, bool after_stall_p)
+ int cycle_issued_insns, int issue_rate,
+ bool starts_cycle_p, bool after_stall_p)
{
fence_t f = flist_lookup (FLIST_TAIL_HEAD (new_fences), insn);
flist_add (FLIST_TAIL_TAILP (new_fences), insn, state, dc, tc,
last_scheduled_insn, executing_insns, ready_ticks,
ready_ticks_size, sched_next, cycle, cycle_issued_insns,
- starts_cycle_p, after_stall_p);
+ issue_rate, starts_cycle_p, after_stall_p);
FLIST_TAIL_TAILP (new_fences)
= &FLIST_NEXT (*FLIST_TAIL_TAILP (new_fences));
{
merge_fences (f, insn, state, dc, tc, last_scheduled_insn,
executing_insns, ready_ticks, ready_ticks_size,
- sched_next, cycle, after_stall_p);
+ sched_next, cycle, issue_rate, after_stall_p);
}
}
merge_fences (f, old->insn, old->state, old->dc, old->tc,
old->last_scheduled_insn, old->executing_insns,
old->ready_ticks, old->ready_ticks_size,
- old->sched_next, old->cycle,
+ old->sched_next, old->cycle, old->issue_more,
old->after_stall_p);
}
else
NULL_RTX, NULL,
XCNEWVEC (int, ready_ticks_size), ready_ticks_size,
NULL_RTX, FENCE_CYCLE (fence) + 1,
- 0, 1, FENCE_AFTER_STALL_P (fence));
+ 0, issue_rate, 1, FENCE_AFTER_STALL_P (fence));
}
/* Add a new fence to NEW_FENCES list and initialize all of its data
FENCE_SCHED_NEXT (fence),
FENCE_CYCLE (fence),
FENCE_ISSUED_INSNS (fence),
+ FENCE_ISSUE_MORE (fence),
FENCE_STARTS_CYCLE_P (fence),
FENCE_AFTER_STALL_P (fence));
}
bool rescan_p;
/* Keep empty bb only if this block immediately precedes EXIT and
- has incoming non-fallthrough edge. Otherwise remove it. */
+ has incoming non-fallthrough edge, or it has no predecessors or
+ successors. Otherwise remove it. */
if (!sel_bb_empty_p (bb)
|| (single_succ_p (bb)
&& single_succ (bb) == EXIT_BLOCK_PTR
&& (!single_pred_p (bb)
- || !(single_pred_edge (bb)->flags & EDGE_FALLTHRU))))
+ || !(single_pred_edge (bb)->flags & EDGE_FALLTHRU)))
+ || EDGE_COUNT (bb->preds) == 0
+ || EDGE_COUNT (bb->succs) == 0)
return false;
/* Do not attempt to redirect complex edges. */
{
gcc_assert (pred_bb != NULL);
- move_bb_info (pred_bb, bb);
+ if (in_current_region_p (pred_bb))
+ move_bb_info (pred_bb, bb);
remove_empty_bb (bb, true);
}
sched_scan (&ssi, bbs, bb, new_insns, NULL);
}
-/* Restore other notes for the whole region. */
+/* Restore notes for the whole region. */
static void
-sel_restore_other_notes (void)
+sel_restore_notes (void)
{
int bb;
+ insn_t insn;
for (bb = 0; bb < current_nr_blocks; bb++)
{
restore_other_notes (NULL, first);
BB_NOTE_LIST (first) = NULL_RTX;
+ FOR_BB_INSNS (first, insn)
+ if (NONDEBUG_INSN_P (insn))
+ reemit_notes (insn);
+
first = first->next_bb;
}
while (first != last);
void
sel_finish_bbs (void)
{
- sel_restore_other_notes ();
+ sel_restore_notes ();
/* Remove current loop preheader from this loop. */
if (current_loop_nest)
latch. We can't use header here, because this header could be
just removed preheader and it will give us the wrong region number.
Latch can't be used because it could be in the inner loop too. */
- if (LOOP_MARKED_FOR_PIPELINING_P (loop) && pipelining_p)
+ if (LOOP_MARKED_FOR_PIPELINING_P (loop))
{
int rgn = CONTAINING_RGN (loop->latch->index);
for (i = 0;
VEC_iterate (basic_block, preheader_blocks, i, bb);
i++)
+ {
+ VEC_safe_push (basic_block, heap, last_added_blocks, bb);
sel_add_bb (bb);
+ }
VEC_free (basic_block, heap, preheader_blocks);
}