static int is_exception_free (rtx, int, int);
static bool sets_likely_spilled (rtx);
-static void sets_likely_spilled_1 (rtx, rtx, void *);
+static void sets_likely_spilled_1 (rtx, const_rtx, void *);
static void add_branch_dependences (rtx, rtx);
-static void compute_block_backward_dependences (int);
+static void compute_block_dependences (int);
static void init_regions (void);
static void schedule_region (int);
if (current_function_has_exception_handlers ())
return 1;
- /* If we have non-jumping insns which refer to labels, then we consider
- the cfg not well structured. */
+ /* If we have insns which refer to labels as non-jumped-to operands,
+ then we consider the cfg not well structured. */
FOR_EACH_BB (b)
FOR_BB_INSNS (b, insn)
{
- /* Check for labels referred by non-jump insns. */
- if (NONJUMP_INSN_P (insn) || CALL_P (insn))
- {
- rtx note = find_reg_note (insn, REG_LABEL, NULL_RTX);
- if (note
- && ! (JUMP_P (NEXT_INSN (insn))
- && find_reg_note (NEXT_INSN (insn), REG_LABEL,
- XEXP (note, 0))))
- return 1;
- }
+ /* Check for labels referred to but (at least not directly) as
+ jump targets. */
+ if (INSN_P (insn)
+ && find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX))
+ return 1;
+
/* If this function has a computed jump, then we consider the cfg
not well structured. */
- else if (JUMP_P (insn) && computed_jump_p (insn))
+ if (JUMP_P (insn) && computed_jump_p (insn))
return 1;
}
static void
set_spec_fed (rtx load_insn)
{
- dep_link_t link;
+ sd_iterator_def sd_it;
+ dep_t dep;
- FOR_EACH_DEP_LINK (link, INSN_FORW_DEPS (load_insn))
- if (DEP_LINK_KIND (link) == REG_DEP_TRUE)
- FED_BY_SPEC_LOAD (DEP_LINK_CON (link)) = 1;
+ FOR_EACH_DEP (load_insn, SD_LIST_FORW, sd_it, dep)
+ if (DEP_TYPE (dep) == REG_DEP_TRUE)
+ FED_BY_SPEC_LOAD (DEP_CON (dep)) = 1;
}
/* On the path from the insn to load_insn_bb, find a conditional
static int
find_conditional_protection (rtx insn, int load_insn_bb)
{
- dep_link_t link;
+ sd_iterator_def sd_it;
+ dep_t dep;
/* Iterate through DEF-USE forward dependences. */
- FOR_EACH_DEP_LINK (link, INSN_FORW_DEPS (insn))
+ FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
{
- rtx next = DEP_LINK_CON (link);
+ rtx next = DEP_CON (dep);
if ((CONTAINING_RGN (BLOCK_NUM (next)) ==
CONTAINING_RGN (BB_TO_BLOCK (load_insn_bb)))
&& IS_REACHABLE (INSN_BB (next), load_insn_bb)
&& load_insn_bb != INSN_BB (next)
- && DEP_LINK_KIND (link) == REG_DEP_TRUE
+ && DEP_TYPE (dep) == REG_DEP_TRUE
&& (JUMP_P (next)
|| find_conditional_protection (next, load_insn_bb)))
return 1;
static int
is_conditionally_protected (rtx load_insn, int bb_src, int bb_trg)
{
- dep_link_t link;
+ sd_iterator_def sd_it;
+ dep_t dep;
- FOR_EACH_DEP_LINK (link, INSN_BACK_DEPS (load_insn))
+ FOR_EACH_DEP (load_insn, SD_LIST_BACK, sd_it, dep)
{
- rtx insn1 = DEP_LINK_PRO (link);
+ rtx insn1 = DEP_PRO (dep);
/* Must be a DEF-USE dependence upon non-branch. */
- if (DEP_LINK_KIND (link) != REG_DEP_TRUE
+ if (DEP_TYPE (dep) != REG_DEP_TRUE
|| JUMP_P (insn1))
continue;
static int
is_pfree (rtx load_insn, int bb_src, int bb_trg)
{
- dep_link_t back_link;
+ sd_iterator_def back_sd_it;
+ dep_t back_dep;
candidate *candp = candidate_table + bb_src;
if (candp->split_bbs.nr_members != 1)
/* Must have exactly one escape block. */
return 0;
- FOR_EACH_DEP_LINK (back_link, INSN_BACK_DEPS (load_insn))
+ FOR_EACH_DEP (load_insn, SD_LIST_BACK, back_sd_it, back_dep)
{
- rtx insn1 = DEP_LINK_PRO (back_link);
+ rtx insn1 = DEP_PRO (back_dep);
- if (DEP_LINK_KIND (back_link) == REG_DEP_TRUE)
+ if (DEP_TYPE (back_dep) == REG_DEP_TRUE)
+ /* Found a DEF-USE dependence (insn1, load_insn). */
{
- /* Found a DEF-USE dependence (insn1, load_insn). */
- dep_link_t fore_link;
+ sd_iterator_def fore_sd_it;
+ dep_t fore_dep;
- FOR_EACH_DEP_LINK (fore_link, INSN_FORW_DEPS (insn1))
+ FOR_EACH_DEP (insn1, SD_LIST_FORW, fore_sd_it, fore_dep)
{
- rtx insn2 = DEP_LINK_CON (fore_link);
+ rtx insn2 = DEP_CON (fore_dep);
- if (DEP_LINK_KIND (fore_link) == REG_DEP_TRUE)
+ if (DEP_TYPE (fore_dep) == REG_DEP_TRUE)
{
/* Found a DEF-USE dependence (insn1, insn2). */
if (haifa_classify_insn (insn2) != PFREE_CANDIDATE)
if (FED_BY_SPEC_LOAD (load_insn))
return 1;
- if (deps_list_empty_p (INSN_BACK_DEPS (load_insn)))
+ if (sd_lists_empty_p (load_insn, SD_LIST_BACK))
/* Dependence may 'hide' out of the region. */
return 1;
if (not_ex_free
/* We are here because is_exception_free () == false.
But we possibly can handle that with control speculation. */
- && current_sched_info->flags & DO_SPECULATION)
+ && (current_sched_info->flags & DO_SPECULATION)
+ && (spec_info->mask & BEGIN_CONTROL))
/* Here we got new control-speculative instruction. */
ts = set_dep_weak (ts, BEGIN_CONTROL, MAX_DEP_WEAK);
else
}
static void
-sets_likely_spilled_1 (rtx x, rtx pat, void *data)
+sets_likely_spilled_1 (rtx x, const_rtx pat, void *data)
{
bool *ret = (bool *) data;
if (!NOTE_P (insn))
{
if (last != 0
- && (find_link_by_pro_in_deps_list (INSN_BACK_DEPS (last), insn)
- == NULL))
+ && sd_find_dep_between (insn, last, false) == NULL)
{
if (! sched_insns_conditions_mutex_p (last, insn))
add_dependence (last, insn, REG_DEP_ANTI);
pred_deps->pending_write_mems = 0;
}
-/* Compute backward dependences inside bb. In a multiple blocks region:
+/* Compute dependences inside bb. In a multiple blocks region:
(1) a bb is analyzed after its predecessors, and (2) the lists in
effect at the end of bb (after analyzing for bb) are inherited by
bb's successors.
similar, and the result is interblock dependences in the region. */
static void
-compute_block_backward_dependences (int bb)
+compute_block_dependences (int bb)
{
rtx head, tail;
struct deps tmp_deps;
/* Do the analysis for this block. */
gcc_assert (EBB_FIRST_BB (bb) == EBB_LAST_BB (bb));
get_ebb_head_tail (EBB_FIRST_BB (bb), EBB_LAST_BB (bb), &head, &tail);
+
sched_analyze (&tmp_deps, head, tail);
add_branch_dependences (head, tail);
/* Free up the INSN_LISTs. */
free_deps (&tmp_deps);
+
+ if (targetm.sched.dependencies_evaluation_hook)
+ targetm.sched.dependencies_evaluation_hook (head, tail);
+}
+
+/* Free dependencies of instructions inside BB. */
+static void
+free_block_dependencies (int bb)
+{
+ rtx head;
+ rtx tail;
+
+ get_ebb_head_tail (EBB_FIRST_BB (bb), EBB_LAST_BB (bb), &head, &tail);
+
+ sched_free_deps (head, tail, true);
}
/* Remove all INSN_LISTs and EXPR_LISTs from the pending lists and add
}
}
\f
-
+/* Print dependences for debugging starting from FROM_BB.
+ Callable from debugger. */
/* Print dependences for debugging starting from FROM_BB.
Callable from debugger. */
void
for (insn = head; insn != next_tail; insn = NEXT_INSN (insn))
{
- dep_link_t link;
-
if (! INSN_P (insn))
{
int n;
INSN_UID (insn),
INSN_CODE (insn),
BLOCK_NUM (insn),
- INSN_DEP_COUNT (insn),
+ sd_lists_size (insn, SD_LIST_BACK),
INSN_PRIORITY (insn),
insn_cost (insn));
print_reservation (sched_dump, insn);
fprintf (sched_dump, "\t: ");
- FOR_EACH_DEP_LINK (link, INSN_FORW_DEPS (insn))
- fprintf (sched_dump, "%d ", INSN_UID (DEP_LINK_CON (link)));
+ {
+ sd_iterator_def sd_it;
+ dep_t dep;
+
+ FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
+ fprintf (sched_dump, "%d ", INSN_UID (DEP_CON (dep)));
+ }
fprintf (sched_dump, "\n");
}
for (bb = 0; bb < current_nr_blocks; bb++)
init_deps (bb_deps + bb);
- /* Compute backward dependencies. */
+ /* Compute dependencies. */
for (bb = 0; bb < current_nr_blocks; bb++)
- compute_block_backward_dependences (bb);
-
- /* Compute forward dependencies. */
- for (bb = current_nr_blocks - 1; bb >= 0; bb--)
- {
- rtx head, tail;
-
- gcc_assert (EBB_FIRST_BB (bb) == EBB_LAST_BB (bb));
- get_ebb_head_tail (EBB_FIRST_BB (bb), EBB_LAST_BB (bb), &head, &tail);
-
- compute_forward_dependences (head, tail);
-
- if (targetm.sched.dependencies_evaluation_hook)
- targetm.sched.dependencies_evaluation_hook (head, tail);
- }
+ compute_block_dependences (bb);
free_pending_lists ();
/* Sanity check: verify that all region insns were scheduled. */
gcc_assert (sched_rgn_n_insns == rgn_n_insns);
-
/* Done with this region. */
if (current_nr_blocks > 1)
sbitmap_vector_free (ancestor_edges);
free (rgn_edges);
}
+
+ /* Free dependencies. */
+ for (bb = 0; bb < current_nr_blocks; ++bb)
+ free_block_dependencies (bb);
+
+ gcc_assert (haifa_recovery_bb_ever_added_p
+ || deps_pools_are_empty_p ());
}
/* Initialize data structures for region scheduling. */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_df_finish |
+ TODO_df_finish | TODO_verify_rtl_sharing |
TODO_dump_func |
TODO_verify_flow |
TODO_ggc_collect, /* todo_flags_finish */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_df_finish |
+ TODO_df_finish | TODO_verify_rtl_sharing |
TODO_dump_func |
TODO_verify_flow |
TODO_ggc_collect, /* todo_flags_finish */