gcc_assert ((s != NULL && dc != NULL && tc != NULL)
|| (s == NULL && dc == NULL && tc == NULL));
- if (s != NULL)
- free (s);
+ free (s);
if (dc != NULL)
delete_deps_context (dc);
else
EXPR_TARGET_AVAILABLE (to) = -1;
}
+ else if (EXPR_TARGET_AVAILABLE (from) == 0
+ && EXPR_LHS (from)
+ && REG_P (EXPR_LHS (from))
+ && REGNO (EXPR_LHS (to)) != REGNO (EXPR_LHS (from)))
+ EXPR_TARGET_AVAILABLE (to) = -1;
else
EXPR_TARGET_AVAILABLE (to) &= EXPR_TARGET_AVAILABLE (from);
}
void
merge_expr_data (expr_t to, expr_t from, insn_t split_point)
{
- /* For now, we just set the spec of resulting expr to be minimum of the specs
- of merged exprs. */
- if (EXPR_SPEC (to) > EXPR_SPEC (from))
+ /* Choose the maximum of the specs of merged exprs. This is required
+ for correctness of bookkeeping. */
+ if (EXPR_SPEC (to) < EXPR_SPEC (from))
EXPR_SPEC (to) = EXPR_SPEC (from);
if (split_point)
if (EXPR_SEPARABLE_P (expr))
{
if (REG_P (EXPR_LHS (expr))
- && bitmap_bit_p (lv_set, REGNO (EXPR_LHS (expr))))
+ && register_unavailable_p (lv_set, EXPR_LHS (expr)))
{
/* If it's an insn like r1 = use (r1, ...), and it exists in
different forms in each of the av_sets being merged, we can't say
miss a unifying code motion along both branches using a renamed
register, but it won't affect a code correctness since upon
an actual code motion a bookkeeping code would be generated. */
- if (bitmap_bit_p (VINSN_REG_USES (EXPR_VINSN (expr)),
- REGNO (EXPR_LHS (expr))))
+ if (register_unavailable_p (VINSN_REG_USES (EXPR_VINSN (expr)),
+ EXPR_LHS (expr)))
EXPR_TARGET_AVAILABLE (expr) = -1;
else
EXPR_TARGET_AVAILABLE (expr) = false;
/* Do not allow clobbering the address register of speculative
insns. */
- if (bitmap_bit_p (VINSN_REG_USES (EXPR_VINSN (expr)),
- expr_dest_regno (expr)))
+ if (register_unavailable_p (VINSN_REG_USES (EXPR_VINSN (expr)),
+ expr_dest_reg (expr)))
{
EXPR_TARGET_AVAILABLE (expr) = false;
return 2;
}
\f
+/* Returns true if REG (at least partially) is present in REGS. */
+bool
+register_unavailable_p (regset regs, rtx reg)
+{
+ unsigned regno, end_regno;
+
+ regno = REGNO (reg);
+ if (bitmap_bit_p (regs, regno))
+ return true;
+
+ end_regno = END_REGNO (reg);
+
+ while (++regno < end_regno)
+ if (bitmap_bit_p (regs, regno))
+ return true;
+
+ return false;
+}
+
/* Av set functions. */
/* Add a new element to av set SETP.
}
\f
+struct sched_scan_info_def
+{
+ /* This hook notifies scheduler frontend to extend its internal per basic
+ block data structures. This hook should be called once before a series of
+ calls to bb_init (). */
+ void (*extend_bb) (void);
+
+ /* This hook makes scheduler frontend to initialize its internal data
+ structures for the passed basic block. */
+ void (*init_bb) (basic_block);
+
+ /* This hook notifies scheduler frontend to extend its internal per insn data
+ structures. This hook should be called once before a series of calls to
+ insn_init (). */
+ void (*extend_insn) (void);
+
+ /* This hook makes scheduler frontend to initialize its internal data
+ structures for the passed insn. */
+ void (*init_insn) (rtx);
+};
+
+/* A driver function to add a set of basic blocks (BBS) to the
+ scheduling region. */
+static void
+sched_scan (const struct sched_scan_info_def *ssi, bb_vec_t bbs)
+{
+ unsigned i;
+ basic_block bb;
+
+ if (ssi->extend_bb)
+ ssi->extend_bb ();
+
+ if (ssi->init_bb)
+ FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
+ ssi->init_bb (bb);
+
+ if (ssi->extend_insn)
+ ssi->extend_insn ();
+
+ if (ssi->init_insn)
+ FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
+ {
+ rtx insn;
+
+ FOR_BB_INSNS (bb, insn)
+ ssi->init_insn (insn);
+ }
+}
/* Implement hooks for collecting fundamental insn properties like if insn is
an ASM or is within a SCHED_GROUP. */
/* Exception handling insns are always unique. */
|| (cfun->can_throw_non_call_exceptions && can_throw_internal (insn))
/* TRAP_IF though have an INSN code is control_flow_insn_p (). */
- || control_flow_insn_p (insn))
+ || control_flow_insn_p (insn)
+ || volatile_insn_p (PATTERN (insn))
+ || (targetm.cannot_copy_insn_p
+ && targetm.cannot_copy_insn_p (insn)))
force_unique_p = true;
else
force_unique_p = false;
init_global_and_expr_for_insn /* init_insn */
};
- sched_scan (&ssi, bbs, NULL, NULL, NULL);
+ sched_scan (&ssi, bbs);
}
/* Finalize region-scope data structures for basic blocks. */
finish_global_and_expr_insn /* init_insn */
};
- sched_scan (&ssi, bbs, NULL, NULL, NULL);
+ sched_scan (&ssi, bbs);
}
VEC_free (basic_block, heap, bbs);
pro_spec_checked_ds = INSN_SPEC_CHECKED_DS (has_dependence_data.pro);
pro_spec_checked_ds = ds_get_max_dep_weak (pro_spec_checked_ds);
- if (pro_spec_checked_ds != 0)
+ if (pro_spec_checked_ds != 0
+ && bitmap_bit_p (INSN_REG_SETS (has_dependence_data.pro), regno))
/* Merge BE_IN_SPEC bits into *DSP. */
*dsp = ds_full_merge (*dsp, pro_spec_checked_ds,
NULL_RTX, NULL_RTX);
return -1;
}
-/* Return seqno of the only predecessor of INSN. */
+/* Find the proper seqno for inserting at INSN by successors.
+ Return -1 if no successors with positive seqno exist. */
static int
-get_seqno_of_a_pred (insn_t insn)
+get_seqno_by_succs (rtx insn)
+{
+ basic_block bb = BLOCK_FOR_INSN (insn);
+ rtx tmp = insn, end = BB_END (bb);
+ int seqno;
+ insn_t succ = NULL;
+ succ_iterator si;
+
+ while (tmp != end)
+ {
+ tmp = NEXT_INSN (tmp);
+ if (INSN_P (tmp))
+ return INSN_SEQNO (tmp);
+ }
+
+ seqno = INT_MAX;
+
+ FOR_EACH_SUCC_1 (succ, si, end, SUCCS_NORMAL)
+ if (INSN_SEQNO (succ) > 0)
+ seqno = MIN (seqno, INSN_SEQNO (succ));
+
+ if (seqno == INT_MAX)
+ return -1;
+
+ return seqno;
+}
+
+/* Compute seqno for INSN by its preds or succs. */
+static int
+get_seqno_for_a_jump (insn_t insn)
{
int seqno;
int n;
cfg_preds (BLOCK_FOR_INSN (insn), &preds, &n);
- gcc_assert (n == 1);
- seqno = INSN_SEQNO (preds[0]);
+ gcc_assert (n > 0);
+ /* For one predecessor, use simple method. */
+ if (n == 1)
+ seqno = INSN_SEQNO (preds[0]);
+ else
+ seqno = get_seqno_by_preds (insn);
free (preds);
}
}
+ /* We were unable to find a good seqno among preds. */
+ if (seqno < 0)
+ seqno = get_seqno_by_succs (insn);
+
+ gcc_assert (seqno >= 0);
+
return seqno;
}
int n, i, seqno;
while (tmp != head)
- if (INSN_P (tmp))
- return INSN_SEQNO (tmp);
- else
+ {
tmp = PREV_INSN (tmp);
+ if (INSN_P (tmp))
+ return INSN_SEQNO (tmp);
+ }
cfg_preds (bb, &preds, &n);
for (i = 0, seqno = -1; i < n; i++)
/* Data for each insn in current region. */
VEC (sel_insn_data_def, heap) *s_i_d = NULL;
-/* A vector for the insns we've emitted. */
-static insn_vec_t new_insns = NULL;
-
/* Extend data structures for insns from current region. */
static void
extend_insn_data (void)
init_expr (INSN_EXPR (insn), vinsn_create (insn, false), 0,
REG_BR_PROB_BASE, 0, 0, 0, 0, 0, 0, NULL, true, false, false,
false, true);
- INSN_SEQNO (insn) = get_seqno_of_a_pred (insn);
+ INSN_SEQNO (insn) = get_seqno_for_a_jump (insn);
init_first_time_insn_data (insn);
}
}
if (flags & INSN_INIT_TODO_LUID)
- sched_init_luids (NULL, NULL, NULL, insn);
+ {
+ sched_extend_luids ();
+ sched_init_insn_luid (insn);
+ }
if (flags & INSN_INIT_TODO_SSID)
{
free_lv_set (bb);
}
-/* Initialize an invalid AV_SET for BB.
- This set will be updated next time compute_av () process BB. */
+/* Mark AV_SET for BB as invalid, so this set will be updated the next time
+ compute_av() processes BB. This function is called when creating new basic
+ blocks, as well as for blocks (either new or existing) where new jumps are
+ created when the control flow is being updated. */
static void
invalidate_av_set (basic_block bb)
{
- gcc_assert (BB_AV_LEVEL (bb) <= 0
- && BB_AV_SET (bb) == NULL);
-
BB_AV_LEVEL (bb) = -1;
}
}
void
-sel_init_bbs (bb_vec_t bbs, basic_block bb)
+sel_init_bbs (bb_vec_t bbs)
{
const struct sched_scan_info_def ssi =
{
NULL /* init_insn */
};
- sched_scan (&ssi, bbs, bb, new_insns, NULL);
+ sched_scan (&ssi, bbs);
}
/* Restore notes for the whole region. */
sel_add_bb (basic_block bb)
{
/* Extend luids so that new notes will receive zero luids. */
- sched_init_luids (NULL, NULL, NULL, NULL);
+ sched_extend_luids ();
sched_init_bbs ();
- sel_init_bbs (last_added_blocks, NULL);
+ sel_init_bbs (last_added_blocks);
/* When bb is passed explicitly, the vector should contain
the only element that equals to bb; otherwise, the vector
end_sequence ();
- sched_init_luids (NULL, NULL, NULL, NULL);
+ sched_extend_luids ();
sched_extend_target ();
sched_deps_init (false);
rtx
create_copy_of_insn_rtx (rtx insn_rtx)
{
- rtx res;
+ rtx res, link;
if (DEBUG_INSN_P (insn_rtx))
return create_insn_rtx_from_pattern (copy_rtx (PATTERN (insn_rtx)),
res = create_insn_rtx_from_pattern (copy_rtx (PATTERN (insn_rtx)),
NULL_RTX);
+
+ /* Copy all REG_NOTES except REG_EQUAL/REG_EQUIV and REG_LABEL_OPERAND
+ since mark_jump_label will make them. REG_LABEL_TARGETs are created
+ there too, but are supposed to be sticky, so we copy them. */
+ for (link = REG_NOTES (insn_rtx); link; link = XEXP (link, 1))
+ if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND
+ && REG_NOTE_KIND (link) != REG_EQUAL
+ && REG_NOTE_KIND (link) != REG_EQUIV)
+ {
+ if (GET_CODE (link) == EXPR_LIST)
+ add_reg_note (res, REG_NOTE_KIND (link),
+ copy_insn_1 (XEXP (link, 0)));
+ else
+ add_reg_note (res, REG_NOTE_KIND (link), XEXP (link, 0));
+ }
+
return res;
}
NULL, /* begin_schedule_ready */
NULL, /* begin_move_insn */
NULL, /* advance_target_bb */
+
+ NULL,
+ NULL,
+
SEL_SCHED | NEW_BBS
};