/* Instruction scheduling pass. Selective scheduler and pipeliner.
- Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011
+ Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012
Free Software Foundation, Inc.
This file is part of GCC.
else
EXPR_TARGET_AVAILABLE (to) = -1;
}
+ else if (EXPR_TARGET_AVAILABLE (from) == 0
+ && EXPR_LHS (from)
+ && REG_P (EXPR_LHS (from))
+ && REGNO (EXPR_LHS (to)) != REGNO (EXPR_LHS (from)))
+ EXPR_TARGET_AVAILABLE (to) = -1;
else
EXPR_TARGET_AVAILABLE (to) &= EXPR_TARGET_AVAILABLE (from);
}
if (EXPR_SEPARABLE_P (expr))
{
if (REG_P (EXPR_LHS (expr))
- && bitmap_bit_p (lv_set, REGNO (EXPR_LHS (expr))))
+ && register_unavailable_p (lv_set, EXPR_LHS (expr)))
{
/* If it's an insn like r1 = use (r1, ...), and it exists in
different forms in each of the av_sets being merged, we can't say
miss a unifying code motion along both branches using a renamed
register, but it won't affect a code correctness since upon
an actual code motion a bookkeeping code would be generated. */
- if (bitmap_bit_p (VINSN_REG_USES (EXPR_VINSN (expr)),
- REGNO (EXPR_LHS (expr))))
+ if (register_unavailable_p (VINSN_REG_USES (EXPR_VINSN (expr)),
+ EXPR_LHS (expr)))
EXPR_TARGET_AVAILABLE (expr) = -1;
else
EXPR_TARGET_AVAILABLE (expr) = false;
/* Do not allow clobbering the address register of speculative
insns. */
- if (bitmap_bit_p (VINSN_REG_USES (EXPR_VINSN (expr)),
- expr_dest_regno (expr)))
+ if (register_unavailable_p (VINSN_REG_USES (EXPR_VINSN (expr)),
+ expr_dest_reg (expr)))
{
EXPR_TARGET_AVAILABLE (expr) = false;
return 2;
}
\f
+/* Returns true if REG (at least partially) is present in REGS. */
+bool
+register_unavailable_p (regset regs, rtx reg)
+{
+ unsigned regno, end_regno;
+
+ regno = REGNO (reg);
+ if (bitmap_bit_p (regs, regno))
+ return true;
+
+ end_regno = END_REGNO (reg);
+
+ while (++regno < end_regno)
+ if (bitmap_bit_p (regs, regno))
+ return true;
+
+ return false;
+}
+
/* Av set functions. */
/* Add a new element to av set SETP.
pro_spec_checked_ds = INSN_SPEC_CHECKED_DS (has_dependence_data.pro);
pro_spec_checked_ds = ds_get_max_dep_weak (pro_spec_checked_ds);
- if (pro_spec_checked_ds != 0)
+ if (pro_spec_checked_ds != 0
+ && bitmap_bit_p (INSN_REG_SETS (has_dependence_data.pro), regno))
/* Merge BE_IN_SPEC bits into *DSP. */
*dsp = ds_full_merge (*dsp, pro_spec_checked_ds,
NULL_RTX, NULL_RTX);
static bool
maybe_tidy_empty_bb (basic_block bb)
{
- basic_block succ_bb, pred_bb;
+ basic_block succ_bb, pred_bb, note_bb;
VEC (basic_block, heap) *dom_bbs;
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->flags & EDGE_COMPLEX)
return false;
+ else if (e->flags & EDGE_FALLTHRU)
+ {
+ rtx note;
+ /* If prev bb ends with asm goto, see if any of the
+ ASM_OPERANDS_LABELs don't point to the fallthru
+ label. Do not attempt to redirect it in that case. */
+ if (JUMP_P (BB_END (e->src))
+ && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
+ {
+ int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
+
+ for (i = 0; i < n; ++i)
+ if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (bb))
+ return false;
+ }
+ }
free_data_sets (bb);
pred_bb = NULL;
dom_bbs = NULL;
+ /* Save a pred/succ from the current region to attach the notes to. */
+ note_bb = NULL;
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ if (in_current_region_p (e->src))
+ {
+ note_bb = e->src;
+ break;
+ }
+ if (note_bb == NULL)
+ note_bb = succ_bb;
+
/* Redirect all non-fallthru edges to the next bb. */
while (rescan_p)
{
else
{
/* This is a block without fallthru predecessor. Just delete it. */
- gcc_assert (pred_bb != NULL);
-
- if (in_current_region_p (pred_bb))
- move_bb_info (pred_bb, bb);
+ gcc_assert (note_bb);
+ move_bb_info (note_bb, bb);
remove_empty_bb (bb, true);
}
return -1;
}
-/* Return seqno of the only predecessor of INSN. */
+/* Find the proper seqno for inserting at INSN by successors.
+ Return -1 if no successors with positive seqno exist. */
+static int
+get_seqno_by_succs (rtx insn)
+{
+ basic_block bb = BLOCK_FOR_INSN (insn);
+ rtx tmp = insn, end = BB_END (bb);
+ int seqno;
+ insn_t succ = NULL;
+ succ_iterator si;
+
+ while (tmp != end)
+ {
+ tmp = NEXT_INSN (tmp);
+ if (INSN_P (tmp))
+ return INSN_SEQNO (tmp);
+ }
+
+ seqno = INT_MAX;
+
+ FOR_EACH_SUCC_1 (succ, si, end, SUCCS_NORMAL)
+ if (INSN_SEQNO (succ) > 0)
+ seqno = MIN (seqno, INSN_SEQNO (succ));
+
+ if (seqno == INT_MAX)
+ return -1;
+
+ return seqno;
+}
+
+/* Compute seqno for INSN by its preds or succs. */
static int
-get_seqno_of_a_pred (insn_t insn)
+get_seqno_for_a_jump (insn_t insn)
{
int seqno;
int n;
cfg_preds (BLOCK_FOR_INSN (insn), &preds, &n);
- gcc_assert (n == 1);
- seqno = INSN_SEQNO (preds[0]);
+ gcc_assert (n > 0);
+ /* For one predecessor, use simple method. */
+ if (n == 1)
+ seqno = INSN_SEQNO (preds[0]);
+ else
+ seqno = get_seqno_by_preds (insn);
free (preds);
}
}
+ /* We were unable to find a good seqno among preds. */
+ if (seqno < 0)
+ seqno = get_seqno_by_succs (insn);
+
+ gcc_assert (seqno >= 0);
+
return seqno;
}
int n, i, seqno;
while (tmp != head)
- if (INSN_P (tmp))
- return INSN_SEQNO (tmp);
- else
+ {
tmp = PREV_INSN (tmp);
+ if (INSN_P (tmp))
+ return INSN_SEQNO (tmp);
+ }
cfg_preds (bb, &preds, &n);
for (i = 0, seqno = -1; i < n; i++)
init_expr (INSN_EXPR (insn), vinsn_create (insn, false), 0,
REG_BR_PROB_BASE, 0, 0, 0, 0, 0, 0, NULL, true, false, false,
false, true);
- INSN_SEQNO (insn) = get_seqno_of_a_pred (insn);
+ INSN_SEQNO (insn) = get_seqno_for_a_jump (insn);
init_first_time_insn_data (insn);
}
free_lv_set (bb);
}
-/* Initialize an invalid AV_SET for BB.
- This set will be updated next time compute_av () process BB. */
+/* Mark AV_SET for BB as invalid, so this set will be updated the next time
+ compute_av() processes BB. This function is called when creating new basic
+ blocks, as well as for blocks (either new or existing) where new jumps are
+ created when the control flow is being updated. */
static void
invalidate_av_set (basic_block bb)
{
- gcc_assert (BB_AV_LEVEL (bb) <= 0
- && BB_AV_SET (bb) == NULL);
-
BB_AV_LEVEL (bb) = -1;
}
static void
move_bb_info (basic_block merge_bb, basic_block empty_bb)
{
- gcc_assert (in_current_region_p (merge_bb));
-
- concat_note_lists (BB_NOTE_LIST (empty_bb),
- &BB_NOTE_LIST (merge_bb));
+ if (in_current_region_p (merge_bb))
+ concat_note_lists (BB_NOTE_LIST (empty_bb),
+ &BB_NOTE_LIST (merge_bb));
BB_NOTE_LIST (empty_bb) = NULL_RTX;
}
rtx
create_copy_of_insn_rtx (rtx insn_rtx)
{
- rtx res;
+ rtx res, link;
if (DEBUG_INSN_P (insn_rtx))
return create_insn_rtx_from_pattern (copy_rtx (PATTERN (insn_rtx)),
res = create_insn_rtx_from_pattern (copy_rtx (PATTERN (insn_rtx)),
NULL_RTX);
+
+ /* Copy all REG_NOTES except REG_EQUAL/REG_EQUIV and REG_LABEL_OPERAND
+ since mark_jump_label will make them. REG_LABEL_TARGETs are created
+ there too, but are supposed to be sticky, so we copy them. */
+ for (link = REG_NOTES (insn_rtx); link; link = XEXP (link, 1))
+ if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND
+ && REG_NOTE_KIND (link) != REG_EQUAL
+ && REG_NOTE_KIND (link) != REG_EQUIV)
+ {
+ if (GET_CODE (link) == EXPR_LIST)
+ add_reg_note (res, REG_NOTE_KIND (link),
+ copy_insn_1 (XEXP (link, 0)));
+ else
+ add_reg_note (res, REG_NOTE_KIND (link), XEXP (link, 0));
+ }
+
return res;
}