* sched-int.h (IS_SPECULATION_CHECK_P, IS_SPECULATION_SIMPLE_CHECK_P):
New macros.
* sched-ebb.c (begin_schedule_ready): Use them.
* haifa-sched.c (schedule_insn, move_insn, try_ready,
add_to_speculative_block, create_check_block_twin, speculate_insn,
fix_jump_move, move_block_after_check): Ditto.
* sched-rgn.c (new_ready): Ditto.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@117516
138bc75d-0d04-0410-961f-
82ee72b054a4
2006-10-06 Maxim Kuvyrkov <mkuvyrkov@ispras.ru>
2006-10-06 Maxim Kuvyrkov <mkuvyrkov@ispras.ru>
+ * sched-int.h (IS_SPECULATION_CHECK_P, IS_SPECULATION_SIMPLE_CHECK_P):
+ New macros.
+ * sched-ebb.c (begin_schedule_ready): Use them.
+ * haifa-sched.c (schedule_insn, move_insn, try_ready,
+ add_to_speculative_block, create_check_block_twin, speculate_insn,
+ fix_jump_move, move_block_after_check): Ditto.
+ * sched-rgn.c (new_ready): Ditto.
+
+2006-10-06 Maxim Kuvyrkov <mkuvyrkov@ispras.ru>
+
PR rtl-optimization/29128
* sched-int.h (IS_SPECULATION_BRANCHY_CHECK_P): New macro.
* sched-ebb.c (advance_target_bb): Use it to fix condition to
PR rtl-optimization/29128
* sched-int.h (IS_SPECULATION_BRANCHY_CHECK_P): New macro.
* sched-ebb.c (advance_target_bb): Use it to fix condition to
resolve_dep (next, insn);
resolve_dep (next, insn);
- if (!RECOVERY_BLOCK (insn)
- || RECOVERY_BLOCK (insn) == EXIT_BLOCK_PTR)
+ if (!IS_SPECULATION_BRANCHY_CHECK_P (insn))
gcc_assert (!jump_p
|| ((current_sched_info->flags & SCHED_RGN)
gcc_assert (!jump_p
|| ((current_sched_info->flags & SCHED_RGN)
- && RECOVERY_BLOCK (insn)
- && RECOVERY_BLOCK (insn) != EXIT_BLOCK_PTR)
+ && IS_SPECULATION_BRANCHY_CHECK_P (insn))
|| (current_sched_info->flags & SCHED_EBB));
gcc_assert (BLOCK_FOR_INSN (PREV_INSN (insn)) == bb);
|| (current_sched_info->flags & SCHED_EBB));
gcc_assert (BLOCK_FOR_INSN (PREV_INSN (insn)) == bb);
or we simply don't care (*ts & HARD_DEP). */
gcc_assert (!ORIG_PAT (next)
or we simply don't care (*ts & HARD_DEP). */
gcc_assert (!ORIG_PAT (next)
- || !RECOVERY_BLOCK (next)
- || RECOVERY_BLOCK (next) == EXIT_BLOCK_PTR);
+ || !IS_SPECULATION_BRANCHY_CHECK_P (next));
change_queue_index (next, QUEUE_NOWHERE);
return -1;
}
change_queue_index (next, QUEUE_NOWHERE);
return -1;
}
- else if (!(*ts & BEGIN_SPEC) && ORIG_PAT (next) && !RECOVERY_BLOCK (next))
+ else if (!(*ts & BEGIN_SPEC) && ORIG_PAT (next) && !IS_SPECULATION_CHECK_P (next))
/* We should change pattern of every previously speculative
instruction - and we determine if NEXT was speculative by using
/* We should change pattern of every previously speculative
instruction - and we determine if NEXT was speculative by using
- ORIG_PAT field. Except one case - simple checks have ORIG_PAT
- pat too, hence we also check for the RECOVERY_BLOCK. */
+ ORIG_PAT field. Except one case - speculation checks have ORIG_PAT
+ pat too, so skip them. */
{
change_pattern (next, ORIG_PAT (next));
ORIG_PAT (next) = 0;
{
change_pattern (next, ORIG_PAT (next));
ORIG_PAT (next) = 0;
- if (RECOVERY_BLOCK (check))
+ if (IS_SPECULATION_SIMPLE_CHECK_P (check))
{
create_check_block_twin (check, true);
link = LOG_LINKS (insn);
{
create_check_block_twin (check, true);
link = LOG_LINKS (insn);
&& (DEP_STATUS (link) & DEP_TYPES) == DEP_TRUE);
check = XEXP (link, 0);
&& (DEP_STATUS (link) & DEP_TYPES) == DEP_TRUE);
check = XEXP (link, 0);
- gcc_assert (!RECOVERY_BLOCK (check) && !ORIG_PAT (check)
+
+ gcc_assert (!IS_SPECULATION_CHECK_P (check) && !ORIG_PAT (check)
&& QUEUE_INDEX (check) == QUEUE_NOWHERE);
rec = BLOCK_FOR_INSN (check);
&& QUEUE_INDEX (check) == QUEUE_NOWHERE);
rec = BLOCK_FOR_INSN (check);
gcc_assert (ORIG_PAT (insn)
&& (!mutate_p
gcc_assert (ORIG_PAT (insn)
&& (!mutate_p
- || (RECOVERY_BLOCK (insn) == EXIT_BLOCK_PTR
+ || (IS_SPECULATION_SIMPLE_CHECK_P (insn)
&& !(TODO_SPEC (insn) & SPECULATIVE))));
/* Create recovery block. */
&& !(TODO_SPEC (insn) & SPECULATIVE))));
/* Create recovery block. */
|| (request & spec_info->mask) != request)
return -1;
|| (request & spec_info->mask) != request)
return -1;
- gcc_assert (!RECOVERY_BLOCK (insn));
+ gcc_assert (!IS_SPECULATION_CHECK_P (insn));
if (request & BE_IN_SPEC)
{
if (request & BE_IN_SPEC)
{
jump_bb_next = jump_bb->next_bb;
gcc_assert (current_sched_info->flags & SCHED_EBB
jump_bb_next = jump_bb->next_bb;
gcc_assert (current_sched_info->flags & SCHED_EBB
- || (RECOVERY_BLOCK (jump)
- && RECOVERY_BLOCK (jump) != EXIT_BLOCK_PTR));
+ || IS_SPECULATION_BRANCHY_CHECK_P (jump));
if (!NOTE_INSN_BASIC_BLOCK_P (BB_END (jump_bb_next)))
/* if jump_bb_next is not empty. */
if (!NOTE_INSN_BASIC_BLOCK_P (BB_END (jump_bb_next)))
/* if jump_bb_next is not empty. */
update_bb_for_insn (jump_bb);
update_bb_for_insn (jump_bb);
- gcc_assert (RECOVERY_BLOCK (jump)
- || RECOVERY_BLOCK (BB_END (jump_bb_next)));
+ gcc_assert (IS_SPECULATION_CHECK_P (jump)
+ || IS_SPECULATION_CHECK_P (BB_END (jump_bb_next)));
unlink_block (jump_bb_next);
link_block (jump_bb_next, bb);
unlink_block (jump_bb_next);
link_block (jump_bb_next, bb);
gcc_assert (!e || !(e->flags & EDGE_COMPLEX));
gcc_assert (BLOCK_FOR_INSN (insn) == last_bb
gcc_assert (!e || !(e->flags & EDGE_COMPLEX));
gcc_assert (BLOCK_FOR_INSN (insn) == last_bb
- && !RECOVERY_BLOCK (insn)
+ && !IS_SPECULATION_CHECK_P (insn)
&& BB_HEAD (last_bb) != insn
&& BB_END (last_bb) == insn);
&& BB_HEAD (last_bb) != insn
&& BB_END (last_bb) == insn);
#define RECOVERY_BLOCK(INSN) (h_i_d[INSN_UID (INSN)].recovery_block)
#define ORIG_PAT(INSN) (h_i_d[INSN_UID (INSN)].orig_pat)
#define RECOVERY_BLOCK(INSN) (h_i_d[INSN_UID (INSN)].recovery_block)
#define ORIG_PAT(INSN) (h_i_d[INSN_UID (INSN)].orig_pat)
+/* INSN is either a simple or a branchy speculation check. */
+#define IS_SPECULATION_CHECK_P(INSN) (RECOVERY_BLOCK (INSN) != NULL)
+
+/* INSN is a speculation check that will simply reexecute the speculatively
+ scheduled instruction if the speculation fail. */
+#define IS_SPECULATION_SIMPLE_CHECK_P(INSN) \
+ (RECOVERY_BLOCK (INSN) == EXIT_BLOCK_PTR)
+
/* INSN is a speculation check that will branch to RECOVERY_BLOCK if the
speculation fail. Insns in that block will reexecute the speculatively
scheduled code and then will return immediatelly after INSN thus preserving
/* INSN is a speculation check that will branch to RECOVERY_BLOCK if the
speculation fail. Insns in that block will reexecute the speculatively
scheduled code and then will return immediatelly after INSN thus preserving
&& ((recog_memoized (next) >= 0
&& min_insn_conflict_delay (curr_state, next, next)
> PARAM_VALUE (PARAM_MAX_SCHED_INSN_CONFLICT_DELAY))
&& ((recog_memoized (next) >= 0
&& min_insn_conflict_delay (curr_state, next, next)
> PARAM_VALUE (PARAM_MAX_SCHED_INSN_CONFLICT_DELAY))
- || RECOVERY_BLOCK (next)
+ || IS_SPECULATION_CHECK_P (next)
|| !check_live (next, INSN_BB (next))
|| (not_ex_free = !is_exception_free (next, INSN_BB (next),
target_bb)))))
|| !check_live (next, INSN_BB (next))
|| (not_ex_free = !is_exception_free (next, INSN_BB (next),
target_bb)))))