ia64_dfa_new_cycle (FILE *dump, int verbose, rtx insn, int last_clock,
int clock, int *sort_p)
{
- int setup_clocks_p = FALSE;
-
gcc_assert (insn && INSN_P (insn));
if (DEBUG_INSN_P (insn))
*sort_p = 0;
return 1;
}
- else if (reload_completed)
- setup_clocks_p = TRUE;
if (last_scheduled_insn)
{
}
}
}
- else if (reload_completed)
- setup_clocks_p = TRUE;
-
return 0;
}
rtx insn;
int need_barrier_p = 0;
int seen_good_insn = 0;
- rtx prev_insn = NULL_RTX;
init_insn_group_barriers ();
init_insn_group_barriers ();
seen_good_insn = 0;
need_barrier_p = 0;
- prev_insn = NULL_RTX;
}
else if (NONDEBUG_INSN_P (insn))
{
init_insn_group_barriers ();
seen_good_insn = 0;
need_barrier_p = 0;
- prev_insn = NULL_RTX;
}
else if (need_barrier_p || group_barrier_needed (insn)
|| (mflag_sched_stop_bits_after_every_cycle
if (recog_memoized (insn) >= 0
&& important_for_bundling_p (insn))
seen_good_insn = 1;
- prev_insn = NULL_RTX;
}
else if (recog_memoized (insn) >= 0
&& important_for_bundling_p (insn))
- {
- prev_insn = insn;
- seen_good_insn = 1;
- }
+ seen_good_insn = 1;
need_barrier_p = (GET_CODE (insn) == CALL_INSN
|| GET_CODE (PATTERN (insn)) == ASM_INPUT
|| asm_noperands (PATTERN (insn)) >= 0);