+
+/* Common predicates for use with define_bypass. */
+
+/* True if the dependency between OUT_INSN and IN_INSN is on the store
+ data not the address operand(s) of the store. IN_INSN must be
+ single_set. OUT_INSN must be either a single_set or a PARALLEL with
+ SETs inside. */
+
+int
+store_data_bypass_p (rtx out_insn, rtx in_insn)
+{
+ rtx out_set, in_set;
+
+ in_set = single_set (in_insn);
+ gcc_assert (in_set);
+
+ if (!MEM_P (SET_DEST (in_set)))
+ return false;
+
+ out_set = single_set (out_insn);
+ if (out_set)
+ {
+ if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
+ return false;
+ }
+ else
+ {
+ rtx out_pat;
+ int i;
+
+ out_pat = PATTERN (out_insn);
+ gcc_assert (GET_CODE (out_pat) == PARALLEL);
+
+ for (i = 0; i < XVECLEN (out_pat, 0); i++)
+ {
+ rtx exp = XVECEXP (out_pat, 0, i);
+
+ if (GET_CODE (exp) == CLOBBER)
+ continue;
+
+ gcc_assert (GET_CODE (exp) == SET);
+
+ if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
+ return false;
+ }
+ }
+
+ return true;
+}
+
+/* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
+ condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
+ or multiple set; IN_INSN should be single_set for truth, but for convenience
+ of insn categorization may be any JUMP or CALL insn. */
+
+int
+if_test_bypass_p (rtx out_insn, rtx in_insn)
+{
+ rtx out_set, in_set;
+
+ in_set = single_set (in_insn);
+ if (! in_set)
+ {
+ gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
+ return false;
+ }
+
+ if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
+ return false;
+ in_set = SET_SRC (in_set);
+
+ out_set = single_set (out_insn);
+ if (out_set)
+ {
+ if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
+ || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
+ return false;
+ }
+ else
+ {
+ rtx out_pat;
+ int i;
+
+ out_pat = PATTERN (out_insn);
+ gcc_assert (GET_CODE (out_pat) == PARALLEL);
+
+ for (i = 0; i < XVECLEN (out_pat, 0); i++)
+ {
+ rtx exp = XVECEXP (out_pat, 0, i);
+
+ if (GET_CODE (exp) == CLOBBER)
+ continue;
+
+ gcc_assert (GET_CODE (exp) == SET);
+
+ if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
+ || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
+ return false;
+ }
+ }
+
+ return true;
+}
+\f
+static bool
+gate_handle_peephole2 (void)
+{
+ return (optimize > 0 && flag_peephole2);
+}
+
+static void
+rest_of_handle_peephole2 (void)
+{
+#ifdef HAVE_peephole2
+ peephole2_optimize (dump_file);
+#endif
+}
+
+struct tree_opt_pass pass_peephole2 =
+{
+ "peephole2", /* name */
+ gate_handle_peephole2, /* gate */
+ rest_of_handle_peephole2, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_PEEPHOLE2, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func, /* todo_flags_finish */
+ 'z' /* letter */
+};
+
+static void
+rest_of_handle_split_all_insns (void)
+{
+ split_all_insns (1);
+}
+
+struct tree_opt_pass pass_split_all_insns =
+{
+ "split1", /* name */
+ NULL, /* gate */
+ rest_of_handle_split_all_insns, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
+};
+
+/* The placement of the splitting that we do for shorten_branches
+ depends on whether regstack is used by the target or not. */
+static bool
+gate_do_final_split (void)
+{
+#if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
+ return 1;
+#else
+ return 0;
+#endif
+}
+
+struct tree_opt_pass pass_split_for_shorten_branches =
+{
+ "split3", /* name */
+ gate_do_final_split, /* gate */
+ split_all_insns_noflow, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_SHORTEN_BRANCH, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
+};
+
+
+static bool
+gate_handle_split_before_regstack (void)
+{
+#if defined (HAVE_ATTR_length) && defined (STACK_REGS)
+ /* If flow2 creates new instructions which need splitting
+ and scheduling after reload is not done, they might not be
+ split until final which doesn't allow splitting
+ if HAVE_ATTR_length. */
+# ifdef INSN_SCHEDULING
+ return (optimize && !flag_schedule_insns_after_reload);
+# else
+ return (optimize);
+# endif
+#else
+ return 0;
+#endif
+}
+
+struct tree_opt_pass pass_split_before_regstack =
+{
+ "split2", /* name */
+ gate_handle_split_before_regstack, /* gate */
+ rest_of_handle_split_all_insns, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_SHORTEN_BRANCH, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
+};