+Thu Jul 26 22:30:22 CEST 2001 Jan Hubicka <jh@suse.cz>
+
+ * rtl.h (cleanup_barriers): Declare.
+ * jump.c (cleanup_barriers): New function.
+ * toplev.c (rest_of_compilation): Call cleanup_barriers
+ before loop optimizer and after bb_reorder.
+
+ * flow.c (back_edge_of_syntactic_loop_p): New.
+ (split_edge): Use it.
+
2001-07-26 Rainer Orth <ro@TechFak.Uni-Bielefeld.DE>
* glimits.h (_MACH_MACHLIMITS_H_): Delete.
static int flow_loop_level_compute PARAMS ((struct loop *, int));
static int flow_loops_level_compute PARAMS ((struct loops *));
static void delete_dead_jumptables PARAMS ((void));
+static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
\f
/* Find basic blocks of the current function.
F is the first insn of the function and NREGS the number of register
return new_bb;
}
+/* Helper function for split_edge. Return true in case edge BB2 to BB1
+ is back edge of syntactic loop. */
+static bool
+back_edge_of_syntactic_loop_p (bb1, bb2)
+ basic_block bb1, bb2;
+{
+ rtx insn;
+ int count;
+ if (bb1->index > bb2->index)
+ return false;
+ if (bb1->index == bb2->index)
+ return true;
+ for (insn = bb1->end; insn != bb2->head && count >= 0;
+ insn = NEXT_INSN (insn))
+ if (GET_CODE (insn) == NOTE)
+ {
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
+ count++;
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
+ count--;
+ }
+ return count >= 0;
+}
+
/* Split a (typically critical) edge. Return the new block.
Abort on abnormal edges.
if (old_succ != EXIT_BLOCK_PTR
&& PREV_INSN (old_succ->head)
&& GET_CODE (PREV_INSN (old_succ->head)) == NOTE
- && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
+ && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG
+ && !back_edge_of_syntactic_loop_p (old_succ, old_pred))
bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
PREV_INSN (old_succ->head));
else if (old_succ != EXIT_BLOCK_PTR)
LABEL_NUSES (XEXP (insn, 0))++;
}
\f
+/* Some old code expects exactly one BARRIER as the NEXT_INSN of a
+ non-fallthru insn. This is not generally true, as multiple barriers
+ may have crept in, or the BARRIER may be separated from the last
+ real insn by one or more NOTEs.
+
+ This simple pass moves barriers and removes duplicates so that the
+ old code is happy.
+ */
+void
+cleanup_barriers ()
+{
+ rtx insn, next, prev;
+ for (insn = get_insns (); insn; insn = next)
+ {
+ next = NEXT_INSN (insn);
+ if (GET_CODE (insn) == BARRIER)
+ {
+ prev = prev_nonnote_insn (insn);
+ if (GET_CODE (prev) == BARRIER)
+ delete_barrier (insn);
+ else if (prev != PREV_INSN (insn))
+ reorder_insns (insn, insn, prev);
+ }
+ }
+}
+\f
void
copy_loop_headers (f)
rtx f;
extern enum rtx_code unsigned_condition PARAMS ((enum rtx_code));
extern enum rtx_code signed_condition PARAMS ((enum rtx_code));
extern void mark_jump_label PARAMS ((rtx, rtx, int));
+extern void cleanup_barriers PARAMS ((void));
/* In jump.c */
extern rtx squeeze_notes PARAMS ((rtx, rtx));
if (flag_rerun_loop_opt)
{
+ cleanup_barriers ();
+
/* We only want to perform unrolling once. */
loop_optimize (insns, rtl_dump_file, 0);
analysis code depends on this information. */
reg_scan (insns, max_reg_num (), 1);
}
+ cleanup_barriers ();
loop_optimize (insns, rtl_dump_file,
(flag_unroll_loops ? LOOP_UNROLL : 0) | LOOP_BCT);
/* CFG no longer kept up to date. */
purge_line_number_notes (insns);
+ cleanup_barriers ();
/* If a scheduling pass for delayed branches is to be done,
call the scheduling code. */