+/* It is possible that ebb scheduling eliminated some blocks.
+ Place blocks from FIRST to LAST before BEFORE. */
+
+static void
+add_missing_bbs (rtx before, basic_block first, basic_block last)
+{
+ for (; last != first->prev_bb; last = last->prev_bb)
+ {
+ before = emit_note_before (NOTE_INSN_BASIC_BLOCK, before);
+ NOTE_BASIC_BLOCK (before) = last;
+ BB_HEAD (last) = before;
+ BB_END (last) = before;
+ update_bb_for_insn (last);
+ }
+}
+
+/* Fixup the CFG after EBB scheduling. Re-recognize the basic
+ block boundaries in between HEAD and TAIL and update basic block
+ structures between BB and LAST. */
+
+static basic_block
+fix_basic_block_boundaries (basic_block bb, basic_block last, rtx head,
+ rtx tail)
+{
+ rtx insn = head;
+ rtx last_inside = BB_HEAD (bb);
+ rtx aftertail = NEXT_INSN (tail);
+
+ head = BB_HEAD (bb);
+
+ for (; insn != aftertail; insn = NEXT_INSN (insn))
+ {
+ gcc_assert (!LABEL_P (insn));
+ /* Create new basic blocks just before first insn. */
+ if (inside_basic_block_p (insn))
+ {
+ if (!last_inside)
+ {
+ rtx note;
+
+ /* Re-emit the basic block note for newly found BB header. */
+ if (LABEL_P (insn))
+ {
+ note = emit_note_after (NOTE_INSN_BASIC_BLOCK, insn);
+ head = insn;
+ last_inside = note;
+ }
+ else
+ {
+ note = emit_note_before (NOTE_INSN_BASIC_BLOCK, insn);
+ head = note;
+ last_inside = insn;
+ }
+ }
+ else
+ last_inside = insn;
+ }
+ /* Control flow instruction terminate basic block. It is possible
+ that we've eliminated some basic blocks (made them empty).
+ Find the proper basic block using BLOCK_FOR_INSN and arrange things in
+ a sensible way by inserting empty basic blocks as needed. */
+ if (control_flow_insn_p (insn) || (insn == tail && last_inside))
+ {
+ basic_block curr_bb = BLOCK_FOR_INSN (insn);
+ rtx note;
+
+ if (!control_flow_insn_p (insn))
+ curr_bb = last;
+ if (bb == last->next_bb)
+ {
+ edge f;
+ rtx h;
+ edge_iterator ei;
+
+ /* An obscure special case, where we do have partially dead
+ instruction scheduled after last control flow instruction.
+ In this case we can create new basic block. It is
+ always exactly one basic block last in the sequence. Handle
+ it by splitting the edge and repositioning the block.
+ This is somewhat hackish, but at least avoid cut&paste
+
+ A safer solution can be to bring the code into sequence,
+ do the split and re-emit it back in case this will ever
+ trigger problem. */
+
+ FOR_EACH_EDGE (f, ei, bb->prev_bb->succs)
+ if (f->flags & EDGE_FALLTHRU)
+ break;
+
+ if (f)
+ {
+ last = curr_bb = split_edge (f);
+ h = BB_HEAD (curr_bb);
+ BB_HEAD (curr_bb) = head;
+ BB_END (curr_bb) = insn;
+ /* Edge splitting created misplaced BASIC_BLOCK note, kill
+ it. */
+ delete_insn (h);
+ }
+ /* It may happen that code got moved past unconditional jump in
+ case the code is completely dead. Kill it. */
+ else
+ {
+ rtx next = next_nonnote_insn (insn);
+ delete_insn_chain (head, insn);
+ /* We keep some notes in the way that may split barrier from the
+ jump. */
+ if (BARRIER_P (next))
+ {
+ emit_barrier_after (prev_nonnote_insn (head));
+ delete_insn (next);
+ }
+ insn = NULL;
+ }
+ }
+ else
+ {
+ BB_HEAD (curr_bb) = head;
+ BB_END (curr_bb) = insn;
+ add_missing_bbs (BB_HEAD (curr_bb), bb, curr_bb->prev_bb);
+ }
+ note = LABEL_P (head) ? NEXT_INSN (head) : head;
+ NOTE_BASIC_BLOCK (note) = curr_bb;
+ update_bb_for_insn (curr_bb);
+ bb = curr_bb->next_bb;
+ last_inside = NULL;
+ if (!insn)
+ break;
+ }
+ }
+ add_missing_bbs (BB_HEAD (last->next_bb), bb, last);
+ return bb->prev_bb;
+}
+
+/* Returns the earliest block in EBB currently being processed where a
+ "similar load" 'insn2' is found, and hence LOAD_INSN can move
+ speculatively into the found block. All the following must hold:
+
+ (1) both loads have 1 base register (PFREE_CANDIDATEs).
+ (2) load_insn and load2 have a def-use dependence upon
+ the same insn 'insn1'.
+
+ From all these we can conclude that the two loads access memory
+ addresses that differ at most by a constant, and hence if moving
+ load_insn would cause an exception, it would have been caused by
+ load2 anyhow.
+
+ The function uses list (given by LAST_BLOCK) of already processed
+ blocks in EBB. The list is formed in `add_deps_for_risky_insns'. */
+
+static basic_block
+earliest_block_with_similiar_load (basic_block last_block, rtx load_insn)
+{
+ rtx back_link;
+ basic_block bb, earliest_block = NULL;
+
+ for (back_link = LOG_LINKS (load_insn);
+ back_link;
+ back_link = XEXP (back_link, 1))
+ {
+ rtx insn1 = XEXP (back_link, 0);
+
+ if (GET_MODE (back_link) == VOIDmode)
+ {
+ /* Found a DEF-USE dependence (insn1, load_insn). */
+ rtx fore_link;
+
+ for (fore_link = INSN_DEPEND (insn1);
+ fore_link;
+ fore_link = XEXP (fore_link, 1))
+ {
+ rtx insn2 = XEXP (fore_link, 0);
+ basic_block insn2_block = BLOCK_FOR_INSN (insn2);
+
+ if (GET_MODE (fore_link) == VOIDmode)
+ {
+ if (earliest_block != NULL
+ && earliest_block->index < insn2_block->index)
+ continue;
+
+ /* Found a DEF-USE dependence (insn1, insn2). */
+ if (haifa_classify_insn (insn2) != PFREE_CANDIDATE)
+ /* insn2 not guaranteed to be a 1 base reg load. */
+ continue;
+
+ for (bb = last_block; bb; bb = bb->aux)
+ if (insn2_block == bb)
+ break;
+
+ if (!bb)
+ /* insn2 is the similar load. */
+ earliest_block = insn2_block;
+ }
+ }
+ }
+ }
+
+ return earliest_block;
+}
+
+/* The following function adds dependencies between jumps and risky
+ insns in given ebb. */
+
+static void
+add_deps_for_risky_insns (rtx head, rtx tail)
+{
+ rtx insn, prev;
+ int class;
+ rtx last_jump = NULL_RTX;
+ rtx next_tail = NEXT_INSN (tail);
+ basic_block last_block = NULL, bb;
+
+ for (insn = head; insn != next_tail; insn = NEXT_INSN (insn))
+ if (JUMP_P (insn))
+ {
+ bb = BLOCK_FOR_INSN (insn);
+ bb->aux = last_block;
+ last_block = bb;
+ last_jump = insn;
+ }
+ else if (INSN_P (insn) && last_jump != NULL_RTX)
+ {
+ class = haifa_classify_insn (insn);
+ prev = last_jump;
+ switch (class)
+ {
+ case PFREE_CANDIDATE:
+ if (flag_schedule_speculative_load)
+ {
+ bb = earliest_block_with_similiar_load (last_block, insn);
+ if (bb)
+ {
+ bb = bb->aux;
+ if (!bb)
+ break;
+ prev = BB_END (bb);
+ }
+ }
+ /* Fall through. */
+ case TRAP_RISKY:
+ case IRISKY:
+ case PRISKY_CANDIDATE:
+ /* ??? We could implement better checking PRISKY_CANDIDATEs
+ analogous to sched-rgn.c. */
+ /* We can not change the mode of the backward
+ dependency because REG_DEP_ANTI has the lowest
+ rank. */
+ if (! sched_insns_conditions_mutex_p (insn, prev)
+ && add_dependence (insn, prev, REG_DEP_ANTI))
+ add_forward_dependence (prev, insn, REG_DEP_ANTI);
+ break;
+
+ default:
+ break;
+ }
+ }
+ /* Maintain the invariant that bb->aux is clear after use. */
+ while (last_block)
+ {
+ bb = last_block->aux;
+ last_block->aux = NULL;
+ last_block = bb;
+ }
+}
+