+
+/* Helper function.
+ Tries to add speculative dependencies of type FS between instructions
+ in LINK list and TWIN. */
+static void
+process_insn_depend_be_in_spec (rtx link, rtx twin, ds_t fs)
+{
+ for (; link; link = XEXP (link, 1))
+ {
+ ds_t ds;
+ rtx consumer;
+
+ consumer = XEXP (link, 0);
+
+ ds = DEP_STATUS (link);
+
+ if (/* If we want to create speculative dep. */
+ fs
+ /* And we can do that because this is a true dep. */
+ && (ds & DEP_TYPES) == DEP_TRUE)
+ {
+ gcc_assert (!(ds & BE_IN_SPEC));
+
+ if (/* If this dep can be overcome with 'begin speculation'. */
+ ds & BEGIN_SPEC)
+ /* Then we have a choice: keep the dep 'begin speculative'
+ or transform it into 'be in speculative'. */
+ {
+ if (/* In try_ready we assert that if insn once became ready
+ it can be removed from the ready (or queue) list only
+ due to backend decision. Hence we can't let the
+ probability of the speculative dep to decrease. */
+ dep_weak (ds) <= dep_weak (fs))
+ /* Transform it to be in speculative. */
+ ds = (ds & ~BEGIN_SPEC) | fs;
+ }
+ else
+ /* Mark the dep as 'be in speculative'. */
+ ds |= fs;
+ }
+
+ add_back_forw_dep (consumer, twin, REG_NOTE_KIND (link), ds);
+ }
+}
+
+/* Generates recovery code for BEGIN speculative INSN. */
+static void
+begin_speculative_block (rtx insn)
+{
+ if (TODO_SPEC (insn) & BEGIN_DATA)
+ nr_begin_data++;
+ if (TODO_SPEC (insn) & BEGIN_CONTROL)
+ nr_begin_control++;
+
+ create_check_block_twin (insn, false);
+
+ TODO_SPEC (insn) &= ~BEGIN_SPEC;
+}
+
+/* Generates recovery code for BE_IN speculative INSN. */
+static void
+add_to_speculative_block (rtx insn)
+{
+ ds_t ts;
+ rtx link, twins = NULL;
+
+ ts = TODO_SPEC (insn);
+ gcc_assert (!(ts & ~BE_IN_SPEC));
+
+ if (ts & BE_IN_DATA)
+ nr_be_in_data++;
+ if (ts & BE_IN_CONTROL)
+ nr_be_in_control++;
+
+ TODO_SPEC (insn) &= ~BE_IN_SPEC;
+ gcc_assert (!TODO_SPEC (insn));
+
+ DONE_SPEC (insn) |= ts;
+
+ /* First we convert all simple checks to branchy. */
+ for (link = LOG_LINKS (insn); link;)
+ {
+ rtx check;
+
+ check = XEXP (link, 0);
+
+ if (RECOVERY_BLOCK (check))
+ {
+ create_check_block_twin (check, true);
+ link = LOG_LINKS (insn);
+ }
+ else
+ link = XEXP (link, 1);
+ }
+
+ clear_priorities (insn);
+
+ do
+ {
+ rtx link, check, twin;
+ basic_block rec;
+
+ link = LOG_LINKS (insn);
+ gcc_assert (!(DEP_STATUS (link) & BEGIN_SPEC)
+ && (DEP_STATUS (link) & BE_IN_SPEC)
+ && (DEP_STATUS (link) & DEP_TYPES) == DEP_TRUE);
+
+ check = XEXP (link, 0);
+ gcc_assert (!RECOVERY_BLOCK (check) && !ORIG_PAT (check)
+ && QUEUE_INDEX (check) == QUEUE_NOWHERE);
+
+ rec = BLOCK_FOR_INSN (check);
+
+ twin = emit_insn_before (copy_rtx (PATTERN (insn)), BB_END (rec));
+ extend_global (twin);
+
+ RESOLVED_DEPS (twin) = copy_DEPS_LIST_list (RESOLVED_DEPS (insn));
+
+ if (sched_verbose && spec_info->dump)
+ /* INSN_BB (insn) isn't determined for twin insns yet.
+ So we can't use current_sched_info->print_insn. */
+ fprintf (spec_info->dump, ";;\t\tGenerated twin insn : %d/rec%d\n",
+ INSN_UID (twin), rec->index);
+
+ twins = alloc_INSN_LIST (twin, twins);
+
+ /* Add dependences between TWIN and all appropriate
+ instructions from REC. */
+ do
+ {
+ add_back_forw_dep (twin, check, REG_DEP_TRUE, DEP_TRUE);
+
+ do
+ {
+ link = XEXP (link, 1);
+ if (link)
+ {
+ check = XEXP (link, 0);
+ if (BLOCK_FOR_INSN (check) == rec)
+ break;
+ }
+ else
+ break;
+ }
+ while (1);
+ }
+ while (link);
+
+ process_insn_depend_be_in_spec (INSN_DEPEND (insn), twin, ts);
+
+ for (link = LOG_LINKS (insn); link;)
+ {
+ check = XEXP (link, 0);
+
+ if (BLOCK_FOR_INSN (check) == rec)
+ {
+ delete_back_forw_dep (insn, check);
+ link = LOG_LINKS (insn);
+ }
+ else
+ link = XEXP (link, 1);
+ }
+ }
+ while (LOG_LINKS (insn));
+
+ /* We can't add the dependence between insn and twin earlier because
+ that would make twin appear in the INSN_DEPEND (insn). */
+ while (twins)
+ {
+ rtx twin;
+
+ twin = XEXP (twins, 0);
+ calc_priorities (twin);
+ add_back_forw_dep (twin, insn, REG_DEP_OUTPUT, DEP_OUTPUT);
+
+ twin = XEXP (twins, 1);
+ free_INSN_LIST_node (twins);
+ twins = twin;
+ }
+}
+
+/* Extends and fills with zeros (only the new part) array pointed to by P. */
+void *
+xrecalloc (void *p, size_t new_nmemb, size_t old_nmemb, size_t size)
+{
+ gcc_assert (new_nmemb >= old_nmemb);
+ p = XRESIZEVAR (void, p, new_nmemb * size);
+ memset (((char *) p) + old_nmemb * size, 0, (new_nmemb - old_nmemb) * size);
+ return p;
+}
+
+/* Return the probability of speculation success for the speculation
+ status DS. */
+static dw_t
+dep_weak (ds_t ds)
+{
+ ds_t res = 1, dt;
+ int n = 0;
+
+ dt = FIRST_SPEC_TYPE;
+ do
+ {
+ if (ds & dt)
+ {
+ res *= (ds_t) get_dep_weak (ds, dt);
+ n++;
+ }
+
+ if (dt == LAST_SPEC_TYPE)
+ break;
+ dt <<= SPEC_TYPE_SHIFT;
+ }
+ while (1);
+
+ gcc_assert (n);
+ while (--n)
+ res /= MAX_DEP_WEAK;
+
+ if (res < MIN_DEP_WEAK)
+ res = MIN_DEP_WEAK;
+
+ gcc_assert (res <= MAX_DEP_WEAK);
+
+ return (dw_t) res;
+}
+
+/* Helper function.
+ Find fallthru edge from PRED. */
+static edge
+find_fallthru_edge (basic_block pred)
+{
+ edge e;
+ edge_iterator ei;
+ basic_block succ;
+
+ succ = pred->next_bb;
+ gcc_assert (succ->prev_bb == pred);
+
+ if (EDGE_COUNT (pred->succs) <= EDGE_COUNT (succ->preds))
+ {
+ FOR_EACH_EDGE (e, ei, pred->succs)
+ if (e->flags & EDGE_FALLTHRU)
+ {
+ gcc_assert (e->dest == succ);
+ return e;
+ }
+ }
+ else
+ {
+ FOR_EACH_EDGE (e, ei, succ->preds)
+ if (e->flags & EDGE_FALLTHRU)
+ {
+ gcc_assert (e->src == pred);
+ return e;
+ }
+ }
+
+ return NULL;
+}
+
+/* Initialize BEFORE_RECOVERY variable. */
+static void
+init_before_recovery (void)
+{
+ basic_block last;
+ edge e;
+
+ last = EXIT_BLOCK_PTR->prev_bb;
+ e = find_fallthru_edge (last);
+
+ if (e)
+ {
+ /* We create two basic blocks:
+ 1. Single instruction block is inserted right after E->SRC
+ and has jump to
+ 2. Empty block right before EXIT_BLOCK.
+ Between these two blocks recovery blocks will be emitted. */
+
+ basic_block single, empty;
+ rtx x, label;
+
+ single = create_empty_bb (last);
+ empty = create_empty_bb (single);
+
+ single->count = last->count;
+ empty->count = last->count;
+ single->frequency = last->frequency;
+ empty->frequency = last->frequency;
+ BB_COPY_PARTITION (single, last);
+ BB_COPY_PARTITION (empty, last);
+
+ redirect_edge_succ (e, single);
+ make_single_succ_edge (single, empty, 0);
+ make_single_succ_edge (empty, EXIT_BLOCK_PTR,
+ EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
+
+ label = block_label (empty);
+ x = emit_jump_insn_after (gen_jump (label), BB_END (single));
+ JUMP_LABEL (x) = label;
+ LABEL_NUSES (label)++;
+ extend_global (x);
+
+ emit_barrier_after (x);
+
+ add_block (empty, 0);
+ add_block (single, 0);
+
+ before_recovery = single;
+
+ if (sched_verbose >= 2 && spec_info->dump)
+ fprintf (spec_info->dump,
+ ";;\t\tFixed fallthru to EXIT : %d->>%d->%d->>EXIT\n",
+ last->index, single->index, empty->index);
+ }
+ else
+ before_recovery = last;
+}
+
+/* Returns new recovery block. */
+static basic_block
+create_recovery_block (void)
+{
+ rtx label;
+ basic_block rec;
+
+ added_recovery_block_p = true;
+
+ if (!before_recovery)
+ init_before_recovery ();
+
+ label = gen_label_rtx ();
+ gcc_assert (BARRIER_P (NEXT_INSN (BB_END (before_recovery))));
+ label = emit_label_after (label, NEXT_INSN (BB_END (before_recovery)));
+
+ rec = create_basic_block (label, label, before_recovery);
+ emit_barrier_after (BB_END (rec));
+
+ if (BB_PARTITION (before_recovery) != BB_UNPARTITIONED)
+ BB_SET_PARTITION (rec, BB_COLD_PARTITION);
+
+ if (sched_verbose && spec_info->dump)
+ fprintf (spec_info->dump, ";;\t\tGenerated recovery block rec%d\n",
+ rec->index);
+
+ before_recovery = rec;
+
+ return rec;
+}
+
+/* This function creates recovery code for INSN. If MUTATE_P is nonzero,
+ INSN is a simple check, that should be converted to branchy one. */
+static void
+create_check_block_twin (rtx insn, bool mutate_p)
+{
+ basic_block rec;
+ rtx label, check, twin, link;
+ ds_t fs;
+
+ gcc_assert (ORIG_PAT (insn)
+ && (!mutate_p
+ || (RECOVERY_BLOCK (insn) == EXIT_BLOCK_PTR
+ && !(TODO_SPEC (insn) & SPECULATIVE))));
+
+ /* Create recovery block. */
+ if (mutate_p || targetm.sched.needs_block_p (insn))
+ {
+ rec = create_recovery_block ();
+ label = BB_HEAD (rec);
+ }
+ else
+ {
+ rec = EXIT_BLOCK_PTR;
+ label = 0;
+ }
+
+ /* Emit CHECK. */
+ check = targetm.sched.gen_check (insn, label, mutate_p);
+
+ if (rec != EXIT_BLOCK_PTR)
+ {
+ /* To have mem_reg alive at the beginning of second_bb,
+ we emit check BEFORE insn, so insn after splitting
+ insn will be at the beginning of second_bb, which will
+ provide us with the correct life information. */
+ check = emit_jump_insn_before (check, insn);
+ JUMP_LABEL (check) = label;
+ LABEL_NUSES (label)++;
+ }
+ else
+ check = emit_insn_before (check, insn);
+
+ /* Extend data structures. */
+ extend_all (check);
+ RECOVERY_BLOCK (check) = rec;
+
+ if (sched_verbose && spec_info->dump)
+ fprintf (spec_info->dump, ";;\t\tGenerated check insn : %s\n",
+ (*current_sched_info->print_insn) (check, 0));
+
+ gcc_assert (ORIG_PAT (insn));
+
+ /* Initialize TWIN (twin is a duplicate of original instruction
+ in the recovery block). */
+ if (rec != EXIT_BLOCK_PTR)
+ {
+ rtx link;
+
+ for (link = RESOLVED_DEPS (insn); link; link = XEXP (link, 1))
+ if (DEP_STATUS (link) & DEP_OUTPUT)
+ {
+ RESOLVED_DEPS (check) =
+ alloc_DEPS_LIST (XEXP (link, 0), RESOLVED_DEPS (check), DEP_TRUE);
+ PUT_REG_NOTE_KIND (RESOLVED_DEPS (check), REG_DEP_TRUE);
+ }
+
+ twin = emit_insn_after (ORIG_PAT (insn), BB_END (rec));
+ extend_global (twin);
+
+ if (sched_verbose && spec_info->dump)
+ /* INSN_BB (insn) isn't determined for twin insns yet.
+ So we can't use current_sched_info->print_insn. */
+ fprintf (spec_info->dump, ";;\t\tGenerated twin insn : %d/rec%d\n",
+ INSN_UID (twin), rec->index);
+ }
+ else
+ {
+ ORIG_PAT (check) = ORIG_PAT (insn);
+ HAS_INTERNAL_DEP (check) = 1;
+ twin = check;
+ /* ??? We probably should change all OUTPUT dependencies to
+ (TRUE | OUTPUT). */
+ }
+
+ RESOLVED_DEPS (twin) = copy_DEPS_LIST_list (RESOLVED_DEPS (insn));
+
+ if (rec != EXIT_BLOCK_PTR)
+ /* In case of branchy check, fix CFG. */
+ {
+ basic_block first_bb, second_bb;
+ rtx jump;
+ edge e;
+ int edge_flags;
+
+ first_bb = BLOCK_FOR_INSN (check);
+ e = split_block (first_bb, check);
+ /* split_block emits note if *check == BB_END. Probably it
+ is better to rip that note off. */
+ gcc_assert (e->src == first_bb);
+ second_bb = e->dest;
+
+ /* This is fixing of incoming edge. */
+ /* ??? Which other flags should be specified? */
+ if (BB_PARTITION (first_bb) != BB_PARTITION (rec))
+ /* Partition type is the same, if it is "unpartitioned". */
+ edge_flags = EDGE_CROSSING;
+ else
+ edge_flags = 0;
+
+ e = make_edge (first_bb, rec, edge_flags);
+
+ add_block (second_bb, first_bb);
+
+ gcc_assert (NOTE_INSN_BASIC_BLOCK_P (BB_HEAD (second_bb)));
+ label = block_label (second_bb);
+ jump = emit_jump_insn_after (gen_jump (label), BB_END (rec));
+ JUMP_LABEL (jump) = label;
+ LABEL_NUSES (label)++;
+ extend_global (jump);
+
+ if (BB_PARTITION (second_bb) != BB_PARTITION (rec))
+ /* Partition type is the same, if it is "unpartitioned". */
+ {
+ /* Rewritten from cfgrtl.c. */
+ if (flag_reorder_blocks_and_partition
+ && targetm.have_named_sections
+ /*&& !any_condjump_p (jump)*/)
+ /* any_condjump_p (jump) == false.
+ We don't need the same note for the check because
+ any_condjump_p (check) == true. */
+ {
+ REG_NOTES (jump) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP,
+ NULL_RTX,
+ REG_NOTES (jump));
+ }
+ edge_flags = EDGE_CROSSING;
+ }
+ else
+ edge_flags = 0;
+
+ make_single_succ_edge (rec, second_bb, edge_flags);
+
+ add_block (rec, EXIT_BLOCK_PTR);
+ }
+
+ /* Move backward dependences from INSN to CHECK and
+ move forward dependences from INSN to TWIN. */
+ for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
+ {
+ ds_t ds;
+
+ /* If BEGIN_DATA: [insn ~~TRUE~~> producer]:
+ check --TRUE--> producer ??? or ANTI ???
+ twin --TRUE--> producer
+ twin --ANTI--> check
+
+ If BEGIN_CONTROL: [insn ~~ANTI~~> producer]:
+ check --ANTI--> producer
+ twin --ANTI--> producer
+ twin --ANTI--> check
+
+ If BE_IN_SPEC: [insn ~~TRUE~~> producer]:
+ check ~~TRUE~~> producer
+ twin ~~TRUE~~> producer
+ twin --ANTI--> check */
+
+ ds = DEP_STATUS (link);
+
+ if (ds & BEGIN_SPEC)
+ {
+ gcc_assert (!mutate_p);
+ ds &= ~BEGIN_SPEC;
+ }
+
+ if (rec != EXIT_BLOCK_PTR)
+ {
+ add_back_forw_dep (check, XEXP (link, 0), REG_NOTE_KIND (link), ds);
+ add_back_forw_dep (twin, XEXP (link, 0), REG_NOTE_KIND (link), ds);
+ }
+ else
+ add_back_forw_dep (check, XEXP (link, 0), REG_NOTE_KIND (link), ds);
+ }
+
+ for (link = LOG_LINKS (insn); link;)
+ if ((DEP_STATUS (link) & BEGIN_SPEC)
+ || mutate_p)
+ /* We can delete this dep only if we totally overcome it with
+ BEGIN_SPECULATION. */
+ {
+ delete_back_forw_dep (insn, XEXP (link, 0));
+ link = LOG_LINKS (insn);
+ }
+ else
+ link = XEXP (link, 1);
+
+ fs = 0;
+
+ /* Fields (DONE_SPEC (x) & BEGIN_SPEC) and CHECK_SPEC (x) are set only
+ here. */
+
+ gcc_assert (!DONE_SPEC (insn));
+
+ if (!mutate_p)
+ {
+ ds_t ts = TODO_SPEC (insn);
+
+ DONE_SPEC (insn) = ts & BEGIN_SPEC;
+ CHECK_SPEC (check) = ts & BEGIN_SPEC;
+
+ if (ts & BEGIN_DATA)
+ fs = set_dep_weak (fs, BE_IN_DATA, get_dep_weak (ts, BEGIN_DATA));
+ if (ts & BEGIN_CONTROL)
+ fs = set_dep_weak (fs, BE_IN_CONTROL, get_dep_weak (ts, BEGIN_CONTROL));
+ }
+ else
+ CHECK_SPEC (check) = CHECK_SPEC (insn);
+
+ /* Future speculations: call the helper. */
+ process_insn_depend_be_in_spec (INSN_DEPEND (insn), twin, fs);
+
+ if (rec != EXIT_BLOCK_PTR)
+ {
+ /* Which types of dependencies should we use here is,
+ generally, machine-dependent question... But, for now,
+ it is not. */
+
+ if (!mutate_p)
+ {
+ add_back_forw_dep (check, insn, REG_DEP_TRUE, DEP_TRUE);
+ add_back_forw_dep (twin, insn, REG_DEP_OUTPUT, DEP_OUTPUT);
+ }
+ else
+ {
+ if (spec_info->dump)
+ fprintf (spec_info->dump, ";;\t\tRemoved simple check : %s\n",
+ (*current_sched_info->print_insn) (insn, 0));
+
+ for (link = INSN_DEPEND (insn); link; link = INSN_DEPEND (insn))
+ delete_back_forw_dep (XEXP (link, 0), insn);
+
+ if (QUEUE_INDEX (insn) != QUEUE_NOWHERE)
+ try_ready (check);
+
+ sched_remove_insn (insn);
+ }
+
+ add_back_forw_dep (twin, check, REG_DEP_ANTI, DEP_ANTI);
+ }
+ else
+ add_back_forw_dep (check, insn, REG_DEP_TRUE, DEP_TRUE | DEP_OUTPUT);
+
+ if (!mutate_p)
+ /* Fix priorities. If MUTATE_P is nonzero, this is not necessary,
+ because it'll be done later in add_to_speculative_block. */
+ {
+ clear_priorities (twin);
+ calc_priorities (twin);
+ }
+}
+
+/* Removes dependency between instructions in the recovery block REC
+ and usual region instructions. It keeps inner dependences so it
+ won't be necessary to recompute them. */
+static void
+fix_recovery_deps (basic_block rec)
+{
+ rtx note, insn, link, jump, ready_list = 0;
+ bitmap_head in_ready;
+
+ bitmap_initialize (&in_ready, 0);
+
+ /* NOTE - a basic block note. */
+ note = NEXT_INSN (BB_HEAD (rec));
+ gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
+ insn = BB_END (rec);
+ gcc_assert (JUMP_P (insn));
+ insn = PREV_INSN (insn);
+
+ do
+ {
+ for (link = INSN_DEPEND (insn); link;)
+ {
+ rtx consumer;
+
+ consumer = XEXP (link, 0);
+
+ if (BLOCK_FOR_INSN (consumer) != rec)
+ {
+ delete_back_forw_dep (consumer, insn);
+
+ if (!bitmap_bit_p (&in_ready, INSN_LUID (consumer)))
+ {
+ ready_list = alloc_INSN_LIST (consumer, ready_list);
+ bitmap_set_bit (&in_ready, INSN_LUID (consumer));
+ }
+
+ link = INSN_DEPEND (insn);
+ }
+ else
+ {
+ gcc_assert ((DEP_STATUS (link) & DEP_TYPES) == DEP_TRUE);
+
+ link = XEXP (link, 1);
+ }
+ }
+
+ insn = PREV_INSN (insn);
+ }
+ while (insn != note);
+
+ bitmap_clear (&in_ready);
+
+ /* Try to add instructions to the ready or queue list. */
+ for (link = ready_list; link; link = XEXP (link, 1))
+ try_ready (XEXP (link, 0));
+ free_INSN_LIST_list (&ready_list);
+
+ /* Fixing jump's dependences. */
+ insn = BB_HEAD (rec);
+ jump = BB_END (rec);
+
+ gcc_assert (LABEL_P (insn));
+ insn = NEXT_INSN (insn);
+
+ gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
+ add_jump_dependencies (insn, jump);
+}
+
+/* The function saves line notes at the beginning of block B. */
+static void
+associate_line_notes_with_blocks (basic_block b)
+{
+ rtx line;
+
+ for (line = BB_HEAD (b); line; line = PREV_INSN (line))
+ if (NOTE_P (line) && NOTE_LINE_NUMBER (line) > 0)
+ {
+ line_note_head[b->index] = line;
+ break;
+ }
+ /* Do a forward search as well, since we won't get to see the first
+ notes in a basic block. */
+ for (line = BB_HEAD (b); line; line = NEXT_INSN (line))
+ {
+ if (INSN_P (line))
+ break;
+ if (NOTE_P (line) && NOTE_LINE_NUMBER (line) > 0)
+ line_note_head[b->index] = line;
+ }
+}
+
+/* Changes pattern of the INSN to NEW_PAT. */
+static void
+change_pattern (rtx insn, rtx new_pat)
+{
+ int t;
+
+ t = validate_change (insn, &PATTERN (insn), new_pat, 0);
+ gcc_assert (t);
+ /* Invalidate INSN_COST, so it'll be recalculated. */
+ INSN_COST (insn) = -1;
+ /* Invalidate INSN_TICK, so it'll be recalculated. */
+ INSN_TICK (insn) = INVALID_TICK;
+ dfa_clear_single_insn_cache (insn);
+}
+
+
+/* -1 - can't speculate,
+ 0 - for speculation with REQUEST mode it is OK to use
+ current instruction pattern,
+ 1 - need to change pattern for *NEW_PAT to be speculative. */
+static int
+speculate_insn (rtx insn, ds_t request, rtx *new_pat)
+{
+ gcc_assert (current_sched_info->flags & DO_SPECULATION
+ && (request & SPECULATIVE));
+
+ if (!NONJUMP_INSN_P (insn)
+ || HAS_INTERNAL_DEP (insn)
+ || SCHED_GROUP_P (insn)
+ || side_effects_p (PATTERN (insn))
+ || (request & spec_info->mask) != request)
+ return -1;
+
+ gcc_assert (!RECOVERY_BLOCK (insn));
+
+ if (request & BE_IN_SPEC)
+ {
+ if (may_trap_p (PATTERN (insn)))
+ return -1;
+
+ if (!(request & BEGIN_SPEC))
+ return 0;
+ }
+
+ return targetm.sched.speculate_insn (insn, request & BEGIN_SPEC, new_pat);
+}
+
+/* Print some information about block BB, which starts with HEAD and
+ ends with TAIL, before scheduling it.
+ I is zero, if scheduler is about to start with the fresh ebb. */
+static void
+dump_new_block_header (int i, basic_block bb, rtx head, rtx tail)
+{
+ if (!i)
+ fprintf (sched_dump,
+ ";; ======================================================\n");
+ else
+ fprintf (sched_dump,
+ ";; =====================ADVANCING TO=====================\n");
+ fprintf (sched_dump,
+ ";; -- basic block %d from %d to %d -- %s reload\n",
+ bb->index, INSN_UID (head), INSN_UID (tail),
+ (reload_completed ? "after" : "before"));
+ fprintf (sched_dump,
+ ";; ======================================================\n");
+ fprintf (sched_dump, "\n");
+}
+
+/* Unlink basic block notes and labels and saves them, so they
+ can be easily restored. We unlink basic block notes in EBB to
+ provide back-compatibility with the previous code, as target backends
+ assume, that there'll be only instructions between
+ current_sched_info->{head and tail}. We restore these notes as soon
+ as we can.
+ FIRST (LAST) is the first (last) basic block in the ebb.
+ NB: In usual case (FIRST == LAST) nothing is really done. */
+void
+unlink_bb_notes (basic_block first, basic_block last)
+{
+ /* We DON'T unlink basic block notes of the first block in the ebb. */
+ if (first == last)
+ return;
+
+ bb_header = xmalloc (last_basic_block * sizeof (*bb_header));
+
+ /* Make a sentinel. */
+ if (last->next_bb != EXIT_BLOCK_PTR)
+ bb_header[last->next_bb->index] = 0;
+
+ first = first->next_bb;
+ do
+ {
+ rtx prev, label, note, next;
+
+ label = BB_HEAD (last);
+ if (LABEL_P (label))
+ note = NEXT_INSN (label);
+ else
+ note = label;
+ gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
+
+ prev = PREV_INSN (label);
+ next = NEXT_INSN (note);
+ gcc_assert (prev && next);
+
+ NEXT_INSN (prev) = next;
+ PREV_INSN (next) = prev;
+
+ bb_header[last->index] = label;
+
+ if (last == first)
+ break;
+
+ last = last->prev_bb;
+ }
+ while (1);
+}
+
+/* Restore basic block notes.
+ FIRST is the first basic block in the ebb. */
+static void
+restore_bb_notes (basic_block first)
+{
+ if (!bb_header)
+ return;
+
+ /* We DON'T unlink basic block notes of the first block in the ebb. */
+ first = first->next_bb;
+ /* Remember: FIRST is actually a second basic block in the ebb. */
+
+ while (first != EXIT_BLOCK_PTR
+ && bb_header[first->index])
+ {
+ rtx prev, label, note, next;
+
+ label = bb_header[first->index];
+ prev = PREV_INSN (label);
+ next = NEXT_INSN (prev);
+
+ if (LABEL_P (label))
+ note = NEXT_INSN (label);
+ else
+ note = label;
+ gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
+
+ bb_header[first->index] = 0;
+
+ NEXT_INSN (prev) = label;
+ NEXT_INSN (note) = next;
+ PREV_INSN (next) = note;
+
+ first = first->next_bb;
+ }
+
+ free (bb_header);
+ bb_header = 0;
+}
+
+/* Extend per basic block data structures of the scheduler.
+ If BB is NULL, initialize structures for the whole CFG.
+ Otherwise, initialize them for the just created BB. */
+static void
+extend_bb (basic_block bb)
+{
+ rtx insn;
+
+ if (write_symbols != NO_DEBUG)
+ {
+ /* Save-line-note-head:
+ Determine the line-number at the start of each basic block.
+ This must be computed and saved now, because after a basic block's
+ predecessor has been scheduled, it is impossible to accurately
+ determine the correct line number for the first insn of the block. */
+ line_note_head = xrecalloc (line_note_head, last_basic_block,
+ old_last_basic_block,
+ sizeof (*line_note_head));
+
+ if (bb)
+ associate_line_notes_with_blocks (bb);
+ else
+ FOR_EACH_BB (bb)
+ associate_line_notes_with_blocks (bb);
+ }
+
+ old_last_basic_block = last_basic_block;
+
+ if (current_sched_info->flags & USE_GLAT)
+ {
+ glat_start = xrealloc (glat_start,
+ last_basic_block * sizeof (*glat_start));
+ glat_end = xrealloc (glat_end, last_basic_block * sizeof (*glat_end));
+ }
+
+ /* The following is done to keep current_sched_info->next_tail non null. */
+
+ insn = BB_END (EXIT_BLOCK_PTR->prev_bb);
+ if (NEXT_INSN (insn) == 0
+ || (!NOTE_P (insn)
+ && !LABEL_P (insn)
+ /* Don't emit a NOTE if it would end up before a BARRIER. */
+ && !BARRIER_P (NEXT_INSN (insn))))
+ {
+ emit_note_after (NOTE_INSN_DELETED, insn);
+ /* Make insn to appear outside BB. */
+ BB_END (EXIT_BLOCK_PTR->prev_bb) = insn;
+ }
+}
+
+/* Add a basic block BB to extended basic block EBB.
+ If EBB is EXIT_BLOCK_PTR, then BB is recovery block.
+ If EBB is NULL, then BB should be a new region. */
+void
+add_block (basic_block bb, basic_block ebb)
+{
+ gcc_assert (current_sched_info->flags & DETACH_LIFE_INFO
+ && bb->il.rtl->global_live_at_start == 0
+ && bb->il.rtl->global_live_at_end == 0);
+
+ extend_bb (bb);
+
+ glat_start[bb->index] = 0;
+ glat_end[bb->index] = 0;
+
+ if (current_sched_info->add_block)
+ /* This changes only data structures of the front-end. */
+ current_sched_info->add_block (bb, ebb);
+}
+
+/* Helper function.
+ Fix CFG after both in- and inter-block movement of
+ control_flow_insn_p JUMP. */
+static void
+fix_jump_move (rtx jump)
+{
+ basic_block bb, jump_bb, jump_bb_next;
+
+ bb = BLOCK_FOR_INSN (PREV_INSN (jump));
+ jump_bb = BLOCK_FOR_INSN (jump);
+ jump_bb_next = jump_bb->next_bb;
+
+ gcc_assert (current_sched_info->flags & SCHED_EBB
+ || (RECOVERY_BLOCK (jump)
+ && RECOVERY_BLOCK (jump) != EXIT_BLOCK_PTR));
+
+ if (!NOTE_INSN_BASIC_BLOCK_P (BB_END (jump_bb_next)))
+ /* if jump_bb_next is not empty. */
+ BB_END (jump_bb) = BB_END (jump_bb_next);
+
+ if (BB_END (bb) != PREV_INSN (jump))
+ /* Then there are instruction after jump that should be placed
+ to jump_bb_next. */
+ BB_END (jump_bb_next) = BB_END (bb);
+ else
+ /* Otherwise jump_bb_next is empty. */
+ BB_END (jump_bb_next) = NEXT_INSN (BB_HEAD (jump_bb_next));
+
+ /* To make assertion in move_insn happy. */
+ BB_END (bb) = PREV_INSN (jump);
+
+ update_bb_for_insn (jump_bb_next);
+}
+
+/* Fix CFG after interblock movement of control_flow_insn_p JUMP. */
+static void
+move_block_after_check (rtx jump)
+{
+ basic_block bb, jump_bb, jump_bb_next;
+ VEC(edge,gc) *t;
+
+ bb = BLOCK_FOR_INSN (PREV_INSN (jump));
+ jump_bb = BLOCK_FOR_INSN (jump);
+ jump_bb_next = jump_bb->next_bb;
+
+ update_bb_for_insn (jump_bb);
+
+ gcc_assert (RECOVERY_BLOCK (jump)
+ || RECOVERY_BLOCK (BB_END (jump_bb_next)));
+
+ unlink_block (jump_bb_next);
+ link_block (jump_bb_next, bb);
+
+ t = bb->succs;
+ bb->succs = 0;
+ move_succs (&(jump_bb->succs), bb);
+ move_succs (&(jump_bb_next->succs), jump_bb);
+ move_succs (&t, jump_bb_next);
+
+ if (current_sched_info->fix_recovery_cfg)
+ current_sched_info->fix_recovery_cfg
+ (bb->index, jump_bb->index, jump_bb_next->index);
+}
+
+/* Helper function for move_block_after_check.
+ This functions attaches edge vector pointed to by SUCCSP to
+ block TO. */
+static void
+move_succs (VEC(edge,gc) **succsp, basic_block to)
+{
+ edge e;
+ edge_iterator ei;
+
+ gcc_assert (to->succs == 0);
+
+ to->succs = *succsp;
+
+ FOR_EACH_EDGE (e, ei, to->succs)
+ e->src = to;
+
+ *succsp = 0;
+}
+
+/* Initialize GLAT (global_live_at_{start, end}) structures.
+ GLAT structures are used to substitute global_live_{start, end}
+ regsets during scheduling. This is necessary to use such functions as
+ split_block (), as they assume consistency of register live information. */
+static void
+init_glat (void)
+{
+ basic_block bb;
+
+ FOR_ALL_BB (bb)
+ init_glat1 (bb);
+}
+
+/* Helper function for init_glat. */
+static void
+init_glat1 (basic_block bb)
+{
+ gcc_assert (bb->il.rtl->global_live_at_start != 0
+ && bb->il.rtl->global_live_at_end != 0);
+
+ glat_start[bb->index] = bb->il.rtl->global_live_at_start;
+ glat_end[bb->index] = bb->il.rtl->global_live_at_end;
+
+ if (current_sched_info->flags & DETACH_LIFE_INFO)
+ {
+ bb->il.rtl->global_live_at_start = 0;
+ bb->il.rtl->global_live_at_end = 0;
+ }
+}
+
+/* Attach reg_live_info back to basic blocks.
+ Also save regsets, that should not have been changed during scheduling,
+ for checking purposes (see check_reg_live). */
+void
+attach_life_info (void)
+{
+ basic_block bb;
+
+ FOR_ALL_BB (bb)
+ attach_life_info1 (bb);
+}
+
+/* Helper function for attach_life_info. */
+static void
+attach_life_info1 (basic_block bb)
+{
+ gcc_assert (bb->il.rtl->global_live_at_start == 0
+ && bb->il.rtl->global_live_at_end == 0);
+
+ if (glat_start[bb->index])
+ {
+ gcc_assert (glat_end[bb->index]);
+
+ bb->il.rtl->global_live_at_start = glat_start[bb->index];
+ bb->il.rtl->global_live_at_end = glat_end[bb->index];
+
+ /* Make them NULL, so they won't be freed in free_glat. */
+ glat_start[bb->index] = 0;
+ glat_end[bb->index] = 0;
+
+#ifdef ENABLE_CHECKING
+ if (bb->index < NUM_FIXED_BLOCKS
+ || current_sched_info->region_head_or_leaf_p (bb, 0))
+ {
+ glat_start[bb->index] = ALLOC_REG_SET (®_obstack);
+ COPY_REG_SET (glat_start[bb->index],
+ bb->il.rtl->global_live_at_start);
+ }
+
+ if (bb->index < NUM_FIXED_BLOCKS
+ || current_sched_info->region_head_or_leaf_p (bb, 1))
+ {
+ glat_end[bb->index] = ALLOC_REG_SET (®_obstack);
+ COPY_REG_SET (glat_end[bb->index], bb->il.rtl->global_live_at_end);
+ }
+#endif
+ }
+ else
+ {
+ gcc_assert (!glat_end[bb->index]);
+
+ bb->il.rtl->global_live_at_start = ALLOC_REG_SET (®_obstack);
+ bb->il.rtl->global_live_at_end = ALLOC_REG_SET (®_obstack);
+ }
+}
+
+/* Free GLAT information. */
+static void
+free_glat (void)
+{
+#ifdef ENABLE_CHECKING
+ if (current_sched_info->flags & DETACH_LIFE_INFO)
+ {
+ basic_block bb;
+
+ FOR_ALL_BB (bb)
+ {
+ if (glat_start[bb->index])
+ FREE_REG_SET (glat_start[bb->index]);
+ if (glat_end[bb->index])
+ FREE_REG_SET (glat_end[bb->index]);
+ }
+ }
+#endif
+
+ free (glat_start);
+ free (glat_end);
+}
+
+/* Remove INSN from the instruction stream.
+ INSN should have any dependencies. */
+static void
+sched_remove_insn (rtx insn)
+{
+ change_queue_index (insn, QUEUE_NOWHERE);
+ current_sched_info->add_remove_insn (insn, 1);
+ remove_insn (insn);
+}
+
+/* Clear priorities of all instructions, that are
+ forward dependent on INSN. */
+static void
+clear_priorities (rtx insn)
+{
+ rtx link;
+
+ for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
+ {
+ rtx pro;
+
+ pro = XEXP (link, 0);
+ if (INSN_PRIORITY_KNOWN (pro))
+ {
+ INSN_PRIORITY_KNOWN (pro) = 0;
+ clear_priorities (pro);
+ }
+ }
+}
+
+/* Recompute priorities of instructions, whose priorities might have been
+ changed due to changes in INSN. */
+static void
+calc_priorities (rtx insn)
+{
+ rtx link;
+
+ for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
+ {
+ rtx pro;
+
+ pro = XEXP (link, 0);
+ if (!INSN_PRIORITY_KNOWN (pro))
+ {
+ priority (pro);
+ calc_priorities (pro);
+ }
+ }
+}
+
+
+/* Add dependences between JUMP and other instructions in the recovery
+ block. INSN is the first insn the recovery block. */
+static void
+add_jump_dependencies (rtx insn, rtx jump)
+{
+ do
+ {
+ insn = NEXT_INSN (insn);
+ if (insn == jump)
+ break;
+
+ if (!INSN_DEPEND (insn))
+ add_back_forw_dep (jump, insn, REG_DEP_ANTI, DEP_ANTI);
+ }
+ while (1);
+ gcc_assert (LOG_LINKS (jump));
+}
+
+/* Return the NOTE_INSN_BASIC_BLOCK of BB. */
+static rtx
+bb_note (basic_block bb)
+{
+ rtx note;
+
+ note = BB_HEAD (bb);
+ if (LABEL_P (note))
+ note = NEXT_INSN (note);
+
+ gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
+ return note;
+}
+
+#ifdef ENABLE_CHECKING
+extern void debug_spec_status (ds_t);
+
+/* Dump information about the dependence status S. */
+void
+debug_spec_status (ds_t s)
+{
+ FILE *f = stderr;
+
+ if (s & BEGIN_DATA)
+ fprintf (f, "BEGIN_DATA: %d; ", get_dep_weak (s, BEGIN_DATA));
+ if (s & BE_IN_DATA)
+ fprintf (f, "BE_IN_DATA: %d; ", get_dep_weak (s, BE_IN_DATA));
+ if (s & BEGIN_CONTROL)
+ fprintf (f, "BEGIN_CONTROL: %d; ", get_dep_weak (s, BEGIN_CONTROL));
+ if (s & BE_IN_CONTROL)
+ fprintf (f, "BE_IN_CONTROL: %d; ", get_dep_weak (s, BE_IN_CONTROL));
+
+ if (s & HARD_DEP)
+ fprintf (f, "HARD_DEP; ");
+
+ if (s & DEP_TRUE)
+ fprintf (f, "DEP_TRUE; ");
+ if (s & DEP_ANTI)
+ fprintf (f, "DEP_ANTI; ");
+ if (s & DEP_OUTPUT)
+ fprintf (f, "DEP_OUTPUT; ");
+
+ fprintf (f, "\n");
+}
+
+/* Helper function for check_cfg.
+ Return nonzero, if edge vector pointed to by EL has edge with TYPE in
+ its flags. */
+static int
+has_edge_p (VEC(edge,gc) *el, int type)
+{
+ edge e;
+ edge_iterator ei;
+
+ FOR_EACH_EDGE (e, ei, el)
+ if (e->flags & type)
+ return 1;
+ return 0;
+}
+
+/* Check few properties of CFG between HEAD and TAIL.
+ If HEAD (TAIL) is NULL check from the beginning (till the end) of the
+ instruction stream. */
+static void
+check_cfg (rtx head, rtx tail)
+{
+ rtx next_tail;
+ basic_block bb = 0;
+ int not_first = 0, not_last;
+
+ if (head == NULL)
+ head = get_insns ();
+ if (tail == NULL)
+ tail = get_last_insn ();
+ next_tail = NEXT_INSN (tail);
+
+ do
+ {
+ not_last = head != tail;
+
+ if (not_first)
+ gcc_assert (NEXT_INSN (PREV_INSN (head)) == head);
+ if (not_last)
+ gcc_assert (PREV_INSN (NEXT_INSN (head)) == head);
+
+ if (LABEL_P (head)
+ || (NOTE_INSN_BASIC_BLOCK_P (head)
+ && (!not_first
+ || (not_first && !LABEL_P (PREV_INSN (head))))))
+ {
+ gcc_assert (bb == 0);
+ bb = BLOCK_FOR_INSN (head);
+ if (bb != 0)
+ gcc_assert (BB_HEAD (bb) == head);
+ else
+ /* This is the case of jump table. See inside_basic_block_p (). */
+ gcc_assert (LABEL_P (head) && !inside_basic_block_p (head));
+ }
+
+ if (bb == 0)
+ {
+ gcc_assert (!inside_basic_block_p (head));
+ head = NEXT_INSN (head);
+ }
+ else
+ {
+ gcc_assert (inside_basic_block_p (head)
+ || NOTE_P (head));
+ gcc_assert (BLOCK_FOR_INSN (head) == bb);
+
+ if (LABEL_P (head))
+ {
+ head = NEXT_INSN (head);
+ gcc_assert (NOTE_INSN_BASIC_BLOCK_P (head));
+ }
+ else
+ {
+ if (control_flow_insn_p (head))
+ {
+ gcc_assert (BB_END (bb) == head);
+
+ if (any_uncondjump_p (head))
+ gcc_assert (EDGE_COUNT (bb->succs) == 1
+ && BARRIER_P (NEXT_INSN (head)));
+ else if (any_condjump_p (head))
+ gcc_assert (EDGE_COUNT (bb->succs) > 1
+ && !BARRIER_P (NEXT_INSN (head)));
+ }
+ if (BB_END (bb) == head)
+ {
+ if (EDGE_COUNT (bb->succs) > 1)
+ gcc_assert (control_flow_insn_p (head)
+ || has_edge_p (bb->succs, EDGE_COMPLEX));
+ bb = 0;
+ }
+
+ head = NEXT_INSN (head);
+ }
+ }
+
+ not_first = 1;
+ }
+ while (head != next_tail);
+
+ gcc_assert (bb == 0);
+}
+
+/* Perform a few consistency checks of flags in different data structures. */
+static void
+check_sched_flags (void)
+{
+ unsigned int f = current_sched_info->flags;
+
+ if (flag_sched_stalled_insns)
+ gcc_assert (!(f & DO_SPECULATION));
+ if (f & DO_SPECULATION)
+ gcc_assert (!flag_sched_stalled_insns
+ && (f & DETACH_LIFE_INFO)
+ && spec_info
+ && spec_info->mask);
+ if (f & DETACH_LIFE_INFO)
+ gcc_assert (f & USE_GLAT);
+}
+
+/* Check global_live_at_{start, end} regsets.
+ If FATAL_P is TRUE, then abort execution at the first failure.
+ Otherwise, print diagnostics to STDERR (this mode is for calling
+ from debugger). */
+void
+check_reg_live (bool fatal_p)
+{
+ basic_block bb;
+
+ FOR_ALL_BB (bb)
+ {
+ int i;
+
+ i = bb->index;
+
+ if (glat_start[i])
+ {
+ bool b = bitmap_equal_p (bb->il.rtl->global_live_at_start,
+ glat_start[i]);
+
+ if (!b)
+ {
+ gcc_assert (!fatal_p);
+
+ fprintf (stderr, ";; check_reg_live_at_start (%d) failed.\n", i);
+ }
+ }
+
+ if (glat_end[i])
+ {
+ bool b = bitmap_equal_p (bb->il.rtl->global_live_at_end,
+ glat_end[i]);
+
+ if (!b)
+ {
+ gcc_assert (!fatal_p);
+
+ fprintf (stderr, ";; check_reg_live_at_end (%d) failed.\n", i);
+ }
+ }
+ }
+}
+#endif /* ENABLE_CHECKING */
+