+/* Basic block BB_COPY was created by code duplication. Add phi node
+ arguments for edges going out of BB_COPY. The blocks that were
+ duplicated have rbi->duplicated set to one. */
+
+void
+add_phi_args_after_copy_bb (basic_block bb_copy)
+{
+ basic_block bb, dest;
+ edge e, e_copy;
+ edge_iterator ei;
+ tree phi, phi_copy, phi_next, def;
+
+ bb = bb_copy->rbi->original;
+
+ FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
+ {
+ if (!phi_nodes (e_copy->dest))
+ continue;
+
+ if (e_copy->dest->rbi->duplicated)
+ dest = e_copy->dest->rbi->original;
+ else
+ dest = e_copy->dest;
+
+ e = find_edge (bb, dest);
+ if (!e)
+ {
+ /* During loop unrolling the target of the latch edge is copied.
+ In this case we are not looking for edge to dest, but to
+ duplicated block whose original was dest. */
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (e->dest->rbi->duplicated
+ && e->dest->rbi->original == dest)
+ break;
+
+ gcc_assert (e != NULL);
+ }
+
+ for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
+ phi;
+ phi = phi_next, phi_copy = TREE_CHAIN (phi_copy))
+ {
+ phi_next = TREE_CHAIN (phi);
+
+ gcc_assert (PHI_RESULT (phi) == PHI_RESULT (phi_copy));
+ def = PHI_ARG_DEF_FROM_EDGE (phi, e);
+ add_phi_arg (&phi_copy, def, e_copy);
+ }
+ }
+}
+
+/* Blocks in REGION_COPY array of length N_REGION were created by
+ duplication of basic blocks. Add phi node arguments for edges
+ going from these blocks. */
+
+void
+add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
+{
+ unsigned i;
+
+ for (i = 0; i < n_region; i++)
+ region_copy[i]->rbi->duplicated = 1;
+
+ for (i = 0; i < n_region; i++)
+ add_phi_args_after_copy_bb (region_copy[i]);
+
+ for (i = 0; i < n_region; i++)
+ region_copy[i]->rbi->duplicated = 0;
+}
+
+/* Maps the old ssa name FROM_NAME to TO_NAME. */
+
+struct ssa_name_map_entry
+{
+ tree from_name;
+ tree to_name;
+};
+
+/* Hash function for ssa_name_map_entry. */
+
+static hashval_t
+ssa_name_map_entry_hash (const void *entry)
+{
+ const struct ssa_name_map_entry *en = entry;
+ return SSA_NAME_VERSION (en->from_name);
+}
+
+/* Equality function for ssa_name_map_entry. */
+
+static int
+ssa_name_map_entry_eq (const void *in_table, const void *ssa_name)
+{
+ const struct ssa_name_map_entry *en = in_table;
+
+ return en->from_name == ssa_name;
+}
+
+/* Allocate duplicates of ssa names in list DEFINITIONS and store the mapping
+ to MAP. */
+
+void
+allocate_ssa_names (bitmap definitions, htab_t *map)
+{
+ tree name;
+ struct ssa_name_map_entry *entry;
+ PTR *slot;
+ unsigned ver;
+ bitmap_iterator bi;
+
+ if (!*map)
+ *map = htab_create (10, ssa_name_map_entry_hash,
+ ssa_name_map_entry_eq, free);
+ EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
+ {
+ name = ssa_name (ver);
+ slot = htab_find_slot_with_hash (*map, name, SSA_NAME_VERSION (name),
+ INSERT);
+ if (*slot)
+ entry = *slot;
+ else
+ {
+ entry = xmalloc (sizeof (struct ssa_name_map_entry));
+ entry->from_name = name;
+ *slot = entry;
+ }
+ entry->to_name = duplicate_ssa_name (name, SSA_NAME_DEF_STMT (name));
+ }
+}
+
+/* Rewrite the definition DEF in statement STMT to new ssa name as specified
+ by the mapping MAP. */
+
+static void
+rewrite_to_new_ssa_names_def (def_operand_p def, tree stmt, htab_t map)
+{
+ tree name = DEF_FROM_PTR (def);
+ struct ssa_name_map_entry *entry;
+
+ gcc_assert (TREE_CODE (name) == SSA_NAME);
+
+ entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
+ if (!entry)
+ return;
+
+ SET_DEF (def, entry->to_name);
+ SSA_NAME_DEF_STMT (entry->to_name) = stmt;
+}
+
+/* Rewrite the USE to new ssa name as specified by the mapping MAP. */
+
+static void
+rewrite_to_new_ssa_names_use (use_operand_p use, htab_t map)
+{
+ tree name = USE_FROM_PTR (use);
+ struct ssa_name_map_entry *entry;
+
+ if (TREE_CODE (name) != SSA_NAME)
+ return;
+
+ entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
+ if (!entry)
+ return;
+
+ SET_USE (use, entry->to_name);
+}
+
+/* Rewrite the ssa names in basic block BB to new ones as specified by the
+ mapping MAP. */
+
+void
+rewrite_to_new_ssa_names_bb (basic_block bb, htab_t map)
+{
+ unsigned i;
+ edge e;
+ edge_iterator ei;
+ tree phi, stmt;
+ block_stmt_iterator bsi;
+ use_optype uses;
+ vuse_optype vuses;
+ def_optype defs;
+ v_may_def_optype v_may_defs;
+ v_must_def_optype v_must_defs;
+ stmt_ann_t ann;
+
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ if (e->flags & EDGE_ABNORMAL)
+ break;
+
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ {
+ rewrite_to_new_ssa_names_def (PHI_RESULT_PTR (phi), phi, map);
+ if (e)
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)) = 1;
+ }
+
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ stmt = bsi_stmt (bsi);
+ get_stmt_operands (stmt);
+ ann = stmt_ann (stmt);
+
+ uses = USE_OPS (ann);
+ for (i = 0; i < NUM_USES (uses); i++)
+ rewrite_to_new_ssa_names_use (USE_OP_PTR (uses, i), map);
+
+ defs = DEF_OPS (ann);
+ for (i = 0; i < NUM_DEFS (defs); i++)
+ rewrite_to_new_ssa_names_def (DEF_OP_PTR (defs, i), stmt, map);
+
+ vuses = VUSE_OPS (ann);
+ for (i = 0; i < NUM_VUSES (vuses); i++)
+ rewrite_to_new_ssa_names_use (VUSE_OP_PTR (vuses, i), map);
+
+ v_may_defs = V_MAY_DEF_OPS (ann);
+ for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
+ {
+ rewrite_to_new_ssa_names_use
+ (V_MAY_DEF_OP_PTR (v_may_defs, i), map);
+ rewrite_to_new_ssa_names_def
+ (V_MAY_DEF_RESULT_PTR (v_may_defs, i), stmt, map);
+ }
+
+ v_must_defs = V_MUST_DEF_OPS (ann);
+ for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
+ {
+ rewrite_to_new_ssa_names_def
+ (V_MUST_DEF_RESULT_PTR (v_must_defs, i), stmt, map);
+ rewrite_to_new_ssa_names_use
+ (V_MUST_DEF_KILL_PTR (v_must_defs, i), map);
+ }
+ }
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
+ {
+ rewrite_to_new_ssa_names_use
+ (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), map);
+
+ if (e->flags & EDGE_ABNORMAL)
+ {
+ tree op = PHI_ARG_DEF_FROM_EDGE (phi, e);
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op) = 1;
+ }
+ }
+}
+
+/* Rewrite the ssa names in N_REGION blocks REGION to the new ones as specified
+ by the mapping MAP. */
+
+void
+rewrite_to_new_ssa_names (basic_block *region, unsigned n_region, htab_t map)
+{
+ unsigned r;
+
+ for (r = 0; r < n_region; r++)
+ rewrite_to_new_ssa_names_bb (region[r], map);
+}
+
+/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
+ important exit edge EXIT. By important we mean that no SSA name defined
+ inside region is live over the other exit edges of the region. All entry
+ edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
+ to the duplicate of the region. SSA form, dominance and loop information
+ is updated. The new basic blocks are stored to REGION_COPY in the same
+ order as they had in REGION, provided that REGION_COPY is not NULL.
+ The function returns false if it is unable to copy the region,
+ true otherwise. */
+
+bool
+tree_duplicate_sese_region (edge entry, edge exit,
+ basic_block *region, unsigned n_region,
+ basic_block *region_copy)
+{
+ unsigned i, n_doms, ver;
+ bool free_region_copy = false, copying_header = false;
+ struct loop *loop = entry->dest->loop_father;
+ edge exit_copy;
+ bitmap definitions;
+ tree phi;
+ basic_block *doms;
+ htab_t ssa_name_map = NULL;
+ edge redirected;
+ bitmap_iterator bi;
+
+ if (!can_copy_bbs_p (region, n_region))
+ return false;
+
+ /* Some sanity checking. Note that we do not check for all possible
+ missuses of the functions. I.e. if you ask to copy something weird,
+ it will work, but the state of structures probably will not be
+ correct. */
+
+ for (i = 0; i < n_region; i++)
+ {
+ /* We do not handle subloops, i.e. all the blocks must belong to the
+ same loop. */
+ if (region[i]->loop_father != loop)
+ return false;
+
+ if (region[i] != entry->dest
+ && region[i] == loop->header)
+ return false;
+ }
+
+ loop->copy = loop;
+
+ /* In case the function is used for loop header copying (which is the primary
+ use), ensure that EXIT and its copy will be new latch and entry edges. */
+ if (loop->header == entry->dest)
+ {
+ copying_header = true;
+ loop->copy = loop->outer;
+
+ if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
+ return false;
+
+ for (i = 0; i < n_region; i++)
+ if (region[i] != exit->src
+ && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
+ return false;
+ }
+
+ if (!region_copy)
+ {
+ region_copy = xmalloc (sizeof (basic_block) * n_region);
+ free_region_copy = true;
+ }
+
+ gcc_assert (!any_marked_for_rewrite_p ());
+
+ /* Record blocks outside the region that are duplicated by something
+ inside. */
+ doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
+ n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
+
+ copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop);
+ definitions = marked_ssa_names ();
+
+ if (copying_header)
+ {
+ loop->header = exit->dest;
+ loop->latch = exit->src;
+ }
+
+ /* Redirect the entry and add the phi node arguments. */
+ redirected = redirect_edge_and_branch (entry, entry->dest->rbi->copy);
+ gcc_assert (redirected != NULL);
+ flush_pending_stmts (entry);
+
+ /* Concerning updating of dominators: We must recount dominators
+ for entry block and its copy. Anything that is outside of the region, but
+ was dominated by something inside needs recounting as well. */
+ set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
+ doms[n_doms++] = entry->dest->rbi->original;
+ iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
+ free (doms);
+
+ /* Add the other phi node arguments. */
+ add_phi_args_after_copy (region_copy, n_region);
+
+ /* Add phi nodes for definitions at exit. TODO -- once we have immediate
+ uses, it should be possible to emit phi nodes just for definitions that
+ are used outside region. */
+ EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
+ {
+ tree name = ssa_name (ver);
+
+ phi = create_phi_node (name, exit->dest);
+ add_phi_arg (&phi, name, exit);
+ add_phi_arg (&phi, name, exit_copy);
+
+ SSA_NAME_DEF_STMT (name) = phi;
+ }
+
+ /* And create new definitions inside region and its copy. TODO -- once we
+ have immediate uses, it might be better to leave definitions in region
+ unchanged, create new ssa names for phi nodes on exit, and rewrite
+ the uses, to avoid changing the copied region. */
+ allocate_ssa_names (definitions, &ssa_name_map);
+ rewrite_to_new_ssa_names (region, n_region, ssa_name_map);
+ allocate_ssa_names (definitions, &ssa_name_map);
+ rewrite_to_new_ssa_names (region_copy, n_region, ssa_name_map);
+ htab_delete (ssa_name_map);
+
+ if (free_region_copy)
+ free (region_copy);
+
+ unmark_all_for_rewrite ();
+ BITMAP_XFREE (definitions);
+
+ return true;
+}