+static void
+mark_def_sites (struct dom_walk_data *walk_data,
+ basic_block bb,
+ block_stmt_iterator bsi)
+{
+ struct mark_def_sites_global_data *gd = walk_data->global_data;
+ bitmap kills = gd->kills;
+ size_t uid;
+ tree stmt, def;
+ use_operand_p use_p;
+ def_operand_p def_p;
+ ssa_op_iter iter;
+
+ /* Mark all the blocks that have definitions for each variable in the
+ VARS_TO_RENAME bitmap. */
+ stmt = bsi_stmt (bsi);
+ get_stmt_operands (stmt);
+
+ REWRITE_THIS_STMT (stmt) = 0;
+
+ /* If a variable is used before being set, then the variable is live
+ across a block boundary, so mark it live-on-entry to BB. */
+
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
+ SSA_OP_USE | SSA_OP_VUSE | SSA_OP_VMUSTDEFKILL)
+ {
+ if (prepare_use_operand_for_rename (use_p, &uid))
+ {
+ REWRITE_THIS_STMT (stmt) = 1;
+ if (!bitmap_bit_p (kills, uid))
+ set_livein_block (USE_FROM_PTR (use_p), bb);
+ }
+ }
+
+ /* Note that virtual definitions are irrelevant for computing KILLS
+ because a V_MAY_DEF does not constitute a killing definition of the
+ variable. However, the operand of a virtual definitions is a use
+ of the variable, so it may cause the variable to be considered
+ live-on-entry. */
+ FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, stmt, iter)
+ {
+ if (prepare_use_operand_for_rename (use_p, &uid))
+ {
+ /* If we do not already have an SSA_NAME for our destination,
+ then set the destination to the source. */
+ if (TREE_CODE (DEF_FROM_PTR (def_p)) != SSA_NAME)
+ SET_DEF (def_p, USE_FROM_PTR (use_p));
+
+ set_livein_block (USE_FROM_PTR (use_p), bb);
+ set_def_block (DEF_FROM_PTR (def_p), bb, false, false);
+ REWRITE_THIS_STMT (stmt) = 1;
+ }
+ }
+
+ /* Now process the defs and must-defs made by this statement. */
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF | SSA_OP_VMUSTDEF)
+ {
+ if (prepare_def_operand_for_rename (def, &uid))
+ {
+ set_def_block (def, bb, false, false);
+ bitmap_set_bit (kills, uid);
+ REWRITE_THIS_STMT (stmt) = 1;
+ }
+ }
+}
+
+
+/* Given a set of blocks with variable definitions (DEF_BLOCKS),
+ return a bitmap with all the blocks in the iterated dominance
+ frontier of the blocks in DEF_BLOCKS. DFS contains dominance
+ frontier information as returned by compute_dominance_frontiers.
+
+ The resulting set of blocks are the potential sites where PHI nodes
+ are needed. The caller is responsible from freeing the memory
+ allocated for the return value. */
+
+static bitmap
+find_idf (bitmap def_blocks, bitmap *dfs)
+{
+ bitmap_iterator bi;
+ unsigned bb_index;
+ VEC(int) *work_stack;
+ bitmap phi_insertion_points;
+
+ work_stack = VEC_alloc (int, n_basic_blocks);
+ phi_insertion_points = BITMAP_ALLOC (NULL);
+
+ /* Seed the work list with all the blocks in DEF_BLOCKS. */
+ EXECUTE_IF_SET_IN_BITMAP (def_blocks, 0, bb_index, bi)
+ /* We use VEC_quick_push here for speed. This is safe because we
+ know that the number of definition blocks is no greater than
+ the number of basic blocks, which is the initial capacity of
+ WORK_STACK. */
+ VEC_quick_push (int, work_stack, bb_index);
+
+ /* Pop a block off the worklist, add every block that appears in
+ the original block's DF that we have not already processed to
+ the worklist. Iterate until the worklist is empty. Blocks
+ which are added to the worklist are potential sites for
+ PHI nodes. */
+ while (VEC_length (int, work_stack) > 0)
+ {
+ bb_index = VEC_pop (int, work_stack);
+
+ EXECUTE_IF_AND_COMPL_IN_BITMAP (dfs[bb_index], phi_insertion_points,
+ 0, bb_index, bi)
+ {
+ /* Use a safe push because if there is a definition of VAR
+ in every basic block, then WORK_STACK may eventually have
+ more than N_BASIC_BLOCK entries. */
+ VEC_safe_push (int, work_stack, bb_index);
+ bitmap_set_bit (phi_insertion_points, bb_index);
+ }
+ }
+
+ VEC_free (int, work_stack);
+
+ return phi_insertion_points;
+}
+
+
+/* Return the set of blocks where variable VAR is defined and the blocks
+ where VAR is live on entry (livein). Return NULL, if no entry is
+ found in DEF_BLOCKS. */
+
+static inline struct def_blocks_d *
+find_def_blocks_for (tree var)
+{
+ struct def_blocks_d dm;
+ dm.var = var;
+ return (struct def_blocks_d *) htab_find (def_blocks, &dm);
+}
+
+
+/* Insert PHI nodes for variable VAR using the iterated dominance
+ frontier given in PHI_INSERTION_POINTS. */
+
+static void
+insert_phi_nodes_for (tree var, bitmap phi_insertion_points)
+{
+ unsigned bb_index;
+ edge e;
+ tree phi;
+ basic_block bb;
+ bitmap_iterator bi;
+ struct def_blocks_d *def_map;
+
+ def_map = find_def_blocks_for (var);
+
+ /* Remove the blocks where we already have PHI nodes for VAR. */
+ bitmap_and_compl_into (phi_insertion_points, def_map->phi_blocks);
+
+ /* Now compute global livein for this variable. Note this modifies
+ def_map->livein_blocks. */
+ compute_global_livein (def_map->livein_blocks, def_map->def_blocks);
+
+ /* And insert the PHI nodes. */
+ EXECUTE_IF_AND_IN_BITMAP (phi_insertion_points, def_map->livein_blocks,
+ 0, bb_index, bi)
+ {
+ bb = BASIC_BLOCK (bb_index);
+ phi = create_phi_node (var, bb);
+
+ /* If we are rewriting SSA names, add also the PHI arguments. */
+ if (TREE_CODE (var) == SSA_NAME)