+/* Copy basic block, scale profile accordingly. Edges will be taken care of
+ later */
+
+static basic_block
+copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scale)
+{
+ block_stmt_iterator bsi, copy_bsi;
+ basic_block copy_basic_block;
+
+ /* create_basic_block() will append every new block to
+ basic_block_info automatically. */
+ copy_basic_block = create_basic_block (NULL, (void *) 0,
+ (basic_block) bb->prev_bb->aux);
+ copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
+
+ /* We are going to rebuild frequencies from scratch. These values have just
+ small importance to drive canonicalize_loop_headers. */
+ copy_basic_block->frequency = ((gcov_type)bb->frequency
+ * frequency_scale / REG_BR_PROB_BASE);
+ if (copy_basic_block->frequency > BB_FREQ_MAX)
+ copy_basic_block->frequency = BB_FREQ_MAX;
+ copy_bsi = bsi_start (copy_basic_block);
+
+ for (bsi = bsi_start (bb);
+ !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ tree stmt = bsi_stmt (bsi);
+ tree orig_stmt = stmt;
+
+ walk_tree (&stmt, copy_body_r, id, NULL);
+
+ /* RETURN_EXPR might be removed,
+ this is signalled by making stmt pointer NULL. */
+ if (stmt)
+ {
+ tree call, decl;
+
+ gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
+
+ /* With return slot optimization we can end up with
+ non-gimple (foo *)&this->m, fix that here. */
+ if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
+ && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
+ && !is_gimple_val (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0)))
+ gimplify_stmt (&stmt);
+
+ bsi_insert_after (©_bsi, stmt, BSI_NEW_STMT);
+
+ /* Process new statement. gimplify_stmt possibly turned statement
+ into multiple statements, we need to process all of them. */
+ while (!bsi_end_p (copy_bsi))
+ {
+ stmt = bsi_stmt (copy_bsi);
+ call = get_call_expr_in (stmt);
+
+ /* Statements produced by inlining can be unfolded, especially
+ when we constant propagated some operands. We can't fold
+ them right now for two reasons:
+ 1) folding require SSA_NAME_DEF_STMTs to be correct
+ 2) we can't change function calls to builtins.
+ So we just mark statement for later folding. We mark
+ all new statements, instead just statements that has changed
+ by some nontrivial substitution so even statements made
+ foldable indirectly are updated. If this turns out to be
+ expensive, copy_body can be told to watch for nontrivial
+ changes. */
+ if (id->statements_to_fold)
+ pointer_set_insert (id->statements_to_fold, stmt);
+ /* We're duplicating a CALL_EXPR. Find any corresponding
+ callgraph edges and update or duplicate them. */
+ if (call && (decl = get_callee_fndecl (call)))
+ {
+ struct cgraph_node *node;
+ struct cgraph_edge *edge;
+
+ switch (id->transform_call_graph_edges)
+ {
+ case CB_CGE_DUPLICATE:
+ edge = cgraph_edge (id->src_node, orig_stmt);
+ if (edge)
+ cgraph_clone_edge (edge, id->dst_node, stmt,
+ REG_BR_PROB_BASE, 1, edge->frequency, true);
+ break;
+
+ case CB_CGE_MOVE_CLONES:
+ for (node = id->dst_node->next_clone;
+ node;
+ node = node->next_clone)
+ {
+ edge = cgraph_edge (node, orig_stmt);
+ gcc_assert (edge);
+ cgraph_set_call_stmt (edge, stmt);
+ }
+ /* FALLTHRU */
+
+ case CB_CGE_MOVE:
+ edge = cgraph_edge (id->dst_node, orig_stmt);
+ if (edge)
+ cgraph_set_call_stmt (edge, stmt);
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+ }
+ /* If you think we can abort here, you are wrong.
+ There is no region 0 in tree land. */
+ gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
+ != 0);
+
+ if (tree_could_throw_p (stmt)
+ /* When we are cloning for inlining, we are supposed to
+ construct a clone that calls precisely the same functions
+ as original. However IPA optimizers might've proved
+ earlier some function calls as non-trapping that might
+ render some basic blocks dead that might become
+ unreachable.
+
+ We can't update SSA with unreachable blocks in CFG and thus
+ we prevent the scenario by preserving even the "dead" eh
+ edges until the point they are later removed by
+ fixup_cfg pass. */
+ || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
+ && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
+ {
+ int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
+ /* Add an entry for the copied tree in the EH hashtable.
+ When cloning or versioning, use the hashtable in
+ cfun, and just copy the EH number. When inlining, use the
+ hashtable in the caller, and adjust the region number. */
+ if (region > 0)
+ add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
+
+ /* If this tree doesn't have a region associated with it,
+ and there is a "current region,"
+ then associate this tree with the current region
+ and add edges associated with this region. */
+ if ((lookup_stmt_eh_region_fn (id->src_cfun,
+ orig_stmt) <= 0
+ && id->eh_region > 0)
+ && tree_could_throw_p (stmt))
+ add_stmt_to_eh_region (stmt, id->eh_region);
+ }
+ if (gimple_in_ssa_p (cfun))
+ {
+ ssa_op_iter i;
+ tree def;
+
+ find_new_referenced_vars (bsi_stmt_ptr (copy_bsi));
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
+ if (TREE_CODE (def) == SSA_NAME)
+ SSA_NAME_DEF_STMT (def) = stmt;
+ }
+ bsi_next (©_bsi);
+ }
+ copy_bsi = bsi_last (copy_basic_block);
+ }
+ }
+ return copy_basic_block;
+}
+
+/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
+ form is quite easy, since dominator relationship for old basic blocks does
+ not change.
+
+ There is however exception where inlining might change dominator relation
+ across EH edges from basic block within inlined functions destinating
+ to landing pads in function we inline into.
+
+ The function mark PHI_RESULT of such PHI nodes for renaming; it is
+ safe the EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI
+ must be set. This means, that there will be no overlapping live ranges
+ for the underlying symbol.
+
+ This might change in future if we allow redirecting of EH edges and
+ we might want to change way build CFG pre-inlining to include
+ all the possible edges then. */
+static void
+update_ssa_across_eh_edges (basic_block bb)
+{
+ edge e;
+ edge_iterator ei;
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (!e->dest->aux
+ || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
+ {
+ tree phi;
+
+ gcc_assert (e->flags & EDGE_EH);
+ for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
+ {
+ gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
+ (PHI_RESULT (phi)));
+ mark_sym_for_renaming
+ (SSA_NAME_VAR (PHI_RESULT (phi)));
+ }
+ }
+}
+
+/* Copy edges from BB into its copy constructed earlier, scale profile
+ accordingly. Edges will be taken care of later. Assume aux
+ pointers to point to the copies of each BB. */
+static void
+copy_edges_for_bb (basic_block bb, int count_scale)
+{
+ basic_block new_bb = (basic_block) bb->aux;
+ edge_iterator ei;
+ edge old_edge;
+ block_stmt_iterator bsi;
+ int flags;
+
+ /* Use the indices from the original blocks to create edges for the
+ new ones. */
+ FOR_EACH_EDGE (old_edge, ei, bb->succs)
+ if (!(old_edge->flags & EDGE_EH))
+ {
+ edge new;
+
+ flags = old_edge->flags;
+
+ /* Return edges do get a FALLTHRU flag when the get inlined. */
+ if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
+ && old_edge->dest->aux != EXIT_BLOCK_PTR)
+ flags |= EDGE_FALLTHRU;
+ new = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
+ new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
+ new->probability = old_edge->probability;
+ }
+
+ if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
+ return;
+
+ for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
+ {
+ tree copy_stmt;
+
+ copy_stmt = bsi_stmt (bsi);
+ update_stmt (copy_stmt);
+ if (gimple_in_ssa_p (cfun))
+ mark_symbols_for_renaming (copy_stmt);
+ /* Do this before the possible split_block. */
+ bsi_next (&bsi);
+
+ /* If this tree could throw an exception, there are two
+ cases where we need to add abnormal edge(s): the
+ tree wasn't in a region and there is a "current
+ region" in the caller; or the original tree had
+ EH edges. In both cases split the block after the tree,
+ and add abnormal edge(s) as needed; we need both
+ those from the callee and the caller.
+ We check whether the copy can throw, because the const
+ propagation can change an INDIRECT_REF which throws
+ into a COMPONENT_REF which doesn't. If the copy
+ can throw, the original could also throw. */
+
+ if (tree_can_throw_internal (copy_stmt))
+ {
+ if (!bsi_end_p (bsi))
+ /* Note that bb's predecessor edges aren't necessarily
+ right at this point; split_block doesn't care. */
+ {
+ edge e = split_block (new_bb, copy_stmt);
+
+ new_bb = e->dest;
+ new_bb->aux = e->src->aux;
+ bsi = bsi_start (new_bb);
+ }
+
+ make_eh_edges (copy_stmt);
+
+ if (gimple_in_ssa_p (cfun))
+ update_ssa_across_eh_edges (bb_for_stmt (copy_stmt));
+ }
+ }
+}
+
+/* Copy the PHIs. All blocks and edges are copied, some blocks
+ was possibly split and new outgoing EH edges inserted.
+ BB points to the block of original function and AUX pointers links
+ the original and newly copied blocks. */
+
+static void
+copy_phis_for_bb (basic_block bb, copy_body_data *id)
+{
+ basic_block new_bb = bb->aux;
+ edge_iterator ei;
+ tree phi;
+
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ {
+ tree res = PHI_RESULT (phi);
+ tree new_res = res;
+ tree new_phi;
+ edge new_edge;
+
+ if (is_gimple_reg (res))
+ {
+ walk_tree (&new_res, copy_body_r, id, NULL);
+ SSA_NAME_DEF_STMT (new_res)
+ = new_phi = create_phi_node (new_res, new_bb);
+ FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
+ {
+ edge old_edge = find_edge (new_edge->src->aux, bb);
+ tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
+ tree new_arg = arg;
+
+ walk_tree (&new_arg, copy_body_r, id, NULL);
+ gcc_assert (new_arg);
+ add_phi_arg (new_phi, new_arg, new_edge);
+ }
+ }
+ }
+}
+
+/* Wrapper for remap_decl so it can be used as a callback. */
+static tree
+remap_decl_1 (tree decl, void *data)
+{
+ return remap_decl (decl, (copy_body_data *) data);
+}
+
+/* Build struct function and associated datastructures for the new clone
+ NEW_FNDECL to be build. CALLEE_FNDECL is the original */
+
+static void
+initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
+ int frequency)
+{
+ struct function *new_cfun
+ = (struct function *) ggc_alloc_cleared (sizeof (struct function));
+ struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
+ int count_scale, frequency_scale;
+
+ if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
+ count_scale = (REG_BR_PROB_BASE * count
+ / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
+ else
+ count_scale = 1;
+
+ if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
+ frequency_scale = (REG_BR_PROB_BASE * frequency
+ /
+ ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
+ else
+ frequency_scale = count_scale;
+
+ /* Register specific tree functions. */
+ tree_register_cfg_hooks ();
+ *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
+ new_cfun->funcdef_no = get_next_funcdef_no ();
+ VALUE_HISTOGRAMS (new_cfun) = NULL;
+ new_cfun->unexpanded_var_list = NULL;
+ new_cfun->cfg = NULL;
+ new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
+ new_cfun->ib_boundaries_block = NULL;
+ DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
+ push_cfun (new_cfun);
+ init_empty_tree_cfg ();
+
+ ENTRY_BLOCK_PTR->count =
+ (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
+ REG_BR_PROB_BASE);
+ ENTRY_BLOCK_PTR->frequency =
+ (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
+ frequency_scale / REG_BR_PROB_BASE);
+ EXIT_BLOCK_PTR->count =
+ (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
+ REG_BR_PROB_BASE);
+ EXIT_BLOCK_PTR->frequency =
+ (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
+ frequency_scale / REG_BR_PROB_BASE);
+ if (src_cfun->eh)
+ init_eh_for_function ();
+
+ if (src_cfun->gimple_df)
+ {
+ init_tree_ssa ();
+ cfun->gimple_df->in_ssa_p = true;
+ init_ssa_operands ();
+ }
+ pop_cfun ();
+}
+
+/* Make a copy of the body of FN so that it can be inserted inline in
+ another function. Walks FN via CFG, returns new fndecl. */
+
+static tree
+copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
+ basic_block entry_block_map, basic_block exit_block_map)
+{
+ tree callee_fndecl = id->src_fn;
+ /* Original cfun for the callee, doesn't change. */
+ struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
+ struct function *cfun_to_copy;
+ basic_block bb;
+ tree new_fndecl = NULL;
+ int count_scale, frequency_scale;
+ int last;
+
+ if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
+ count_scale = (REG_BR_PROB_BASE * count
+ / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
+ else
+ count_scale = 1;
+
+ if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
+ frequency_scale = (REG_BR_PROB_BASE * frequency
+ /
+ ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
+ else
+ frequency_scale = count_scale;
+
+ /* Register specific tree functions. */
+ tree_register_cfg_hooks ();
+
+ /* Must have a CFG here at this point. */
+ gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
+ (DECL_STRUCT_FUNCTION (callee_fndecl)));
+
+ cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
+
+
+ ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
+ EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
+ entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
+ exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
+
+ /* Duplicate any exception-handling regions. */
+ if (cfun->eh)
+ {
+ id->eh_region_offset
+ = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
+ 0, id->eh_region);
+ }
+ /* Use aux pointers to map the original blocks to copy. */
+ FOR_EACH_BB_FN (bb, cfun_to_copy)
+ {
+ basic_block new = copy_bb (id, bb, frequency_scale, count_scale);
+ bb->aux = new;
+ new->aux = bb;
+ }
+
+ last = n_basic_blocks;
+ /* Now that we've duplicated the blocks, duplicate their edges. */
+ FOR_ALL_BB_FN (bb, cfun_to_copy)
+ copy_edges_for_bb (bb, count_scale);
+ if (gimple_in_ssa_p (cfun))
+ FOR_ALL_BB_FN (bb, cfun_to_copy)
+ copy_phis_for_bb (bb, id);
+ FOR_ALL_BB_FN (bb, cfun_to_copy)
+ {
+ ((basic_block)bb->aux)->aux = NULL;
+ bb->aux = NULL;
+ }
+ /* Zero out AUX fields of newly created block during EH edge
+ insertion. */
+ for (; last < n_basic_blocks; last++)
+ BASIC_BLOCK (last)->aux = NULL;
+ entry_block_map->aux = NULL;
+ exit_block_map->aux = NULL;
+
+ return new_fndecl;
+}
+