/* Tree inlining.
- Copyright 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
+ Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ Free Software Foundation, Inc.
Contributed by Alexandre Oliva <aoliva@redhat.com>
This file is part of GCC.
#include "debug.h"
#include "pointer-set.h"
#include "ipa-prop.h"
+#include "value-prof.h"
+#include "tree-pass.h"
/* I'm not real happy about this, but we need to handle gimple and
non-gimple trees. */
Inlining: a function body is duplicated, but the PARM_DECLs are
remapped into VAR_DECLs, and non-void RETURN_EXPRs become
- MODIFY_EXPRs that store to a dedicated returned-value variable.
+ GIMPLE_MODIFY_STMTs that store to a dedicated returned-value variable.
The duplicated eh_region info of the copy will later be appended
to the info for the caller; the eh_region info in copied throwing
statements and RESX_EXPRs is adjusted accordingly.
o Provide heuristics to clamp inlining of recursive template
calls? */
+
+/* Weights that estimate_num_insns uses for heuristics in inlining. */
+
+eni_weights eni_inlining_weights;
+
+/* Weights that estimate_num_insns uses to estimate the size of the
+ produced code. */
+
+eni_weights eni_size_weights;
+
+/* Weights that estimate_num_insns uses to estimate the time necessary
+ to execute the produced code. */
+
+eni_weights eni_time_weights;
+
/* Prototypes. */
static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
static void remap_save_expr (tree *, void *, int *);
static void add_lexical_block (tree current_block, tree new_block);
static tree copy_decl_to_var (tree, copy_body_data *);
+static tree copy_result_decl_to_var (tree, copy_body_data *);
static tree copy_decl_no_change (tree, copy_body_data *);
static tree copy_decl_maybe_to_var (tree, copy_body_data *);
void
insert_decl_map (copy_body_data *id, tree key, tree value)
{
- splay_tree_insert (id->decl_map, (splay_tree_key) key,
- (splay_tree_value) value);
+ *pointer_map_insert (id->decl_map, key) = value;
/* Always insert an identity map as well. If we see this same new
node again, we won't want to duplicate it a second time. */
if (key != value)
- splay_tree_insert (id->decl_map, (splay_tree_key) value,
- (splay_tree_value) value);
+ *pointer_map_insert (id->decl_map, value) = value;
+}
+
+/* Construct new SSA name for old NAME. ID is the inline context. */
+
+static tree
+remap_ssa_name (tree name, copy_body_data *id)
+{
+ tree new;
+ tree *n;
+
+ gcc_assert (TREE_CODE (name) == SSA_NAME);
+
+ n = (tree *) pointer_map_contains (id->decl_map, name);
+ if (n)
+ return *n;
+
+ /* Do not set DEF_STMT yet as statement is not copied yet. We do that
+ in copy_bb. */
+ new = remap_decl (SSA_NAME_VAR (name), id);
+ /* We might've substituted constant or another SSA_NAME for
+ the variable.
+
+ Replace the SSA name representing RESULT_DECL by variable during
+ inlining: this saves us from need to introduce PHI node in a case
+ return value is just partly initialized. */
+ if ((TREE_CODE (new) == VAR_DECL || TREE_CODE (new) == PARM_DECL)
+ && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
+ || !id->transform_return_to_modify))
+ {
+ new = make_ssa_name (new, NULL);
+ insert_decl_map (id, name, new);
+ if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
+ {
+ SSA_NAME_DEF_STMT (new) = build_empty_stmt ();
+ if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) == name)
+ set_default_def (SSA_NAME_VAR (new), new);
+ }
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new)
+ = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
+ TREE_TYPE (new) = TREE_TYPE (SSA_NAME_VAR (new));
+ }
+ else
+ insert_decl_map (id, name, new);
+ return new;
}
/* Remap DECL during the copying of the BLOCK tree for the function. */
tree
remap_decl (tree decl, copy_body_data *id)
{
- splay_tree_node n;
+ tree *n;
tree fn;
/* We only remap local variables in the current function. */
/* See if we have remapped this declaration. */
- n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
+ n = (tree *) pointer_map_contains (id->decl_map, decl);
/* If we didn't already have an equivalent for this declaration,
create one now. */
walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
}
+ if (cfun && gimple_in_ssa_p (cfun)
+ && (TREE_CODE (t) == VAR_DECL
+ || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
+ {
+ tree def = gimple_default_def (id->src_cfun, decl);
+ get_var_ann (t);
+ if (TREE_CODE (decl) != PARM_DECL && def)
+ {
+ tree map = remap_ssa_name (def, id);
+ /* Watch out RESULT_DECLs whose SSA names map directly
+ to them. */
+ if (TREE_CODE (map) == SSA_NAME)
+ set_default_def (t, map);
+ }
+ add_referenced_var (t);
+ }
return t;
}
- return unshare_expr ((tree) n->value);
+ return unshare_expr (*n);
}
static tree
remap_type_1 (tree type, copy_body_data *id)
{
- splay_tree_node node;
+ tree *node;
tree new, t;
if (type == NULL)
return type;
/* See if we have remapped this type. */
- node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
+ node = (tree *) pointer_map_contains (id->decl_map, type);
if (node)
- return (tree) node->value;
+ return *node;
/* The type only needs remapping if it's variably modified. */
if (! variably_modified_type_p (type, id->src_fn))
tree
remap_type (tree type, copy_body_data *id)
{
- splay_tree_node node;
+ tree *node;
if (type == NULL)
return type;
/* See if we have remapped this type. */
- node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
+ node = (tree *) pointer_map_contains (id->decl_map, type);
if (node)
- return (tree) node->value;
+ return *node;
/* The type only needs remapping if it's variably modified. */
if (! variably_modified_type_p (type, id->src_fn))
duplicated and/or tweaked. */
/* When requested, RETURN_EXPRs should be transformed to just the
- contained MODIFY_EXPR. The branch semantics of the return will
+ contained GIMPLE_MODIFY_STMT. The branch semantics of the return will
be handled elsewhere by manipulating the CFG rather than a statement. */
if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
{
If the "assignment" is just the result decl, the result
decl has already been set (e.g. a recent "foo (&result_decl,
...)"); just toss the entire RETURN_EXPR. */
- if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
+ if (assignment && TREE_CODE (assignment) == GIMPLE_MODIFY_STMT)
{
/* Replace the RETURN_EXPR with (a copy of) the
- MODIFY_EXPR hanging underneath. */
+ GIMPLE_MODIFY_STMT hanging underneath. */
*tp = copy_node (assignment);
}
else /* Else the RETURN_EXPR returns no value. */
return (tree) (void *)1;
}
}
+ else if (TREE_CODE (*tp) == SSA_NAME)
+ {
+ *tp = remap_ssa_name (*tp, id);
+ *walk_subtrees = 0;
+ return NULL;
+ }
/* Local variables and labels need to be replaced by equivalent
variables. We don't want to copy static variables; there's only
/* Here we handle trees that are not completely rewritten.
First we detect some inlining-induced bogosities for
discarding. */
- if (TREE_CODE (*tp) == MODIFY_EXPR
- && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
+ if (TREE_CODE (*tp) == GIMPLE_MODIFY_STMT
+ && GIMPLE_STMT_OPERAND (*tp, 0) == GIMPLE_STMT_OPERAND (*tp, 1)
&& (lang_hooks.tree_inlining.auto_var_in_fn_p
- (TREE_OPERAND (*tp, 0), fn)))
+ (GIMPLE_STMT_OPERAND (*tp, 0), fn)))
{
/* Some assignments VAR = VAR; don't generate any rtl code
and thus don't count as variable modification. Avoid
keeping bogosities like 0 = 0. */
- tree decl = TREE_OPERAND (*tp, 0), value;
- splay_tree_node n;
+ tree decl = GIMPLE_STMT_OPERAND (*tp, 0), value;
+ tree *n;
- n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
+ n = (tree *) pointer_map_contains (id->decl_map, decl);
if (n)
{
- value = (tree) n->value;
+ value = *n;
STRIP_TYPE_NOPS (value);
if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
{
/* Get rid of *& from inline substitutions that can happen when a
pointer argument is an ADDR_EXPR. */
tree decl = TREE_OPERAND (*tp, 0);
- splay_tree_node n;
+ tree *n;
- n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
+ n = (tree *) pointer_map_contains (id->decl_map, decl);
if (n)
{
tree new;
build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
but we absolutely rely on that. As fold_indirect_ref
does other useful transformations, try that first, though. */
- tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
- new = unshare_expr ((tree)n->value);
+ tree type = TREE_TYPE (TREE_TYPE (*n));
+ new = unshare_expr (*n);
old = *tp;
*tp = fold_indirect_ref_1 (type, new);
if (! *tp)
/* Here is the "usual case". Copy this tree node, and then
tweak some special cases. */
copy_tree_r (tp, walk_subtrees, NULL);
+
+ /* Global variables we didn't seen yet needs to go into referenced
+ vars. */
+ if (gimple_in_ssa_p (cfun) && TREE_CODE (*tp) == VAR_DECL)
+ add_referenced_var (*tp);
/* If EXPR has block defined, map it to newly constructed block.
When inlining we want EXPRs without block appear in the block
of function call. */
- if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
+ if (EXPR_P (*tp) || GIMPLE_STMT_P (*tp))
{
new_block = id->block;
if (TREE_BLOCK (*tp))
{
- splay_tree_node n;
- n = splay_tree_lookup (id->decl_map,
- (splay_tree_key) TREE_BLOCK (*tp));
+ tree *n;
+ n = (tree *) pointer_map_contains (id->decl_map,
+ TREE_BLOCK (*tp));
gcc_assert (n);
- new_block = (tree) n->value;
+ new_block = *n;
}
TREE_BLOCK (*tp) = new_block;
}
(NULL_TREE,
id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
- TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
+ if (!GIMPLE_TUPLE_P (*tp) && TREE_CODE (*tp) != OMP_CLAUSE)
+ TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
/* The copied TARGET_EXPR has never been expanded, even if the
original node was expanded already. */
copy_basic_block = create_basic_block (NULL, (void *) 0,
(basic_block) bb->prev_bb->aux);
copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
- copy_basic_block->frequency = (bb->frequency
+
+ /* We are going to rebuild frequencies from scratch. These values have just
+ small importance to drive canonicalize_loop_headers. */
+ copy_basic_block->frequency = ((gcov_type)bb->frequency
* frequency_scale / REG_BR_PROB_BASE);
+ if (copy_basic_block->frequency > BB_FREQ_MAX)
+ copy_basic_block->frequency = BB_FREQ_MAX;
copy_bsi = bsi_start (copy_basic_block);
for (bsi = bsi_start (bb);
{
tree call, decl;
+ gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
+
/* With return slot optimization we can end up with
non-gimple (foo *)&this->m, fix that here. */
- if (TREE_CODE (stmt) == MODIFY_EXPR
- && TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR
- && !is_gimple_val (TREE_OPERAND (TREE_OPERAND (stmt, 1), 0)))
+ if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
+ && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
+ && !is_gimple_val (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0)))
gimplify_stmt (&stmt);
bsi_insert_after (©_bsi, stmt, BSI_NEW_STMT);
- call = get_call_expr_in (stmt);
- /* We're duplicating a CALL_EXPR. Find any corresponding
- callgraph edges and update or duplicate them. */
- if (call && (decl = get_callee_fndecl (call)))
+
+ /* Process new statement. gimplify_stmt possibly turned statement
+ into multiple statements, we need to process all of them. */
+ while (!bsi_end_p (copy_bsi))
{
- struct cgraph_node *node;
- struct cgraph_edge *edge;
-
- switch (id->transform_call_graph_edges)
+ stmt = bsi_stmt (copy_bsi);
+ call = get_call_expr_in (stmt);
+
+ /* Statements produced by inlining can be unfolded, especially
+ when we constant propagated some operands. We can't fold
+ them right now for two reasons:
+ 1) folding require SSA_NAME_DEF_STMTs to be correct
+ 2) we can't change function calls to builtins.
+ So we just mark statement for later folding. We mark
+ all new statements, instead just statements that has changed
+ by some nontrivial substitution so even statements made
+ foldable indirectly are updated. If this turns out to be
+ expensive, copy_body can be told to watch for nontrivial
+ changes. */
+ if (id->statements_to_fold)
+ pointer_set_insert (id->statements_to_fold, stmt);
+ /* We're duplicating a CALL_EXPR. Find any corresponding
+ callgraph edges and update or duplicate them. */
+ if (call && (decl = get_callee_fndecl (call)))
{
- case CB_CGE_DUPLICATE:
- edge = cgraph_edge (id->src_node, orig_stmt);
- if (edge)
- cgraph_clone_edge (edge, id->dst_node, stmt,
- REG_BR_PROB_BASE, 1, true);
- break;
-
- case CB_CGE_MOVE_CLONES:
- for (node = id->dst_node->next_clone;
- node;
- node = node->next_clone)
+ struct cgraph_node *node;
+ struct cgraph_edge *edge;
+
+ switch (id->transform_call_graph_edges)
{
- edge = cgraph_edge (node, orig_stmt);
- gcc_assert (edge);
- cgraph_set_call_stmt (edge, stmt);
+ case CB_CGE_DUPLICATE:
+ edge = cgraph_edge (id->src_node, orig_stmt);
+ if (edge)
+ cgraph_clone_edge (edge, id->dst_node, stmt,
+ REG_BR_PROB_BASE, 1, edge->frequency, true);
+ break;
+
+ case CB_CGE_MOVE_CLONES:
+ for (node = id->dst_node->next_clone;
+ node;
+ node = node->next_clone)
+ {
+ edge = cgraph_edge (node, orig_stmt);
+ gcc_assert (edge);
+ cgraph_set_call_stmt (edge, stmt);
+ }
+ /* FALLTHRU */
+
+ case CB_CGE_MOVE:
+ edge = cgraph_edge (id->dst_node, orig_stmt);
+ if (edge)
+ cgraph_set_call_stmt (edge, stmt);
+ break;
+
+ default:
+ gcc_unreachable ();
}
- /* FALLTHRU */
-
- case CB_CGE_MOVE:
- edge = cgraph_edge (id->dst_node, orig_stmt);
- if (edge)
- cgraph_set_call_stmt (edge, stmt);
- break;
-
- default:
- gcc_unreachable ();
}
- }
- /* If you think we can abort here, you are wrong.
- There is no region 0 in tree land. */
- gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
- != 0);
+ /* If you think we can abort here, you are wrong.
+ There is no region 0 in tree land. */
+ gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
+ != 0);
+
+ if (tree_could_throw_p (stmt)
+ /* When we are cloning for inlining, we are supposed to
+ construct a clone that calls precisely the same functions
+ as original. However IPA optimizers might've proved
+ earlier some function calls as non-trapping that might
+ render some basic blocks dead that might become
+ unreachable.
+
+ We can't update SSA with unreachable blocks in CFG and thus
+ we prevent the scenario by preserving even the "dead" eh
+ edges until the point they are later removed by
+ fixup_cfg pass. */
+ || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
+ && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
+ {
+ int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
+ /* Add an entry for the copied tree in the EH hashtable.
+ When cloning or versioning, use the hashtable in
+ cfun, and just copy the EH number. When inlining, use the
+ hashtable in the caller, and adjust the region number. */
+ if (region > 0)
+ add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
+
+ /* If this tree doesn't have a region associated with it,
+ and there is a "current region,"
+ then associate this tree with the current region
+ and add edges associated with this region. */
+ if ((lookup_stmt_eh_region_fn (id->src_cfun,
+ orig_stmt) <= 0
+ && id->eh_region > 0)
+ && tree_could_throw_p (stmt))
+ add_stmt_to_eh_region (stmt, id->eh_region);
+ }
+ if (gimple_in_ssa_p (cfun))
+ {
+ ssa_op_iter i;
+ tree def;
- if (tree_could_throw_p (stmt))
- {
- int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
- /* Add an entry for the copied tree in the EH hashtable.
- When cloning or versioning, use the hashtable in
- cfun, and just copy the EH number. When inlining, use the
- hashtable in the caller, and adjust the region number. */
- if (region > 0)
- add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
-
- /* If this tree doesn't have a region associated with it,
- and there is a "current region,"
- then associate this tree with the current region
- and add edges associated with this region. */
- if ((lookup_stmt_eh_region_fn (id->src_cfun,
- orig_stmt) <= 0
- && id->eh_region > 0)
- && tree_could_throw_p (stmt))
- add_stmt_to_eh_region (stmt, id->eh_region);
+ find_new_referenced_vars (bsi_stmt_ptr (copy_bsi));
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
+ if (TREE_CODE (def) == SSA_NAME)
+ SSA_NAME_DEF_STMT (def) = stmt;
+ }
+ bsi_next (©_bsi);
}
+ copy_bsi = bsi_last (copy_basic_block);
}
}
return copy_basic_block;
}
+/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
+ form is quite easy, since dominator relationship for old basic blocks does
+ not change.
+
+ There is however exception where inlining might change dominator relation
+ across EH edges from basic block within inlined functions destinating
+ to landing pads in function we inline into.
+
+ The function mark PHI_RESULT of such PHI nodes for renaming; it is
+ safe the EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI
+ must be set. This means, that there will be no overlapping live ranges
+ for the underlying symbol.
+
+ This might change in future if we allow redirecting of EH edges and
+ we might want to change way build CFG pre-inlining to include
+ all the possible edges then. */
+static void
+update_ssa_across_eh_edges (basic_block bb)
+{
+ edge e;
+ edge_iterator ei;
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (!e->dest->aux
+ || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
+ {
+ tree phi;
+
+ gcc_assert (e->flags & EDGE_EH);
+ for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
+ {
+ gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
+ (PHI_RESULT (phi)));
+ mark_sym_for_renaming
+ (SSA_NAME_VAR (PHI_RESULT (phi)));
+ }
+ }
+}
+
/* Copy edges from BB into its copy constructed earlier, scale profile
accordingly. Edges will be taken care of later. Assume aux
pointers to point to the copies of each BB. */
copy_stmt = bsi_stmt (bsi);
update_stmt (copy_stmt);
+ if (gimple_in_ssa_p (cfun))
+ mark_symbols_for_renaming (copy_stmt);
/* Do this before the possible split_block. */
bsi_next (&bsi);
right at this point; split_block doesn't care. */
{
edge e = split_block (new_bb, copy_stmt);
+
new_bb = e->dest;
+ new_bb->aux = e->src->aux;
bsi = bsi_start (new_bb);
}
make_eh_edges (copy_stmt);
+
+ if (gimple_in_ssa_p (cfun))
+ update_ssa_across_eh_edges (bb_for_stmt (copy_stmt));
+ }
+ }
+}
+
+/* Copy the PHIs. All blocks and edges are copied, some blocks
+ was possibly split and new outgoing EH edges inserted.
+ BB points to the block of original function and AUX pointers links
+ the original and newly copied blocks. */
+
+static void
+copy_phis_for_bb (basic_block bb, copy_body_data *id)
+{
+ basic_block new_bb = bb->aux;
+ edge_iterator ei;
+ tree phi;
+
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ {
+ tree res = PHI_RESULT (phi);
+ tree new_res = res;
+ tree new_phi;
+ edge new_edge;
+
+ if (is_gimple_reg (res))
+ {
+ walk_tree (&new_res, copy_body_r, id, NULL);
+ SSA_NAME_DEF_STMT (new_res)
+ = new_phi = create_phi_node (new_res, new_bb);
+ FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
+ {
+ edge old_edge = find_edge (new_edge->src->aux, bb);
+ tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
+ tree new_arg = arg;
+
+ walk_tree (&new_arg, copy_body_r, id, NULL);
+ gcc_assert (new_arg);
+ add_phi_arg (new_phi, new_arg, new_edge);
+ }
}
}
}
return remap_decl (decl, (copy_body_data *) data);
}
+/* Build struct function and associated datastructures for the new clone
+ NEW_FNDECL to be build. CALLEE_FNDECL is the original */
+
+static void
+initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
+ int frequency)
+{
+ struct function *new_cfun
+ = (struct function *) ggc_alloc_cleared (sizeof (struct function));
+ struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
+ int count_scale, frequency_scale;
+
+ if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
+ count_scale = (REG_BR_PROB_BASE * count
+ / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
+ else
+ count_scale = 1;
+
+ if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
+ frequency_scale = (REG_BR_PROB_BASE * frequency
+ /
+ ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
+ else
+ frequency_scale = count_scale;
+
+ /* Register specific tree functions. */
+ tree_register_cfg_hooks ();
+ *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
+ new_cfun->funcdef_no = get_next_funcdef_no ();
+ VALUE_HISTOGRAMS (new_cfun) = NULL;
+ new_cfun->unexpanded_var_list = NULL;
+ new_cfun->cfg = NULL;
+ new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
+ DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
+ push_cfun (new_cfun);
+ init_empty_tree_cfg ();
+
+ ENTRY_BLOCK_PTR->count =
+ (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
+ REG_BR_PROB_BASE);
+ ENTRY_BLOCK_PTR->frequency =
+ (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
+ frequency_scale / REG_BR_PROB_BASE);
+ EXIT_BLOCK_PTR->count =
+ (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
+ REG_BR_PROB_BASE);
+ EXIT_BLOCK_PTR->frequency =
+ (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
+ frequency_scale / REG_BR_PROB_BASE);
+ if (src_cfun->eh)
+ init_eh_for_function ();
+
+ if (src_cfun->gimple_df)
+ {
+ init_tree_ssa ();
+ cfun->gimple_df->in_ssa_p = true;
+ init_ssa_operands ();
+ }
+ pop_cfun ();
+}
+
/* Make a copy of the body of FN so that it can be inserted inline in
another function. Walks FN via CFG, returns new fndecl. */
tree callee_fndecl = id->src_fn;
/* Original cfun for the callee, doesn't change. */
struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
- /* Copy, built by this function. */
- struct function *new_cfun;
- /* Place to copy from; when a copy of the function was saved off earlier,
- use that instead of the main copy. */
- struct function *cfun_to_copy =
- (struct function *) ggc_alloc_cleared (sizeof (struct function));
+ struct function *cfun_to_copy;
basic_block bb;
tree new_fndecl = NULL;
int count_scale, frequency_scale;
+ int last;
if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
count_scale = (REG_BR_PROB_BASE * count
gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
(DECL_STRUCT_FUNCTION (callee_fndecl)));
- *cfun_to_copy = *DECL_STRUCT_FUNCTION (callee_fndecl);
-
- id->src_cfun = cfun_to_copy;
-
- /* If requested, create new basic_block_info and label_to_block_maps.
- Otherwise, insert our new blocks and labels into the existing cfg. */
- if (id->transform_new_cfg)
- {
- new_cfun =
- (struct function *) ggc_alloc_cleared (sizeof (struct function));
- *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
- new_cfun->cfg = NULL;
- new_cfun->decl = new_fndecl = copy_node (callee_fndecl);
- new_cfun->ib_boundaries_block = NULL;
- DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
- push_cfun (new_cfun);
- init_empty_tree_cfg ();
-
- ENTRY_BLOCK_PTR->count =
- (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
- REG_BR_PROB_BASE);
- ENTRY_BLOCK_PTR->frequency =
- (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
- frequency_scale / REG_BR_PROB_BASE);
- EXIT_BLOCK_PTR->count =
- (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
- REG_BR_PROB_BASE);
- EXIT_BLOCK_PTR->frequency =
- (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
- frequency_scale / REG_BR_PROB_BASE);
-
- entry_block_map = ENTRY_BLOCK_PTR;
- exit_block_map = EXIT_BLOCK_PTR;
- }
+ cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
+
ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
+ entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
+ exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
/* Duplicate any exception-handling regions. */
if (cfun->eh)
{
- if (id->transform_new_cfg)
- init_eh_for_function ();
id->eh_region_offset
= duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
0, id->eh_region);
}
/* Use aux pointers to map the original blocks to copy. */
FOR_EACH_BB_FN (bb, cfun_to_copy)
- bb->aux = copy_bb (id, bb, frequency_scale, count_scale);
+ {
+ basic_block new = copy_bb (id, bb, frequency_scale, count_scale);
+ bb->aux = new;
+ new->aux = bb;
+ }
+
+ last = last_basic_block;
/* Now that we've duplicated the blocks, duplicate their edges. */
FOR_ALL_BB_FN (bb, cfun_to_copy)
copy_edges_for_bb (bb, count_scale);
+ if (gimple_in_ssa_p (cfun))
+ FOR_ALL_BB_FN (bb, cfun_to_copy)
+ copy_phis_for_bb (bb, id);
FOR_ALL_BB_FN (bb, cfun_to_copy)
- bb->aux = NULL;
-
- if (id->transform_new_cfg)
- pop_cfun ();
+ {
+ ((basic_block)bb->aux)->aux = NULL;
+ bb->aux = NULL;
+ }
+ /* Zero out AUX fields of newly created block during EH edge
+ insertion. */
+ for (; last < last_basic_block; last++)
+ BASIC_BLOCK (last)->aux = NULL;
+ entry_block_map->aux = NULL;
+ exit_block_map->aux = NULL;
return new_fndecl;
}
tree init_stmt;
tree var;
tree var_sub;
+ tree rhs = value ? fold_convert (TREE_TYPE (p), value) : NULL;
+ tree def = (gimple_in_ssa_p (cfun)
+ ? gimple_default_def (id->src_cfun, p) : NULL);
- /* If the parameter is never assigned to, we may not need to
- create a new variable here at all. Instead, we may be able
- to just use the argument value. */
+ /* If the parameter is never assigned to, has no SSA_NAMEs created,
+ we may not need to create a new variable here at all. Instead, we may
+ be able to just use the argument value. */
if (TREE_READONLY (p)
&& !TREE_ADDRESSABLE (p)
- && value && !TREE_SIDE_EFFECTS (value))
+ && value && !TREE_SIDE_EFFECTS (value)
+ && !def)
{
/* We may produce non-gimple trees by adding NOPs or introduce
invalid sharing when operand is not really constant.
here since the type of this decl must be visible to the calling
function. */
var = copy_decl_to_var (p, id);
+ if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
+ {
+ get_var_ann (var);
+ add_referenced_var (var);
+ }
/* See if the frontend wants to pass this by invisible reference. If
so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
TREE_READONLY (var) = 0;
+ /* If there is no setup required and we are in SSA, take the easy route
+ replacing all SSA names representing the function parameter by the
+ SSA name passed to function.
+
+ We need to construct map for the variable anyway as it might be used
+ in different SSA names when parameter is set in function.
+
+ FIXME: This usually kills the last connection in between inlined
+ function parameter and the actual value in debug info. Can we do
+ better here? If we just inserted the statement, copy propagation
+ would kill it anyway as it always did in older versions of GCC.
+
+ We might want to introduce a notion that single SSA_NAME might
+ represent multiple variables for purposes of debugging. */
+ if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
+ && (TREE_CODE (rhs) == SSA_NAME
+ || is_gimple_min_invariant (rhs))
+ && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
+ {
+ insert_decl_map (id, def, rhs);
+ return;
+ }
+
/* Initialize this VAR_DECL from the equivalent argument. Convert
the argument to the proper type in case it was promoted. */
if (value)
{
- tree rhs = fold_convert (TREE_TYPE (var), value);
block_stmt_iterator bsi = bsi_last (bb);
if (rhs == error_mark_node)
- return;
-
+ {
+ insert_decl_map (id, p, var_sub);
+ return;
+ }
+
STRIP_USELESS_TYPE_CONVERSION (rhs);
- /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
+ /* We want to use GIMPLE_MODIFY_STMT, not INIT_EXPR here so that we
keep our trees in gimple form. */
- init_stmt = build2 (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
+ if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
+ {
+ def = remap_ssa_name (def, id);
+ init_stmt = build_gimple_modify_stmt (def, rhs);
+ SSA_NAME_DEF_STMT (def) = init_stmt;
+ SSA_NAME_IS_DEFAULT_DEF (def) = 0;
+ set_default_def (var, NULL);
+ }
+ else
+ init_stmt = build_gimple_modify_stmt (var, rhs);
/* If we did not create a gimple value and we did not create a gimple
cast of a gimple value, then we will need to gimplify INIT_STMTS
at the end. Note that is_gimple_cast only checks the outer
tree code, not its operand. Thus the explicit check that its
operand is a gimple value. */
- if (!is_gimple_val (rhs)
+ if ((!is_gimple_val (rhs)
&& (!is_gimple_cast (rhs)
|| !is_gimple_val (TREE_OPERAND (rhs, 0))))
- gimplify_stmt (&init_stmt);
+ || !is_gimple_reg (var))
+ {
+ tree_stmt_iterator i;
+
+ push_gimplify_context ();
+ gimplify_stmt (&init_stmt);
+ if (gimple_in_ssa_p (cfun)
+ && init_stmt && TREE_CODE (init_stmt) == STATEMENT_LIST)
+ {
+ /* The replacement can expose previously unreferenced
+ variables. */
+ for (i = tsi_start (init_stmt); !tsi_end_p (i); tsi_next (&i))
+ find_new_referenced_vars (tsi_stmt_ptr (i));
+ }
+ pop_gimplify_context (NULL);
+ }
/* If VAR represents a zero-sized variable, it's possible that the
assignment statment may result in no gimple statements. */
if (init_stmt)
bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
+ if (gimple_in_ssa_p (cfun))
+ for (;!bsi_end_p (bsi); bsi_next (&bsi))
+ mark_symbols_for_renaming (bsi_stmt (bsi));
}
}
/* Generate code to initialize the parameters of the function at the
- top of the stack in ID from the ARGS (presented as a TREE_LIST). */
+ top of the stack in ID from the CALL_EXPR EXP. */
static void
-initialize_inlined_parameters (copy_body_data *id, tree args, tree static_chain,
+initialize_inlined_parameters (copy_body_data *id, tree exp,
tree fn, basic_block bb)
{
tree parms;
tree a;
tree p;
tree vars = NULL_TREE;
- int argnum = 0;
+ call_expr_arg_iterator iter;
+ tree static_chain = CALL_EXPR_STATIC_CHAIN (exp);
/* Figure out what the parameters are. */
parms = DECL_ARGUMENTS (fn);
/* Loop through the parameter declarations, replacing each with an
equivalent VAR_DECL, appropriately initialized. */
- for (p = parms, a = args; p;
- a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
- {
- tree value;
-
- ++argnum;
-
- /* Find the initializer. */
- value = lang_hooks.tree_inlining.convert_parm_for_inlining
- (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
-
- setup_one_parameter (id, p, value, fn, bb, &vars);
- }
+ for (p = parms, a = first_call_expr_arg (exp, &iter); p;
+ a = next_call_expr_arg (&iter), p = TREE_CHAIN (p))
+ setup_one_parameter (id, p, a, fn, bb, &vars);
/* Initialize the static chain. */
p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
The USE_STMT is filled to contain a use of the declaration to
indicate the return value of the function.
- RETURN_SLOT_ADDR, if non-null, was a fake parameter that
- took the address of the result. MODIFY_DEST, if non-null, was the LHS of
- the MODIFY_EXPR to which this call is the RHS.
+ RETURN_SLOT, if non-null is place where to store the result. It
+ is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
+ was the LHS of the GIMPLE_MODIFY_STMT to which this call is the RHS.
The return value is a (possibly null) value that is the result of the
function as seen by the callee. *USE_P is a (possibly null) value that
holds the result as seen by the caller. */
static tree
-declare_return_variable (copy_body_data *id, tree return_slot_addr,
- tree modify_dest, tree *use_p)
+declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
+ tree *use_p)
{
tree callee = id->src_fn;
tree caller = id->dst_fn;
/* If there was a return slot, then the return value is the
dereferenced address of that object. */
- if (return_slot_addr)
+ if (return_slot)
{
- /* The front end shouldn't have used both return_slot_addr and
+ /* The front end shouldn't have used both return_slot and
a modify expression. */
gcc_assert (!modify_dest);
if (DECL_BY_REFERENCE (result))
- var = return_slot_addr;
+ {
+ tree return_slot_addr = build_fold_addr_expr (return_slot);
+ STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
+
+ /* We are going to construct *&return_slot and we can't do that
+ for variables believed to be not addressable.
+
+ FIXME: This check possibly can match, because values returned
+ via return slot optimization are not believed to have address
+ taken by alias analysis. */
+ gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
+ if (gimple_in_ssa_p (cfun))
+ {
+ HOST_WIDE_INT bitsize;
+ HOST_WIDE_INT bitpos;
+ tree offset;
+ enum machine_mode mode;
+ int unsignedp;
+ int volatilep;
+ tree base;
+ base = get_inner_reference (return_slot, &bitsize, &bitpos,
+ &offset,
+ &mode, &unsignedp, &volatilep,
+ false);
+ if (TREE_CODE (base) == INDIRECT_REF)
+ base = TREE_OPERAND (base, 0);
+ if (TREE_CODE (base) == SSA_NAME)
+ base = SSA_NAME_VAR (base);
+ mark_sym_for_renaming (base);
+ }
+ var = return_slot_addr;
+ }
else
- var = build_fold_indirect_ref (return_slot_addr);
- if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
- && !DECL_COMPLEX_GIMPLE_REG_P (result)
+ {
+ var = return_slot;
+ gcc_assert (TREE_CODE (var) != SSA_NAME);
+ }
+ if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
+ && !DECL_GIMPLE_REG_P (result)
&& DECL_P (var))
- DECL_COMPLEX_GIMPLE_REG_P (var) = 0;
+ DECL_GIMPLE_REG_P (var) = 0;
use = NULL;
goto done;
}
gcc_assert (!TREE_ADDRESSABLE (callee_type));
/* Attempt to avoid creating a new temporary variable. */
- if (modify_dest)
+ if (modify_dest
+ && TREE_CODE (modify_dest) != SSA_NAME)
{
bool use_it = false;
use_it = false;
else if (is_global_var (base_m))
use_it = false;
- else if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
- && !DECL_COMPLEX_GIMPLE_REG_P (result)
- && DECL_COMPLEX_GIMPLE_REG_P (base_m))
+ else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
+ && !DECL_GIMPLE_REG_P (result)
+ && DECL_GIMPLE_REG_P (base_m))
use_it = false;
else if (!TREE_ADDRESSABLE (base_m))
use_it = true;
gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
- var = copy_decl_to_var (result, id);
+ var = copy_result_decl_to_var (result, id);
+ if (gimple_in_ssa_p (cfun))
+ {
+ get_var_ann (var);
+ add_referenced_var (var);
+ }
DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
not be visible to the user. */
TREE_NO_WARNING (var) = 1;
+ declare_inline_vars (id->block, var);
+
/* Build the use expr. If the return type of the function was
promoted, convert it back to the expected type. */
use = var;
STRIP_USELESS_TYPE_CONVERSION (use);
+ if (DECL_BY_REFERENCE (result))
+ var = build_fold_addr_expr (var);
+
done:
/* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
way, when the RESULT_DECL is encountered, it will be
return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
}
+/* Arguments for estimate_num_insns_1. */
+
+struct eni_data
+{
+ /* Used to return the number of insns. */
+ int count;
+
+ /* Weights of various constructs. */
+ eni_weights *weights;
+};
+
/* Used by estimate_num_insns. Estimate number of instructions seen
by given statement. */
static tree
estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
{
- int *count = (int *) data;
+ struct eni_data *d = data;
tree x = *tp;
+ unsigned cost;
if (IS_TYPE_OR_DECL_P (x))
{
*walk_subtrees = 0;
return NULL;
+ /* CHANGE_DYNAMIC_TYPE_EXPR explicitly expands to nothing. */
+ case CHANGE_DYNAMIC_TYPE_EXPR:
+ *walk_subtrees = 0;
+ return NULL;
+
/* Try to estimate the cost of assignments. We have three cases to
deal with:
1) Simple assignments to registers;
3) TARGET_EXPRs.
Let us look at the first two cases, assuming we have "a = b + C":
- <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
+ <GIMPLE_MODIFY_STMT <var_decl "a">
+ <plus_expr <var_decl "b"> <constant C>>
If "a" is a GIMPLE register, the assignment to it is free on almost
any target, because "a" usually ends up in a real register. Hence
the only cost of this expression comes from the PLUS_EXPR, and we
- can ignore the MODIFY_EXPR.
+ can ignore the GIMPLE_MODIFY_STMT.
If "a" is not a GIMPLE register, the assignment to "a" will most
- likely be a real store, so the cost of the MODIFY_EXPR is the cost
+ likely be a real store, so the cost of the GIMPLE_MODIFY_STMT is the cost
of moving something into "a", which we compute using the function
estimate_move_cost.
The third case deals with TARGET_EXPRs, for which the semantics are
that a temporary is assigned, unless the TARGET_EXPR itself is being
assigned to something else. In the latter case we do not need the
- temporary. E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
- MODIFY_EXPR is free. */
+ temporary. E.g. in:
+ <GIMPLE_MODIFY_STMT <var_decl "a"> <target_expr>>, the
+ GIMPLE_MODIFY_STMT is free. */
case INIT_EXPR:
- case MODIFY_EXPR:
+ case GIMPLE_MODIFY_STMT:
/* Is the right and side a TARGET_EXPR? */
- if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
+ if (TREE_CODE (GENERIC_TREE_OPERAND (x, 1)) == TARGET_EXPR)
break;
/* ... fall through ... */
case TARGET_EXPR:
- x = TREE_OPERAND (x, 0);
+ x = GENERIC_TREE_OPERAND (x, 0);
/* Is this an assignments to a register? */
if (is_gimple_reg (x))
break;
/* Otherwise it's a store, so fall through to compute the move cost. */
case CONSTRUCTOR:
- *count += estimate_move_cost (TREE_TYPE (x));
+ d->count += estimate_move_cost (TREE_TYPE (x));
break;
/* Assign cost of 1 to usual operations.
case VEC_COND_EXPR:
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
case MINUS_EXPR:
case MULT_EXPR:
case FIX_TRUNC_EXPR:
- case FIX_CEIL_EXPR:
- case FIX_FLOOR_EXPR:
- case FIX_ROUND_EXPR:
case NEGATE_EXPR:
case FLOAT_EXPR:
case POSTDECREMENT_EXPR:
case POSTINCREMENT_EXPR:
- case SWITCH_EXPR:
-
case ASM_EXPR:
case REALIGN_LOAD_EXPR:
case REDUC_PLUS_EXPR:
case WIDEN_SUM_EXPR:
case DOT_PROD_EXPR:
+ case VEC_WIDEN_MULT_HI_EXPR:
+ case VEC_WIDEN_MULT_LO_EXPR:
+ case VEC_UNPACK_HI_EXPR:
+ case VEC_UNPACK_LO_EXPR:
+ case VEC_UNPACK_FLOAT_HI_EXPR:
+ case VEC_UNPACK_FLOAT_LO_EXPR:
+ case VEC_PACK_TRUNC_EXPR:
+ case VEC_PACK_SAT_EXPR:
+ case VEC_PACK_FIX_TRUNC_EXPR:
case WIDEN_MULT_EXPR:
+ case VEC_EXTRACT_EVEN_EXPR:
+ case VEC_EXTRACT_ODD_EXPR:
+ case VEC_INTERLEAVE_HIGH_EXPR:
+ case VEC_INTERLEAVE_LOW_EXPR:
+
case RESX_EXPR:
- *count += 1;
+ d->count += 1;
+ break;
+
+ case SWITCH_EXPR:
+ /* TODO: Cost of a switch should be derived from the number of
+ branches. */
+ d->count += d->weights->switch_cost;
break;
/* Few special cases of expensive operations. This is useful
case FLOOR_MOD_EXPR:
case ROUND_MOD_EXPR:
case RDIV_EXPR:
- *count += 10;
+ d->count += d->weights->div_mod_cost;
break;
case CALL_EXPR:
{
tree decl = get_callee_fndecl (x);
- tree arg;
+ cost = d->weights->call_cost;
if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
switch (DECL_FUNCTION_CODE (decl))
{
return NULL_TREE;
case BUILT_IN_EXPECT:
return NULL_TREE;
+ /* Prefetch instruction is not expensive. */
+ case BUILT_IN_PREFETCH:
+ cost = 1;
+ break;
default:
break;
}
that does use function declaration to figure out the arguments. */
if (!decl)
{
- for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
- *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
+ tree a;
+ call_expr_arg_iterator iter;
+ FOR_EACH_CALL_EXPR_ARG (a, iter, x)
+ d->count += estimate_move_cost (TREE_TYPE (a));
}
else
{
+ tree arg;
for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
- *count += estimate_move_cost (TREE_TYPE (arg));
+ d->count += estimate_move_cost (TREE_TYPE (arg));
}
- *count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
+ d->count += cost;
break;
}
case OMP_CRITICAL:
case OMP_ATOMIC:
/* OpenMP directives are generally very expensive. */
- *count += 40;
+ d->count += d->weights->omp_cost;
break;
default:
return NULL;
}
-/* Estimate number of instructions that will be created by expanding EXPR. */
+/* Estimate number of instructions that will be created by expanding EXPR.
+ WEIGHTS contains weights attributed to various constructs. */
int
-estimate_num_insns (tree expr)
+estimate_num_insns (tree expr, eni_weights *weights)
{
- int num = 0;
struct pointer_set_t *visited_nodes;
basic_block bb;
block_stmt_iterator bsi;
struct function *my_function;
+ struct eni_data data;
+
+ data.count = 0;
+ data.weights = weights;
/* If we're given an entire function, walk the CFG. */
if (TREE_CODE (expr) == FUNCTION_DECL)
bsi_next (&bsi))
{
walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
- &num, visited_nodes);
+ &data, visited_nodes);
}
}
pointer_set_destroy (visited_nodes);
}
else
- walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
+ walk_tree_without_duplicates (&expr, estimate_num_insns_1, &data);
- return num;
+ return data.count;
+}
+
+/* Initializes weights used by estimate_num_insns. */
+
+void
+init_inline_once (void)
+{
+ eni_inlining_weights.call_cost = PARAM_VALUE (PARAM_INLINE_CALL_COST);
+ eni_inlining_weights.div_mod_cost = 10;
+ eni_inlining_weights.switch_cost = 1;
+ eni_inlining_weights.omp_cost = 40;
+
+ eni_size_weights.call_cost = 1;
+ eni_size_weights.div_mod_cost = 1;
+ eni_size_weights.switch_cost = 10;
+ eni_size_weights.omp_cost = 40;
+
+ /* Estimating time for call is difficult, since we have no idea what the
+ called function does. In the current uses of eni_time_weights,
+ underestimating the cost does less harm than overestimating it, so
+ we choose a rather small value here. */
+ eni_time_weights.call_cost = 10;
+ eni_time_weights.div_mod_cost = 10;
+ eni_time_weights.switch_cost = 4;
+ eni_time_weights.omp_cost = 40;
}
typedef struct function *function_p;
tree t;
tree use_retvar;
tree fn;
- splay_tree st;
- tree args;
- tree return_slot_addr;
+ struct pointer_map_t *st;
+ tree return_slot;
tree modify_dest;
location_t saved_location;
struct cgraph_edge *cg_edge;
edge e;
block_stmt_iterator bsi, stmt_bsi;
bool successfully_inlined = FALSE;
+ bool purge_dead_abnormal_edges;
tree t_step;
tree var;
- tree decl;
/* See what we've got. */
id = (copy_body_data *) data;
(incorrect node sharing is most common reason for missing edges. */
gcc_assert (dest->needed || !flag_unit_at_a_time);
cgraph_create_edge (id->dst_node, dest, stmt,
- bb->count, bb->loop_depth)->inline_failed
+ bb->count, CGRAPH_FREQ_BASE,
+ bb->loop_depth)->inline_failed
= N_("originally indirect function call not considered for inlining");
+ if (dump_file)
+ {
+ fprintf (dump_file, "Created new direct edge to %s",
+ cgraph_node_name (dest));
+ }
goto egress;
}
#endif
/* We will be inlining this callee. */
-
id->eh_region = lookup_stmt_eh_region (stmt);
/* Split the block holding the CALL_EXPR. */
-
e = split_block (bb, stmt);
bb = e->src;
return_block = e->dest;
remove_edge (e);
- /* split_block splits before the statement, work around this by moving
- the call into the first half_bb. Not pretty, but seems easier than
- doing the CFG manipulation by hand when the CALL_EXPR is in the last
- statement in BB. */
+ /* split_block splits after the statement; work around this by
+ moving the call into the second block manually. Not pretty,
+ but seems easier than doing the CFG manipulation by hand
+ when the CALL_EXPR is in the last statement of BB. */
stmt_bsi = bsi_last (bb);
+ bsi_remove (&stmt_bsi, false);
+
+ /* If the CALL_EXPR was in the last statement of BB, it may have
+ been the source of abnormal edges. In this case, schedule
+ the removal of dead abnormal edges. */
bsi = bsi_start (return_block);
- if (!bsi_end_p (bsi))
- bsi_move_before (&stmt_bsi, &bsi);
- else
+ if (bsi_end_p (bsi))
{
- tree stmt = bsi_stmt (stmt_bsi);
- bsi_remove (&stmt_bsi, false);
bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
+ purge_dead_abnormal_edges = true;
+ }
+ else
+ {
+ bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
+ purge_dead_abnormal_edges = false;
}
+
stmt_bsi = bsi_start (return_block);
/* Build a block containing code to initialize the arguments, the
/* Local declarations will be replaced by their equivalents in this
map. */
st = id->decl_map;
- id->decl_map = splay_tree_new (splay_tree_compare_pointers,
- NULL, NULL);
-
- /* Initialize the parameters. */
- args = TREE_OPERAND (t, 1);
+ id->decl_map = pointer_map_create ();
/* Record the function we are about to inline. */
id->src_fn = fn;
id->src_node = cg_edge->callee;
+ id->src_cfun = DECL_STRUCT_FUNCTION (fn);
- initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
+ initialize_inlined_parameters (id, t, fn, bb);
if (DECL_INITIAL (fn))
add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
/* Find the lhs to which the result of this call is assigned. */
- return_slot_addr = NULL;
- if (TREE_CODE (stmt) == MODIFY_EXPR)
+ return_slot = NULL;
+ if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
{
- modify_dest = TREE_OPERAND (stmt, 0);
+ modify_dest = GIMPLE_STMT_OPERAND (stmt, 0);
/* The function which we are inlining might not return a value,
in which case we should issue a warning that the function
TREE_NO_WARNING (modify_dest) = 1;
if (CALL_EXPR_RETURN_SLOT_OPT (t))
{
- return_slot_addr = build_fold_addr_expr (modify_dest);
- STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
+ return_slot = modify_dest;
modify_dest = NULL;
}
}
modify_dest = NULL;
/* Declare the return variable for the function. */
- decl = declare_return_variable (id, return_slot_addr,
- modify_dest, &use_retvar);
- /* Do this only if declare_return_variable created a new one. */
- if (decl && !return_slot_addr && decl != modify_dest)
- declare_inline_vars (id->block, decl);
+ declare_return_variable (id, return_slot,
+ modify_dest, &use_retvar);
/* This is it. Duplicate the callee body. Assume callee is
pre-gimplified. Note that we must not alter the caller
}
/* Clean up. */
- splay_tree_delete (id->decl_map);
+ pointer_map_destroy (id->decl_map);
id->decl_map = st;
/* If the inlined function returns a result that we care about,
if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
{
*tp = use_retvar;
+ if (gimple_in_ssa_p (cfun))
+ {
+ update_stmt (stmt);
+ mark_symbols_for_renaming (stmt);
+ }
maybe_clean_or_replace_eh_stmt (stmt, stmt);
}
else
/* We're modifying a TSI owned by gimple_expand_calls_inline();
tsi_delink() will leave the iterator in a sane state. */
- bsi_remove (&stmt_bsi, true);
+ {
+ /* Handle case of inlining function that miss return statement so
+ return value becomes undefined. */
+ if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
+ && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
+ {
+ tree name = TREE_OPERAND (stmt, 0);
+ tree var = SSA_NAME_VAR (TREE_OPERAND (stmt, 0));
+ tree def = gimple_default_def (cfun, var);
- bsi_next (&bsi);
- if (bsi_end_p (bsi))
- tree_purge_dead_eh_edges (return_block);
+ /* If the variable is used undefined, make this name undefined via
+ move. */
+ if (def)
+ {
+ TREE_OPERAND (stmt, 1) = def;
+ update_stmt (stmt);
+ }
+ /* Otherwise make this variable undefined. */
+ else
+ {
+ bsi_remove (&stmt_bsi, true);
+ set_default_def (var, name);
+ SSA_NAME_DEF_STMT (name) = build_empty_stmt ();
+ }
+ }
+ else
+ bsi_remove (&stmt_bsi, true);
+ }
+
+ if (purge_dead_abnormal_edges)
+ tree_purge_dead_abnormal_call_edges (return_block);
/* If the value of the new expression is ignored, that's OK. We
don't warn about this for CALL_EXPRs, so we shouldn't warn about
/* Expand call statements reachable from STMT_P.
We can only have CALL_EXPRs as the "toplevel" tree code or nested
- in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
+ in a GIMPLE_MODIFY_STMT. See tree-gimple.c:get_call_expr_in(). We can
unfortunately not use that function here because we need a pointer
to the CALL_EXPR, not the tree itself. */
tree *expr_p = bsi_stmt_ptr (bsi);
tree stmt = *expr_p;
- if (TREE_CODE (*expr_p) == MODIFY_EXPR)
- expr_p = &TREE_OPERAND (*expr_p, 1);
+ if (TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT)
+ expr_p = &GIMPLE_STMT_OPERAND (*expr_p, 1);
if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
expr_p = &TREE_OPERAND (*expr_p, 0);
if (TREE_CODE (*expr_p) == CALL_EXPR)
return false;
}
+/* Walk all basic blocks created after FIRST and try to fold every statement
+ in the STATEMENTS pointer set. */
+static void
+fold_marked_statements (int first, struct pointer_set_t *statements)
+{
+ for (;first < n_basic_blocks;first++)
+ if (BASIC_BLOCK (first))
+ {
+ block_stmt_iterator bsi;
+ for (bsi = bsi_start (BASIC_BLOCK (first));
+ !bsi_end_p (bsi); bsi_next (&bsi))
+ if (pointer_set_contains (statements, bsi_stmt (bsi)))
+ {
+ tree old_stmt = bsi_stmt (bsi);
+ if (fold_stmt (bsi_stmt_ptr (bsi)))
+ {
+ update_stmt (bsi_stmt (bsi));
+ if (maybe_clean_or_replace_eh_stmt (old_stmt, bsi_stmt (bsi)))
+ tree_purge_dead_eh_edges (BASIC_BLOCK (first));
+ }
+ }
+ }
+}
+
+/* Return true if BB has at least one abnormal outgoing edge. */
+
+static inline bool
+has_abnormal_outgoing_edge_p (basic_block bb)
+{
+ edge e;
+ edge_iterator ei;
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (e->flags & EDGE_ABNORMAL)
+ return true;
+
+ return false;
+}
+
+/* When a block from the inlined function contains a call with side-effects
+ in the middle gets inlined in a function with non-locals labels, the call
+ becomes a potential non-local goto so we need to add appropriate edge. */
+
+static void
+make_nonlocal_label_edges (void)
+{
+ block_stmt_iterator bsi;
+ basic_block bb;
+
+ FOR_EACH_BB (bb)
+ {
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ tree stmt = bsi_stmt (bsi);
+ if (tree_can_make_abnormal_goto (stmt))
+ {
+ if (stmt == bsi_stmt (bsi_last (bb)))
+ {
+ if (!has_abnormal_outgoing_edge_p (bb))
+ make_abnormal_goto_edges (bb, true);
+ }
+ else
+ {
+ edge e = split_block (bb, stmt);
+ bb = e->src;
+ make_abnormal_goto_edges (bb, true);
+ }
+ break;
+ }
+
+ /* Update PHIs on nonlocal goto receivers we (possibly)
+ just created new edges into. */
+ if (TREE_CODE (stmt) == LABEL_EXPR
+ && gimple_in_ssa_p (cfun))
+ {
+ tree target = LABEL_EXPR_LABEL (stmt);
+ if (DECL_NONLOCAL (target))
+ {
+ tree phi;
+
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ {
+ gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
+ (PHI_RESULT (phi)));
+ mark_sym_for_renaming
+ (SSA_NAME_VAR (PHI_RESULT (phi)));
+ }
+ }
+ }
+ }
+ }
+}
+
/* Expand calls to inline functions in the body of FN. */
-void
+unsigned int
optimize_inline_calls (tree fn)
{
copy_body_data id;
tree prev_fn;
basic_block bb;
+ int last = n_basic_blocks;
/* There is no point in performing inlining if errors have already
occurred -- and we might crash if we try to inline invalid
code. */
if (errorcount || sorrycount)
- return;
+ return 0;
/* Clear out ID. */
memset (&id, 0, sizeof (id));
id.transform_new_cfg = false;
id.transform_return_to_modify = true;
id.transform_lang_insert_block = false;
+ id.statements_to_fold = pointer_set_create ();
push_gimplify_context ();
+ /* We make no attempts to keep dominance info up-to-date. */
+ free_dominance_info (CDI_DOMINATORS);
+ free_dominance_info (CDI_POST_DOMINATORS);
+
/* Reach the trees by walking over the CFG, and note the
enclosing basic-blocks in the call edges. */
/* We walk the blocks going forward, because inlined function bodies
gcc_assert (e->inline_failed);
}
#endif
- /* We need to rescale frequencies again to peak at REG_BR_PROB_BASE
- as inlining loops might increase the maximum. */
- if (ENTRY_BLOCK_PTR->count)
- counts_to_freqs ();
+
+ /* We are not going to maintain the cgraph edges up to date.
+ Kill it so it won't confuse us. */
+ cgraph_node_remove_callees (id.dst_node);
+
+ fold_marked_statements (last, id.statements_to_fold);
+ pointer_set_destroy (id.statements_to_fold);
fold_cond_expr_cond ();
+ if (current_function_has_nonlocal_label)
+ make_nonlocal_label_edges ();
+ /* It would be nice to check SSA/CFG/statement consistency here, but it is
+ not possible yet - the IPA passes might make various functions to not
+ throw and they don't care to proactively update local EH info. This is
+ done later in fixup_cfg pass that also execute the verification. */
+ return (TODO_update_ssa | TODO_cleanup_cfg
+ | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
+ | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
}
/* FN is a function that has a complete body, and CLONE is a function whose
id.src_fn = fn;
id.dst_fn = clone;
id.src_cfun = DECL_STRUCT_FUNCTION (fn);
- id.decl_map = (splay_tree)arg_map;
+ id.decl_map = (struct pointer_map_t *)arg_map;
id.copy_decl = copy_decl_no_change;
id.transform_call_graph_edges = CB_CGE_DUPLICATE;
copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
{
enum tree_code code = TREE_CODE (*tp);
+ enum tree_code_class cl = TREE_CODE_CLASS (code);
/* We make copies of most nodes. */
- if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
+ if (IS_EXPR_CODE_CLASS (cl)
+ || IS_GIMPLE_STMT_CODE_CLASS (cl)
|| code == TREE_LIST
|| code == TREE_VEC
|| code == TYPE_DECL
{
/* Because the chain gets clobbered when we make a copy, we save it
here. */
- tree chain = TREE_CHAIN (*tp);
- tree new;
+ tree chain = NULL_TREE, new;
+
+ if (!GIMPLE_TUPLE_P (*tp))
+ chain = TREE_CHAIN (*tp);
/* Copy the node. */
new = copy_node (*tp);
static void
remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
{
- splay_tree st = (splay_tree) st_;
- splay_tree_node n;
+ struct pointer_map_t *st = (struct pointer_map_t *) st_;
+ tree *n;
tree t;
/* See if we already encountered this SAVE_EXPR. */
- n = splay_tree_lookup (st, (splay_tree_key) *tp);
+ n = (tree *) pointer_map_contains (st, *tp);
/* If we didn't already remap this SAVE_EXPR, do so now. */
if (!n)
t = copy_node (*tp);
/* Remember this SAVE_EXPR. */
- splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
+ *pointer_map_insert (st, *tp) = t;
/* Make sure we don't remap an already-remapped SAVE_EXPR. */
- splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
+ *pointer_map_insert (st, t) = t;
}
else
{
/* We've already walked into this SAVE_EXPR; don't do it again. */
*walk_subtrees = 0;
- t = (tree) n->value;
+ t = *n;
}
/* Replace this SAVE_EXPR with the copy. */
unsave_r (tree *tp, int *walk_subtrees, void *data)
{
copy_body_data *id = (copy_body_data *) data;
- splay_tree st = id->decl_map;
- splay_tree_node n;
+ struct pointer_map_t *st = id->decl_map;
+ tree *n;
/* Only a local declaration (variable or label). */
if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
|| TREE_CODE (*tp) == LABEL_DECL)
{
/* Lookup the declaration. */
- n = splay_tree_lookup (st, (splay_tree_key) *tp);
+ n = (tree *) pointer_map_contains (st, *tp);
/* If it's there, remap it. */
if (n)
- *tp = (tree) n->value;
+ *tp = *n;
}
else if (TREE_CODE (*tp) == STATEMENT_LIST)
memset (&id, 0, sizeof (id));
id.src_fn = current_function_decl;
id.dst_fn = current_function_decl;
- id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
+ id.decl_map = pointer_map_create ();
id.copy_decl = copy_decl_no_change;
id.transform_call_graph_edges = CB_CGE_DUPLICATE;
walk_tree (&expr, unsave_r, &id, NULL);
/* Clean up. */
- splay_tree_delete (id.decl_map);
+ pointer_map_destroy (id.decl_map);
return expr;
}
TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
TREE_READONLY (copy) = TREE_READONLY (decl);
TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
- DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
+ DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
+ DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
return copy_decl_for_dup_finish (id, decl, copy);
}
+/* Like copy_decl_to_var, but create a return slot object instead of a
+ pointer variable for return by invisible reference. */
+
+static tree
+copy_result_decl_to_var (tree decl, copy_body_data *id)
+{
+ tree copy, type;
+
+ gcc_assert (TREE_CODE (decl) == PARM_DECL
+ || TREE_CODE (decl) == RESULT_DECL);
+
+ type = TREE_TYPE (decl);
+ if (DECL_BY_REFERENCE (decl))
+ type = TREE_TYPE (type);
+
+ copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
+ TREE_READONLY (copy) = TREE_READONLY (decl);
+ TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
+ if (!DECL_BY_REFERENCE (decl))
+ {
+ TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
+ DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
+ DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
+ }
+
+ return copy_decl_for_dup_finish (id, decl, copy);
+}
+
+
static tree
copy_decl_no_change (tree decl, copy_body_data *id)
{
struct cgraph_node *old_version_node;
struct cgraph_node *new_version_node;
copy_body_data id;
- tree p, new_fndecl;
+ tree p;
unsigned i;
struct ipa_replace_map *replace_info;
basic_block old_entry_block;
tree t_step;
+ tree old_current_function_decl = current_function_decl;
gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
&& TREE_CODE (new_decl) == FUNCTION_DECL);
old_version_node = cgraph_node (old_decl);
new_version_node = cgraph_node (new_decl);
- allocate_struct_function (new_decl);
- /* Cfun points to the new allocated function struct at this point. */
- cfun->function_end_locus = DECL_SOURCE_LOCATION (new_decl);
-
DECL_ARTIFICIAL (new_decl) = 1;
DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
+ /* Prepare the data structures for the tree copy. */
+ memset (&id, 0, sizeof (id));
+
/* Generate a new name for the new version. */
if (!update_clones)
- DECL_NAME (new_decl) = create_tmp_var_name (NULL);
- /* Create a new SYMBOL_REF rtx for the new name. */
- if (DECL_RTL (old_decl) != NULL)
{
- SET_DECL_RTL (new_decl, copy_rtx (DECL_RTL (old_decl)));
- XEXP (DECL_RTL (new_decl), 0) =
- gen_rtx_SYMBOL_REF (GET_MODE (XEXP (DECL_RTL (old_decl), 0)),
- IDENTIFIER_POINTER (DECL_NAME (new_decl)));
+ DECL_NAME (new_decl) = create_tmp_var_name (NULL);
+ SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
+ SET_DECL_RTL (new_decl, NULL_RTX);
+ id.statements_to_fold = pointer_set_create ();
}
-
- /* Prepare the data structures for the tree copy. */
- memset (&id, 0, sizeof (id));
- id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
+ id.decl_map = pointer_map_create ();
id.src_fn = old_decl;
id.dst_fn = new_decl;
id.src_node = old_version_node;
id.transform_lang_insert_block = false;
current_function_decl = new_decl;
+ old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
+ (DECL_STRUCT_FUNCTION (old_decl));
+ initialize_cfun (new_decl, old_decl,
+ old_entry_block->count,
+ old_entry_block->frequency);
+ push_cfun (DECL_STRUCT_FUNCTION (new_decl));
/* Copy the function's static chain. */
p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
}
/* Copy the Function's body. */
- old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
- (DECL_STRUCT_FUNCTION (old_decl));
- new_fndecl = copy_body (&id,
- old_entry_block->count,
- old_entry_block->frequency, NULL, NULL);
+ copy_body (&id, old_entry_block->count, old_entry_block->frequency, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
- DECL_SAVED_TREE (new_decl) = DECL_SAVED_TREE (new_fndecl);
-
- DECL_STRUCT_FUNCTION (new_decl)->cfg =
- DECL_STRUCT_FUNCTION (new_fndecl)->cfg;
- DECL_STRUCT_FUNCTION (new_decl)->eh = DECL_STRUCT_FUNCTION (new_fndecl)->eh;
- DECL_STRUCT_FUNCTION (new_decl)->ib_boundaries_block =
- DECL_STRUCT_FUNCTION (new_fndecl)->ib_boundaries_block;
- DECL_STRUCT_FUNCTION (new_decl)->last_label_uid =
- DECL_STRUCT_FUNCTION (new_fndecl)->last_label_uid;
-
if (DECL_RESULT (old_decl) != NULL_TREE)
{
tree *res_decl = &DECL_RESULT (old_decl);
lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
}
- current_function_decl = NULL;
/* Renumber the lexical scoping (non-code) blocks consecutively. */
number_blocks (new_decl);
/* Clean up. */
- splay_tree_delete (id.decl_map);
- fold_cond_expr_cond ();
+ pointer_map_destroy (id.decl_map);
+ if (!update_clones)
+ {
+ fold_marked_statements (0, id.statements_to_fold);
+ pointer_set_destroy (id.statements_to_fold);
+ fold_cond_expr_cond ();
+ }
+ if (gimple_in_ssa_p (cfun))
+ {
+ free_dominance_info (CDI_DOMINATORS);
+ free_dominance_info (CDI_POST_DOMINATORS);
+ if (!update_clones)
+ delete_unreachable_blocks ();
+ update_ssa (TODO_update_ssa);
+ if (!update_clones)
+ {
+ fold_cond_expr_cond ();
+ if (need_ssa_update_p ())
+ update_ssa (TODO_update_ssa);
+ }
+ }
+ free_dominance_info (CDI_DOMINATORS);
+ free_dominance_info (CDI_POST_DOMINATORS);
+ pop_cfun ();
+ current_function_decl = old_current_function_decl;
+ gcc_assert (!current_function_decl
+ || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
return;
}
id.src_fn = current_function_decl;
id.dst_fn = current_function_decl;
id.src_cfun = cfun;
- id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
+ id.decl_map = pointer_map_create ();
type = remap_type_1 (type, &id);
- splay_tree_delete (id.decl_map);
+ pointer_map_destroy (id.decl_map);
return type;
}