tree callee;
/* FUNCTION_DECL for function being inlined into. */
tree caller;
- /* struct function for function being inlined. Usually this is the same
- as DECL_STRUCT_FUNCTION (callee), but can be different if saved_cfg
- and saved_eh are in use. */
+ /* struct function for function being inlined. */
struct function *callee_cfun;
/* The VAR_DECL for the return value. */
tree retvar;
distinguish between those two situations. This flag is true if
we are cloning, rather than inlining. */
bool cloning_p;
- /* Similarly for saving function body. */
- bool saving_p;
/* Versioning function is slightly different from inlining. */
bool versioning_p;
+ /* If set, the call_stmt of edges in clones of caller functions will
+ be updated. */
+ bool update_clones_p;
/* Callgraph node of function we are inlining into. */
struct cgraph_node *node;
/* Callgraph node of currently inlined function. */
}
static tree
-remap_type (tree type, inline_data *id)
+remap_type_1 (tree type, inline_data *id)
{
- splay_tree_node node;
tree new, t;
- if (type == NULL)
- return type;
-
- /* See if we have remapped this type. */
- node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
- if (node)
- return (tree) node->value;
-
- /* The type only needs remapping if it's variably modified. */
- if (! variably_modified_type_p (type, id->callee))
- {
- insert_decl_map (id, type, type);
- return type;
- }
-
/* We do need a copy. build and register it now. If this is a pointer or
reference type, remap the designated type and make a new pointer or
reference type. */
case RECORD_TYPE:
case UNION_TYPE:
case QUAL_UNION_TYPE:
- walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
+ {
+ tree f, nf = NULL;
+
+ for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
+ {
+ t = remap_decl (f, id);
+ DECL_CONTEXT (t) = new;
+ TREE_CHAIN (t) = nf;
+ nf = t;
+ }
+ TYPE_FIELDS (new) = nreverse (nf);
+ }
break;
case OFFSET_TYPE:
}
static tree
+remap_type (tree type, inline_data *id)
+{
+ splay_tree_node node;
+
+ if (type == NULL)
+ return type;
+
+ /* See if we have remapped this type. */
+ node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
+ if (node)
+ return (tree) node->value;
+
+ /* The type only needs remapping if it's variably modified. */
+ if (! variably_modified_type_p (type, id->callee))
+ {
+ insert_decl_map (id, type, type);
+ return type;
+ }
+
+ return remap_type_1 (type, id);
+}
+
+static tree
remap_decls (tree decls, inline_data *id)
{
tree old_var;
else /* Else the RETURN_EXPR returns no value. */
{
*tp = NULL;
- return (void *)1;
+ return (tree) (void *)1;
}
}
n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
if (n)
{
+ tree new;
/* If we happen to get an ADDR_EXPR in n->value, strip
it manually here as we'll eventually get ADDR_EXPRs
which lie about their types pointed to. In this case
but we absolutely rely on that. As fold_indirect_ref
does other useful transformations, try that first, though. */
tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
- *tp = fold_indirect_ref_1 (type, (tree)n->value);
+ new = unshare_expr ((tree)n->value);
+ *tp = fold_indirect_ref_1 (type, new);
if (! *tp)
{
- if (TREE_CODE ((tree)n->value) == ADDR_EXPR)
- *tp = TREE_OPERAND ((tree)n->value, 0);
+ if (TREE_CODE (new) == ADDR_EXPR)
+ *tp = TREE_OPERAND (new, 0);
else
- *tp = build1 (INDIRECT_REF, type, (tree)n->value);
+ *tp = build1 (INDIRECT_REF, type, new);
}
*walk_subtrees = 0;
return NULL;
else if (TREE_CODE (*tp) == ADDR_EXPR)
{
walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
- recompute_tree_invarant_for_addr_expr (*tp);
+ recompute_tree_invariant_for_addr_expr (*tp);
*walk_subtrees = 0;
}
}
/* create_basic_block() will append every new block to
basic_block_info automatically. */
- copy_basic_block = create_basic_block (NULL, (void *) 0, bb->prev_bb->aux);
+ copy_basic_block = create_basic_block (NULL, (void *) 0,
+ (basic_block) bb->prev_bb->aux);
copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
copy_basic_block->frequency = (bb->frequency
* frequency_scale / REG_BR_PROB_BASE);
callgraph edges and update or duplicate them. */
if (call && (decl = get_callee_fndecl (call)))
{
- if (id->saving_p)
- {
- struct cgraph_node *node;
- struct cgraph_edge *edge;
-
- /* We're saving a copy of the body, so we'll update the
- callgraph nodes in place. Note that we avoid
- altering the original callgraph node; we begin with
- the first clone. */
- for (node = id->node->next_clone;
- node;
- node = node->next_clone)
- {
- edge = cgraph_edge (node, orig_stmt);
- gcc_assert (edge);
- edge->call_stmt = stmt;
- }
- }
- else
+ if (!id->versioning_p)
{
struct cgraph_edge *edge;
/* We're cloning or inlining this body; duplicate the
associate callgraph nodes. */
- if (!id->versioning_p)
- {
- edge = cgraph_edge (id->current_node, orig_stmt);
- if (edge)
- cgraph_clone_edge (edge, id->node, stmt,
- REG_BR_PROB_BASE, 1, true);
- }
+ edge = cgraph_edge (id->current_node, orig_stmt);
+ if (edge)
+ cgraph_clone_edge (edge, id->node, stmt,
+ REG_BR_PROB_BASE, 1, true);
}
- if (id->versioning_p)
+ else
{
/* Update the call_expr on the edges from the new version
to its callees. */
struct cgraph_edge *edge;
edge = cgraph_edge (id->node, orig_stmt);
if (edge)
- edge->call_stmt = stmt;
+ {
+ edge->call_stmt = stmt;
+ if (id->update_clones_p)
+ {
+ struct cgraph_node *n;
+ for (n = id->node->next_clone; n; n = n->next_clone)
+ cgraph_edge (n, orig_stmt)->call_stmt = stmt;
+ }
+ }
}
}
/* If you think we can abort here, you are wrong.
static void
copy_edges_for_bb (basic_block bb, int count_scale)
{
- basic_block new_bb = bb->aux;
+ basic_block new_bb = (basic_block) bb->aux;
edge_iterator ei;
edge old_edge;
block_stmt_iterator bsi;
if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
&& old_edge->dest->aux != EXIT_BLOCK_PTR)
flags |= EDGE_FALLTHRU;
- new = make_edge (new_bb, old_edge->dest->aux, flags);
+ new = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
new->probability = old_edge->probability;
}
static tree
remap_decl_1 (tree decl, void *data)
{
- return remap_decl (decl, data);
+ return remap_decl (decl, (inline_data *) data);
}
/* Make a copy of the body of FN so that it can be inserted inline in
(struct function *) ggc_alloc_cleared (sizeof (struct function));
basic_block bb;
tree new_fndecl = NULL;
- bool saving_or_cloning;
+ bool versioning_or_cloning;
int count_scale, frequency_scale;
if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count)
*cfun_to_copy = *DECL_STRUCT_FUNCTION (callee_fndecl);
- /* If there is a saved_cfg+saved_args lurking in the
- struct function, a copy of the callee body was saved there, and
- the 'struct cgraph edge' nodes have been fudged to point into the
- saved body. Accordingly, we want to copy that saved body so the
- callgraph edges will be recognized and cloned properly. */
- if (cfun_to_copy->saved_cfg)
- {
- cfun_to_copy->cfg = cfun_to_copy->saved_cfg;
- cfun_to_copy->eh = cfun_to_copy->saved_eh;
- }
id->callee_cfun = cfun_to_copy;
/* If saving or cloning a function body, create new basic_block_info
and label_to_block_maps. Otherwise, we're duplicating a function
body for inlining; insert our new blocks and labels into the
existing varrays. */
- saving_or_cloning = (id->saving_p || id->cloning_p || id->versioning_p);
- if (saving_or_cloning)
+ versioning_or_cloning = (id->cloning_p || id->versioning_p);
+ if (versioning_or_cloning)
{
new_cfun =
(struct function *) ggc_alloc_cleared (sizeof (struct function));
/* Duplicate any exception-handling regions. */
if (cfun->eh)
{
- if (saving_or_cloning)
+ if (versioning_or_cloning)
init_eh_for_function ();
id->eh_region_offset = duplicate_eh_regions (cfun_to_copy,
remap_decl_1,
FOR_ALL_BB_FN (bb, cfun_to_copy)
bb->aux = NULL;
- if (saving_or_cloning)
+ if (versioning_or_cloning)
pop_cfun ();
return new_fndecl;
/* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
keep our trees in gimple form. */
- init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
+ init_stmt = build2 (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
/* If we did not create a gimple value and we did not create a gimple
cast of a gimple value, then we will need to gimplify INIT_STMTS
&& (!is_gimple_cast (rhs)
|| !is_gimple_val (TREE_OPERAND (rhs, 0))))
gimplify_stmt (&init_stmt);
- bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
+
+ /* If VAR represents a zero-sized variable, it's possible that the
+ assignment statment may result in no gimple statements. */
+ if (init_stmt)
+ bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
}
}
/* Figure out what the parameters are. */
parms = DECL_ARGUMENTS (fn);
- if (fn == current_function_decl)
- parms = cfun->saved_args;
/* Loop through the parameter declarations, replacing each with an
equivalent VAR_DECL, appropriately initialized. */
/* Initialize the static chain. */
p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
- if (fn == current_function_decl)
- p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
+ gcc_assert (fn != current_function_decl);
if (p)
{
/* No static chain? Seems like a bug in tree-nested.c. */
var = return_slot_addr;
else
var = build_fold_indirect_ref (return_slot_addr);
+ if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
+ && !DECL_COMPLEX_GIMPLE_REG_P (result)
+ && DECL_P (var))
+ DECL_COMPLEX_GIMPLE_REG_P (var) = 0;
use = NULL;
goto done;
}
/* If the callee cannot possibly modify MODIFY_DEST, then we can
reuse it as the result of the call directly. Don't do this if
it would promote MODIFY_DEST to addressable. */
- else if (!TREE_STATIC (modify_dest)
- && !TREE_ADDRESSABLE (modify_dest)
- && !TREE_ADDRESSABLE (result))
- use_it = true;
+ else if (TREE_ADDRESSABLE (result))
+ use_it = false;
+ else
+ {
+ tree base_m = get_base_address (modify_dest);
+
+ /* If the base isn't a decl, then it's a pointer, and we don't
+ know where that's going to go. */
+ if (!DECL_P (base_m))
+ use_it = false;
+ else if (is_global_var (base_m))
+ use_it = false;
+ else if (!TREE_ADDRESSABLE (base_m))
+ use_it = true;
+ }
if (use_it)
{
static tree
estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
{
- int *count = data;
+ int *count = (int *) data;
tree x = *tp;
if (IS_TYPE_OR_DECL_P (x))
}
goto egress;
}
+ fn = cg_edge->callee->decl;
#ifdef ENABLE_CHECKING
if (cg_edge->callee->decl != id->node->decl)
else
{
tree stmt = bsi_stmt (stmt_bsi);
- bsi_remove (&stmt_bsi);
+ bsi_remove (&stmt_bsi, false);
bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
}
stmt_bsi = bsi_start (return_block);
/* Record the function we are about to inline. */
id->callee = fn;
- if (DECL_STRUCT_FUNCTION (fn)->saved_blocks)
- add_lexical_block (id->block, remap_blocks (DECL_STRUCT_FUNCTION (fn)->saved_blocks, id));
- else if (DECL_INITIAL (fn))
+ if (DECL_INITIAL (fn))
add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
/* Return statements in the function body will be replaced by jumps
/* Add local vars in this inlined callee to caller. */
t_step = id->callee_cfun->unexpanded_var_list;
- if (id->callee_cfun->saved_unexpanded_var_list)
- t_step = id->callee_cfun->saved_unexpanded_var_list;
for (; t_step; t_step = TREE_CHAIN (t_step))
{
var = TREE_VALUE (t_step);
else
/* We're modifying a TSI owned by gimple_expand_calls_inline();
tsi_delink() will leave the iterator in a sane state. */
- bsi_remove (&stmt_bsi);
+ bsi_remove (&stmt_bsi, true);
bsi_next (&bsi);
if (bsi_end_p (bsi))
append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
}
-/* Save duplicate body in FN. MAP is used to pass around splay tree
- used to update arguments in restore_body. */
-
-/* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
- in *arg_copy and of the static chain, if any, in *sc_copy. */
-
-void
-save_body (tree fn, tree *arg_copy, tree *sc_copy)
-{
- inline_data id;
- tree newdecl, *parg;
- basic_block fn_entry_block;
- tree t_step;
-
- memset (&id, 0, sizeof (id));
- id.callee = fn;
- id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
- id.caller = fn;
- id.node = cgraph_node (fn);
- id.saving_p = true;
- id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
- *arg_copy = DECL_ARGUMENTS (fn);
-
- for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
- {
- tree new = copy_node (*parg);
-
- lang_hooks.dup_lang_specific_decl (new);
- DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
- insert_decl_map (&id, *parg, new);
- TREE_CHAIN (new) = TREE_CHAIN (*parg);
- *parg = new;
- }
-
- *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
- if (*sc_copy)
- {
- tree new = copy_node (*sc_copy);
-
- lang_hooks.dup_lang_specific_decl (new);
- DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
- insert_decl_map (&id, *sc_copy, new);
- TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
- *sc_copy = new;
- }
-
- /* We're not inside any EH region. */
- id.eh_region = -1;
-
- insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
-
- DECL_STRUCT_FUNCTION (fn)->saved_blocks
- = remap_blocks (DECL_INITIAL (fn), &id);
- for (t_step = id.callee_cfun->unexpanded_var_list;
- t_step;
- t_step = TREE_CHAIN (t_step))
- {
- tree var = TREE_VALUE (t_step);
- if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
- cfun->saved_unexpanded_var_list
- = tree_cons (NULL_TREE, var, cfun->saved_unexpanded_var_list);
- else
- cfun->saved_unexpanded_var_list
- = tree_cons (NULL_TREE, remap_decl (var, &id),
- cfun->saved_unexpanded_var_list);
- }
-
- /* Actually copy the body, including a new (struct function *) and CFG.
- EH info is also duplicated so its labels point into the copied
- CFG, not the original. */
- fn_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fn));
- newdecl = copy_body (&id, fn_entry_block->count, fn_entry_block->frequency,
- NULL, NULL);
- DECL_STRUCT_FUNCTION (fn)->saved_cfg = DECL_STRUCT_FUNCTION (newdecl)->cfg;
- DECL_STRUCT_FUNCTION (fn)->saved_eh = DECL_STRUCT_FUNCTION (newdecl)->eh;
-
- /* Clean up. */
- splay_tree_delete (id.decl_map);
-}
-
/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
tree
DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
/* The new variable/label has no RTL, yet. */
- if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
+ if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
+ && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
SET_DECL_RTL (copy, NULL_RTX);
/* These args would always appear unused, if not for this. */
respectively. In case we want to replace a DECL
tree with another tree while duplicating the function's
body, TREE_MAP represents the mapping between these
- trees. */
+ trees. If UPDATE_CLONES is set, the call_stmt fields
+ of edges of clones of the function will be updated. */
void
-tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map)
+tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
+ bool update_clones)
{
struct cgraph_node *old_version_node;
struct cgraph_node *new_version_node;
DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
/* Generate a new name for the new version. */
- DECL_NAME (new_decl) =
- create_tmp_var_name (NULL);
+ if (!update_clones)
+ DECL_NAME (new_decl) =
+ create_tmp_var_name (NULL);
/* Create a new SYMBOL_REF rtx for the new name. */
if (DECL_RTL (old_decl) != NULL)
{
id.current_node = cgraph_node (old_decl);
id.versioning_p = true;
+ id.update_clones_p = update_clones;
id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
id.caller = new_decl;
id.callee = old_decl;
static inline bool
inlining_p (inline_data * id)
{
- return (!id->saving_p && !id->cloning_p && !id->versioning_p);
+ return (!id->cloning_p && !id->versioning_p);
+}
+
+/* Duplicate a type, fields and all. */
+
+tree
+build_duplicate_type (tree type)
+{
+ inline_data id;
+
+ memset (&id, 0, sizeof (id));
+ id.callee = current_function_decl;
+ id.caller = current_function_decl;
+ id.callee_cfun = cfun;
+ id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
+
+ type = remap_type_1 (type, &id);
+
+ splay_tree_delete (id.decl_map);
+
+ return type;
}