#include "except.h"
#include "debug.h"
#include "pointer-set.h"
-#include "integrate.h"
+#include "ipa-prop.h"
/* I'm not real happy about this, but we need to handle gimple and
non-gimple trees. */
tree callee;
/* FUNCTION_DECL for function being inlined into. */
tree caller;
- /* struct function for function being inlined. Usually this is the same
- as DECL_STRUCT_FUNCTION (callee), but can be different if saved_cfg
- and saved_eh are in use. */
+ /* struct function for function being inlined. */
struct function *callee_cfun;
/* The VAR_DECL for the return value. */
tree retvar;
distinguish between those two situations. This flag is true if
we are cloning, rather than inlining. */
bool cloning_p;
- /* Similarly for saving function body. */
- bool saving_p;
+ /* Versioning function is slightly different from inlining. */
+ bool versioning_p;
+ /* If set, the call_stmt of edges in clones of caller functions will
+ be updated. */
+ bool update_clones_p;
/* Callgraph node of function we are inlining into. */
struct cgraph_node *node;
/* Callgraph node of currently inlined function. */
struct cgraph_node *current_node;
/* Current BLOCK. */
tree block;
+ varray_type ipa_info;
/* Exception region the inlined call lie in. */
int eh_region;
/* Take region number in the function being copied, add this value and
static tree unsave_r (tree *, int *, void *);
static void declare_inline_vars (tree, tree);
static void remap_save_expr (tree *, void *, int *);
-
-static inline bool inlining_p (inline_data *id);
+static bool replace_ref_tree (inline_data *, tree *);
+static inline bool inlining_p (inline_data *);
+static void add_lexical_block (tree current_block, tree new_block);
/* Insert a tree->tree mapping for ID. Despite the name suggests
that the trees should be variables, it is used for more than that. */
{
/* Make a copy of the variable or label. */
tree t;
- t = copy_decl_for_inlining (decl, fn, id->caller);
-
+ t = copy_decl_for_dup (decl, fn, id->caller, id->versioning_p);
+
/* Remember it, so that if we encounter this local entity again
we can reuse this copy. Do this early because remap_type may
need this decl for TYPE_STUB_DECL. */
TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
if (TREE_CODE (t) == TYPE_DECL)
DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
- else if (TREE_CODE (t) == PARM_DECL)
- DECL_ARG_TYPE_AS_WRITTEN (t)
- = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
/* Remap sizes as necessary. */
walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
}
#endif
- /* If we are inlining and this is a variable (not a label), declare the
- remapped variable in the callers' body. */
- if (inlining_p (id)
- && (TREE_CODE (t) == VAR_DECL
- || TREE_CODE (t) == PARM_DECL))
- declare_inline_vars (id->block, t);
-
/* Remember it, so that if we encounter this local entity
again we can reuse this copy. */
insert_decl_map (id, decl, t);
}
static tree
-remap_type (tree type, inline_data *id)
+remap_type_1 (tree type, inline_data *id)
{
- splay_tree_node node;
tree new, t;
- if (type == NULL)
- return type;
-
- /* See if we have remapped this type. */
- node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
- if (node)
- return (tree) node->value;
-
- /* The type only needs remapping if it's variably modified. */
- if (! variably_modified_type_p (type, id->callee))
- {
- insert_decl_map (id, type, type);
- return type;
- }
-
/* We do need a copy. build and register it now. If this is a pointer or
reference type, remap the designated type and make a new pointer or
reference type. */
case RECORD_TYPE:
case UNION_TYPE:
case QUAL_UNION_TYPE:
- walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
+ {
+ tree f, nf = NULL;
+
+ for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
+ {
+ t = remap_decl (f, id);
+ DECL_CONTEXT (t) = new;
+ TREE_CHAIN (t) = nf;
+ nf = t;
+ }
+ TYPE_FIELDS (new) = nreverse (nf);
+ }
break;
case OFFSET_TYPE:
}
static tree
+remap_type (tree type, inline_data *id)
+{
+ splay_tree_node node;
+
+ if (type == NULL)
+ return type;
+
+ /* See if we have remapped this type. */
+ node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
+ if (node)
+ return (tree) node->value;
+
+ /* The type only needs remapping if it's variably modified. */
+ if (! variably_modified_type_p (type, id->callee))
+ {
+ insert_decl_map (id, type, type);
+ return type;
+ }
+
+ return remap_type_1 (type, id);
+}
+
+static tree
remap_decls (tree decls, inline_data *id)
{
tree old_var;
new_block = make_node (BLOCK);
TREE_USED (new_block) = TREE_USED (old_block);
BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
+ BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
*block = new_block;
/* Remap its variables. */
BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
fn = id->caller;
-#if 1
- /* FIXME! It shouldn't be so hard to manage blocks. Rebuilding them in
- rest_of_compilation is a good start. */
if (id->cloning_p)
/* We're building a clone; DECL_INITIAL is still
error_mark_node, and current_binding_level is the parm
binding level. */
lang_hooks.decls.insert_block (new_block);
- else
- {
- /* Attach this new block after the DECL_INITIAL block for the
- function into which this block is being inlined. In
- rest_of_compilation we will straighten out the BLOCK tree. */
- tree *first_block;
- if (DECL_INITIAL (fn))
- first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
- else
- first_block = &DECL_INITIAL (fn);
- BLOCK_CHAIN (new_block) = *first_block;
- *first_block = new_block;
- }
-#endif
/* Remember the remapped block. */
insert_decl_map (id, old_block, new_block);
}
+/* Copy the whole block tree and root it in id->block. */
+static tree
+remap_blocks (tree block, inline_data *id)
+{
+ tree t;
+ tree new = block;
+
+ if (!block)
+ return NULL;
+
+ remap_block (&new, id);
+ gcc_assert (new != block);
+ for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
+ add_lexical_block (new, remap_blocks (t, id));
+ return new;
+}
+
static void
copy_statement_list (tree *tp)
{
{
inline_data *id = (inline_data *) data;
tree fn = id->callee;
+ tree new_block;
/* Begin by recognizing trees that we'll completely rewrite for the
inlining context. Our output for these trees is completely
else /* Else the RETURN_EXPR returns no value. */
{
*tp = NULL;
- return (void *)1;
+ return (tree) (void *)1;
}
}
}
}
}
- else if (TREE_CODE (*tp) == INDIRECT_REF)
+ else if (TREE_CODE (*tp) == INDIRECT_REF
+ && !id->versioning_p)
{
/* Get rid of *& from inline substitutions that can happen when a
pointer argument is an ADDR_EXPR. */
n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
if (n)
{
+ tree new;
/* If we happen to get an ADDR_EXPR in n->value, strip
it manually here as we'll eventually get ADDR_EXPRs
which lie about their types pointed to. In this case
but we absolutely rely on that. As fold_indirect_ref
does other useful transformations, try that first, though. */
tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
- *tp = fold_indirect_ref_1 (type, (tree)n->value);
+ new = unshare_expr ((tree)n->value);
+ *tp = fold_indirect_ref_1 (type, new);
if (! *tp)
{
- if (TREE_CODE ((tree)n->value) == ADDR_EXPR)
- *tp = TREE_OPERAND ((tree)n->value, 0);
+ if (TREE_CODE (new) == ADDR_EXPR)
+ *tp = TREE_OPERAND (new, 0);
else
- *tp = build1 (INDIRECT_REF, type, (tree)n->value);
+ *tp = build1 (INDIRECT_REF, type, new);
}
*walk_subtrees = 0;
return NULL;
/* Here is the "usual case". Copy this tree node, and then
tweak some special cases. */
- copy_tree_r (tp, walk_subtrees, NULL);
- if (id->block
- && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
- TREE_BLOCK (*tp) = id->block;
+ copy_tree_r (tp, walk_subtrees, id->versioning_p ? data : NULL);
+
+ /* If EXPR has block defined, map it to newly constructed block.
+ When inlining we want EXPRs without block appear in the block
+ of function call. */
+ if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
+ {
+ new_block = id->block;
+ if (TREE_BLOCK (*tp))
+ {
+ splay_tree_node n;
+ n = splay_tree_lookup (id->decl_map,
+ (splay_tree_key) TREE_BLOCK (*tp));
+ gcc_assert (n);
+ new_block = (tree) n->value;
+ }
+ TREE_BLOCK (*tp) = new_block;
+ }
if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
TREE_OPERAND (*tp, 0) =
else if (TREE_CODE (*tp) == ADDR_EXPR)
{
walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
- recompute_tree_invarant_for_addr_expr (*tp);
+ recompute_tree_invariant_for_addr_expr (*tp);
*walk_subtrees = 0;
}
}
/* create_basic_block() will append every new block to
basic_block_info automatically. */
- copy_basic_block = create_basic_block (NULL, (void *) 0, bb->prev_bb->aux);
+ copy_basic_block = create_basic_block (NULL, (void *) 0,
+ (basic_block) bb->prev_bb->aux);
copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
copy_basic_block->frequency = (bb->frequency
* frequency_scale / REG_BR_PROB_BASE);
callgraph edges and update or duplicate them. */
if (call && (decl = get_callee_fndecl (call)))
{
- if (id->saving_p)
- {
- struct cgraph_node *node;
- struct cgraph_edge *edge;
-
- /* We're saving a copy of the body, so we'll update the
- callgraph nodes in place. Note that we avoid
- altering the original callgraph node; we begin with
- the first clone. */
- for (node = id->node->next_clone;
- node;
- node = node->next_clone)
- {
- edge = cgraph_edge (node, orig_stmt);
- gcc_assert (edge);
- edge->call_stmt = stmt;
- }
- }
- else
+ if (!id->versioning_p)
{
struct cgraph_edge *edge;
edge = cgraph_edge (id->current_node, orig_stmt);
if (edge)
cgraph_clone_edge (edge, id->node, stmt,
- REG_BR_PROB_BASE, 1);
+ REG_BR_PROB_BASE, 1, true);
+ }
+ else
+ {
+ /* Update the call_expr on the edges from the new version
+ to its callees. */
+ struct cgraph_edge *edge;
+ edge = cgraph_edge (id->node, orig_stmt);
+ if (edge)
+ {
+ edge->call_stmt = stmt;
+ if (id->update_clones_p)
+ {
+ struct cgraph_node *n;
+ for (n = id->node->next_clone; n; n = n->next_clone)
+ cgraph_edge (n, orig_stmt)->call_stmt = stmt;
+ }
+ }
}
}
/* If you think we can abort here, you are wrong.
static void
copy_edges_for_bb (basic_block bb, int count_scale)
{
- basic_block new_bb = bb->aux;
+ basic_block new_bb = (basic_block) bb->aux;
edge_iterator ei;
edge old_edge;
block_stmt_iterator bsi;
if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
&& old_edge->dest->aux != EXIT_BLOCK_PTR)
flags |= EDGE_FALLTHRU;
- new = make_edge (new_bb, old_edge->dest->aux, flags);
+ new = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
new->probability = old_edge->probability;
}
static tree
remap_decl_1 (tree decl, void *data)
{
- return remap_decl (decl, data);
+ return remap_decl (decl, (inline_data *) data);
}
/* Make a copy of the body of FN so that it can be inserted inline in
(struct function *) ggc_alloc_cleared (sizeof (struct function));
basic_block bb;
tree new_fndecl = NULL;
- bool saving_or_cloning;
+ bool versioning_or_cloning;
int count_scale, frequency_scale;
if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count)
*cfun_to_copy = *DECL_STRUCT_FUNCTION (callee_fndecl);
- /* If there is a saved_cfg+saved_args lurking in the
- struct function, a copy of the callee body was saved there, and
- the 'struct cgraph edge' nodes have been fudged to point into the
- saved body. Accordingly, we want to copy that saved body so the
- callgraph edges will be recognized and cloned properly. */
- if (cfun_to_copy->saved_cfg)
- {
- cfun_to_copy->cfg = cfun_to_copy->saved_cfg;
- cfun_to_copy->eh = cfun_to_copy->saved_eh;
- }
id->callee_cfun = cfun_to_copy;
/* If saving or cloning a function body, create new basic_block_info
and label_to_block_maps. Otherwise, we're duplicating a function
body for inlining; insert our new blocks and labels into the
existing varrays. */
- saving_or_cloning = (id->saving_p || id->cloning_p);
- if (saving_or_cloning)
+ versioning_or_cloning = (id->cloning_p || id->versioning_p);
+ if (versioning_or_cloning)
{
new_cfun =
(struct function *) ggc_alloc_cleared (sizeof (struct function));
/* Duplicate any exception-handling regions. */
if (cfun->eh)
{
- if (saving_or_cloning)
+ if (versioning_or_cloning)
init_eh_for_function ();
id->eh_region_offset = duplicate_eh_regions (cfun_to_copy,
remap_decl_1,
FOR_ALL_BB_FN (bb, cfun_to_copy)
bb->aux = NULL;
- if (saving_or_cloning)
+ if (versioning_or_cloning)
pop_cfun ();
return new_fndecl;
/* Make an equivalent VAR_DECL. Note that we must NOT remap the type
here since the type of this decl must be visible to the calling
function. */
- var = copy_decl_for_inlining (p, fn, id->caller);
+ var = copy_decl_for_dup (p, fn, id->caller, /*versioning=*/false);
/* See if the frontend wants to pass this by invisible reference. If
so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
/* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
keep our trees in gimple form. */
- init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
+ init_stmt = build2 (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
/* If we did not create a gimple value and we did not create a gimple
cast of a gimple value, then we will need to gimplify INIT_STMTS
&& (!is_gimple_cast (rhs)
|| !is_gimple_val (TREE_OPERAND (rhs, 0))))
gimplify_stmt (&init_stmt);
- bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
+
+ /* If VAR represents a zero-sized variable, it's possible that the
+ assignment statment may result in no gimple statements. */
+ if (init_stmt)
+ bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
}
}
/* Figure out what the parameters are. */
parms = DECL_ARGUMENTS (fn);
- if (fn == current_function_decl)
- parms = cfun->saved_args;
/* Loop through the parameter declarations, replacing each with an
equivalent VAR_DECL, appropriately initialized. */
/* Initialize the static chain. */
p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
- if (fn == current_function_decl)
- p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
+ gcc_assert (fn != current_function_decl);
if (p)
{
/* No static chain? Seems like a bug in tree-nested.c. */
var = return_slot_addr;
else
var = build_fold_indirect_ref (return_slot_addr);
+ if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
+ && !DECL_COMPLEX_GIMPLE_REG_P (result)
+ && DECL_P (var))
+ DECL_COMPLEX_GIMPLE_REG_P (var) = 0;
use = NULL;
goto done;
}
/* If the callee cannot possibly modify MODIFY_DEST, then we can
reuse it as the result of the call directly. Don't do this if
it would promote MODIFY_DEST to addressable. */
- else if (!TREE_STATIC (modify_dest)
- && !TREE_ADDRESSABLE (modify_dest)
- && !TREE_ADDRESSABLE (result))
- use_it = true;
+ else if (TREE_ADDRESSABLE (result))
+ use_it = false;
+ else
+ {
+ tree base_m = get_base_address (modify_dest);
+
+ /* If the base isn't a decl, then it's a pointer, and we don't
+ know where that's going to go. */
+ if (!DECL_P (base_m))
+ use_it = false;
+ else if (is_global_var (base_m))
+ use_it = false;
+ else if (!TREE_ADDRESSABLE (base_m))
+ use_it = true;
+ }
if (use_it)
{
gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
- var = copy_decl_for_inlining (result, callee, caller);
+ var = copy_decl_for_dup (result, callee, caller, /*versioning=*/false);
DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
&& !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
{
inline_forbidden_reason
- = G_("%Jfunction %qF can never be inlined because it uses "
+ = G_("function %q+F can never be inlined because it uses "
"alloca (override using the always_inline attribute)");
return node;
}
if (setjmp_call_p (t))
{
inline_forbidden_reason
- = G_("%Jfunction %qF can never be inlined because it uses setjmp");
+ = G_("function %q+F can never be inlined because it uses setjmp");
return node;
}
case BUILT_IN_NEXT_ARG:
case BUILT_IN_VA_END:
inline_forbidden_reason
- = G_("%Jfunction %qF can never be inlined because it "
+ = G_("function %q+F can never be inlined because it "
"uses variable argument lists");
return node;
function calling __builtin_longjmp to be inlined into the
function calling __builtin_setjmp, Things will Go Awry. */
inline_forbidden_reason
- = G_("%Jfunction %qF can never be inlined because "
+ = G_("function %q+F can never be inlined because "
"it uses setjmp-longjmp exception handling");
return node;
case BUILT_IN_NONLOCAL_GOTO:
/* Similarly. */
inline_forbidden_reason
- = G_("%Jfunction %qF can never be inlined because "
+ = G_("function %q+F can never be inlined because "
"it uses non-local goto");
return node;
been inlined into. Similarly __builtin_return would
return from the function the inline has been inlined into. */
inline_forbidden_reason
- = G_("%Jfunction %qF can never be inlined because "
+ = G_("function %q+F can never be inlined because "
"it uses __builtin_return or __builtin_apply_args");
return node;
if (TREE_CODE (t) != LABEL_DECL)
{
inline_forbidden_reason
- = G_("%Jfunction %qF can never be inlined "
+ = G_("function %q+F can never be inlined "
"because it contains a computed goto");
return node;
}
because we cannot remap the destination label used in the
function that is performing the non-local goto. */
inline_forbidden_reason
- = G_("%Jfunction %qF can never be inlined "
+ = G_("function %q+F can never be inlined "
"because it receives a non-local goto");
return node;
}
if (variably_modified_type_p (TREE_TYPE (t), NULL))
{
inline_forbidden_reason
- = G_("%Jfunction %qF can never be inlined "
+ = G_("function %q+F can never be inlined "
"because it uses variable sized variables");
return node;
}
&& !DECL_IN_SYSTEM_HEADER (fn));
if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
- sorry (inline_forbidden_reason, fn, fn);
+ sorry (inline_forbidden_reason, fn);
else if (do_warning)
- warning (0, inline_forbidden_reason, fn, fn);
+ warning (OPT_Winline, inline_forbidden_reason, fn);
inlinable = false;
}
static tree
estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
{
- int *count = data;
+ int *count = (int *) data;
tree x = *tp;
if (IS_TYPE_OR_DECL_P (x))
;
*blk_p = new_block;
BLOCK_SUPERCONTEXT (new_block) = current_block;
- BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
}
/* If *TP is a CALL_EXPR, replace it with its inline expansion. */
inlining. */
if (!cgraph_inline_p (cg_edge, &reason))
{
- if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
+ if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
+ /* Avoid warnings during early inline pass. */
+ && (!flag_unit_at_a_time || cgraph_global_info_ready))
{
- sorry ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
+ sorry ("inlining failed in call to %q+F: %s", fn, reason);
sorry ("called from here");
}
else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
/* Avoid warnings during early inline pass. */
&& (!flag_unit_at_a_time || cgraph_global_info_ready))
{
- warning (OPT_Winline, "%Jinlining failed in call to %qF: %s",
- fn, fn, reason);
+ warning (OPT_Winline, "inlining failed in call to %q+F: %s",
+ fn, reason);
warning (OPT_Winline, "called from here");
}
goto egress;
}
+ fn = cg_edge->callee->decl;
#ifdef ENABLE_CHECKING
if (cg_edge->callee->decl != id->node->decl)
else
{
tree stmt = bsi_stmt (stmt_bsi);
- bsi_remove (&stmt_bsi);
+ bsi_remove (&stmt_bsi, false);
bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
}
stmt_bsi = bsi_start (return_block);
statement expression is the return type of the function call. */
id->block = make_node (BLOCK);
BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
+ BLOCK_SOURCE_LOCATION (id->block) = input_location;
add_lexical_block (TREE_BLOCK (stmt), id->block);
-
/* Local declarations will be replaced by their equivalents in this
map. */
st = id->decl_map;
/* Record the function we are about to inline. */
id->callee = fn;
+ if (DECL_INITIAL (fn))
+ add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
+
/* Return statements in the function body will be replaced by jumps
to the RET_LABEL. */
copy_body (id, bb->count, bb->frequency, bb, return_block);
id->current_node = old_node;
+ /* Add local vars in this inlined callee to caller. */
+ t_step = id->callee_cfun->unexpanded_var_list;
+ for (; t_step; t_step = TREE_CHAIN (t_step))
+ {
+ var = TREE_VALUE (t_step);
+ if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
+ cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
+ cfun->unexpanded_var_list);
+ else
+ cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
+ cfun->unexpanded_var_list);
+ }
+
/* Clean up. */
splay_tree_delete (id->decl_map);
id->decl_map = st;
else
/* We're modifying a TSI owned by gimple_expand_calls_inline();
tsi_delink() will leave the iterator in a sane state. */
- bsi_remove (&stmt_bsi);
+ bsi_remove (&stmt_bsi, true);
bsi_next (&bsi);
if (bsi_end_p (bsi))
/* Declare the 'auto' variables added with this inlined body. */
record_vars (BLOCK_VARS (id->block));
id->block = NULL_TREE;
-
- /* Add local static vars in this inlined callee to caller. */
- for (t_step = id->callee_cfun->unexpanded_var_list;
- t_step;
- t_step = TREE_CHAIN (t_step))
- {
- var = TREE_VALUE (t_step);
- if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
- record_vars (var);
- }
successfully_inlined = TRUE;
egress:
append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
}
-/* Save duplicate body in FN. MAP is used to pass around splay tree
- used to update arguments in restore_body. */
-
-/* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
- in *arg_copy and of the static chain, if any, in *sc_copy. */
-
-void
-save_body (tree fn, tree *arg_copy, tree *sc_copy)
-{
- inline_data id;
- tree newdecl, *parg;
- basic_block fn_entry_block;
-
- memset (&id, 0, sizeof (id));
- id.callee = fn;
- id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
- id.caller = fn;
- id.node = cgraph_node (fn);
- id.saving_p = true;
- id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
- *arg_copy = DECL_ARGUMENTS (fn);
-
- for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
- {
- tree new = copy_node (*parg);
-
- lang_hooks.dup_lang_specific_decl (new);
- DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
- insert_decl_map (&id, *parg, new);
- TREE_CHAIN (new) = TREE_CHAIN (*parg);
- *parg = new;
- }
-
- *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
- if (*sc_copy)
- {
- tree new = copy_node (*sc_copy);
-
- lang_hooks.dup_lang_specific_decl (new);
- DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
- insert_decl_map (&id, *sc_copy, new);
- TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
- *sc_copy = new;
- }
-
- /* We're not inside any EH region. */
- id.eh_region = -1;
-
- insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
-
- /* Actually copy the body, including a new (struct function *) and CFG.
- EH info is also duplicated so its labels point into the copied
- CFG, not the original. */
- fn_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fn));
- newdecl = copy_body (&id, fn_entry_block->count, fn_entry_block->frequency, NULL, NULL);
- DECL_STRUCT_FUNCTION (fn)->saved_cfg = DECL_STRUCT_FUNCTION (newdecl)->cfg;
- DECL_STRUCT_FUNCTION (fn)->saved_eh = DECL_STRUCT_FUNCTION (newdecl)->eh;
-
- /* Clean up. */
- splay_tree_delete (id.decl_map);
-}
-
/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
tree
copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
{
enum tree_code code = TREE_CODE (*tp);
+ inline_data *id = (inline_data *) data;
/* We make copies of most nodes. */
if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
tree chain = TREE_CHAIN (*tp);
tree new;
+ if (id && id->versioning_p && replace_ref_tree (id, tp))
+ {
+ *walk_subtrees = 0;
+ return NULL_TREE;
+ }
/* Copy the node. */
new = copy_node (*tp);
if (TREE_CODE (*tp) == BIND_EXPR)
BIND_EXPR_BLOCK (*tp) = NULL_TREE;
}
+ else if (code == CONSTRUCTOR)
+ {
+ /* CONSTRUCTOR nodes need special handling because
+ we need to duplicate the vector of elements. */
+ tree new;
+ new = copy_node (*tp);
+
+ /* Propagate mudflap marked-ness. */
+ if (flag_mudflap && mf_marked_p (*tp))
+ mf_mark (new);
+
+ CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
+ CONSTRUCTOR_ELTS (*tp));
+ *tp = new;
+ }
else if (TREE_CODE_CLASS (code) == tcc_type)
*walk_subtrees = 0;
else if (TREE_CODE_CLASS (code) == tcc_declaration)
/* Copy the decl and remember the copy. */
insert_decl_map (id, decl,
- copy_decl_for_inlining (decl, DECL_CONTEXT (decl),
- DECL_CONTEXT (decl)));
+ copy_decl_for_dup (decl, DECL_CONTEXT (decl),
+ DECL_CONTEXT (decl), /*versioning=*/false));
}
return NULL_TREE;
BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
}
-/* Returns true if we're inlining. */
+
+/* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
+ but now it will be in the TO_FN. VERSIONING means that this function
+ is used by the versioning utility (not inlining or cloning). */
+
+tree
+copy_decl_for_dup (tree decl, tree from_fn, tree to_fn, bool versioning)
+{
+ tree copy;
+
+ gcc_assert (DECL_P (decl));
+ /* Copy the declaration. */
+ if (!versioning
+ && (TREE_CODE (decl) == PARM_DECL
+ || TREE_CODE (decl) == RESULT_DECL))
+ {
+ tree type = TREE_TYPE (decl);
+
+ /* For a parameter or result, we must make an equivalent VAR_DECL,
+ not a new PARM_DECL. */
+ copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
+ TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
+ TREE_READONLY (copy) = TREE_READONLY (decl);
+ TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
+ DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
+ }
+ else
+ {
+ copy = copy_node (decl);
+ /* The COPY is not abstract; it will be generated in TO_FN. */
+ DECL_ABSTRACT (copy) = 0;
+ lang_hooks.dup_lang_specific_decl (copy);
+
+ /* TREE_ADDRESSABLE isn't used to indicate that a label's
+ address has been taken; it's for internal bookkeeping in
+ expand_goto_internal. */
+ if (TREE_CODE (copy) == LABEL_DECL)
+ {
+ TREE_ADDRESSABLE (copy) = 0;
+ LABEL_DECL_UID (copy) = -1;
+ }
+ }
+
+ /* Don't generate debug information for the copy if we wouldn't have
+ generated it for the copy either. */
+ DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
+ DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
+
+ /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
+ declaration inspired this copy. */
+ DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
+
+ /* The new variable/label has no RTL, yet. */
+ if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
+ && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
+ SET_DECL_RTL (copy, NULL_RTX);
+
+ /* These args would always appear unused, if not for this. */
+ TREE_USED (copy) = 1;
+
+ /* Set the context for the new declaration. */
+ if (!DECL_CONTEXT (decl))
+ /* Globals stay global. */
+ ;
+ else if (DECL_CONTEXT (decl) != from_fn)
+ /* Things that weren't in the scope of the function we're inlining
+ from aren't in the scope we're inlining to, either. */
+ ;
+ else if (TREE_STATIC (decl))
+ /* Function-scoped static variables should stay in the original
+ function. */
+ ;
+ else
+ /* Ordinary automatic local variables are now in the scope of the
+ new function. */
+ DECL_CONTEXT (copy) = to_fn;
+
+ return copy;
+}
+
+/* Return a copy of the function's argument tree. */
+static tree
+copy_arguments_for_versioning (tree orig_parm, inline_data * id)
+{
+ tree *arg_copy, *parg;
+
+ arg_copy = &orig_parm;
+ for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
+ {
+ tree new = remap_decl (*parg, id);
+ lang_hooks.dup_lang_specific_decl (new);
+ TREE_CHAIN (new) = TREE_CHAIN (*parg);
+ *parg = new;
+ }
+ return orig_parm;
+}
+
+/* Return a copy of the function's static chain. */
+static tree
+copy_static_chain (tree static_chain, inline_data * id)
+{
+ tree *chain_copy, *pvar;
+
+ chain_copy = &static_chain;
+ for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
+ {
+ tree new = remap_decl (*pvar, id);
+ lang_hooks.dup_lang_specific_decl (new);
+ TREE_CHAIN (new) = TREE_CHAIN (*pvar);
+ *pvar = new;
+ }
+ return static_chain;
+}
+
+/* Return true if the function is allowed to be versioned.
+ This is a guard for the versioning functionality. */
+bool
+tree_versionable_function_p (tree fndecl)
+{
+ if (fndecl == NULL_TREE)
+ return false;
+ /* ??? There are cases where a function is
+ uninlinable but can be versioned. */
+ if (!tree_inlinable_function_p (fndecl))
+ return false;
+
+ return true;
+}
+
+/* Create a copy of a function's tree.
+ OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
+ of the original function and the new copied function
+ respectively. In case we want to replace a DECL
+ tree with another tree while duplicating the function's
+ body, TREE_MAP represents the mapping between these
+ trees. If UPDATE_CLONES is set, the call_stmt fields
+ of edges of clones of the function will be updated. */
+void
+tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
+ bool update_clones)
+{
+ struct cgraph_node *old_version_node;
+ struct cgraph_node *new_version_node;
+ inline_data id;
+ tree p, new_fndecl;
+ unsigned i;
+ struct ipa_replace_map *replace_info;
+ basic_block old_entry_block;
+ tree t_step;
+
+ gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
+ && TREE_CODE (new_decl) == FUNCTION_DECL);
+ DECL_POSSIBLY_INLINED (old_decl) = 1;
+
+ old_version_node = cgraph_node (old_decl);
+ new_version_node = cgraph_node (new_decl);
+
+ allocate_struct_function (new_decl);
+ /* Cfun points to the new allocated function struct at this point. */
+ cfun->function_end_locus = DECL_SOURCE_LOCATION (new_decl);
+
+ DECL_ARTIFICIAL (new_decl) = 1;
+ DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
+
+ /* Generate a new name for the new version. */
+ if (!update_clones)
+ DECL_NAME (new_decl) =
+ create_tmp_var_name (NULL);
+ /* Create a new SYMBOL_REF rtx for the new name. */
+ if (DECL_RTL (old_decl) != NULL)
+ {
+ SET_DECL_RTL (new_decl, copy_rtx (DECL_RTL (old_decl)));
+ XEXP (DECL_RTL (new_decl), 0) =
+ gen_rtx_SYMBOL_REF (GET_MODE (XEXP (DECL_RTL (old_decl), 0)),
+ IDENTIFIER_POINTER (DECL_NAME (new_decl)));
+ }
+
+ /* Prepare the data structures for the tree copy. */
+ memset (&id, 0, sizeof (id));
+
+ /* The new version. */
+ id.node = new_version_node;
+
+ /* The old version. */
+ id.current_node = cgraph_node (old_decl);
+
+ id.versioning_p = true;
+ id.update_clones_p = update_clones;
+ id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
+ id.caller = new_decl;
+ id.callee = old_decl;
+ id.callee_cfun = DECL_STRUCT_FUNCTION (old_decl);
+
+ current_function_decl = new_decl;
+
+ /* Copy the function's static chain. */
+ p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
+ if (p)
+ DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
+ copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
+ &id);
+ /* Copy the function's arguments. */
+ if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
+ DECL_ARGUMENTS (new_decl) =
+ copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
+
+ /* If there's a tree_map, prepare for substitution. */
+ if (tree_map)
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
+ {
+ replace_info = VARRAY_GENERIC_PTR (tree_map, i);
+ if (replace_info->replace_p && !replace_info->ref_p)
+ insert_decl_map (&id, replace_info->old_tree,
+ replace_info->new_tree);
+ else if (replace_info->replace_p && replace_info->ref_p)
+ id.ipa_info = tree_map;
+ }
+
+ DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.callee), &id);
+
+ /* Renumber the lexical scoping (non-code) blocks consecutively. */
+ number_blocks (id.caller);
+
+ if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
+ /* Add local vars. */
+ for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
+ t_step; t_step = TREE_CHAIN (t_step))
+ {
+ tree var = TREE_VALUE (t_step);
+ if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
+ cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
+ cfun->unexpanded_var_list);
+ else
+ cfun->unexpanded_var_list =
+ tree_cons (NULL_TREE, remap_decl (var, &id),
+ cfun->unexpanded_var_list);
+ }
+
+ /* Copy the Function's body. */
+ old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
+ (DECL_STRUCT_FUNCTION (old_decl));
+ new_fndecl = copy_body (&id,
+ old_entry_block->count,
+ old_entry_block->frequency, NULL, NULL);
+
+ DECL_SAVED_TREE (new_decl) = DECL_SAVED_TREE (new_fndecl);
+
+ DECL_STRUCT_FUNCTION (new_decl)->cfg =
+ DECL_STRUCT_FUNCTION (new_fndecl)->cfg;
+ DECL_STRUCT_FUNCTION (new_decl)->eh = DECL_STRUCT_FUNCTION (new_fndecl)->eh;
+ DECL_STRUCT_FUNCTION (new_decl)->ib_boundaries_block =
+ DECL_STRUCT_FUNCTION (new_fndecl)->ib_boundaries_block;
+ DECL_STRUCT_FUNCTION (new_decl)->last_label_uid =
+ DECL_STRUCT_FUNCTION (new_fndecl)->last_label_uid;
+
+ if (DECL_RESULT (old_decl) != NULL_TREE)
+ {
+ tree *res_decl = &DECL_RESULT (old_decl);
+ DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
+ lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
+ }
+
+ current_function_decl = NULL;
+ /* Renumber the lexical scoping (non-code) blocks consecutively. */
+ number_blocks (new_decl);
+
+ /* Clean up. */
+ splay_tree_delete (id.decl_map);
+ fold_cond_expr_cond ();
+ return;
+}
+
+/* Replace an INDIRECT_REF tree of a given DECL tree with a new
+ given tree.
+ ID->ipa_info keeps the old tree and the new tree.
+ TP points to the INDIRECT REF tree. Return true if
+ the trees were replaced. */
+static bool
+replace_ref_tree (inline_data * id, tree * tp)
+{
+ bool replaced = false;
+ tree new;
+
+ if (id->ipa_info && VARRAY_ACTIVE_SIZE (id->ipa_info) > 0)
+ {
+ unsigned i;
+
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (id->ipa_info); i++)
+ {
+ struct ipa_replace_map *replace_info;
+ replace_info = VARRAY_GENERIC_PTR (id->ipa_info, i);
+
+ if (replace_info->replace_p && replace_info->ref_p)
+ {
+ tree old_tree = replace_info->old_tree;
+ tree new_tree = replace_info->new_tree;
+
+ if (TREE_CODE (*tp) == INDIRECT_REF
+ && TREE_OPERAND (*tp, 0) == old_tree)
+ {
+ new = copy_node (new_tree);
+ *tp = new;
+ replaced = true;
+ }
+ }
+ }
+ }
+ return replaced;
+}
+
+/* Return true if we are inlining. */
static inline bool
-inlining_p (inline_data *id)
+inlining_p (inline_data * id)
+{
+ return (!id->cloning_p && !id->versioning_p);
+}
+
+/* Duplicate a type, fields and all. */
+
+tree
+build_duplicate_type (tree type)
{
- return (!id->saving_p && !id->cloning_p);
+ inline_data id;
+
+ memset (&id, 0, sizeof (id));
+ id.callee = current_function_decl;
+ id.caller = current_function_decl;
+ id.callee_cfun = cfun;
+ id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
+
+ type = remap_type_1 (type, &id);
+
+ splay_tree_delete (id.decl_map);
+
+ return type;
}