/* Tree inlining.
- Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
Contributed by Alexandre Oliva <aoliva@redhat.com>
#include "tree-mudflap.h"
#include "tree-flow.h"
#include "function.h"
-#include "ggc.h"
#include "tree-flow.h"
#include "diagnostic.h"
#include "except.h"
calls? */
-/* Weights that estimate_num_insns uses for heuristics in inlining. */
-
-eni_weights eni_inlining_weights;
-
/* Weights that estimate_num_insns uses to estimate the size of the
produced code. */
/* Prototypes. */
-static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
+static tree declare_return_variable (copy_body_data *, tree, tree);
static void remap_block (tree *, copy_body_data *);
static void copy_bind_expr (tree *, int *, copy_body_data *);
static tree mark_local_for_remap_r (tree *, int *, void *);
*pointer_map_insert (id->debug_map, key) = value;
}
+/* If nonzero, we're remapping the contents of inlined debug
+ statements. If negative, an error has occurred, such as a
+ reference to a variable that isn't available in the inlined
+ context. */
+static int processing_debug_stmt = 0;
+
/* Construct new SSA name for old NAME. ID is the inline context. */
static tree
if (n)
return unshare_expr (*n);
+ if (processing_debug_stmt)
+ {
+ processing_debug_stmt = -1;
+ return name;
+ }
+
/* Do not set DEF_STMT yet as statement is not copied yet. We do that
in copy_bb. */
new_tree = remap_decl (SSA_NAME_VAR (name), id);
/* We might've substituted constant or another SSA_NAME for
- the variable.
+ the variable.
Replace the SSA name representing RESULT_DECL by variable during
inlining: this saves us from need to introduce PHI node in a case
&& (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
|| !id->transform_return_to_modify))
{
+ struct ptr_info_def *pi;
new_tree = make_ssa_name (new_tree, NULL);
insert_decl_map (id, name, new_tree);
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
+ /* At least IPA points-to info can be directly transferred. */
+ if (id->src_cfun->gimple_df
+ && id->src_cfun->gimple_df->ipa_pta
+ && (pi = SSA_NAME_PTR_INFO (name))
+ && !pi->pt.anything)
+ {
+ struct ptr_info_def *new_pi = get_ptr_info (new_tree);
+ new_pi->pt = pi->pt;
+ }
if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
{
/* By inlining function having uninitialized variable, we might
{
gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
gimple init_stmt;
-
+
init_stmt = gimple_build_assign (new_tree,
fold_convert (TREE_TYPE (new_tree),
integer_zero_node));
return new_tree;
}
-/* If nonzero, we're remapping the contents of inlined debug
- statements. If negative, an error has occurred, such as a
- reference to a variable that isn't available in the inlined
- context. */
-int processing_debug_stmt = 0;
-
/* Remap DECL during the copying of the BLOCK tree for the function. */
tree
remap_decl (tree decl, copy_body_data *id)
{
tree *n;
- tree fn;
/* We only remap local variables in the current function. */
- fn = id->src_fn;
/* See if we have remapped this declaration. */
{
/* Make a copy of the variable or label. */
tree t = id->copy_decl (decl, id);
-
+
/* Remember it, so that if we encounter this local entity again
we can reuse this copy. Do this early because remap_type may
need this decl for TYPE_STUB_DECL. */
&& (TREE_CODE (t) == VAR_DECL
|| TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
{
- tree def = gimple_default_def (id->src_cfun, decl);
get_var_ann (t);
- if (TREE_CODE (decl) != PARM_DECL && def)
- {
- tree map = remap_ssa_name (def, id);
- /* Watch out RESULT_DECLs whose SSA names map directly
- to them. */
- if (TREE_CODE (map) == SSA_NAME
- && gimple_nop_p (SSA_NAME_DEF_STMT (map)))
- set_default_def (t, map);
- }
add_referenced_var (t);
}
return t;
/* The type only needs remapping if it's variably modified. */
/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
-
+
static bool
can_be_nonlocal (tree decl, copy_body_data *id)
{
for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
{
tree new_var;
- tree origin_var = DECL_ORIGIN (old_var);
if (can_be_nonlocal (old_var, id))
{
if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
&& !DECL_IGNORED_P (old_var)
&& nonlocalized_list)
- VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
+ VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
continue;
}
/* If we didn't remap this variable, we can't mess with its
TREE_CHAIN. If we remapped this variable to the return slot, it's
already declared somewhere else, so don't declare it here. */
-
+
if (new_var == id->retvar)
;
else if (!new_var)
if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
&& !DECL_IGNORED_P (old_var)
&& nonlocalized_list)
- VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
+ VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
}
else
{
{
tree old_block;
tree new_block;
- tree fn;
/* Make the new block. */
old_block = *block;
&BLOCK_NONLOCALIZED_VARS (new_block),
id);
- fn = id->dst_fn;
-
if (id->transform_lang_insert_block)
id->transform_lang_insert_block (new_block);
}
if (BIND_EXPR_VARS (*tp))
- /* This will remap a lot of the same decls again, but this should be
- harmless. */
- BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
+ {
+ tree t;
+
+ /* This will remap a lot of the same decls again, but this should be
+ harmless. */
+ BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
+
+ /* Also copy value-expressions. */
+ for (t = BIND_EXPR_VARS (*tp); t; t = TREE_CHAIN (t))
+ if (TREE_CODE (t) == VAR_DECL
+ && DECL_HAS_VALUE_EXPR_P (t))
+ {
+ tree tem = DECL_VALUE_EXPR (t);
+ walk_tree (&tem, copy_tree_body_r, id, NULL);
+ SET_DECL_VALUE_EXPR (t, tem);
+ }
+ }
}
/* Create a new gimple_seq by remapping all the statements in BODY
using the inlining information in ID. */
-gimple_seq
+static gimple_seq
remap_gimple_seq (gimple_seq body, copy_body_data *id)
{
gimple_stmt_iterator si;
&& id->remapping_type_depth == 0
&& !processing_debug_stmt)
add_referenced_var (*tp);
-
+
/* If EXPR has block defined, map it to newly constructed block.
When inlining we want EXPRs without block appear in the block
- of function call. */
+ of function call if we are not remapping a type. */
if (EXPR_P (*tp))
{
- new_block = id->block;
+ new_block = id->remapping_type_depth == 0 ? id->block : NULL;
if (TREE_BLOCK (*tp))
{
tree *n;
case GIMPLE_TRY:
s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
- copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
+ copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
break;
case GIMPLE_WITH_CLEANUP_EXPR:
default:
break;
}
+
+ /* Reset alias info if we didn't apply measures to
+ keep it valid over inlining by setting DECL_PT_UID. */
+ if (!id->src_cfun->gimple_df
+ || !id->src_cfun->gimple_df->ipa_pta)
+ gimple_call_reset_alias_info (copy);
}
break;
if (skip_first)
walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
else
- walk_gimple_op (copy, remap_gimple_op_r, &wi);
+ walk_gimple_op (copy, remap_gimple_op_r, &wi);
/* Clear the copied virtual operands. We are not remapping them here
but are going to recreate them from scratch. */
/* We could also just rescale the frequency, but
doing so would introduce roundoff errors and make
verifier unhappy. */
- edge->frequency
+ edge->frequency
= compute_call_stmt_bb_frequency (id->dst_node->decl,
copy_basic_block);
if (dump_file
bb->frequency,
copy_basic_block->frequency);
}
+ stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
}
break;
/* Constant propagation on argument done during inlining
may create new direct call. Produce an edge for it. */
- if ((!edge
- || (edge->indirect_call
+ if ((!edge
+ || (edge->indirect_inlining_edge
&& id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
- && is_gimple_call (stmt)
&& (fn = gimple_call_fndecl (stmt)) != NULL)
{
struct cgraph_node *dest = cgraph_node (fn);
other cases we hit a bug (incorrect node sharing is the
most common reason for missing edges). */
gcc_assert (dest->needed || !dest->analyzed
+ || dest->address_taken
|| !id->src_node->analyzed);
if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
cgraph_create_edge_including_clones
- (id->dst_node, dest, stmt, bb->count,
- compute_call_stmt_bb_frequency (id->dst_node->decl,
+ (id->dst_node, dest, orig_stmt, stmt, bb->count,
+ compute_call_stmt_bb_frequency (id->dst_node->decl,
copy_basic_block),
bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
else
cgraph_create_edge (id->dst_node, dest, stmt,
- bb->count, CGRAPH_FREQ_BASE,
+ bb->count,
+ compute_call_stmt_bb_frequency
+ (id->dst_node->decl, copy_basic_block),
bb->loop_depth)->inline_failed
= CIF_ORIGINALLY_INDIRECT_CALL;
if (dump_file)
new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
gsi_insert_seq_on_edge_immediate (new_edge, stmts);
}
- add_phi_arg (new_phi, new_arg, new_edge,
+ add_phi_arg (new_phi, new_arg, new_edge,
gimple_phi_arg_location_from_edge (phi, old_edge));
}
}
cfun->last_verified = src_cfun->last_verified;
cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
- cfun->function_frequency = src_cfun->function_frequency;
cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
cfun->stdarg = src_cfun->stdarg;
cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
gcc_assert (TREE_CODE (*n) == VAR_DECL);
t = *n;
}
+ else if (TREE_CODE (t) == VAR_DECL
+ && !TREE_STATIC (t)
+ && gimple_in_ssa_p (cfun)
+ && !pointer_map_contains (id->decl_map, t)
+ && !var_ann (t))
+ /* T is a non-localized variable. */;
else
walk_tree (&t, remap_gimple_op_r, &wi, NULL);
is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
was the LHS of the MODIFY_EXPR to which this call is the RHS.
- The return value is a (possibly null) value that is the result of the
- function as seen by the callee. *USE_P is a (possibly null) value that
- holds the result as seen by the caller. */
+ The return value is a (possibly null) value that holds the result
+ as seen by the caller. */
static tree
-declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
- tree *use_p)
+declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
{
tree callee = id->src_fn;
tree caller = id->dst_fn;
tree result = DECL_RESULT (callee);
tree callee_type = TREE_TYPE (result);
- tree caller_type = TREE_TYPE (TREE_TYPE (callee));
+ tree caller_type;
tree var, use;
+ /* Handle type-mismatches in the function declaration return type
+ vs. the call expression. */
+ if (modify_dest)
+ caller_type = TREE_TYPE (modify_dest);
+ else
+ caller_type = TREE_TYPE (TREE_TYPE (callee));
+
/* We don't need to do anything for functions that don't return
anything. */
if (!result || VOID_TYPE_P (callee_type))
- {
- *use_p = NULL_TREE;
- return NULL_TREE;
- }
+ return NULL_TREE;
/* If there was a return slot, then the return value is the
dereferenced address of that object. */
STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
/* We are going to construct *&return_slot and we can't do that
- for variables believed to be not addressable.
+ for variables believed to be not addressable.
FIXME: This check possibly can match, because values returned
via return slot optimization are not believed to have address
use = var;
if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
use = fold_convert (caller_type, var);
-
+
STRIP_USELESS_TYPE_CONVERSION (use);
if (DECL_BY_REFERENCE (result))
/* Remember this so we can ignore it in remap_decls. */
id->retvar = var;
- *use_p = use;
- return var;
+ return use;
}
/* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
return NULL_TREE;
}
-/* Callback through walk_tree. Determine if we've got an aggregate
- type that we can't support; return non-null if so. */
-
-static tree
-cannot_copy_type_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
- void *data ATTRIBUTE_UNUSED)
-{
- tree t, node = *nodep;
-
- if (TREE_CODE (node) == RECORD_TYPE || TREE_CODE (node) == UNION_TYPE)
- {
- /* We cannot inline a function of the form
-
- void F (int i) { struct S { int ar[i]; } s; }
-
- Attempting to do so produces a catch-22.
- If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
- UNION_TYPE nodes, then it goes into infinite recursion on a
- structure containing a pointer to its own type. If it doesn't,
- then the type node for S doesn't get adjusted properly when
- F is inlined.
-
- ??? This is likely no longer true, but it's too late in the 4.0
- cycle to try to find out. This should be checked for 4.1. */
- for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
- if (variably_modified_type_p (TREE_TYPE (t), NULL))
- return node;
- }
-
- return NULL_TREE;
-}
-
-
/* Determine if the function can be copied. If so return NULL. If
not return a string describng the reason for failure. */
"address of local label in a static variable");
goto fail;
}
-
- if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
- && variably_modified_type_p (TREE_TYPE (decl), NULL)
- && walk_tree_without_duplicates (&TREE_TYPE (decl),
- cannot_copy_type_1, NULL))
- {
- reason = G_("function %q+F can never be copied "
- "because it uses variable sized variables");
- goto fail;
- }
}
fail:
case GIMPLE_SWITCH:
/* Take into account cost of the switch + guess 2 conditional jumps for
- each case label.
+ each case label.
TODO: once the switch expansion logic is sufficiently separated, we can
do better job on estimating cost of the switch. */
cost = weights->target_builtin_call_cost;
else
cost = weights->call_cost;
-
+
if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
switch (DECL_FUNCTION_CODE (decl))
{
+ /* Builtins that expand to constants. */
case BUILT_IN_CONSTANT_P:
- return 0;
case BUILT_IN_EXPECT:
+ case BUILT_IN_OBJECT_SIZE:
+ case BUILT_IN_UNREACHABLE:
+ /* Simple register moves or loads from stack. */
+ case BUILT_IN_RETURN_ADDRESS:
+ case BUILT_IN_EXTRACT_RETURN_ADDR:
+ case BUILT_IN_FROB_RETURN_ADDR:
+ case BUILT_IN_RETURN:
+ case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
+ case BUILT_IN_FRAME_ADDRESS:
+ case BUILT_IN_VA_END:
+ case BUILT_IN_STACK_SAVE:
+ case BUILT_IN_STACK_RESTORE:
+ /* Exception state returns or moves registers around. */
+ case BUILT_IN_EH_FILTER:
+ case BUILT_IN_EH_POINTER:
+ case BUILT_IN_EH_COPY_VALUES:
return 0;
- /* Prefetch instruction is not expensive. */
+ /* builtins that are not expensive (that is they are most probably
+ expanded inline into resonably simple code). */
+ case BUILT_IN_ABS:
+ case BUILT_IN_ALLOCA:
+ case BUILT_IN_BSWAP32:
+ case BUILT_IN_BSWAP64:
+ case BUILT_IN_CLZ:
+ case BUILT_IN_CLZIMAX:
+ case BUILT_IN_CLZL:
+ case BUILT_IN_CLZLL:
+ case BUILT_IN_CTZ:
+ case BUILT_IN_CTZIMAX:
+ case BUILT_IN_CTZL:
+ case BUILT_IN_CTZLL:
+ case BUILT_IN_FFS:
+ case BUILT_IN_FFSIMAX:
+ case BUILT_IN_FFSL:
+ case BUILT_IN_FFSLL:
+ case BUILT_IN_IMAXABS:
+ case BUILT_IN_FINITE:
+ case BUILT_IN_FINITEF:
+ case BUILT_IN_FINITEL:
+ case BUILT_IN_FINITED32:
+ case BUILT_IN_FINITED64:
+ case BUILT_IN_FINITED128:
+ case BUILT_IN_FPCLASSIFY:
+ case BUILT_IN_ISFINITE:
+ case BUILT_IN_ISINF_SIGN:
+ case BUILT_IN_ISINF:
+ case BUILT_IN_ISINFF:
+ case BUILT_IN_ISINFL:
+ case BUILT_IN_ISINFD32:
+ case BUILT_IN_ISINFD64:
+ case BUILT_IN_ISINFD128:
+ case BUILT_IN_ISNAN:
+ case BUILT_IN_ISNANF:
+ case BUILT_IN_ISNANL:
+ case BUILT_IN_ISNAND32:
+ case BUILT_IN_ISNAND64:
+ case BUILT_IN_ISNAND128:
+ case BUILT_IN_ISNORMAL:
+ case BUILT_IN_ISGREATER:
+ case BUILT_IN_ISGREATEREQUAL:
+ case BUILT_IN_ISLESS:
+ case BUILT_IN_ISLESSEQUAL:
+ case BUILT_IN_ISLESSGREATER:
+ case BUILT_IN_ISUNORDERED:
+ case BUILT_IN_VA_ARG_PACK:
+ case BUILT_IN_VA_ARG_PACK_LEN:
+ case BUILT_IN_VA_COPY:
+ case BUILT_IN_TRAP:
+ case BUILT_IN_SAVEREGS:
+ case BUILT_IN_POPCOUNTL:
+ case BUILT_IN_POPCOUNTLL:
+ case BUILT_IN_POPCOUNTIMAX:
+ case BUILT_IN_POPCOUNT:
+ case BUILT_IN_PARITYL:
+ case BUILT_IN_PARITYLL:
+ case BUILT_IN_PARITYIMAX:
+ case BUILT_IN_PARITY:
+ case BUILT_IN_LABS:
+ case BUILT_IN_LLABS:
case BUILT_IN_PREFETCH:
cost = weights->target_builtin_call_cost;
break;
struct cgraph_edge *cs;
cs = cgraph_edge (node, stmt);
- if (cs)
+ if (cs && !cs->indirect_unknown_callee)
return cs->callee->decl;
return NULL_TREE;
static bool
expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
{
- tree retvar, use_retvar;
+ tree use_retvar;
tree fn;
struct pointer_map_t *st, *dst;
tree return_slot;
/* If this call was originally indirect, we do not want to emit any
inlining related warnings or sorry messages because there are no
guarantees regarding those. */
- if (cg_edge->indirect_call)
+ if (cg_edge->indirect_inlining_edge)
goto egress;
if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
}
/* Declare the return variable for the function. */
- retvar = declare_return_variable (id, return_slot, modify_dest, &use_retvar);
+ use_retvar = declare_return_variable (id, return_slot, modify_dest);
/* Add local vars in this inlined callee to caller. */
t_step = id->src_cfun->local_decls;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Inlining ");
- print_generic_expr (dump_file, id->src_fn, 0);
+ print_generic_expr (dump_file, id->src_fn, 0);
fprintf (dump_file, " to ");
- print_generic_expr (dump_file, id->dst_fn, 0);
+ print_generic_expr (dump_file, id->dst_fn, 0);
fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
}
cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
bb, return_block);
- /* Reset the escaped and callused solutions. */
+ /* Reset the escaped solution. */
if (cfun->gimple_df)
- {
- pt_solution_reset (&cfun->gimple_df->escaped);
- pt_solution_reset (&cfun->gimple_df->callused);
- }
+ pt_solution_reset (&cfun->gimple_df->escaped);
/* Clean up. */
if (id->debug_map)
(*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
/* Update callgraph if needed. */
- if (cg_edge->callee->clone_of
- && !cg_edge->callee->clone_of->next_sibling_clone
- && !cg_edge->callee->analyzed)
- cgraph_remove_node (cg_edge->callee);
cgraph_remove_node (cg_edge->callee);
id->block = NULL_TREE;
optimize_inline_calls (tree fn)
{
copy_body_data id;
- tree prev_fn;
basic_block bb;
int last = n_basic_blocks;
struct gimplify_ctx gctx;
id.src_node = id.dst_node = cgraph_node (fn);
id.dst_fn = fn;
/* Or any functions that aren't finished yet. */
- prev_fn = NULL_TREE;
if (current_function_decl)
- {
- id.dst_fn = current_function_decl;
- prev_fn = current_function_decl;
- }
+ id.dst_fn = current_function_decl;
id.copy_decl = copy_decl_maybe_to_var;
id.transform_call_graph_edges = CB_CGE_DUPLICATE;
gcc_assert (e->inline_failed);
}
#endif
-
+
/* Fold the statements before compacting/renumbering the basic blocks. */
fold_marked_statements (last, id.statements_to_fold);
pointer_set_destroy (id.statements_to_fold);
-
+
gcc_assert (!id.debug_stmts);
/* Renumber the (code) basic_blocks consecutively. */
DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
/* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
- declaration inspired this copy. */
+ declaration inspired this copy. */
DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
/* The new variable/label has no RTL, yet. */
if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
&& !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
SET_DECL_RTL (copy, NULL_RTX);
-
+
/* These args would always appear unused, if not for this. */
TREE_USED (copy) = 1;
copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
VAR_DECL, DECL_NAME (decl), type);
+ if (DECL_PT_UID_SET_P (decl))
+ SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
TREE_READONLY (copy) = TREE_READONLY (decl);
TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
VAR_DECL, DECL_NAME (decl), type);
+ if (DECL_PT_UID_SET_P (decl))
+ SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
TREE_READONLY (copy) = TREE_READONLY (decl);
TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
if (!DECL_BY_REFERENCE (decl))
else
cgraph_remove_edge (e);
}
-
+
if (node->clones)
node = node->clones;
else if (node->next_sibling_clone)
/* Create a copy of a function's tree.
OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
of the original function and the new copied function
- respectively. In case we want to replace a DECL
- tree with another tree while duplicating the function's
- body, TREE_MAP represents the mapping between these
+ respectively. In case we want to replace a DECL
+ tree with another tree while duplicating the function's
+ body, TREE_MAP represents the mapping between these
trees. If UPDATE_CLONES is set, the call_stmt fields
of edges of clones of the function will be updated. */
void
old_transforms_to_apply,
i));
}
-
+
id.copy_decl = copy_decl_no_change;
id.transform_call_graph_edges
= update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
(DECL_STRUCT_FUNCTION (old_decl));
initialize_cfun (new_decl, old_decl,
old_entry_block->count);
+ DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
+ = id.src_cfun->gimple_df->ipa_pta;
push_cfun (DECL_STRUCT_FUNCTION (new_decl));
-
+
/* Copy the function's static chain. */
p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
if (p)
DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
&id);
-
+
/* If there's a tree_map, prepare for substitution. */
if (tree_map)
for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
op = TREE_OPERAND (op, 0);
-
+
if (TREE_CODE (op) == ADDR_EXPR)
{
op = TREE_OPERAND (op, 0);
DECL_ARGUMENTS (new_decl) =
copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
args_to_skip, &vars);
-
+
DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
-
+
/* Renumber the lexical scoping (non-code) blocks consecutively. */
number_blocks (id.dst_fn);
-
+
declare_inline_vars (DECL_INITIAL (new_decl), vars);
if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
tree_cons (NULL_TREE, remap_decl (var, &id),
cfun->local_decls);
}
-
+
/* Copy the Function's body. */
copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
-
+
if (DECL_RESULT (old_decl) != NULL_TREE)
{
tree *res_decl = &DECL_RESULT (old_decl);
DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
}
-
+
/* Renumber the lexical scoping (non-code) blocks consecutively. */
number_blocks (new_decl);
pointer_set_destroy (id.statements_to_fold);
fold_cond_expr_cond ();
delete_unreachable_blocks_update_callgraph (&id);
+ if (id.dst_node->analyzed)
+ cgraph_rebuild_references ();
update_ssa (TODO_update_ssa);
free_dominance_info (CDI_DOMINATORS);
free_dominance_info (CDI_POST_DOMINATORS);
return false;
}
#endif
- tree caller, callee;
+ tree caller, callee, lhs;
caller = e->caller->decl;
callee = e->callee->decl;
return false;
}
+ /* Do not inline calls where we cannot triviall work around mismatches
+ in argument or return types. */
if (e->call_stmt
- && !gimple_check_call_args (e->call_stmt))
+ && ((DECL_RESULT (callee)
+ && !DECL_BY_REFERENCE (DECL_RESULT (callee))
+ && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE
+ && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
+ TREE_TYPE (lhs))
+ && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
+ || !gimple_check_call_args (e->call_stmt)))
{
e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
gimple_call_set_cannot_inline (e->call_stmt, true);