X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ftree-inline.c;h=0f7ea978b7221c0624d8de39fd58e90d2259bb6c;hb=0ff2061223a79ab60894db8561021f2eea33e8ed;hp=d9654f4d52c4481f5c6b7e382758f37afc537b95;hpb=c78cbec85ead213282b6beaf5815ce68aa6bdaa8;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/tree-inline.c b/gcc/tree-inline.c index d9654f4d52c..0f7ea978b72 100644 --- a/gcc/tree-inline.c +++ b/gcc/tree-inline.c @@ -16,8 +16,8 @@ GNU General Public License for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING. If not, write to -the Free Software Foundation, 59 Temple Place - Suite 330, -Boston, MA 02111-1307, USA. */ +the Free Software Foundation, 51 Franklin Street, Fifth Floor, +Boston, MA 02110-1301, USA. */ #include "config.h" #include "system.h" @@ -49,7 +49,7 @@ Boston, MA 02111-1307, USA. */ #include "except.h" #include "debug.h" #include "pointer-set.h" -#include "integrate.h" +#include "ipa-prop.h" /* I'm not real happy about this, but we need to handle gimple and non-gimple trees. */ @@ -127,12 +127,15 @@ typedef struct inline_data bool cloning_p; /* Similarly for saving function body. */ bool saving_p; + /* Versioning function is slightly different from inlining. */ + bool versioning_p; /* Callgraph node of function we are inlining into. */ struct cgraph_node *node; /* Callgraph node of currently inlined function. */ struct cgraph_node *current_node; /* Current BLOCK. */ tree block; + varray_type ipa_info; /* Exception region the inlined call lie in. */ int eh_region; /* Take region number in the function being copied, add this value and @@ -157,8 +160,9 @@ static void unsave_expr_1 (tree); static tree unsave_r (tree *, int *, void *); static void declare_inline_vars (tree, tree); static void remap_save_expr (tree *, void *, int *); - -static inline bool inlining_p (inline_data *id); +static bool replace_ref_tree (inline_data *, tree *); +static inline bool inlining_p (inline_data *); +static void add_lexical_block (tree current_block, tree new_block); /* Insert a tree->tree mapping for ID. Despite the name suggests that the trees should be variables, it is used for more than that. */ @@ -197,8 +201,8 @@ remap_decl (tree decl, inline_data *id) { /* Make a copy of the variable or label. */ tree t; - t = copy_decl_for_inlining (decl, fn, id->caller); - + t = copy_decl_for_dup (decl, fn, id->caller, id->versioning_p); + /* Remember it, so that if we encounter this local entity again we can reuse this copy. Do this early because remap_type may need this decl for TYPE_STUB_DECL. */ @@ -208,9 +212,6 @@ remap_decl (tree decl, inline_data *id) TREE_TYPE (t) = remap_type (TREE_TYPE (t), id); if (TREE_CODE (t) == TYPE_DECL) DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id); - else if (TREE_CODE (t) == PARM_DECL) - DECL_ARG_TYPE_AS_WRITTEN (t) - = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id); /* Remap sizes as necessary. */ walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL); @@ -246,13 +247,6 @@ remap_decl (tree decl, inline_data *id) } #endif - /* If we are inlining and this is a variable (not a label), declare the - remapped variable in the callers' body. */ - if (inlining_p (id) - && (TREE_CODE (t) == VAR_DECL - || TREE_CODE (t) == PARM_DECL)) - declare_inline_vars (id->block, t); - /* Remember it, so that if we encounter this local entity again we can reuse this copy. */ insert_decl_map (id, decl, t); @@ -263,26 +257,10 @@ remap_decl (tree decl, inline_data *id) } static tree -remap_type (tree type, inline_data *id) +remap_type_1 (tree type, inline_data *id) { - splay_tree_node node; tree new, t; - if (type == NULL) - return type; - - /* See if we have remapped this type. */ - node = splay_tree_lookup (id->decl_map, (splay_tree_key) type); - if (node) - return (tree) node->value; - - /* The type only needs remapping if it's variably modified. */ - if (! variably_modified_type_p (type, id->callee)) - { - insert_decl_map (id, type, type); - return type; - } - /* We do need a copy. build and register it now. If this is a pointer or reference type, remap the designated type and make a new pointer or reference type. */ @@ -359,7 +337,18 @@ remap_type (tree type, inline_data *id) case RECORD_TYPE: case UNION_TYPE: case QUAL_UNION_TYPE: - walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL); + { + tree f, nf = NULL; + + for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f)) + { + t = remap_decl (f, id); + DECL_CONTEXT (t) = new; + TREE_CHAIN (t) = nf; + nf = t; + } + TYPE_FIELDS (new) = nreverse (nf); + } break; case OFFSET_TYPE: @@ -375,6 +364,29 @@ remap_type (tree type, inline_data *id) } static tree +remap_type (tree type, inline_data *id) +{ + splay_tree_node node; + + if (type == NULL) + return type; + + /* See if we have remapped this type. */ + node = splay_tree_lookup (id->decl_map, (splay_tree_key) type); + if (node) + return (tree) node->value; + + /* The type only needs remapping if it's variably modified. */ + if (! variably_modified_type_p (type, id->callee)) + { + insert_decl_map (id, type, type); + return type; + } + + return remap_type_1 (type, id); +} + +static tree remap_decls (tree decls, inline_data *id) { tree old_var; @@ -385,6 +397,17 @@ remap_decls (tree decls, inline_data *id) { tree new_var; + /* We can not chain the local static declarations into the unexpanded_var_list + as we can't duplicate them or break one decl rule. Go ahead and link + them into unexpanded_var_list. */ + if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->callee) + && !DECL_EXTERNAL (old_var)) + { + cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var, + cfun->unexpanded_var_list); + continue; + } + /* Remap the variable. */ new_var = remap_decl (old_var, id); @@ -419,38 +442,39 @@ remap_block (tree *block, inline_data *id) new_block = make_node (BLOCK); TREE_USED (new_block) = TREE_USED (old_block); BLOCK_ABSTRACT_ORIGIN (new_block) = old_block; + BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block); *block = new_block; /* Remap its variables. */ BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id); fn = id->caller; -#if 1 - /* FIXME! It shouldn't be so hard to manage blocks. Rebuilding them in - rest_of_compilation is a good start. */ if (id->cloning_p) /* We're building a clone; DECL_INITIAL is still error_mark_node, and current_binding_level is the parm binding level. */ lang_hooks.decls.insert_block (new_block); - else - { - /* Attach this new block after the DECL_INITIAL block for the - function into which this block is being inlined. In - rest_of_compilation we will straighten out the BLOCK tree. */ - tree *first_block; - if (DECL_INITIAL (fn)) - first_block = &BLOCK_CHAIN (DECL_INITIAL (fn)); - else - first_block = &DECL_INITIAL (fn); - BLOCK_CHAIN (new_block) = *first_block; - *first_block = new_block; - } -#endif /* Remember the remapped block. */ insert_decl_map (id, old_block, new_block); } +/* Copy the whole block tree and root it in id->block. */ +static tree +remap_blocks (tree block, inline_data *id) +{ + tree t; + tree new = block; + + if (!block) + return NULL; + + remap_block (&new, id); + gcc_assert (new != block); + for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) + add_lexical_block (new, remap_blocks (t, id)); + return new; +} + static void copy_statement_list (tree *tp) { @@ -492,6 +516,7 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data) { inline_data *id = (inline_data *) data; tree fn = id->callee; + tree new_block; /* Begin by recognizing trees that we'll completely rewrite for the inlining context. Our output for these trees is completely @@ -543,9 +568,11 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data) copy_statement_list (tp); else if (TREE_CODE (*tp) == SAVE_EXPR) remap_save_expr (tp, id->decl_map, walk_subtrees); - else if (TREE_CODE (*tp) == LABEL_DECL) + else if (TREE_CODE (*tp) == LABEL_DECL + && (! DECL_CONTEXT (*tp) + || decl_function_context (*tp) == id->callee)) /* These may need to be remapped for EH handling. */ - remap_decl (*tp, id); + *tp = remap_decl (*tp, id); else if (TREE_CODE (*tp) == BIND_EXPR) copy_bind_expr (tp, walk_subtrees, id); /* Types may need remapping as well. */ @@ -575,8 +602,6 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data) knows not to copy VAR_DECLs, etc., so this is safe. */ else { - tree old_node = *tp; - /* Here we handle trees that are not completely rewritten. First we detect some inlining-induced bogosities for discarding. */ @@ -603,7 +628,8 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data) } } } - else if (TREE_CODE (*tp) == INDIRECT_REF) + else if (TREE_CODE (*tp) == INDIRECT_REF + && !id->versioning_p) { /* Get rid of *& from inline substitutions that can happen when a pointer argument is an ADDR_EXPR. */ @@ -613,7 +639,21 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data) n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl); if (n) { - *tp = build_fold_indirect_ref ((tree)n->value); + /* If we happen to get an ADDR_EXPR in n->value, strip + it manually here as we'll eventually get ADDR_EXPRs + which lie about their types pointed to. In this case + build_fold_indirect_ref wouldn't strip the INDIRECT_REF, + but we absolutely rely on that. As fold_indirect_ref + does other useful transformations, try that first, though. */ + tree type = TREE_TYPE (TREE_TYPE ((tree)n->value)); + *tp = fold_indirect_ref_1 (type, (tree)n->value); + if (! *tp) + { + if (TREE_CODE ((tree)n->value) == ADDR_EXPR) + *tp = TREE_OPERAND ((tree)n->value, 0); + else + *tp = build1 (INDIRECT_REF, type, (tree)n->value); + } *walk_subtrees = 0; return NULL; } @@ -621,45 +661,26 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data) /* Here is the "usual case". Copy this tree node, and then tweak some special cases. */ - copy_tree_r (tp, walk_subtrees, NULL); - if (id->block - && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp)))) - TREE_BLOCK (*tp) = id->block; - - /* We're duplicating a CALL_EXPR. Find any corresponding - callgraph edges and update or duplicate them. */ - if (TREE_CODE (*tp) == CALL_EXPR && id->node && get_callee_fndecl (*tp)) + copy_tree_r (tp, walk_subtrees, id->versioning_p ? data : NULL); + + /* If EXPR has block defined, map it to newly constructed block. + When inlining we want EXPRs without block appear in the block + of function call. */ + if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp)))) { - if (id->saving_p) - { - struct cgraph_node *node; - struct cgraph_edge *edge; - - /* We're saving a copy of the body, so we'll update the - callgraph nodes in place. Note that we avoid - altering the original callgraph node; we begin with - the first clone. */ - for (node = id->node->next_clone; - node; - node = node->next_clone) - { - edge = cgraph_edge (node, old_node); - gcc_assert (edge); - edge->call_expr = *tp; - } - } - else + new_block = id->block; + if (TREE_BLOCK (*tp)) { - struct cgraph_edge *edge; - - /* We're cloning or inlining this body; duplicate the - associate callgraph nodes. */ - edge = cgraph_edge (id->current_node, old_node); - if (edge) - cgraph_clone_edge (edge, id->node, *tp); + splay_tree_node n; + n = splay_tree_lookup (id->decl_map, + (splay_tree_key) TREE_BLOCK (*tp)); + gcc_assert (n); + new_block = (tree) n->value; } + TREE_BLOCK (*tp) = new_block; } - else if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset) + + if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset) TREE_OPERAND (*tp, 0) = build_int_cst (NULL_TREE, @@ -719,7 +740,55 @@ copy_bb (inline_data *id, basic_block bb, int frequency_scale, int count_scale) this is signalled by making stmt pointer NULL. */ if (stmt) { + tree call, decl; bsi_insert_after (©_bsi, stmt, BSI_NEW_STMT); + call = get_call_expr_in (stmt); + /* We're duplicating a CALL_EXPR. Find any corresponding + callgraph edges and update or duplicate them. */ + if (call && (decl = get_callee_fndecl (call))) + { + if (id->saving_p) + { + struct cgraph_node *node; + struct cgraph_edge *edge; + + /* We're saving a copy of the body, so we'll update the + callgraph nodes in place. Note that we avoid + altering the original callgraph node; we begin with + the first clone. */ + for (node = id->node->next_clone; + node; + node = node->next_clone) + { + edge = cgraph_edge (node, orig_stmt); + gcc_assert (edge); + edge->call_stmt = stmt; + } + } + else + { + struct cgraph_edge *edge; + + /* We're cloning or inlining this body; duplicate the + associate callgraph nodes. */ + if (!id->versioning_p) + { + edge = cgraph_edge (id->current_node, orig_stmt); + if (edge) + cgraph_clone_edge (edge, id->node, stmt, + REG_BR_PROB_BASE, 1, true); + } + } + if (id->versioning_p) + { + /* Update the call_expr on the edges from the new version + to its callees. */ + struct cgraph_edge *edge; + edge = cgraph_edge (id->node, orig_stmt); + if (edge) + edge->call_stmt = stmt; + } + } /* If you think we can abort here, you are wrong. There is no region 0 in tree land. */ gcc_assert (lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt) @@ -765,24 +834,24 @@ copy_edges_for_bb (basic_block bb, int count_scale) /* Use the indices from the original blocks to create edges for the new ones. */ FOR_EACH_EDGE (old_edge, ei, bb->succs) - { - edge new; + if (!(old_edge->flags & EDGE_EH)) + { + edge new; - flags = old_edge->flags; + flags = old_edge->flags; - /* Return edges do get a FALLTHRU flag when the get inlined. */ - if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags - && old_edge->dest->aux != EXIT_BLOCK_PTR) - flags |= EDGE_FALLTHRU; - new = make_edge (new_bb, old_edge->dest->aux, flags); - new->count = old_edge->count * count_scale / REG_BR_PROB_BASE; - new->probability = old_edge->probability; - } + /* Return edges do get a FALLTHRU flag when the get inlined. */ + if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags + && old_edge->dest->aux != EXIT_BLOCK_PTR) + flags |= EDGE_FALLTHRU; + new = make_edge (new_bb, old_edge->dest->aux, flags); + new->count = old_edge->count * count_scale / REG_BR_PROB_BASE; + new->probability = old_edge->probability; + } if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK) return; - tree_purge_dead_eh_edges (new_bb); for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);) { tree copy_stmt; @@ -804,9 +873,7 @@ copy_edges_for_bb (basic_block bb, int count_scale) into a COMPONENT_REF which doesn't. If the copy can throw, the original could also throw. */ - if (TREE_CODE (copy_stmt) == RESX_EXPR - || (tree_could_throw_p (copy_stmt) - && lookup_stmt_eh_region (copy_stmt) > 0)) + if (tree_can_throw_internal (copy_stmt)) { if (!bsi_end_p (bsi)) /* Note that bb's predecessor edges aren't necessarily @@ -888,7 +955,7 @@ copy_cfg_body (inline_data * id, gcov_type count, int frequency, and label_to_block_maps. Otherwise, we're duplicating a function body for inlining; insert our new blocks and labels into the existing varrays. */ - saving_or_cloning = (id->saving_p || id->cloning_p); + saving_or_cloning = (id->saving_p || id->cloning_p || id->versioning_p); if (saving_or_cloning) { new_cfun = @@ -1028,7 +1095,7 @@ setup_one_parameter (inline_data *id, tree p, tree value, tree fn, /* Make an equivalent VAR_DECL. Note that we must NOT remap the type here since the type of this decl must be visible to the calling function. */ - var = copy_decl_for_inlining (p, fn, id->caller); + var = copy_decl_for_dup (p, fn, id->caller, /*versioning=*/false); /* See if the frontend wants to pass this by invisible reference. If so, our new VAR_DECL will have REFERENCE_TYPE, and we need to @@ -1211,10 +1278,21 @@ declare_return_variable (inline_data *id, tree return_slot_addr, /* If the callee cannot possibly modify MODIFY_DEST, then we can reuse it as the result of the call directly. Don't do this if it would promote MODIFY_DEST to addressable. */ - else if (!TREE_STATIC (modify_dest) - && !TREE_ADDRESSABLE (modify_dest) - && !TREE_ADDRESSABLE (result)) - use_it = true; + else if (TREE_ADDRESSABLE (result)) + use_it = false; + else + { + tree base_m = get_base_address (modify_dest); + + /* If the base isn't a decl, then it's a pointer, and we don't + know where that's going to go. */ + if (!DECL_P (base_m)) + use_it = false; + else if (is_global_var (base_m)) + use_it = false; + else if (!TREE_ADDRESSABLE (base_m)) + use_it = true; + } if (use_it) { @@ -1226,7 +1304,7 @@ declare_return_variable (inline_data *id, tree return_slot_addr, gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST); - var = copy_decl_for_inlining (result, callee, caller); + var = copy_decl_for_dup (result, callee, caller, /*versioning=*/false); DECL_SEEN_IN_BIND_EXPR_P (var) = 1; DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list @@ -1286,7 +1364,7 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED, && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))) { inline_forbidden_reason - = N_("%Jfunction %qF can never be inlined because it uses " + = G_("function %q+F can never be inlined because it uses " "alloca (override using the always_inline attribute)"); return node; } @@ -1298,7 +1376,7 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED, if (setjmp_call_p (t)) { inline_forbidden_reason - = N_("%Jfunction %qF can never be inlined because it uses setjmp"); + = G_("function %q+F can never be inlined because it uses setjmp"); return node; } @@ -1312,7 +1390,7 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED, case BUILT_IN_NEXT_ARG: case BUILT_IN_VA_END: inline_forbidden_reason - = N_("%Jfunction %qF can never be inlined because it " + = G_("function %q+F can never be inlined because it " "uses variable argument lists"); return node; @@ -1323,14 +1401,14 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED, function calling __builtin_longjmp to be inlined into the function calling __builtin_setjmp, Things will Go Awry. */ inline_forbidden_reason - = N_("%Jfunction %qF can never be inlined because " + = G_("function %q+F can never be inlined because " "it uses setjmp-longjmp exception handling"); return node; case BUILT_IN_NONLOCAL_GOTO: /* Similarly. */ inline_forbidden_reason - = N_("%Jfunction %qF can never be inlined because " + = G_("function %q+F can never be inlined because " "it uses non-local goto"); return node; @@ -1341,7 +1419,7 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED, been inlined into. Similarly __builtin_return would return from the function the inline has been inlined into. */ inline_forbidden_reason - = N_("%Jfunction %qF can never be inlined because " + = G_("function %q+F can never be inlined because " "it uses __builtin_return or __builtin_apply_args"); return node; @@ -1360,7 +1438,7 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED, if (TREE_CODE (t) != LABEL_DECL) { inline_forbidden_reason - = N_("%Jfunction %qF can never be inlined " + = G_("function %q+F can never be inlined " "because it contains a computed goto"); return node; } @@ -1374,7 +1452,7 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED, because we cannot remap the destination label used in the function that is performing the non-local goto. */ inline_forbidden_reason - = N_("%Jfunction %qF can never be inlined " + = G_("function %q+F can never be inlined " "because it receives a non-local goto"); return node; } @@ -1399,7 +1477,7 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED, if (variably_modified_type_p (TREE_TYPE (t), NULL)) { inline_forbidden_reason - = N_("%Jfunction %qF can never be inlined " + = G_("function %q+F can never be inlined " "because it uses variable sized variables"); return node; } @@ -1494,9 +1572,9 @@ inlinable_function_p (tree fn) && !DECL_IN_SYSTEM_HEADER (fn)); if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))) - sorry (inline_forbidden_reason, fn, fn); + sorry (inline_forbidden_reason, fn); else if (do_warning) - warning (0, inline_forbidden_reason, fn, fn); + warning (OPT_Winline, inline_forbidden_reason, fn); inlinable = false; } @@ -1668,6 +1746,8 @@ estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data) case RSHIFT_EXPR: case LROTATE_EXPR: case RROTATE_EXPR: + case VEC_LSHIFT_EXPR: + case VEC_RSHIFT_EXPR: case BIT_IOR_EXPR: case BIT_XOR_EXPR: @@ -1712,6 +1792,10 @@ estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data) case REALIGN_LOAD_EXPR: + case REDUC_MAX_EXPR: + case REDUC_MIN_EXPR: + case REDUC_PLUS_EXPR: + case RESX_EXPR: *count += 1; break; @@ -1804,28 +1888,25 @@ estimate_num_insns (tree expr) return num; } +typedef struct function *function_p; + +DEF_VEC_P(function_p); +DEF_VEC_ALLOC_P(function_p,heap); + /* Initialized with NOGC, making this poisonous to the garbage collector. */ -static varray_type cfun_stack; +static VEC(function_p,heap) *cfun_stack; void push_cfun (struct function *new_cfun) { - static bool initialized = false; - - if (!initialized) - { - VARRAY_GENERIC_PTR_NOGC_INIT (cfun_stack, 20, "cfun_stack"); - initialized = true; - } - VARRAY_PUSH_GENERIC_PTR (cfun_stack, cfun); + VEC_safe_push (function_p, heap, cfun_stack, cfun); cfun = new_cfun; } void pop_cfun (void) { - cfun = (struct function *)VARRAY_TOP_GENERIC_PTR (cfun_stack); - VARRAY_POP (cfun_stack); + cfun = VEC_pop (function_p, cfun_stack); } /* Install new lexical TREE_BLOCK underneath 'current_block'. */ @@ -1841,7 +1922,6 @@ add_lexical_block (tree current_block, tree new_block) ; *blk_p = new_block; BLOCK_SUPERCONTEXT (new_block) = current_block; - BLOCK_SUBBLOCKS (new_block) = NULL_TREE; } /* If *TP is a CALL_EXPR, replace it with its inline expansion. */ @@ -1909,7 +1989,7 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data) if (!id->current_node->analyzed) goto egress; - cg_edge = cgraph_edge (id->current_node, t); + cg_edge = cgraph_edge (id->current_node, stmt); /* Constant propagation on argument done during previous inlining may create new direct call. Produce an edge for it. */ @@ -1922,7 +2002,8 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data) constant propagating arguments. In all other cases we hit a bug (incorrect node sharing is most common reason for missing edges. */ gcc_assert (dest->needed || !flag_unit_at_a_time); - cgraph_create_edge (id->node, dest, t)->inline_failed + cgraph_create_edge (id->node, dest, stmt, + bb->count, bb->loop_depth)->inline_failed = N_("originally indirect function call not considered for inlining"); goto egress; } @@ -1931,18 +2012,23 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data) inlining. */ if (!cgraph_inline_p (cg_edge, &reason)) { - if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))) + if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)) + /* Avoid warnings during early inline pass. */ + && (!flag_unit_at_a_time || cgraph_global_info_ready)) { - sorry ("%Jinlining failed in call to %qF: %s", fn, fn, reason); + sorry ("inlining failed in call to %q+F: %s", fn, reason); sorry ("called from here"); } else if (warn_inline && DECL_DECLARED_INLINE_P (fn) && !DECL_IN_SYSTEM_HEADER (fn) && strlen (reason) - && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))) + && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)) + /* Avoid warnings during early inline pass. */ + && (!flag_unit_at_a_time || cgraph_global_info_ready)) { - warning (0, "%Jinlining failed in call to %qF: %s", fn, fn, reason); - warning (0, "called from here"); + warning (OPT_Winline, "inlining failed in call to %q+F: %s", + fn, reason); + warning (OPT_Winline, "called from here"); } goto egress; } @@ -1985,9 +2071,9 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data) statement expression is the return type of the function call. */ id->block = make_node (BLOCK); BLOCK_ABSTRACT_ORIGIN (id->block) = fn; + BLOCK_SOURCE_LOCATION (id->block) = input_location; add_lexical_block (TREE_BLOCK (stmt), id->block); - /* Local declarations will be replaced by their equivalents in this map. */ st = id->decl_map; @@ -1996,19 +2082,17 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data) /* Initialize the parameters. */ args = TREE_OPERAND (t, 1); - if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t)) - { - return_slot_addr = TREE_VALUE (args); - args = TREE_CHAIN (args); - } - else - return_slot_addr = NULL_TREE; initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb); /* Record the function we are about to inline. */ id->callee = fn; + if (DECL_STRUCT_FUNCTION (fn)->saved_blocks) + add_lexical_block (id->block, remap_blocks (DECL_STRUCT_FUNCTION (fn)->saved_blocks, id)); + else if (DECL_INITIAL (fn)) + add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id)); + /* Return statements in the function body will be replaced by jumps to the RET_LABEL. */ @@ -2016,10 +2100,10 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data) gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK); /* Find the lhs to which the result of this call is assigned. */ - modify_dest = stmt; - if (TREE_CODE (modify_dest) == MODIFY_EXPR) + return_slot_addr = NULL; + if (TREE_CODE (stmt) == MODIFY_EXPR) { - modify_dest = TREE_OPERAND (modify_dest, 0); + modify_dest = TREE_OPERAND (stmt, 0); /* The function which we are inlining might not return a value, in which case we should issue a warning that the function @@ -2029,6 +2113,11 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data) uninitialized variable. */ if (DECL_P (modify_dest)) TREE_NO_WARNING (modify_dest) = 1; + if (CALL_EXPR_RETURN_SLOT_OPT (t)) + { + return_slot_addr = build_fold_addr_expr (modify_dest); + modify_dest = NULL; + } } else modify_dest = NULL; @@ -2057,6 +2146,21 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data) copy_body (id, bb->count, bb->frequency, bb, return_block); id->current_node = old_node; + /* Add local vars in this inlined callee to caller. */ + t_step = id->callee_cfun->unexpanded_var_list; + if (id->callee_cfun->saved_unexpanded_var_list) + t_step = id->callee_cfun->saved_unexpanded_var_list; + for (; t_step; t_step = TREE_CHAIN (t_step)) + { + var = TREE_VALUE (t_step); + if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var)) + cfun->unexpanded_var_list = tree_cons (NULL_TREE, var, + cfun->unexpanded_var_list); + else + cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id), + cfun->unexpanded_var_list); + } + /* Clean up. */ splay_tree_delete (id->decl_map); id->decl_map = st; @@ -2094,16 +2198,6 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data) /* Declare the 'auto' variables added with this inlined body. */ record_vars (BLOCK_VARS (id->block)); id->block = NULL_TREE; - - /* Add local static vars in this inlined callee to caller. */ - for (t_step = id->callee_cfun->unexpanded_var_list; - t_step; - t_step = TREE_CHAIN (t_step)) - { - var = TREE_VALUE (t_step); - if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var)) - record_vars (var); - } successfully_inlined = TRUE; egress: @@ -2242,6 +2336,7 @@ save_body (tree fn, tree *arg_copy, tree *sc_copy) inline_data id; tree newdecl, *parg; basic_block fn_entry_block; + tree t_step; memset (&id, 0, sizeof (id)); id.callee = fn; @@ -2280,11 +2375,28 @@ save_body (tree fn, tree *arg_copy, tree *sc_copy) insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn)); + DECL_STRUCT_FUNCTION (fn)->saved_blocks + = remap_blocks (DECL_INITIAL (fn), &id); + for (t_step = id.callee_cfun->unexpanded_var_list; + t_step; + t_step = TREE_CHAIN (t_step)) + { + tree var = TREE_VALUE (t_step); + if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var)) + cfun->saved_unexpanded_var_list + = tree_cons (NULL_TREE, var, cfun->saved_unexpanded_var_list); + else + cfun->saved_unexpanded_var_list + = tree_cons (NULL_TREE, remap_decl (var, &id), + cfun->saved_unexpanded_var_list); + } + /* Actually copy the body, including a new (struct function *) and CFG. EH info is also duplicated so its labels point into the copied CFG, not the original. */ fn_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fn)); - newdecl = copy_body (&id, fn_entry_block->count, fn_entry_block->frequency, NULL, NULL); + newdecl = copy_body (&id, fn_entry_block->count, fn_entry_block->frequency, + NULL, NULL); DECL_STRUCT_FUNCTION (fn)->saved_cfg = DECL_STRUCT_FUNCTION (newdecl)->cfg; DECL_STRUCT_FUNCTION (fn)->saved_eh = DECL_STRUCT_FUNCTION (newdecl)->eh; @@ -2298,6 +2410,7 @@ tree copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) { enum tree_code code = TREE_CODE (*tp); + inline_data *id = (inline_data *) data; /* We make copies of most nodes. */ if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)) @@ -2310,6 +2423,11 @@ copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) tree chain = TREE_CHAIN (*tp); tree new; + if (id && id->versioning_p && replace_ref_tree (id, tp)) + { + *walk_subtrees = 0; + return NULL_TREE; + } /* Copy the node. */ new = copy_node (*tp); @@ -2329,7 +2447,22 @@ copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) if (TREE_CODE (*tp) == BIND_EXPR) BIND_EXPR_BLOCK (*tp) = NULL_TREE; } + else if (code == CONSTRUCTOR) + { + /* CONSTRUCTOR nodes need special handling because + we need to duplicate the vector of elements. */ + tree new; + new = copy_node (*tp); + + /* Propagate mudflap marked-ness. */ + if (flag_mudflap && mf_marked_p (*tp)) + mf_mark (new); + + CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc, + CONSTRUCTOR_ELTS (*tp)); + *tp = new; + } else if (TREE_CODE_CLASS (code) == tcc_type) *walk_subtrees = 0; else if (TREE_CODE_CLASS (code) == tcc_declaration) @@ -2397,8 +2530,8 @@ mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, /* Copy the decl and remember the copy. */ insert_decl_map (id, decl, - copy_decl_for_inlining (decl, DECL_CONTEXT (decl), - DECL_CONTEXT (decl))); + copy_decl_for_dup (decl, DECL_CONTEXT (decl), + DECL_CONTEXT (decl), /*versioning=*/false)); } return NULL_TREE; @@ -2532,9 +2665,335 @@ declare_inline_vars (tree block, tree vars) BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars); } -/* Returns true if we're inlining. */ + +/* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN, + but now it will be in the TO_FN. VERSIONING means that this function + is used by the versioning utility (not inlining or cloning). */ + +tree +copy_decl_for_dup (tree decl, tree from_fn, tree to_fn, bool versioning) +{ + tree copy; + + gcc_assert (DECL_P (decl)); + /* Copy the declaration. */ + if (!versioning + && (TREE_CODE (decl) == PARM_DECL + || TREE_CODE (decl) == RESULT_DECL)) + { + tree type = TREE_TYPE (decl); + + /* For a parameter or result, we must make an equivalent VAR_DECL, + not a new PARM_DECL. */ + copy = build_decl (VAR_DECL, DECL_NAME (decl), type); + TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl); + TREE_READONLY (copy) = TREE_READONLY (decl); + TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl); + DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl); + } + else + { + copy = copy_node (decl); + /* The COPY is not abstract; it will be generated in TO_FN. */ + DECL_ABSTRACT (copy) = 0; + lang_hooks.dup_lang_specific_decl (copy); + + /* TREE_ADDRESSABLE isn't used to indicate that a label's + address has been taken; it's for internal bookkeeping in + expand_goto_internal. */ + if (TREE_CODE (copy) == LABEL_DECL) + { + TREE_ADDRESSABLE (copy) = 0; + LABEL_DECL_UID (copy) = -1; + } + } + + /* Don't generate debug information for the copy if we wouldn't have + generated it for the copy either. */ + DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl); + DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl); + + /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what + declaration inspired this copy. */ + DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl); + + /* The new variable/label has no RTL, yet. */ + if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL) + && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy)) + SET_DECL_RTL (copy, NULL_RTX); + + /* These args would always appear unused, if not for this. */ + TREE_USED (copy) = 1; + + /* Set the context for the new declaration. */ + if (!DECL_CONTEXT (decl)) + /* Globals stay global. */ + ; + else if (DECL_CONTEXT (decl) != from_fn) + /* Things that weren't in the scope of the function we're inlining + from aren't in the scope we're inlining to, either. */ + ; + else if (TREE_STATIC (decl)) + /* Function-scoped static variables should stay in the original + function. */ + ; + else + /* Ordinary automatic local variables are now in the scope of the + new function. */ + DECL_CONTEXT (copy) = to_fn; + + return copy; +} + +/* Return a copy of the function's argument tree. */ +static tree +copy_arguments_for_versioning (tree orig_parm, inline_data * id) +{ + tree *arg_copy, *parg; + + arg_copy = &orig_parm; + for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg)) + { + tree new = remap_decl (*parg, id); + lang_hooks.dup_lang_specific_decl (new); + TREE_CHAIN (new) = TREE_CHAIN (*parg); + *parg = new; + } + return orig_parm; +} + +/* Return a copy of the function's static chain. */ +static tree +copy_static_chain (tree static_chain, inline_data * id) +{ + tree *chain_copy, *pvar; + + chain_copy = &static_chain; + for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar)) + { + tree new = remap_decl (*pvar, id); + lang_hooks.dup_lang_specific_decl (new); + TREE_CHAIN (new) = TREE_CHAIN (*pvar); + *pvar = new; + } + return static_chain; +} + +/* Return true if the function is allowed to be versioned. + This is a guard for the versioning functionality. */ +bool +tree_versionable_function_p (tree fndecl) +{ + if (fndecl == NULL_TREE) + return false; + /* ??? There are cases where a function is + uninlinable but can be versioned. */ + if (!tree_inlinable_function_p (fndecl)) + return false; + + return true; +} + +/* Create a copy of a function's tree. + OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes + of the original function and the new copied function + respectively. In case we want to replace a DECL + tree with another tree while duplicating the function's + body, TREE_MAP represents the mapping between these + trees. */ +void +tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map) +{ + struct cgraph_node *old_version_node; + struct cgraph_node *new_version_node; + inline_data id; + tree p, new_fndecl; + unsigned i; + struct ipa_replace_map *replace_info; + basic_block old_entry_block; + tree t_step; + + gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL + && TREE_CODE (new_decl) == FUNCTION_DECL); + DECL_POSSIBLY_INLINED (old_decl) = 1; + + old_version_node = cgraph_node (old_decl); + new_version_node = cgraph_node (new_decl); + + allocate_struct_function (new_decl); + /* Cfun points to the new allocated function struct at this point. */ + cfun->function_end_locus = DECL_SOURCE_LOCATION (new_decl); + + DECL_ARTIFICIAL (new_decl) = 1; + DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl); + + /* Generate a new name for the new version. */ + DECL_NAME (new_decl) = + create_tmp_var_name (NULL); + /* Create a new SYMBOL_REF rtx for the new name. */ + if (DECL_RTL (old_decl) != NULL) + { + SET_DECL_RTL (new_decl, copy_rtx (DECL_RTL (old_decl))); + XEXP (DECL_RTL (new_decl), 0) = + gen_rtx_SYMBOL_REF (GET_MODE (XEXP (DECL_RTL (old_decl), 0)), + IDENTIFIER_POINTER (DECL_NAME (new_decl))); + } + + /* Prepare the data structures for the tree copy. */ + memset (&id, 0, sizeof (id)); + + /* The new version. */ + id.node = new_version_node; + + /* The old version. */ + id.current_node = cgraph_node (old_decl); + + id.versioning_p = true; + id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL); + id.caller = new_decl; + id.callee = old_decl; + id.callee_cfun = DECL_STRUCT_FUNCTION (old_decl); + + current_function_decl = new_decl; + + /* Copy the function's static chain. */ + p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl; + if (p) + DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl = + copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl, + &id); + /* Copy the function's arguments. */ + if (DECL_ARGUMENTS (old_decl) != NULL_TREE) + DECL_ARGUMENTS (new_decl) = + copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id); + + /* If there's a tree_map, prepare for substitution. */ + if (tree_map) + for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++) + { + replace_info = VARRAY_GENERIC_PTR (tree_map, i); + if (replace_info->replace_p && !replace_info->ref_p) + insert_decl_map (&id, replace_info->old_tree, + replace_info->new_tree); + else if (replace_info->replace_p && replace_info->ref_p) + id.ipa_info = tree_map; + } + + DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.callee), &id); + + /* Renumber the lexical scoping (non-code) blocks consecutively. */ + number_blocks (id.caller); + + if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE) + /* Add local vars. */ + for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list; + t_step; t_step = TREE_CHAIN (t_step)) + { + tree var = TREE_VALUE (t_step); + if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var)) + cfun->unexpanded_var_list = tree_cons (NULL_TREE, var, + cfun->unexpanded_var_list); + else + cfun->unexpanded_var_list = + tree_cons (NULL_TREE, remap_decl (var, &id), + cfun->unexpanded_var_list); + } + + /* Copy the Function's body. */ + old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION + (DECL_STRUCT_FUNCTION (old_decl)); + new_fndecl = copy_body (&id, + old_entry_block->count, + old_entry_block->frequency, NULL, NULL); + + DECL_SAVED_TREE (new_decl) = DECL_SAVED_TREE (new_fndecl); + + DECL_STRUCT_FUNCTION (new_decl)->cfg = + DECL_STRUCT_FUNCTION (new_fndecl)->cfg; + DECL_STRUCT_FUNCTION (new_decl)->eh = DECL_STRUCT_FUNCTION (new_fndecl)->eh; + DECL_STRUCT_FUNCTION (new_decl)->ib_boundaries_block = + DECL_STRUCT_FUNCTION (new_fndecl)->ib_boundaries_block; + DECL_STRUCT_FUNCTION (new_decl)->last_label_uid = + DECL_STRUCT_FUNCTION (new_fndecl)->last_label_uid; + + if (DECL_RESULT (old_decl) != NULL_TREE) + { + tree *res_decl = &DECL_RESULT (old_decl); + DECL_RESULT (new_decl) = remap_decl (*res_decl, &id); + lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl)); + } + + current_function_decl = NULL; + /* Renumber the lexical scoping (non-code) blocks consecutively. */ + number_blocks (new_decl); + + /* Clean up. */ + splay_tree_delete (id.decl_map); + fold_cond_expr_cond (); + return; +} + +/* Replace an INDIRECT_REF tree of a given DECL tree with a new + given tree. + ID->ipa_info keeps the old tree and the new tree. + TP points to the INDIRECT REF tree. Return true if + the trees were replaced. */ +static bool +replace_ref_tree (inline_data * id, tree * tp) +{ + bool replaced = false; + tree new; + + if (id->ipa_info && VARRAY_ACTIVE_SIZE (id->ipa_info) > 0) + { + unsigned i; + + for (i = 0; i < VARRAY_ACTIVE_SIZE (id->ipa_info); i++) + { + struct ipa_replace_map *replace_info; + replace_info = VARRAY_GENERIC_PTR (id->ipa_info, i); + + if (replace_info->replace_p && replace_info->ref_p) + { + tree old_tree = replace_info->old_tree; + tree new_tree = replace_info->new_tree; + + if (TREE_CODE (*tp) == INDIRECT_REF + && TREE_OPERAND (*tp, 0) == old_tree) + { + new = copy_node (new_tree); + *tp = new; + replaced = true; + } + } + } + } + return replaced; +} + +/* Return true if we are inlining. */ static inline bool -inlining_p (inline_data *id) +inlining_p (inline_data * id) { - return (!id->saving_p && !id->cloning_p); + return (!id->saving_p && !id->cloning_p && !id->versioning_p); +} + +/* Duplicate a type, fields and all. */ + +tree +build_duplicate_type (tree type) +{ + inline_data id; + + memset (&id, 0, sizeof (id)); + id.callee = current_function_decl; + id.caller = current_function_decl; + id.callee_cfun = cfun; + id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL); + + type = remap_type_1 (type, &id); + + splay_tree_delete (id.decl_map); + + return type; }