/* Control and data flow functions for trees.
- Copyright 2001, 2002, 2003 Free Software Foundation, Inc.
+ Copyright 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
Contributed by Alexandre Oliva <aoliva@redhat.com>
This file is part of GCC.
#include "langhooks.h"
#include "cgraph.h"
#include "intl.h"
-
+#include "diagnostic.h"
+#include "function.h"
/* This should be eventually be generalized to other languages, but
this would require a shared function-as-trees infrastructure. */
int in_target_cleanup_p;
/* A list of the functions current function has inlined. */
varray_type inlined_fns;
- /* The approximate number of instructions we have inlined in the
- current call stack. */
- int inlined_insns;
/* We use the same mechanism to build clones that we do to perform
inlining. However, there are a few places where we need to
distinguish between those two situations. This flag is true if
we are cloning, rather than inlining. */
bool cloning_p;
+ /* Similarly for saving function body. */
+ bool saving_p;
/* Hash table used to prevent walk_tree from visiting the same node
umpteen million times. */
htab_t tree_pruner;
- /* Decl of function we are inlining into. */
- tree decl;
- tree current_decl;
+ /* Callgraph node of function we are inlining into. */
+ struct cgraph_node *node;
+ /* Callgraph node of currently inlined function. */
+ struct cgraph_node *current_node;
} inline_data;
/* Prototypes. */
static tree expand_call_inline (tree *, int *, void *);
static void expand_calls_inline (tree *, inline_data *);
static bool inlinable_function_p (tree);
-static int limits_allow_inlining (tree, inline_data *);
static tree remap_decl (tree, inline_data *);
+static tree remap_type (tree, inline_data *);
#ifndef INLINER_FOR_JAVA
static tree initialize_inlined_parameters (inline_data *, tree, tree);
static void remap_block (tree, tree, inline_data *);
static tree add_stmt_to_compound (tree, tree, tree);
#endif /* INLINER_FOR_JAVA */
+/* Insert a tree->tree mapping for ID. Despite the name suggests
+ that the trees should be variables, it is used for more than that. */
+
+static void
+insert_decl_map (inline_data *id, tree key, tree value)
+{
+ splay_tree_insert (id->decl_map, (splay_tree_key) key,
+ (splay_tree_value) value);
+
+ /* Always insert an identity map as well. If we see this same new
+ node again, we won't want to duplicate it a second time. */
+ if (key != value)
+ splay_tree_insert (id->decl_map, (splay_tree_key) value,
+ (splay_tree_value) value);
+}
+
/* Remap DECL during the copying of the BLOCK tree for the function. */
static tree
/* We only remap local variables in the current function. */
fn = VARRAY_TOP_TREE (id->fns);
- if (! (*lang_hooks.tree_inlining.auto_var_in_fn_p) (decl, fn))
+ if (! lang_hooks.tree_inlining.auto_var_in_fn_p (decl, fn))
return NULL_TREE;
/* See if we have remapped this declaration. */
n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
+
/* If we didn't already have an equivalent for this declaration,
create one now. */
if (!n)
tree t;
/* Make a copy of the variable or label. */
- t = copy_decl_for_inlining (decl, fn,
- VARRAY_TREE (id->fns, 0));
-
- /* The decl T could be a dynamic array or other variable size type,
- in which case some fields need to be remapped because they may
- contain SAVE_EXPRs. */
- if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
- && TYPE_DOMAIN (TREE_TYPE (t)))
- {
- TREE_TYPE (t) = copy_node (TREE_TYPE (t));
- TYPE_DOMAIN (TREE_TYPE (t))
- = copy_node (TYPE_DOMAIN (TREE_TYPE (t)));
- walk_tree (&TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t))),
- copy_body_r, id, NULL);
- }
+ t = copy_decl_for_inlining (decl, fn, VARRAY_TREE (id->fns, 0));
+
+ /* Remap types, if necessary. */
+ TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
+ if (TREE_CODE (t) == TYPE_DECL)
+ DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
+ else if (TREE_CODE (t) == PARM_DECL)
+ DECL_ARG_TYPE_AS_WRITTEN (t)
+ = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
+
+ /* Remap sizes as necessary. */
+ walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
+ walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
#ifndef INLINER_FOR_JAVA
if (! DECL_NAME (t) && TREE_TYPE (t)
- && (*lang_hooks.tree_inlining.anon_aggr_type_p) (TREE_TYPE (t)))
+ && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
{
/* For a VAR_DECL of anonymous type, we must also copy the
- member VAR_DECLS here and rechain the
- DECL_ANON_UNION_ELEMS. */
+ member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
tree members = NULL;
tree src;
/* Remember it, so that if we encounter this local entity
again we can reuse this copy. */
- n = splay_tree_insert (id->decl_map,
- (splay_tree_key) decl,
- (splay_tree_value) t);
+ insert_decl_map (id, decl, t);
+ return t;
}
return (tree) n->value;
}
+static tree
+remap_type (tree type, inline_data *id)
+{
+ splay_tree_node node;
+ tree new, t;
+
+ if (type == NULL)
+ return type;
+
+ /* See if we have remapped this type. */
+ node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
+ if (node)
+ return (tree) node->value;
+
+ /* The type only needs remapping if it's variably modified. */
+ if (! variably_modified_type_p (type))
+ {
+ insert_decl_map (id, type, type);
+ return type;
+ }
+
+ /* We do need a copy. build and register it now. */
+ new = copy_node (type);
+ insert_decl_map (id, type, new);
+
+ /* This is a new type, not a copy of an old type. Need to reassociate
+ variants. We can handle everything except the main variant lazily. */
+ t = TYPE_MAIN_VARIANT (type);
+ if (type != t)
+ {
+ t = remap_type (t, id);
+ TYPE_MAIN_VARIANT (new) = t;
+ TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
+ TYPE_NEXT_VARIANT (t) = new;
+ }
+ else
+ {
+ TYPE_MAIN_VARIANT (new) = new;
+ TYPE_NEXT_VARIANT (new) = NULL;
+ }
+
+ /* Lazily create pointer and reference types. */
+ TYPE_POINTER_TO (new) = NULL;
+ TYPE_REFERENCE_TO (new) = NULL;
+
+ switch (TREE_CODE (new))
+ {
+ case INTEGER_TYPE:
+ case REAL_TYPE:
+ case ENUMERAL_TYPE:
+ case BOOLEAN_TYPE:
+ case CHAR_TYPE:
+ t = TYPE_MIN_VALUE (new);
+ if (t && TREE_CODE (t) != INTEGER_CST)
+ walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
+
+ t = TYPE_MAX_VALUE (new);
+ if (t && TREE_CODE (t) != INTEGER_CST)
+ walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
+ return new;
+
+ case POINTER_TYPE:
+ TREE_TYPE (new) = t = remap_type (TREE_TYPE (new), id);
+ TYPE_NEXT_PTR_TO (new) = TYPE_POINTER_TO (t);
+ TYPE_POINTER_TO (t) = new;
+ return new;
+
+ case REFERENCE_TYPE:
+ TREE_TYPE (new) = t = remap_type (TREE_TYPE (new), id);
+ TYPE_NEXT_REF_TO (new) = TYPE_REFERENCE_TO (t);
+ TYPE_REFERENCE_TO (t) = new;
+ return new;
+
+ case METHOD_TYPE:
+ case FUNCTION_TYPE:
+ TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
+ walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
+ return new;
+
+ case ARRAY_TYPE:
+ TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
+ TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
+ break;
+
+ case RECORD_TYPE:
+ case UNION_TYPE:
+ case QUAL_UNION_TYPE:
+ walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
+ break;
+
+ case FILE_TYPE:
+ case SET_TYPE:
+ case OFFSET_TYPE:
+ default:
+ /* Shouldn't have been thought variable sized. */
+ abort ();
+ }
+
+ walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
+ walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
+
+ return new;
+}
+
#ifndef INLINER_FOR_JAVA
/* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
remapped versions of the variables therein. And hook the new block
/* We're building a clone; DECL_INITIAL is still
error_mark_node, and current_binding_level is the parm
binding level. */
- (*lang_hooks.decls.insert_block) (new_block);
+ lang_hooks.decls.insert_block (new_block);
else
{
/* Attach this new block after the DECL_INITIAL block for the
*first_block = new_block;
}
/* Remember the remapped block. */
- splay_tree_insert (id->decl_map,
- (splay_tree_key) old_block,
- (splay_tree_value) new_block);
+ insert_decl_map (id, old_block, new_block);
}
/* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
remapped block. */
/* If this is a RETURN_STMT, change it into an EXPR_STMT and a
GOTO_STMT with the RET_LABEL as its target. */
#ifndef INLINER_FOR_JAVA
- if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label)
+ if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label && !id->saving_p)
#else /* INLINER_FOR_JAVA */
- if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
+ if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label && !id->saving_p)
#endif /* INLINER_FOR_JAVA */
{
tree return_stmt = *tp;
/* If we're not returning anything just do the jump. */
else
*tp = goto_stmt;
+
+ /* We can't replace return label while inlining function
+ because it is in the outer function. */
+ insert_decl_map (id, id->ret_label, id->ret_label);
}
/* Local variables and labels need to be replaced by equivalent
variables. We don't want to copy static variables; there's only
one of those, no matter how many times we inline the containing
function. */
- else if ((*lang_hooks.tree_inlining.auto_var_in_fn_p) (*tp, fn))
+ else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
{
tree new_decl;
will refer to it, so save a copy ready for remapping. We
save it in the decl_map, although it isn't a decl. */
tree new_block = copy_node (*tp);
- splay_tree_insert (id->decl_map,
- (splay_tree_key) *tp,
- (splay_tree_value) new_block);
+ insert_decl_map (id, *tp, new_block);
*tp = new_block;
}
else if (TREE_CODE (*tp) == EXIT_BLOCK_EXPR)
TREE_OPERAND (*tp, 0) = (tree) n->value;
}
#endif /* INLINER_FOR_JAVA */
+ /* Types may need remapping as well. */
+ else if (TYPE_P (*tp))
+ *tp = remap_type (*tp, id);
+
/* Otherwise, just copy the node. Note that copy_tree_r already
knows not to copy VAR_DECLs, etc., so this is safe. */
else
{
+ tree old_node = *tp;
+
if (TREE_CODE (*tp) == MODIFY_EXPR
&& TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
- && ((*lang_hooks.tree_inlining.auto_var_in_fn_p)
+ && (lang_hooks.tree_inlining.auto_var_in_fn_p
(TREE_OPERAND (*tp, 0), fn)))
{
/* Some assignments VAR = VAR; don't generate any rtl code
}
}
else if (TREE_CODE (*tp) == ADDR_EXPR
- && ((*lang_hooks.tree_inlining.auto_var_in_fn_p)
+ && (lang_hooks.tree_inlining.auto_var_in_fn_p
(TREE_OPERAND (*tp, 0), fn)))
{
/* Get rid of &* from inline substitutions. It can occur when
copy_tree_r (tp, walk_subtrees, NULL);
+ if (TREE_CODE (*tp) == CALL_EXPR && id->node && get_callee_fndecl (*tp))
+ {
+ if (id->saving_p)
+ {
+ struct cgraph_node *node;
+ struct cgraph_edge *edge;
+
+ for (node = id->node->next_clone; node; node = node->next_clone)
+ {
+ edge = cgraph_edge (node, old_node);
+ if (edge)
+ edge->call_expr = *tp;
+ else
+ abort ();
+ }
+ }
+ else if (!id->cloning_p)
+ {
+ struct cgraph_edge *edge;
+
+ edge = cgraph_edge (id->current_node, old_node);
+ if (edge)
+ cgraph_clone_edge (edge, id->node, *tp);
+ }
+ }
+
+ TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
+
/* The copied TARGET_EXPR has never been expanded, even if the
original node was expanded already. */
if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
copy_body (inline_data *id)
{
tree body;
+ tree fndecl = VARRAY_TOP_TREE (id->fns);
- body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
+ if (fndecl == current_function_decl
+ && cfun->saved_tree)
+ body = cfun->saved_tree;
+ else
+ body = DECL_SAVED_TREE (fndecl);
walk_tree (&body, copy_body_r, id, NULL);
return body;
#ifdef INLINER_FOR_JAVA
tree vars = NULL_TREE;
#endif /* INLINER_FOR_JAVA */
+ int argnum = 0;
/* Figure out what the parameters are. */
parms = DECL_ARGUMENTS (fn);
+ if (fn == current_function_decl && cfun->saved_args)
+ parms = cfun->saved_args;
/* Start with no initializations whatsoever. */
init_stmts = NULL_TREE;
tree value;
tree var_sub;
+ ++argnum;
+
/* Find the initializer. */
- value = (*lang_hooks.tree_inlining.convert_parm_for_inlining)
- (p, a ? TREE_VALUE (a) : NULL_TREE, fn);
+ value = lang_hooks.tree_inlining.convert_parm_for_inlining
+ (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
/* If the parameter is never assigned to, we may not need to
create a new variable here at all. Instead, we may be able
else if (TREE_TYPE (value) != TREE_TYPE (p))
value = fold (build1 (NOP_EXPR, TREE_TYPE (p), value));
- splay_tree_insert (id->decl_map,
- (splay_tree_key) p,
- (splay_tree_value) value);
+ insert_decl_map (id, p, value);
continue;
}
}
/* Register the VAR_DECL as the equivalent for the PARM_DECL;
that way, when the PARM_DECL is encountered, it will be
automatically replaced by the VAR_DECL. */
- splay_tree_insert (id->decl_map,
- (splay_tree_key) p,
- (splay_tree_value) var_sub);
+ insert_decl_map (id, p, var_sub);
/* Declare this new variable. */
#ifndef INLINER_FOR_JAVA
}
/* See if we need to clean up the declaration. */
- cleanup = (*lang_hooks.maybe_build_cleanup) (var);
+ cleanup = lang_hooks.maybe_build_cleanup (var);
if (cleanup)
{
tree cleanup_stmt;
}
#ifndef INLINER_FOR_JAVA
- var = ((*lang_hooks.tree_inlining.copy_res_decl_for_inlining)
+ var = (lang_hooks.tree_inlining.copy_res_decl_for_inlining
(result, fn, VARRAY_TREE (id->fns, 0), id->decl_map,
&need_return_decl, return_slot_addr));
/* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
way, when the RESULT_DECL is encountered, it will be
automatically replaced by the VAR_DECL. */
- splay_tree_insert (id->decl_map,
- (splay_tree_key) result,
- (splay_tree_value) var);
+ insert_decl_map (id, result, var);
/* Build the USE_STMT. If the return type of the function was
promoted, convert it back to the expected type. */
if (need_return_decl)
return build_stmt (DECL_STMT, var);
#else /* INLINER_FOR_JAVA */
- *var = ((*lang_hooks.tree_inlining.copy_res_decl_for_inlining)
+ *var = (lang_hooks.tree_inlining.copy_res_decl_for_inlining
(result, fn, VARRAY_TREE (id->fns, 0), id->decl_map,
&need_return_decl, return_slot_addr));
switch (TREE_CODE (node))
{
case CALL_EXPR:
- /* Refuse to inline alloca call unless user explicitly forced so as this
- may change program's memory overhead drastically when the function
- using alloca is called in loop. In GCC present in SPEC2000 inlining
- into schedule_block cause it to require 2GB of ram instead of 256MB. */
+ /* Refuse to inline alloca call unless user explicitly forced so as
+ this may change program's memory overhead drastically when the
+ function using alloca is called in loop. In GCC present in
+ SPEC2000 inlining into schedule_block cause it to require 2GB of
+ RAM instead of 256MB. */
if (alloca_call_p (node)
&& !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
{
return node;
}
- switch (DECL_FUNCTION_CODE (t))
- {
- /* We cannot inline functions that take a variable number of
- arguments. */
- case BUILT_IN_VA_START:
- case BUILT_IN_STDARG_START:
+ if (DECL_BUILT_IN (t))
+ switch (DECL_FUNCTION_CODE (t))
{
- inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined because it "
- "uses variable argument lists");
- return node;
- }
- case BUILT_IN_LONGJMP:
- {
- /* We can't inline functions that call __builtin_longjmp at all.
- The non-local goto machinery really requires the destination
- be in a different function. If we allow the function calling
- __builtin_longjmp to be inlined into the function calling
- __builtin_setjmp, Things will Go Awry. */
- /* ??? Need front end help to identify "regular" non-local goto. */
- if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
- {
- inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined "
- "because it uses setjmp-longjmp exception handling");
- return node;
- }
- }
+ /* We cannot inline functions that take a variable number of
+ arguments. */
+ case BUILT_IN_VA_START:
+ case BUILT_IN_STDARG_START:
+ case BUILT_IN_NEXT_ARG:
+ case BUILT_IN_VA_END:
+ {
+ inline_forbidden_reason
+ = N_("%Jfunction '%F' can never be inlined because it "
+ "uses variable argument lists");
+ return node;
+ }
+ case BUILT_IN_LONGJMP:
+ {
+ /* We can't inline functions that call __builtin_longjmp at
+ all. The non-local goto machinery really requires the
+ destination be in a different function. If we allow the
+ function calling __builtin_longjmp to be inlined into the
+ function calling __builtin_setjmp, Things will Go Awry. */
+ /* ??? Need front end help to identify "regular" non-local
+ goto. */
+ if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
+ {
+ inline_forbidden_reason
+ = N_("%Jfunction '%F' can never be inlined because "
+ "it uses setjmp-longjmp exception handling");
+ return node;
+ }
+ }
- default:
- break;
- }
+ default:
+ break;
+ }
break;
#ifndef INLINER_FOR_JAVA
in C++ it may result in template instantiation.)
If the function is not inlinable for language-specific reasons,
it is left up to the langhook to explain why. */
- inlinable = !(*lang_hooks.tree_inlining.cannot_inline_tree_fn) (&fn);
+ inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
/* If we don't have the function body available, we can't inline it.
However, this should not be recorded since we also get here for
&& DECL_DECLARED_INLINE_P (fn)
&& !DECL_IN_SYSTEM_HEADER (fn));
- if (do_warning)
+ if (lookup_attribute ("always_inline",
+ DECL_ATTRIBUTES (fn)))
+ sorry (inline_forbidden_reason, fn, fn);
+ else if (do_warning)
warning (inline_forbidden_reason, fn, fn);
inlinable = false;
return inlinable;
}
-/* We can't inline functions that are too big. Only allow a single
- function to be of MAX_INLINE_INSNS_SINGLE size. Make special
- allowance for extern inline functions, though.
-
- Return nonzero if the function FN can be inlined into the inlining
- context ID. */
-
-static int
-limits_allow_inlining (tree fn, inline_data *id)
-{
- int estimated_insns = 0;
- size_t i;
-
- /* Don't even bother if the function is not inlinable. */
- if (!inlinable_function_p (fn))
- return 0;
-
- /* Investigate the size of the function. Return at once
- if the function body size is too large. */
- if (!(*lang_hooks.tree_inlining.disregard_inline_limits) (fn))
- {
- int currfn_max_inline_insns;
-
- /* If we haven't already done so, get an estimate of the number of
- instructions that will be produces when expanding this function. */
- if (!DECL_ESTIMATED_INSNS (fn))
- DECL_ESTIMATED_INSNS (fn)
- = (*lang_hooks.tree_inlining.estimate_num_insns) (fn);
- estimated_insns = DECL_ESTIMATED_INSNS (fn);
-
- /* We may be here either because fn is declared inline or because
- we use -finline-functions. For the second case, we are more
- restrictive.
-
- FIXME: -finline-functions should imply -funit-at-a-time, it's
- about equally expensive but unit-at-a-time produces
- better code. */
- currfn_max_inline_insns = DECL_DECLARED_INLINE_P (fn) ?
- MAX_INLINE_INSNS_SINGLE : MAX_INLINE_INSNS_AUTO;
-
- /* If the function is too big to be inlined, adieu. */
- if (estimated_insns > currfn_max_inline_insns)
- return 0;
-
- /* We now know that we don't disregard the inlining limits and that
- we basically should be able to inline this function.
- We always allow inlining functions if we estimate that they are
- smaller than MIN_INLINE_INSNS. Otherwise, investigate further. */
- if (estimated_insns > MIN_INLINE_INSNS)
- {
- int sum_insns = (id ? id->inlined_insns : 0) + estimated_insns;
-
- /* In the extreme case that we have exceeded the recursive inlining
- limit by a huge factor (128), we just say no.
-
- FIXME: Should not happen in real life, but people have reported
- that it actually does!? */
- if (sum_insns > MAX_INLINE_INSNS * 128)
- return 0;
-
- /* If we did not hit the extreme limit, we use a linear function
- with slope -1/MAX_INLINE_SLOPE to exceedingly decrease the
- allowable size. */
- else if (sum_insns > MAX_INLINE_INSNS)
- {
- if (estimated_insns > currfn_max_inline_insns
- - (sum_insns - MAX_INLINE_INSNS) / MAX_INLINE_SLOPE)
- return 0;
- }
- }
- }
-
- /* Don't allow recursive inlining. */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (id->fns); ++i)
- if (VARRAY_TREE (id->fns, i) == fn)
- return 0;
-
- if (DECL_INLINED_FNS (fn))
- {
- int j;
- tree inlined_fns = DECL_INLINED_FNS (fn);
-
- for (j = 0; j < TREE_VEC_LENGTH (inlined_fns); ++j)
- if (TREE_VEC_ELT (inlined_fns, j) == VARRAY_TREE (id->fns, 0))
- return 0;
- }
-
- /* Go ahead, this function can be inlined. */
- return 1;
-}
-
/* If *TP is a CALL_EXPR, replace it with its inline expansion. */
static tree
splay_tree st;
tree args;
tree return_slot_addr;
+ struct cgraph_edge *edge;
+ const char *reason;
/* See what we've got. */
id = (inline_data *) data;
return NULL_TREE;
/* Turn forward declarations into real ones. */
- if (flag_unit_at_a_time)
- fn = cgraph_node (fn)->decl;
+ fn = cgraph_node (fn)->decl;
/* If fn is a declaration of a function in a nested scope that was
globally declared inline, we don't set its DECL_INITIAL.
&& DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
fn = DECL_ABSTRACT_ORIGIN (fn);
+ /* Objective C and fortran still calls tree_rest_of_compilation directly.
+ Kill this check once this is fixed. */
+ if (!id->current_node->analyzed)
+ return NULL_TREE;
+
+ edge = cgraph_edge (id->current_node, t);
+
+ /* Constant propagation on argument done during previous inlining
+ may create new direct call. Produce an edge for it. */
+ if (!edge)
+ {
+ struct cgraph_node *dest = cgraph_node (fn);
+
+ /* FN must have address taken so it can be passed as argument. */
+ if (!dest->needed)
+ abort ();
+ cgraph_create_edge (id->node, dest, t)->inline_failed
+ = N_("originally indirect function call not considered for inlining");
+ return NULL_TREE;
+ }
+
/* Don't try to inline functions that are not well-suited to
inlining. */
- if ((flag_unit_at_a_time
- && (!DECL_SAVED_TREE (fn) || !cgraph_inline_p (id->current_decl, fn)))
- || (!flag_unit_at_a_time && !limits_allow_inlining (fn, id)))
+ if (!cgraph_inline_p (edge, &reason))
{
- if (warn_inline && DECL_INLINE (fn) && DECL_DECLARED_INLINE_P (fn)
- && !DECL_IN_SYSTEM_HEADER (fn))
+ if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
+ {
+ sorry ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
+ sorry ("called from here");
+ }
+ else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
+ && !DECL_IN_SYSTEM_HEADER (fn)
+ && strlen (reason))
{
- warning ("%Jinlining failed in call to '%F'", fn, fn);
+ warning ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
warning ("called from here");
}
return NULL_TREE;
}
- if (! (*lang_hooks.tree_inlining.start_inlining) (fn))
+#ifdef ENABLE_CHECKING
+ if (edge->callee->decl != id->node->decl)
+ verify_cgraph_node (edge->callee);
+#endif
+
+ if (! lang_hooks.tree_inlining.start_inlining (fn))
return NULL_TREE;
/* Set the current filename and line number to the function we are
line numbers corresponding to the function we are calling. We
wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
because individual statements don't record the filename. */
- push_srcloc (TREE_FILENAME (fn), TREE_LINENO (fn));
+ push_srcloc (DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn));
#ifndef INLINER_FOR_JAVA
/* Build a statement-expression containing code to initialize the
statements within the function to jump to. The type of the
statement expression is the return type of the function call. */
stmt = NULL;
- expr = build (BLOCK, TREE_TYPE (TREE_TYPE (fn)), stmt);
+ expr = build (BLOCK, TREE_TYPE (TREE_TYPE (fn)));
#endif /* INLINER_FOR_JAVA */
/* Local declarations will be replaced by their equivalents in this
/* After we've initialized the parameters, we insert the body of the
function itself. */
+ {
+ struct cgraph_node *old_node = id->current_node;
+
+ id->current_node = edge->callee;
#ifndef INLINER_FOR_JAVA
- inlined_body = &COMPOUND_BODY (stmt);
- while (*inlined_body)
- inlined_body = &TREE_CHAIN (*inlined_body);
- *inlined_body = copy_body (id);
+ inlined_body = &COMPOUND_BODY (stmt);
+ while (*inlined_body)
+ inlined_body = &TREE_CHAIN (*inlined_body);
+ *inlined_body = copy_body (id);
#else /* INLINER_FOR_JAVA */
- {
- tree new_body;
- java_inlining_map_static_initializers (fn, id->decl_map);
- new_body = copy_body (id);
- TREE_TYPE (new_body) = TREE_TYPE (TREE_TYPE (fn));
- BLOCK_EXPR_BODY (expr)
- = add_stmt_to_compound (BLOCK_EXPR_BODY (expr),
- TREE_TYPE (new_body), new_body);
- inlined_body = &BLOCK_EXPR_BODY (expr);
- }
+ {
+ tree new_body;
+ java_inlining_map_static_initializers (fn, id->decl_map);
+ new_body = copy_body (id);
+ TREE_TYPE (new_body) = TREE_TYPE (TREE_TYPE (fn));
+ BLOCK_EXPR_BODY (expr)
+ = add_stmt_to_compound (BLOCK_EXPR_BODY (expr),
+ TREE_TYPE (new_body), new_body);
+ inlined_body = &BLOCK_EXPR_BODY (expr);
+ }
#endif /* INLINER_FOR_JAVA */
+ id->current_node = old_node;
+ }
/* After the body of the function comes the RET_LABEL. This must come
before we evaluate the returned value below, because that evaluation
#ifndef INLINER_FOR_JAVA
chain = TREE_CHAIN (*tp);
#endif /* INLINER_FOR_JAVA */
- *tp = build_expr_wfl (expr, TREE_FILENAME (fn), TREE_LINENO (fn),
+ *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
/*col=*/0);
EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
#ifndef INLINER_FOR_JAVA
the equivalent inlined version either. */
TREE_USED (*tp) = 1;
- /* Our function now has more statements than it did before. */
- DECL_ESTIMATED_INSNS (VARRAY_TREE (id->fns, 0)) += DECL_ESTIMATED_INSNS (fn);
- /* For accounting, subtract one for the saved call/ret. */
- id->inlined_insns += DECL_ESTIMATED_INSNS (fn) - 1;
-
/* Update callgraph if needed. */
- if (id->decl && flag_unit_at_a_time)
- {
- cgraph_remove_call (id->decl, fn);
- cgraph_create_edges (id->decl, *inlined_body);
- }
+ cgraph_remove_node (edge->callee);
/* Recurse into the body of the just inlined function. */
- {
- tree old_decl = id->current_decl;
- id->current_decl = fn;
- expand_calls_inline (inlined_body, id);
- id->current_decl = old_decl;
- }
+ expand_calls_inline (inlined_body, id);
VARRAY_POP (id->fns);
- /* If we've returned to the top level, clear out the record of how
- much inlining has been done. */
- if (VARRAY_ACTIVE_SIZE (id->fns) == id->first_inlined_fn)
- id->inlined_insns = 0;
-
/* Don't walk into subtrees. We've already handled them above. */
*walk_subtrees = 0;
- (*lang_hooks.tree_inlining.end_inlining) (fn);
+ lang_hooks.tree_inlining.end_inlining (fn);
/* Keep iterating. */
return NULL_TREE;
inline_data id;
tree prev_fn;
+ /* There is no point in performing inlining if errors have already
+ occurred -- and we might crash if we try to inline invalid
+ code. */
+ if (errorcount || sorrycount)
+ return;
+
/* Clear out ID. */
memset (&id, 0, sizeof (id));
- id.decl = fn;
- id.current_decl = fn;
+ id.current_node = id.node = cgraph_node (fn);
/* Don't allow recursion into FN. */
VARRAY_TREE_INIT (id.fns, 32, "fns");
VARRAY_PUSH_TREE (id.fns, fn);
- if (!DECL_ESTIMATED_INSNS (fn))
- DECL_ESTIMATED_INSNS (fn)
- = (*lang_hooks.tree_inlining.estimate_num_insns) (fn);
/* Or any functions that aren't finished yet. */
prev_fn = NULL_TREE;
if (current_function_decl)
prev_fn = current_function_decl;
}
- prev_fn = ((*lang_hooks.tree_inlining.add_pending_fn_decls)
+ prev_fn = (lang_hooks.tree_inlining.add_pending_fn_decls
(&id.fns, prev_fn));
/* Create the list of functions this call will inline. */
VARRAY_ACTIVE_SIZE (id.inlined_fns) * sizeof (tree));
DECL_INLINED_FNS (fn) = ifn;
}
+#ifdef ENABLE_CHECKING
+ {
+ struct cgraph_edge *e;
+
+ verify_cgraph_node (id.node);
+
+ /* Double check that we inlined everything we are supposed to inline. */
+ for (e = id.node->callees; e; e = e->next_callee)
+ if (!e->inline_failed)
+ abort ();
+ }
+#endif
}
/* FN is a function that has a complete body, and CLONE is a function
TREE_CHAIN (DECL_SAVED_TREE (clone)) = copy_body (&id);
}
+/* Save duplicate of body in FN. MAP is used to pass around splay tree
+ used to update arguments in restore_body. */
+tree
+save_body (tree fn, tree *arg_copy)
+{
+ inline_data id;
+ tree body, *parg;
+
+ memset (&id, 0, sizeof (id));
+ VARRAY_TREE_INIT (id.fns, 1, "fns");
+ VARRAY_PUSH_TREE (id.fns, fn);
+ id.node = cgraph_node (fn);
+ id.saving_p = true;
+ id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
+ *arg_copy = DECL_ARGUMENTS (fn);
+ for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
+ {
+ tree new = copy_node (*parg);
+ (*lang_hooks.dup_lang_specific_decl) (new);
+ DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
+ insert_decl_map (&id, *parg, new);
+ TREE_CHAIN (new) = TREE_CHAIN (*parg);
+ *parg = new;
+ }
+ insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
+
+ /* Actually copy the body. */
+ body = copy_body (&id);
+ if (lang_hooks.update_decl_after_saving)
+ lang_hooks.update_decl_after_saving (fn, id.decl_map);
+
+ /* Clean up. */
+ splay_tree_delete (id.decl_map);
+ return body;
+}
+
/* Apply FUNC to all the sub-trees of TP in a pre-order traversal.
FUNC is called with the DATA and the address of each sub-tree. If
FUNC returns a non-NULL value, the traversal is aborted, and the
if (!walk_subtrees)
{
if (STATEMENT_CODE_P (code) || code == TREE_LIST
- || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
+ || lang_hooks.tree_inlining.tree_chain_matters_p (*tp))
/* But we still need to check our siblings. */
WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
else
WALK_SUBTREE (DECL_INITIAL (DECL_STMT_DECL (*tp)));
WALK_SUBTREE (DECL_SIZE (DECL_STMT_DECL (*tp)));
WALK_SUBTREE (DECL_SIZE_UNIT (DECL_STMT_DECL (*tp)));
+ WALK_SUBTREE (TREE_TYPE (*tp));
}
/* This can be tail-recursion optimized if we write it this way. */
/* Also examine various special fields, below. */
}
- result = (*lang_hooks.tree_inlining.walk_subtrees) (tp, &walk_subtrees, func,
- data, htab);
+ result = lang_hooks.tree_inlining.walk_subtrees (tp, &walk_subtrees, func,
+ data, htab);
if (result || ! walk_subtrees)
return result;
case BLOCK:
case RECORD_TYPE:
case CHAR_TYPE:
- /* None of thse have subtrees other than those already walked
+ case PLACEHOLDER_EXPR:
+ /* None of these have subtrees other than those already walked
above. */
break;
|| TREE_CODE_CLASS (code) == 'c'
|| code == TREE_LIST
|| code == TREE_VEC
- || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
+ || code == TYPE_DECL
+ || lang_hooks.tree_inlining.tree_chain_matters_p (*tp))
{
/* Because the chain gets clobbered when we make a copy, we save it
here. */
walk_tree to walk into the chain as well. */
if (code == PARM_DECL || code == TREE_LIST
#ifndef INLINER_FOR_JAVA
- || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp)
+ || lang_hooks.tree_inlining.tree_chain_matters_p (*tp)
|| STATEMENT_CODE_P (code))
TREE_CHAIN (*tp) = chain;
if (TREE_CODE (*tp) == SCOPE_STMT)
SCOPE_STMT_BLOCK (*tp) = NULL_TREE;
#else /* INLINER_FOR_JAVA */
- || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
+ || lang_hooks.tree_inlining.tree_chain_matters_p (*tp))
TREE_CHAIN (*tp) = chain;
#endif /* INLINER_FOR_JAVA */
}
- else if (TREE_CODE_CLASS (code) == 't' && !variably_modified_type_p (*tp))
- /* Types only need to be copied if they are variably modified. */
+ else if (TREE_CODE_CLASS (code) == 't')
*walk_subtrees = 0;
return NULL_TREE;
{
splay_tree st = (splay_tree) st_;
splay_tree_node n;
+ tree t;
/* See if we already encountered this SAVE_EXPR. */
n = splay_tree_lookup (st, (splay_tree_key) *tp);
/* If we didn't already remap this SAVE_EXPR, do so now. */
if (!n)
{
- tree t = copy_node (*tp);
+ t = copy_node (*tp);
/* The SAVE_EXPR is now part of the function into which we
are inlining this body. */
/* And we haven't evaluated it yet. */
SAVE_EXPR_RTL (t) = NULL_RTX;
/* Remember this SAVE_EXPR. */
- n = splay_tree_insert (st,
- (splay_tree_key) *tp,
- (splay_tree_value) t);
+ splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
/* Make sure we don't remap an already-remapped SAVE_EXPR. */
- splay_tree_insert (st, (splay_tree_key) t,
- (splay_tree_value) error_mark_node);
+ splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
}
else
- /* We've already walked into this SAVE_EXPR, so we needn't do it
- again. */
- *walk_subtrees = 0;
+ {
+ /* We've already walked into this SAVE_EXPR; don't do it again. */
+ *walk_subtrees = 0;
+ t = (tree) n->value;
+ }
/* Replace this SAVE_EXPR with the copy. */
- *tp = (tree) n->value;
+ *tp = t;
}
#ifdef INLINER_FOR_JAVA