/* Tree inlining.
- Copyright 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
Contributed by Alexandre Oliva <aoliva@redhat.com>
This file is part of GCC.
#include "cgraph.h"
#include "intl.h"
#include "tree-mudflap.h"
+#include "tree-flow.h"
#include "function.h"
#include "diagnostic.h"
+#include "debug.h"
/* I'm not real happy about this, but we need to handle gimple and
non-gimple trees. */
/* Nonzero if we are currently within the cleanup for a
TARGET_EXPR. */
int in_target_cleanup_p;
- /* A list of the functions current function has inlined. */
- varray_type inlined_fns;
/* We use the same mechanism to build clones that we do to perform
inlining. However, there are a few places where we need to
distinguish between those two situations. This flag is true if
static void unsave_expr_1 (tree);
static tree unsave_r (tree *, int *, void *);
static void declare_inline_vars (tree bind_expr, tree vars);
+static void remap_save_expr (tree *, void *, int *);
/* Insert a tree->tree mapping for ID. Despite the name suggests
that the trees should be variables, it is used for more than that. */
break;
case FILE_TYPE:
- case SET_TYPE:
case OFFSET_TYPE:
default:
/* Shouldn't have been thought variable sized. */
/* Replace this variable with the copy. */
STRIP_TYPE_NOPS (new_decl);
*tp = new_decl;
+ *walk_subtrees = 0;
}
else if (TREE_CODE (*tp) == STATEMENT_LIST)
copy_statement_list (tp);
remap_save_expr (tp, id->decl_map, walk_subtrees);
else if (TREE_CODE (*tp) == BIND_EXPR)
copy_bind_expr (tp, walk_subtrees, id);
- else if (TREE_CODE (*tp) == LABELED_BLOCK_EXPR)
- {
- /* We need a new copy of this labeled block; the EXIT_BLOCK_EXPR
- will refer to it, so save a copy ready for remapping. We
- save it in the decl_map, although it isn't a decl. */
- tree new_block = copy_node (*tp);
- insert_decl_map (id, *tp, new_block);
- *tp = new_block;
- }
- else if (TREE_CODE (*tp) == EXIT_BLOCK_EXPR)
- {
- splay_tree_node n
- = splay_tree_lookup (id->decl_map,
- (splay_tree_key) TREE_OPERAND (*tp, 0));
- /* We _must_ have seen the enclosing LABELED_BLOCK_EXPR. */
- gcc_assert (n);
- *tp = copy_node (*tp);
- TREE_OPERAND (*tp, 0) = (tree) n->value;
- }
/* Types may need remapping as well. */
else if (TYPE_P (*tp))
*tp = remap_type (*tp, id);
+ /* If this is a constant, we have to copy the node iff the type will be
+ remapped. copy_tree_r will not copy a constant. */
+ else if (TREE_CODE_CLASS (TREE_CODE (*tp)) == tcc_constant)
+ {
+ tree new_type = remap_type (TREE_TYPE (*tp), id);
+
+ if (new_type == TREE_TYPE (*tp))
+ *walk_subtrees = 0;
+
+ else if (TREE_CODE (*tp) == INTEGER_CST)
+ *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
+ TREE_INT_CST_HIGH (*tp));
+ else
+ {
+ *tp = copy_node (*tp);
+ TREE_TYPE (*tp) = new_type;
+ }
+ }
+
/* Otherwise, just copy the node. Note that copy_tree_r already
knows not to copy VAR_DECLs, etc., so this is safe. */
else
STRIP_TYPE_NOPS (value);
if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
{
- *tp = value;
+ *tp = build_empty_stmt ();
return copy_body_r (tp, walk_subtrees, data);
}
}
return body;
}
+/* Return true if VALUE is an ADDR_EXPR of an automatic variable
+ defined in function FN, or of a data member thereof. */
+
+static bool
+self_inlining_addr_expr (tree value, tree fn)
+{
+ tree var;
+
+ if (TREE_CODE (value) != ADDR_EXPR)
+ return false;
+
+ var = get_base_address (TREE_OPERAND (value, 0));
+
+ return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
+}
+
static void
setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
tree *init_stmts, tree *vars, bool *gimplify_init_stmts_p)
It is not big deal to prohibit constant propagation here as
we will constant propagate in DOM1 pass anyway. */
if (is_gimple_min_invariant (value)
- && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p)))
+ && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
+ /* We have to be very careful about ADDR_EXPR. Make sure
+ the base variable isn't a local variable of the inlined
+ function, e.g., when doing recursive inlining, direct or
+ mutually-recursive or whatever, which is why we don't
+ just test whether fn == current_function_decl. */
+ && ! self_inlining_addr_expr (value, fn))
{
insert_decl_map (id, p, value);
return;
/* Initialize the static chain. */
p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
+ if (fn == current_function_decl)
+ p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
if (p)
{
/* No static chain? Seems like a bug in tree-nested.c. */
}
if (gimplify_init_stmts_p)
- gimplify_body (&init_stmts, current_function_decl);
+ gimplify_body (&init_stmts, current_function_decl, false);
declare_inline_vars (bind_expr, vars);
return init_stmts;
&& !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
{
inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined because it uses "
+ = N_("%Jfunction %qF can never be inlined because it uses "
"alloca (override using the always_inline attribute)");
return node;
}
if (setjmp_call_p (t))
{
inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined because it uses setjmp");
+ = N_("%Jfunction %qF can never be inlined because it uses setjmp");
return node;
}
case BUILT_IN_NEXT_ARG:
case BUILT_IN_VA_END:
inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined because it "
+ = N_("%Jfunction %qF can never be inlined because it "
"uses variable argument lists");
return node;
function calling __builtin_longjmp to be inlined into the
function calling __builtin_setjmp, Things will Go Awry. */
inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined because "
+ = N_("%Jfunction %qF can never be inlined because "
"it uses setjmp-longjmp exception handling");
return node;
case BUILT_IN_NONLOCAL_GOTO:
/* Similarly. */
inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined because "
+ = N_("%Jfunction %qF can never be inlined because "
"it uses non-local goto");
return node;
if (TREE_CODE (t) != LABEL_DECL)
{
inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined "
+ = N_("%Jfunction %qF can never be inlined "
"because it contains a computed goto");
return node;
}
because we cannot remap the destination label used in the
function that is performing the non-local goto. */
inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined "
+ = N_("%Jfunction %qF can never be inlined "
"because it receives a non-local goto");
return node;
}
UNION_TYPE nodes, then it goes into infinite recursion on a
structure containing a pointer to its own type. If it doesn't,
then the type node for S doesn't get adjusted properly when
- F is inlined, and we abort in find_function_data. */
+ F is inlined, and we abort in find_function_data.
+
+ ??? This is likely no longer true, but it's too late in the 4.0
+ cycle to try to find out. This should be checked for 4.1. */
for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
if (variably_modified_type_p (TREE_TYPE (t), NULL))
{
inline_forbidden_reason
- = N_("%Jfunction '%F' can never be inlined "
+ = N_("%Jfunction %qF can never be inlined "
"because it uses variable sized variables");
return node;
}
case COMPONENT_REF:
case BIT_FIELD_REF:
case INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case MISALIGNED_INDIRECT_REF:
case ARRAY_REF:
case ARRAY_RANGE_REF:
case OBJ_TYPE_REF:
case FILTER_EXPR: /* ??? */
case COMPOUND_EXPR:
case BIND_EXPR:
- case LABELED_BLOCK_EXPR:
case WITH_CLEANUP_EXPR:
case NOP_EXPR:
case VIEW_CONVERT_EXPR:
case SAVE_EXPR:
case ADDR_EXPR:
case COMPLEX_EXPR:
- case EXIT_BLOCK_EXPR:
+ case RANGE_EXPR:
case CASE_LABEL_EXPR:
case SSA_NAME:
case CATCH_EXPR:
case ASM_EXPR:
+ case REALIGN_LOAD_EXPR:
+
case RESX_EXPR:
*count += 1;
break;
{
tree decl = get_callee_fndecl (x);
- if (decl && DECL_BUILT_IN (decl))
+ if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
switch (DECL_FUNCTION_CODE (decl))
{
case BUILT_IN_CONSTANT_P:
if (TREE_CODE (*tp) == TARGET_EXPR)
{
#if 0
- int i, len = first_rtl_op (TARGET_EXPR);
+ int i, len = TREE_CODE_LENGTH (TARGET_EXPR);
/* We're walking our own subtrees. */
*walk_subtrees = 0;
{
if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
{
- sorry ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
+ sorry ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
sorry ("called from here");
}
else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
&& strlen (reason)
&& !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)))
{
- warning ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
+ warning ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
warning ("called from here");
}
goto egress;
recursing into it. */
VARRAY_PUSH_TREE (id->fns, fn);
- /* Record the function we are about to inline if optimize_function
- has not been called on it yet and we don't have it in the list. */
- if (! DECL_INLINED_FNS (fn))
- {
- int i;
-
- for (i = VARRAY_ACTIVE_SIZE (id->inlined_fns) - 1; i >= 0; i--)
- if (VARRAY_TREE (id->inlined_fns, i) == fn)
- break;
- if (i < 0)
- VARRAY_PUSH_TREE (id->inlined_fns, fn);
- }
-
/* Return statements in the function body will be replaced by jumps
to the RET_LABEL. */
id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
DECL_ARTIFICIAL (id->ret_label) = 1;
+ DECL_IGNORED_P (id->ret_label) = 1;
DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
insert_decl_map (id, id->ret_label, id->ret_label);
/* Find the lhs to which the result of this call is assigned. */
modify_dest = tsi_stmt (id->tsi);
if (TREE_CODE (modify_dest) == MODIFY_EXPR)
- modify_dest = TREE_OPERAND (modify_dest, 0);
+ {
+ modify_dest = TREE_OPERAND (modify_dest, 0);
+
+ /* The function which we are inlining might not return a value,
+ in which case we should issue a warning that the function
+ does not return a value. In that case the optimizers will
+ see that the variable to which the value is assigned was not
+ initialized. We do not want to issue a warning about that
+ uninitialized variable. */
+ if (DECL_P (modify_dest))
+ TREE_NO_WARNING (modify_dest) = 1;
+ }
else
modify_dest = NULL;
function itself. */
{
struct cgraph_node *old_node = id->current_node;
+ tree copy;
id->current_node = edge->callee;
- append_to_statement_list (copy_body (id), &BIND_EXPR_BODY (expr));
+ copy = copy_body (id);
+
+ /* If the function uses a return slot, then it may legitimately
+ fall through while still returning a value, so we have to skip
+ the warning here. */
+ if (warn_return_type
+ && !TREE_NO_WARNING (fn)
+ && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
+ && return_slot_addr == NULL_TREE
+ && block_may_fallthru (copy))
+ {
+ warning ("control may reach end of non-void function %qD being inlined",
+ fn);
+ TREE_NO_WARNING (fn) = 1;
+ }
+
+ append_to_statement_list (copy, &BIND_EXPR_BODY (expr));
id->current_node = old_node;
}
inlined_body = &BIND_EXPR_BODY (expr);
splay_tree_delete (id->decl_map);
id->decl_map = st;
- /* The new expression has side-effects if the old one did. */
- TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
+ /* Although, from the semantic viewpoint, the new expression has
+ side-effects only if the old one did, it is not possible, from
+ the technical viewpoint, to evaluate the body of a function
+ multiple times without serious havoc. */
+ TREE_SIDE_EFFECTS (expr) = 1;
tsi_link_before (&id->tsi, expr, TSI_SAME_STMT);
The easiest solution is to simply recalculate TREE_SIDE_EFFECTS for
the toplevel expression. */
recalculate_side_effects (expr);
+
+ /* Output the inlining info for this abstract function, since it has been
+ inlined. If we don't do this now, we can lose the information about the
+ variables in the function when the blocks get blown away as soon as we
+ remove the cgraph node. */
+ (*debug_hooks->outlining_inline_function) (edge->callee->decl);
/* Update callgraph if needed. */
cgraph_remove_node (edge->callee);
{
inline_data id;
tree prev_fn;
- tree ifn;
/* There is no point in performing inlining if errors have already
occurred -- and we might crash if we try to inline invalid
prev_fn = lang_hooks.tree_inlining.add_pending_fn_decls (&id.fns, prev_fn);
- /* Create the list of functions this call will inline. */
- VARRAY_TREE_INIT (id.inlined_fns, 32, "inlined_fns");
-
/* Keep track of the low-water mark, i.e., the point where the first
real inlining is represented in ID.FNS. */
id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
/* Clean up. */
htab_delete (id.tree_pruner);
- ifn = make_tree_vec (VARRAY_ACTIVE_SIZE (id.inlined_fns));
- if (VARRAY_ACTIVE_SIZE (id.inlined_fns))
- memcpy (&TREE_VEC_ELT (ifn, 0), &VARRAY_TREE (id.inlined_fns, 0),
- VARRAY_ACTIVE_SIZE (id.inlined_fns) * sizeof (tree));
- DECL_INLINED_FNS (fn) = ifn;
#ifdef ENABLE_CHECKING
{
}
}
- else if (code != EXIT_BLOCK_EXPR
- && code != SAVE_EXPR
+ else if (code != SAVE_EXPR
&& code != BIND_EXPR
&& IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
{
int i, len;
/* Walk over all the sub-trees of this operand. */
- len = first_rtl_op (code);
+ len = TREE_CODE_LENGTH (code);
/* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
But, we only want to walk once. */
if (code == TARGET_EXPR
case CONSTRUCTOR:
WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
- case EXIT_BLOCK_EXPR:
- WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 1));
-
case SAVE_EXPR:
WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
information indicating to what new SAVE_EXPR this one should be mapped,
use that one. Otherwise, create a new node and enter it in ST. */
-void
+static void
remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
{
splay_tree st = (splay_tree) st_;