/* Nested function decomposition for GIMPLE.
- Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
+ Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
+ Free Software Foundation, Inc.
This file is part of GCC.
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
-#include "rtl.h"
#include "tm_p.h"
#include "function.h"
#include "tree-dump.h"
#include "tree-iterator.h"
#include "tree-flow.h"
#include "cgraph.h"
-#include "expr.h"
+#include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
#include "langhooks.h"
#include "pointer-set.h"
-#include "ggc.h"
/* The object of this pass is to lower the representation of a set of nested
nonlocal references. We want to do this sooner rather than later, in
order to give us more freedom in emitting all of the functions in question.
- Back in olden times, when gcc was young, we developed an insanely
+ Back in olden times, when gcc was young, we developed an insanely
complicated scheme whereby variables which were referenced nonlocally
were forced to live in the stack of the declaring function, and then
the nested functions magically discovered where these variables were
placed. In order for this scheme to function properly, it required
- that the outer function be partially expanded, then we switch to
+ that the outer function be partially expanded, then we switch to
compiling the inner function, and once done with those we switch back
to compiling the outer function. Such delicate ordering requirements
- makes it difficult to do whole translation unit optimizations
+ makes it difficult to do whole translation unit optimizations
involving such functions.
The implementation here is much more direct. Everything that can be
referenced by an inner function is a member of an explicitly created
structure herein called the "nonlocal frame struct". The incoming
- static chain for a nested function is a pointer to this struct in
+ static chain for a nested function is a pointer to this struct in
the parent. In this way, we settle on known offsets from a known
base, and so are decoupled from the logic that places objects in the
function's stack frame. More importantly, we don't have to wait for
allocated anywhere. Which means that the outer function is now
inlinable.
- Theory of operation here is very simple. Iterate over all the
- statements in all the functions (depth first) several times,
+ Theory of operation here is very simple. Iterate over all the
+ statements in all the functions (depth first) several times,
allocating structures and fields on demand. In general we want to
examine inner functions first, so that we can avoid making changes
to outer functions which are unnecessary.
struct nesting_info *outer;
struct nesting_info *inner;
struct nesting_info *next;
-
+
struct pointer_map_t *field_map;
struct pointer_map_t *var_map;
+ struct pointer_set_t *mem_refs;
bitmap suppress_expansion;
tree context;
};
+/* Iterate over the nesting tree, starting with ROOT, depth first. */
+
+static inline struct nesting_info *
+iter_nestinfo_start (struct nesting_info *root)
+{
+ while (root->inner)
+ root = root->inner;
+ return root;
+}
+
+static inline struct nesting_info *
+iter_nestinfo_next (struct nesting_info *node)
+{
+ if (node->next)
+ return iter_nestinfo_start (node->next);
+ return node->outer;
+}
+
+#define FOR_EACH_NEST_INFO(I, ROOT) \
+ for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
+
/* Obstack used for the bitmaps in the struct above. */
static struct bitmap_obstack nesting_info_bitmap_obstack;
tmp_var = create_tmp_var_raw (type, prefix);
DECL_CONTEXT (tmp_var) = info->context;
- TREE_CHAIN (tmp_var) = info->new_local_var_chain;
+ DECL_CHAIN (tmp_var) = info->new_local_var_chain;
DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
if (TREE_CODE (type) == COMPLEX_TYPE
|| TREE_CODE (type) == VECTOR_TYPE)
way the properties are for the ADDR_EXPR are computed properly. */
save_context = current_function_decl;
current_function_decl = context;
- retval = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
+ retval = build_fold_addr_expr (exp);
current_function_decl = save_context;
return retval;
}
DECL_CONTEXT (field) = type;
- for (p = &TYPE_FIELDS (type); *p ; p = &TREE_CHAIN (*p))
+ for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
break;
- TREE_CHAIN (field) = *p;
+ DECL_CHAIN (field) = *p;
*p = field;
/* Set correct alignment for frame struct type. */
get_chain_decl (struct nesting_info *info)
{
tree decl = info->chain_decl;
+
if (!decl)
{
tree type;
the construction of this variable is handled specially in
expand_function_start and initialize_inlined_parameters.
Note also that it's represented as a parameter. This is more
- close to the truth, since the initial value does come from
+ close to the truth, since the initial value does come from
the caller. */
- decl = build_decl (PARM_DECL, create_tmp_var_name ("CHAIN"), type);
+ decl = build_decl (DECL_SOURCE_LOCATION (info->context),
+ PARM_DECL, create_tmp_var_name ("CHAIN"), type);
DECL_ARTIFICIAL (decl) = 1;
DECL_IGNORED_P (decl) = 1;
TREE_USED (decl) = 1;
TREE_READONLY (decl) = 1;
info->chain_decl = decl;
+
+ if (dump_file
+ && (dump_flags & TDF_DETAILS)
+ && !DECL_STATIC_CHAIN (info->context))
+ fprintf (dump_file, "Setting static-chain for %s\n",
+ lang_hooks.decl_printable_name (info->context, 2));
+
+ DECL_STATIC_CHAIN (info->context) = 1;
}
return decl;
}
get_chain_field (struct nesting_info *info)
{
tree field = info->chain_field;
+
if (!field)
{
tree type = build_pointer_type (get_frame_type (info->outer));
insert_field_into_struct (get_frame_type (info), field);
info->chain_field = field;
+
+ if (dump_file
+ && (dump_flags & TDF_DETAILS)
+ && !DECL_STATIC_CHAIN (info->context))
+ fprintf (dump_file, "Setting static-chain for %s\n",
+ lang_hooks.decl_printable_name (info->context, 2));
+
+ DECL_STATIC_CHAIN (info->context) = 1;
}
return field;
}
return t;
}
-
+
/* Copy EXP into a temporary. Allocate the temporary in the context of
INFO and insert the initialization statement before GSI. */
static GTY(()) tree trampoline_type;
static tree
-get_trampoline_type (void)
+get_trampoline_type (struct nesting_info *info)
{
unsigned align, size;
tree t;
align = STACK_BOUNDARY;
}
- t = build_index_type (build_int_cst (NULL_TREE, size - 1));
+ t = build_index_type (size_int (size - 1));
t = build_array_type (char_type_node, t);
- t = build_decl (FIELD_DECL, get_identifier ("__data"), t);
+ t = build_decl (DECL_SOURCE_LOCATION (info->context),
+ FIELD_DECL, get_identifier ("__data"), t);
DECL_ALIGN (t) = align;
DECL_USER_ALIGN (t) = 1;
{
tree field = make_node (FIELD_DECL);
DECL_NAME (field) = DECL_NAME (decl);
- TREE_TYPE (field) = get_trampoline_type ();
+ TREE_TYPE (field) = get_trampoline_type (info);
TREE_ADDRESSABLE (field) = 1;
insert_field_into_struct (get_frame_type (info), field);
}
return (tree) *slot;
-}
+}
/* Build or return the field within the non-local frame state that holds
the non-local goto "jmp_buf". The buffer itself is maintained by the
size = size + 1;
type = build_array_type
- (type, build_index_type (build_int_cst (NULL_TREE, size)));
+ (type, build_index_type (size_int (size)));
field = make_node (FIELD_DECL);
DECL_NAME (field) = get_identifier ("__nl_goto_buf");
}
/* Similarly for ROOT and all functions nested underneath, depth first. */
-
+
static void
walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
struct nesting_info *root)
{
- do
- {
- if (root->inner)
- walk_all_functions (callback_stmt, callback_op, root->inner);
- walk_function (callback_stmt, callback_op, root);
- root = root->next;
- }
- while (root);
+ struct nesting_info *n;
+ FOR_EACH_NEST_INFO (n, root)
+ walk_function (callback_stmt, callback_op, n);
}
static bool
check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
{
- struct cgraph_node *cgn = cgraph_node (fndecl);
+ struct cgraph_node *cgn = cgraph_get_node (fndecl);
tree arg;
for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
{
- for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = TREE_CHAIN (arg))
+ for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
return true;
struct nesting_info *info = XCNEW (struct nesting_info);
info->field_map = pointer_map_create ();
info->var_map = pointer_map_create ();
+ info->mem_refs = pointer_set_create ();
info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
info->context = cgn->decl;
{
tree field = get_chain_field (i);
- x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
+ x = build_simple_mem_ref (x);
x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
x = init_tmp_var (info, x, gsi);
}
{
tree field = get_chain_field (i);
- x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
+ x = build_simple_mem_ref (x);
x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
x = init_tmp_var (info, x, gsi);
}
- x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
+ x = build_simple_mem_ref (x);
}
x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
/* A subroutine of convert_nonlocal_reference_op. Create a local variable
in the nested function with DECL_VALUE_EXPR set to reference the true
- variable in the parent function. This is used both for debug info
+ variable in the parent function. This is used both for debug info
and in OpenMP lowering. */
static tree
for (i = info->outer; i->context != target_context; i = i->outer)
{
field = get_chain_field (i);
- x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
+ x = build_simple_mem_ref (x);
x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
}
- x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
+ x = build_simple_mem_ref (x);
}
field = lookup_field_for_decl (i, decl, INSERT);
x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
if (use_pointer_in_frame (decl))
- x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
+ x = build_simple_mem_ref (x);
/* ??? We should be remapping types as well, surely. */
- new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
+ new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
+ VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
DECL_CONTEXT (new_decl) = info->context;
- DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
*slot = new_decl;
- TREE_CHAIN (new_decl) = info->debug_var_chain;
+ DECL_CHAIN (new_decl) = info->debug_var_chain;
info->debug_var_chain = new_decl;
if (!optimize
if (use_pointer_in_frame (t))
{
x = init_tmp_var (info, x, &wi->gsi);
- x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
+ x = build_simple_mem_ref (x);
}
}
{
bitmap_set_bit (new_suppress, DECL_UID (decl));
OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
- need_chain = true;
+ if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
+ need_chain = true;
}
break;
if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
break;
/* FALLTHRU */
+ case OMP_CLAUSE_FINAL:
case OMP_CLAUSE_IF:
case OMP_CLAUSE_NUM_THREADS:
wi->val_only = true;
case OMP_CLAUSE_COPYIN:
case OMP_CLAUSE_COLLAPSE:
case OMP_CLAUSE_UNTIED:
+ case OMP_CLAUSE_MERGEABLE:
break;
default:
{
tree var;
- for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
+ for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
if (TREE_CODE (var) == VAR_DECL
&& variably_modified_type_p (TREE_TYPE (var), NULL)
&& DECL_HAS_VALUE_EXPR_P (var)
{
tree c, decl;
decl = get_chain_decl (info);
- c = build_omp_clause (OMP_CLAUSE_FIRSTPRIVATE);
+ c = build_omp_clause (gimple_location (stmt),
+ OMP_CLAUSE_FIRSTPRIVATE);
OMP_CLAUSE_DECL (c) = decl;
OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
gimple_omp_taskreg_set_clauses (stmt, c);
*handled_ops_p = false;
return NULL_TREE;
+ case GIMPLE_COND:
+ wi->val_only = true;
+ wi->is_lhs = false;
+ *handled_ops_p = false;
+ return NULL_TREE;
+
default:
/* For every other statement that we are not interested in
handling here, let the walker traverse the operands. */
x = info->frame_decl;
x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
- new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
+ new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
+ VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
DECL_CONTEXT (new_decl) = info->context;
- DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
*slot = new_decl;
- TREE_CHAIN (new_decl) = info->debug_var_chain;
+ DECL_CHAIN (new_decl) = info->debug_var_chain;
info->debug_var_chain = new_decl;
/* Do not emit debug info twice. */
/* Then the frame decl is now addressable. */
TREE_ADDRESSABLE (info->frame_decl) = 1;
-
+
save_context = current_function_decl;
current_function_decl = info->context;
recompute_tree_invariant_for_addr_expr (t);
wi->val_only = save_val_only;
break;
+ case MEM_REF:
+ save_val_only = wi->val_only;
+ wi->val_only = true;
+ wi->is_lhs = false;
+ walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
+ wi, NULL);
+ /* We need to re-fold the MEM_REF as component references as
+ part of a ADDR_EXPR address are not allowed. But we cannot
+ fold here, as the chain record type is not yet finalized. */
+ if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
+ && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
+ pointer_set_insert (info->mem_refs, tp);
+ wi->val_only = save_val_only;
+ break;
+
case VIEW_CONVERT_EXPR:
/* Just request to look at the subtrees, leaving val_only and lhs
untouched. This might actually be for !val_only + lhs, in which
if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
break;
/* FALLTHRU */
+ case OMP_CLAUSE_FINAL:
case OMP_CLAUSE_IF:
case OMP_CLAUSE_NUM_THREADS:
wi->val_only = true;
case OMP_CLAUSE_COPYIN:
case OMP_CLAUSE_COLLAPSE:
case OMP_CLAUSE_UNTIED:
+ case OMP_CLAUSE_MERGEABLE:
break;
default:
{
tree c;
(void) get_frame_type (info);
- c = build_omp_clause (OMP_CLAUSE_SHARED);
+ c = build_omp_clause (gimple_location (stmt),
+ OMP_CLAUSE_SHARED);
OMP_CLAUSE_DECL (c) = info->frame_decl;
OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
gimple_omp_taskreg_set_clauses (stmt, c);
info, gimple_omp_body (stmt));
break;
+ case GIMPLE_COND:
+ wi->val_only = true;
+ wi->is_lhs = false;
+ *handled_ops_p = false;
+ return NULL_TREE;
+
default:
/* For every other statement that we are not interested in
handling here, let the walker traverse the operands. */
slot = pointer_map_insert (i->var_map, label);
if (*slot == NULL)
{
- new_label = create_artificial_label ();
+ new_label = create_artificial_label (UNKNOWN_LOCATION);
DECL_NONLOCAL (new_label) = 1;
*slot = new_label;
}
else
new_label = (tree) *slot;
-
+
/* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
field = get_nl_goto_field (i);
x = get_frame_field (info, target_context, field, &wi->gsi);
x = build_addr (x, target_context);
x = gsi_gimplify_val (info, x, &wi->gsi);
- call = gimple_build_call (implicit_built_in_decls[BUILT_IN_NONLOCAL_GOTO], 2,
- build_addr (new_label, target_context), x);
+ call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
+ 2, build_addr (new_label, target_context), x);
gsi_replace (&wi->gsi, call, false);
/* We have handled all of STMT's operands, no need to keep going. */
/* If the nested function doesn't use a static chain, then
it doesn't need a trampoline. */
- if (DECL_NO_STATIC_CHAIN (decl))
+ if (!DECL_STATIC_CHAIN (decl))
break;
/* If we don't want a trampoline, then don't build one. */
/* Do machine-specific ugliness. Normally this will involve
computing extra alignment, but it can really be anything. */
- builtin = implicit_built_in_decls[BUILT_IN_ADJUST_TRAMPOLINE];
+ builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
call = gimple_build_call (builtin, 1, x);
x = init_tmp_var_with_call (info, &wi->gsi, call);
switch (gimple_code (stmt))
{
case GIMPLE_CALL:
+ if (gimple_call_chain (stmt))
+ break;
decl = gimple_call_fndecl (stmt);
if (!decl)
break;
target_context = decl_function_context (decl);
- if (target_context && !DECL_NO_STATIC_CHAIN (decl))
+ if (target_context && DECL_STATIC_CHAIN (decl))
{
gimple_call_set_chain (stmt, get_static_chain (info, target_context,
&wi->gsi));
break;
if (c == NULL)
{
- c = build_omp_clause (i ? OMP_CLAUSE_FIRSTPRIVATE
- : OMP_CLAUSE_SHARED);
+ c = build_omp_clause (gimple_location (stmt),
+ i ? OMP_CLAUSE_FIRSTPRIVATE
+ : OMP_CLAUSE_SHARED);
OMP_CLAUSE_DECL (c) = decl;
OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
gimple_omp_taskreg_set_clauses (stmt, c);
return NULL_TREE;
}
-
-/* Walk the nesting tree starting with ROOT, depth first. Convert all
- trampolines and call expressions. On the way back up, determine if
- a nested function actually uses its static chain; if not, remember that. */
+/* Walk the nesting tree starting with ROOT. Convert all trampolines and
+ call expressions. At the same time, determine if a nested function
+ actually uses its static chain; if not, remember that. */
static void
convert_all_function_calls (struct nesting_info *root)
{
- do
+ unsigned int chain_count = 0, old_chain_count, iter_count;
+ struct nesting_info *n;
+
+ /* First, optimistically clear static_chain for all decls that haven't
+ used the static chain already for variable access. */
+ FOR_EACH_NEST_INFO (n, root)
{
- if (root->inner)
- convert_all_function_calls (root->inner);
+ tree decl = n->context;
+ if (!n->outer || (!n->chain_decl && !n->chain_field))
+ {
+ DECL_STATIC_CHAIN (decl) = 0;
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Guessing no static-chain for %s\n",
+ lang_hooks.decl_printable_name (decl, 2));
+ }
+ else
+ DECL_STATIC_CHAIN (decl) = 1;
+ chain_count += DECL_STATIC_CHAIN (decl);
+ }
- walk_function (convert_tramp_reference_stmt, convert_tramp_reference_op,
- root);
- walk_function (convert_gimple_call, NULL, root);
+ /* Walk the functions and perform transformations. Note that these
+ transformations can induce new uses of the static chain, which in turn
+ require re-examining all users of the decl. */
+ /* ??? It would make sense to try to use the call graph to speed this up,
+ but the call graph hasn't really been built yet. Even if it did, we
+ would still need to iterate in this loop since address-of references
+ wouldn't show up in the callgraph anyway. */
+ iter_count = 0;
+ do
+ {
+ old_chain_count = chain_count;
+ chain_count = 0;
+ iter_count++;
- /* If the function does not use a static chain, then remember that. */
- if (root->outer && !root->chain_decl && !root->chain_field)
- DECL_NO_STATIC_CHAIN (root->context) = 1;
- else
- gcc_assert (!DECL_NO_STATIC_CHAIN (root->context));
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fputc ('\n', dump_file);
- root = root->next;
+ FOR_EACH_NEST_INFO (n, root)
+ {
+ tree decl = n->context;
+ walk_function (convert_tramp_reference_stmt,
+ convert_tramp_reference_op, n);
+ walk_function (convert_gimple_call, NULL, n);
+ chain_count += DECL_STATIC_CHAIN (decl);
+ }
}
- while (root);
+ while (chain_count != old_chain_count);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
+ iter_count);
}
struct nesting_copy_body_data
subblock = BLOCK_CHAIN (subblock))
remap_vla_decls (subblock, root);
- for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
- {
- if (TREE_CODE (var) == VAR_DECL
- && variably_modified_type_p (TREE_TYPE (var), NULL)
- && DECL_HAS_VALUE_EXPR_P (var))
- {
- type = TREE_TYPE (var);
- val = DECL_VALUE_EXPR (var);
- if (walk_tree (&type, contains_remapped_vars, root, NULL) != NULL
- || walk_tree (&val, contains_remapped_vars, root, NULL) != NULL)
- break;
- }
- }
+ for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
+ if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
+ {
+ val = DECL_VALUE_EXPR (var);
+ type = TREE_TYPE (var);
+
+ if (!(TREE_CODE (val) == INDIRECT_REF
+ && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
+ && variably_modified_type_p (type, NULL)))
+ continue;
+
+ if (pointer_map_contains (root->var_map, TREE_OPERAND (val, 0))
+ || walk_tree (&type, contains_remapped_vars, root, NULL))
+ break;
+ }
+
if (var == NULL_TREE)
return;
id.cb.decl_map = pointer_map_create ();
id.root = root;
- for (; var; var = TREE_CHAIN (var))
- if (TREE_CODE (var) == VAR_DECL
- && variably_modified_type_p (TREE_TYPE (var), NULL)
- && DECL_HAS_VALUE_EXPR_P (var))
+ for (; var; var = DECL_CHAIN (var))
+ if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
{
struct nesting_info *i;
- tree newt, t, context;
+ tree newt, context;
+ void **slot;
- t = type = TREE_TYPE (var);
val = DECL_VALUE_EXPR (var);
- if (walk_tree (&type, contains_remapped_vars, root, NULL) == NULL
- && walk_tree (&val, contains_remapped_vars, root, NULL) == NULL)
+ type = TREE_TYPE (var);
+
+ if (!(TREE_CODE (val) == INDIRECT_REF
+ && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
+ && variably_modified_type_p (type, NULL)))
+ continue;
+
+ slot = pointer_map_contains (root->var_map, TREE_OPERAND (val, 0));
+ if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
continue;
context = decl_function_context (var);
if (i == NULL)
continue;
+ /* Fully expand value expressions. This avoids having debug variables
+ only referenced from them and that can be swept during GC. */
+ if (slot)
+ {
+ tree t = (tree) *slot;
+ gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
+ val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
+ }
+
id.cb.src_fn = i->context;
id.cb.dst_fn = i->context;
id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
{
newt = TREE_TYPE (newt);
- t = TREE_TYPE (t);
+ type = TREE_TYPE (type);
}
if (TYPE_NAME (newt)
&& TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
- && newt != t
- && TYPE_NAME (newt) == TYPE_NAME (t))
+ && newt != type
+ && TYPE_NAME (newt) == TYPE_NAME (type))
TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
pointer_map_destroy (id.cb.decl_map);
}
+/* Fold the MEM_REF *E. */
+static bool
+fold_mem_refs (const void *e, void *data ATTRIBUTE_UNUSED)
+{
+ tree *ref_p = CONST_CAST2(tree *, const tree *, (const tree *)e);
+ *ref_p = fold (*ref_p);
+ return true;
+}
+
/* Do "everything else" to clean up or complete state collected by the
various walking passes -- lay out the types and decls, generate code
to initialize the frame decl, store critical expressions in the
expression get substituted in instantiate_virtual_regs(). */
for (adjust = &root->new_local_var_chain;
*adjust != root->frame_decl;
- adjust = &TREE_CHAIN (*adjust))
- gcc_assert (TREE_CHAIN (*adjust));
- *adjust = TREE_CHAIN (*adjust);
+ adjust = &DECL_CHAIN (*adjust))
+ gcc_assert (DECL_CHAIN (*adjust));
+ *adjust = DECL_CHAIN (*adjust);
- TREE_CHAIN (root->frame_decl) = NULL_TREE;
+ DECL_CHAIN (root->frame_decl) = NULL_TREE;
declare_vars (root->frame_decl,
gimple_seq_first_stmt (gimple_body (context)), true);
}
- /* If any parameters were referenced non-locally, then we need to
+ /* If any parameters were referenced non-locally, then we need to
insert a copy. Likewise, if any variables were referenced by
pointer, we need to initialize the address. */
if (root->any_parm_remapped)
{
tree p;
- for (p = DECL_ARGUMENTS (context); p ; p = TREE_CHAIN (p))
+ for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
{
tree field, x, y;
if (!field)
continue;
- if (DECL_NO_STATIC_CHAIN (i->context))
- arg3 = null_pointer_node;
- else
- arg3 = build_addr (root->frame_decl, context);
+ gcc_assert (DECL_STATIC_CHAIN (i->context));
+ arg3 = build_addr (root->frame_decl, context);
arg2 = build_addr (i->context, context);
root->frame_decl, field, NULL_TREE);
arg1 = build_addr (x, context);
- x = implicit_built_in_decls[BUILT_IN_INIT_TRAMPOLINE];
+ x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
gimple_seq_add_stmt (&stmt_list, stmt);
}
remap_vla_decls (DECL_INITIAL (root->context), root);
for (debug_var = root->debug_var_chain; debug_var;
- debug_var = TREE_CHAIN (debug_var))
+ debug_var = DECL_CHAIN (debug_var))
if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
break;
id.cb.decl_map = pointer_map_create ();
id.root = root;
- for (; debug_var; debug_var = TREE_CHAIN (debug_var))
+ for (; debug_var; debug_var = DECL_CHAIN (debug_var))
if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
{
tree type = TREE_TYPE (debug_var);
root->debug_var_chain);
}
+ /* Fold the rewritten MEM_REF trees. */
+ pointer_set_traverse (root->mem_refs, fold_mem_refs, NULL);
+
/* Dump the translated tree function. */
- dump_function (TDI_nested, root->context);
+ if (dump_file)
+ {
+ fputs ("\n\n", dump_file);
+ dump_function_to_file (root->context, dump_file, dump_flags);
+ }
}
static void
finalize_nesting_tree (struct nesting_info *root)
{
- do
- {
- if (root->inner)
- finalize_nesting_tree (root->inner);
- finalize_nesting_tree_1 (root);
- root = root->next;
- }
- while (root);
+ struct nesting_info *n;
+ FOR_EACH_NEST_INFO (n, root)
+ finalize_nesting_tree_1 (n);
}
/* Unnest the nodes and pass them to cgraph. */
static void
unnest_nesting_tree_1 (struct nesting_info *root)
{
- struct cgraph_node *node = cgraph_node (root->context);
+ struct cgraph_node *node = cgraph_get_node (root->context);
/* For nested functions update the cgraph to reflect unnesting.
We also delay finalizing of these functions up to this point. */
if (node->origin)
{
- cgraph_unnest_node (cgraph_node (root->context));
+ cgraph_unnest_node (node);
cgraph_finalize_function (root->context, true);
}
}
static void
unnest_nesting_tree (struct nesting_info *root)
{
- do
- {
- if (root->inner)
- unnest_nesting_tree (root->inner);
- unnest_nesting_tree_1 (root);
- root = root->next;
- }
- while (root);
+ struct nesting_info *n;
+ FOR_EACH_NEST_INFO (n, root)
+ unnest_nesting_tree_1 (n);
}
/* Free the data structures allocated during this pass. */
static void
free_nesting_tree (struct nesting_info *root)
{
- struct nesting_info *next;
+ struct nesting_info *node, *next;
+
+ node = iter_nestinfo_start (root);
do
{
- if (root->inner)
- free_nesting_tree (root->inner);
- pointer_map_destroy (root->var_map);
- pointer_map_destroy (root->field_map);
- next = root->next;
- free (root);
- root = next;
+ next = iter_nestinfo_next (node);
+ pointer_map_destroy (node->var_map);
+ pointer_map_destroy (node->field_map);
+ pointer_set_destroy (node->mem_refs);
+ free (node);
+ node = next;
}
- while (root);
+ while (node);
+}
+
+/* Gimplify a function and all its nested functions. */
+static void
+gimplify_all_functions (struct cgraph_node *root)
+{
+ struct cgraph_node *iter;
+ if (!gimple_body (root->decl))
+ gimplify_function_tree (root->decl);
+ for (iter = root->nested; iter; iter = iter->next_nested)
+ gimplify_all_functions (iter);
}
/* Main entry point for this pass. Process FNDECL and all of its nested
struct nesting_info *root;
/* If there are no nested functions, there's nothing to do. */
- cgn = cgraph_node (fndecl);
+ cgn = cgraph_get_node (fndecl);
if (!cgn->nested)
return;
+ gimplify_all_functions (cgn);
+
+ dump_file = dump_begin (TDI_nested, &dump_flags);
+ if (dump_file)
+ fprintf (dump_file, "\n;; Function %s\n\n",
+ lang_hooks.decl_printable_name (fndecl, 2));
+
bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
root = create_nesting_tree (cgn);
+
walk_all_functions (convert_nonlocal_reference_stmt,
convert_nonlocal_reference_op,
root);
root);
walk_all_functions (convert_nl_goto_reference, NULL, root);
walk_all_functions (convert_nl_goto_receiver, NULL, root);
+
convert_all_function_calls (root);
finalize_nesting_tree (root);
unnest_nesting_tree (root);
+
free_nesting_tree (root);
bitmap_obstack_release (&nesting_info_bitmap_obstack);
+
+ if (dump_file)
+ {
+ dump_end (TDI_nested, dump_file);
+ dump_file = NULL;
+ }
}
#include "gt-tree-nested.h"