/* Callgraph based interprocedural optimizations.
- Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
+ Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
Free Software Foundation, Inc.
Contributed by Jan Hubicka
#include "timevar.h"
#include "params.h"
#include "fibheap.h"
-#include "c-common.h"
#include "intl.h"
#include "function.h"
#include "ipa-prop.h"
#include "gimple.h"
#include "tree-iterator.h"
#include "tree-pass.h"
+#include "tree-dump.h"
#include "output.h"
+#include "coverage.h"
static void cgraph_expand_all_functions (void);
static void cgraph_mark_functions_to_output (void);
static void cgraph_expand_function (struct cgraph_node *);
static void cgraph_output_pending_asms (void);
+static void cgraph_analyze_function (struct cgraph_node *);
static FILE *cgraph_dump_file;
priority = p;
else if (p != priority)
break;
- append_to_statement_list (build_function_call_expr (fn, 0),
+ append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
+ fn, 0),
&body);
++i;
}
either outside this translation unit, something magic in the system
configury. */
-static bool
-decide_is_function_needed (struct cgraph_node *node, tree decl)
+bool
+cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
{
- if (MAIN_NAME_P (DECL_NAME (decl))
- && TREE_PUBLIC (decl))
- {
- node->local.externally_visible = true;
- return true;
- }
-
/* If the user told us it is used, then it must be so. */
if (node->local.externally_visible)
return true;
|| (!optimize && !node->local.disregard_inline_limits
&& !DECL_DECLARED_INLINE_P (decl)
&& !node->origin))
- && !flag_whole_program)
+ && !flag_whole_program
+ && !flag_lto
+ && !flag_whopr)
&& !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
return true;
case CGRAPH_STATE_EXPANSION:
/* Functions created during expansion shall be compiled
directly. */
- node->output = 0;
+ node->process = 0;
cgraph_expand_function (node);
break;
static void
cgraph_reset_node (struct cgraph_node *node)
{
- /* If node->output is set, then we have already begun whole-unit analysis.
+ /* If node->process is set, then we have already begun whole-unit analysis.
This is *not* testing for whether we've already emitted the function.
That case can be sort-of legitimately seen with real function redefinition
errors. I would argue that the front end should never present us with
such a case, but don't enforce that for now. */
- gcc_assert (!node->output);
+ gcc_assert (!node->process);
/* Reset our data structures so we can analyze the function again. */
memset (&node->local, 0, sizeof (node->local));
{
if (node->lowered)
return;
+
+ if (node->nested)
+ lower_nested_functions (node->decl);
+ gcc_assert (!node->nested);
+
tree_lowering_passes (node->decl);
node->lowered = true;
}
notice_global_symbol (decl);
node->local.finalized = true;
node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
+ node->finalized_by_frontend = true;
record_cdtor_fn (node->decl);
- if (node->nested)
- lower_nested_functions (decl);
- gcc_assert (!node->nested);
- if (decide_is_function_needed (node, decl))
+ if (cgraph_decide_is_function_needed (node, decl))
cgraph_mark_needed_node (node);
/* Since we reclaim unreachable nodes at the end of every language
cgraph_mark_if_needed (tree decl)
{
struct cgraph_node *node = cgraph_node (decl);
- if (node->local.finalized && decide_is_function_needed (node, decl))
+ if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
cgraph_mark_needed_node (node);
}
+/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
+static bool
+clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
+{
+ while (node != node2 && node2)
+ node2 = node2->clone_of;
+ return node2 != NULL;
+}
+
/* Verify cgraph nodes of given cgraph node. */
void
verify_cgraph_node (struct cgraph_node *node)
{
struct cgraph_edge *e;
- struct cgraph_node *main_clone;
struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
struct function *saved_cfun = cfun;
basic_block this_block;
if (e->aux)
{
error ("aux field set for edge %s->%s",
- cgraph_node_name (e->caller), cgraph_node_name (e->callee));
+ identifier_to_locale (cgraph_node_name (e->caller)),
+ identifier_to_locale (cgraph_node_name (e->callee)));
error_found = true;
}
if (node->count < 0)
error ("Execution count is negative");
error_found = true;
}
+ if (node->global.inlined_to && node->local.externally_visible)
+ {
+ error ("Externally visible inline clone");
+ error_found = true;
+ }
+ if (node->global.inlined_to && node->address_taken)
+ {
+ error ("Inline clone with address taken");
+ error_found = true;
+ }
+ if (node->global.inlined_to && node->needed)
+ {
+ error ("Inline clone is needed");
+ error_found = true;
+ }
for (e = node->callers; e; e = e->next_caller)
{
if (e->count < 0)
error_found = true;
}
- for (main_clone = cgraph_node (node->decl); main_clone;
- main_clone = main_clone->next_clone)
- if (main_clone == node)
- break;
if (!cgraph_node (node->decl))
{
error ("node not found in cgraph_hash");
error_found = true;
}
- if (node->analyzed
+ if (node->clone_of)
+ {
+ struct cgraph_node *n;
+ for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
+ if (n == node)
+ break;
+ if (!n)
+ {
+ error ("node has wrong clone_of");
+ error_found = true;
+ }
+ }
+ if (node->clones)
+ {
+ struct cgraph_node *n;
+ for (n = node->clones; n; n = n->next_sibling_clone)
+ if (n->clone_of != node)
+ break;
+ if (n)
+ {
+ error ("node has wrong clone list");
+ error_found = true;
+ }
+ }
+ if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
+ {
+ error ("node is in clone list but it is not clone");
+ error_found = true;
+ }
+ if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
+ {
+ error ("node has wrong prev_clone pointer");
+ error_found = true;
+ }
+ if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
+ {
+ error ("double linked list of clones corrupted");
+ error_found = true;
+ }
+
+ if (node->analyzed && gimple_has_body_p (node->decl)
&& !TREE_ASM_WRITTEN (node->decl)
- && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
+ && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
+ && !flag_wpa)
{
if (this_cfun->cfg)
{
debug_gimple_stmt (stmt);
error_found = true;
}
- if (e->callee->decl != cgraph_node (decl)->decl
- && e->inline_failed)
+ if (!clone_of_p (cgraph_node (decl), e->callee)
+ && !e->callee->global.inlined_to)
{
error ("edge points to wrong declaration:");
debug_tree (e->callee->decl);
if (!e->aux && !e->indirect_call)
{
error ("edge %s->%s has no corresponding call_stmt",
- cgraph_node_name (e->caller),
- cgraph_node_name (e->callee));
+ identifier_to_locale (cgraph_node_name (e->caller)),
+ identifier_to_locale (cgraph_node_name (e->callee)));
debug_gimple_stmt (e->call_stmt);
error_found = true;
}
}
/* Analyze the function scheduled to be output. */
-void
+static void
cgraph_analyze_function (struct cgraph_node *node)
{
+ tree save = current_function_decl;
tree decl = node->decl;
current_function_decl = decl;
push_cfun (DECL_STRUCT_FUNCTION (decl));
+
+ /* Make sure to gimplify bodies only once. During analyzing a
+ function we lower it, which will require gimplified nested
+ functions, so we can end up here with an already gimplified
+ body. */
+ if (!gimple_body (decl))
+ gimplify_function_tree (decl);
+ dump_function (TDI_generic, decl);
+
cgraph_lower_function (node);
node->analyzed = true;
pop_cfun ();
- current_function_decl = NULL;
+ current_function_decl = save;
}
/* Look for externally_visible and used attributes and mark cgraph nodes
if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
{
if (! TREE_PUBLIC (node->decl))
- warning (OPT_Wattributes,
- "%J%<externally_visible%> attribute have effect only on public objects",
- node->decl);
- else
- {
- if (node->local.finalized)
- cgraph_mark_needed_node (node);
- node->local.externally_visible = true;
- }
+ warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
+ "%<externally_visible%>"
+ " attribute have effect only on public objects");
+ else if (node->local.finalized)
+ cgraph_mark_needed_node (node);
}
}
for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
{
mark_decl_referenced (decl);
+ vnode->force_output = true;
if (vnode->finalized)
varpool_mark_needed_node (vnode);
}
if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
{
if (! TREE_PUBLIC (vnode->decl))
- warning (OPT_Wattributes,
- "%J%<externally_visible%> attribute have effect only on public objects",
- vnode->decl);
- else
- {
- if (vnode->finalized)
- varpool_mark_needed_node (vnode);
- vnode->externally_visible = true;
- }
+ warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
+ "%<externally_visible%>"
+ " attribute have effect only on public objects");
+ else if (vnode->finalized)
+ varpool_mark_needed_node (vnode);
}
}
}
continue;
}
- gcc_assert (!node->analyzed && node->reachable);
- gcc_assert (gimple_body (decl));
-
- cgraph_analyze_function (node);
+ if (!node->analyzed)
+ cgraph_analyze_function (node);
for (edge = node->callees; edge; edge = edge->next_callee)
if (!edge->callee->reachable)
ggc_collect ();
}
+
+/* Emit thunks for every node in the cgraph.
+ FIXME: We really ought to emit thunks only for functions that are needed. */
+
+static void
+cgraph_emit_thunks (void)
+{
+ struct cgraph_node *n;
+
+ for (n = cgraph_nodes; n; n = n->next)
+ {
+ /* Only emit thunks on functions defined in this TU.
+ Note that this may emit more thunks than strictly necessary.
+ During optimization some nodes may disappear. It would be
+ nice to only emit thunks only for the functions that will be
+ emitted, but we cannot know that until the inliner and other
+ IPA passes have run (see the sequencing of the call to
+ cgraph_mark_functions_to_output in cgraph_optimize). */
+ if (n->reachable
+ && !DECL_EXTERNAL (n->decl))
+ lang_hooks.callgraph.emit_associated_thunks (n->decl);
+ }
+}
+
+
/* Analyze the whole compilation unit once it is parsed completely. */
void
cgraph_finalize_compilation_unit (void)
{
- if (errorcount || sorrycount)
- return;
+ timevar_push (TV_CGRAPH);
+
+ /* Do not skip analyzing the functions if there were errors, we
+ miss diagnostics for following functions otherwise. */
+
+ /* Emit size functions we didn't inline. */
+ finalize_size_functions ();
+
+ /* Call functions declared with the "constructor" or "destructor"
+ attribute. */
+ cgraph_build_cdtor_fns ();
+ /* Mark alias targets necessary and emit diagnostics. */
finish_aliases_1 ();
if (!quiet_flag)
fflush (stderr);
}
- timevar_push (TV_CGRAPH);
+ /* Gimplify and lower all functions, compute reachability and
+ remove unreachable nodes. */
cgraph_analyze_functions ();
+
+ /* Emit thunks for reachable nodes, if needed. */
+ if (lang_hooks.callgraph.emit_associated_thunks)
+ cgraph_emit_thunks ();
+
+ /* Mark alias targets necessary and emit diagnostics. */
+ finish_aliases_1 ();
+
+ /* Gimplify and lower thunks. */
+ cgraph_analyze_functions ();
+
+ /* Finally drive the pass manager. */
+ cgraph_optimize ();
+
timevar_pop (TV_CGRAPH);
}
+
+
/* Figure out what functions we want to assemble. */
static void
tree decl = node->decl;
struct cgraph_edge *e;
- gcc_assert (!node->output);
+ gcc_assert (!node->process);
for (e = node->callers; e; e = e->next_caller)
if (e->inline_failed)
|| (e && node->reachable))
&& !TREE_ASM_WRITTEN (decl)
&& !DECL_EXTERNAL (decl))
- node->output = 1;
+ node->process = 1;
else
{
/* We should've reclaimed all functions that are not needed. */
gcc_assert (!node->global.inlined_to);
announce_function (decl);
+ node->process = 0;
gcc_assert (node->lowered);
/* Generate RTL for the body of DECL. */
- if (lang_hooks.callgraph.emit_associated_thunks)
- lang_hooks.callgraph.emit_associated_thunks (decl);
tree_rest_of_compilation (decl);
/* Make sure that BE didn't give up on compiling. */
/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
bool
-cgraph_inline_p (struct cgraph_edge *e, const char **reason)
+cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
{
*reason = e->inline_failed;
return !e->inline_failed;
/* Garbage collector may remove inline clones we eliminate during
optimization. So we must be sure to not reference them. */
for (i = 0; i < order_pos; i++)
- if (order[i]->output)
+ if (order[i]->process)
order[new_order_pos++] = order[i];
for (i = new_order_pos - 1; i >= 0; i--)
{
node = order[i];
- if (node->output)
+ if (node->process)
{
gcc_assert (node->reachable);
- node->output = 0;
+ node->process = 0;
cgraph_expand_function (node);
}
}
/* This is used to sort the node types by the cgraph order number. */
+enum cgraph_order_sort_kind
+{
+ ORDER_UNDEFINED = 0,
+ ORDER_FUNCTION,
+ ORDER_VAR,
+ ORDER_ASM
+};
+
struct cgraph_order_sort
{
- enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
+ enum cgraph_order_sort_kind kind;
union
{
struct cgraph_node *f;
for (pf = cgraph_nodes; pf; pf = pf->next)
{
- if (pf->output)
+ if (pf->process)
{
i = pf->order;
gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
switch (nodes[i].kind)
{
case ORDER_FUNCTION:
- nodes[i].u.f->output = 0;
+ nodes[i].u.f->process = 0;
cgraph_expand_function (nodes[i].u.f);
break;
gcc_assert (cgraph_global_info_ready);
/* Look if there is any clone around. */
- for (node = cgraph_node (decl); node; node = node->next_clone)
- if (node->global.inlined_to)
- return true;
+ node = cgraph_node (decl);
+ if (node->clones)
+ return true;
return false;
}
current_function_decl = NULL;
gimple_register_cfg_hooks ();
bitmap_obstack_initialize (NULL);
- execute_ipa_pass_list (all_ipa_passes);
+
+ if (!in_lto_p)
+ execute_ipa_pass_list (all_small_ipa_passes);
+
+ /* If pass_all_early_optimizations was not scheduled, the state of
+ the cgraph will not be properly updated. Update it now. */
+ if (cgraph_state < CGRAPH_STATE_IPA_SSA)
+ cgraph_state = CGRAPH_STATE_IPA_SSA;
+
+ if (!in_lto_p)
+ {
+ /* Generate coverage variables and constructors. */
+ coverage_finish ();
+
+ /* Process new functions added. */
+ set_cfun (NULL);
+ current_function_decl = NULL;
+ cgraph_process_new_functions ();
+
+ execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
+ }
+ execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
+
+ if (!in_lto_p)
+ ipa_write_summaries ();
+
+ if (!flag_ltrans)
+ execute_ipa_pass_list (all_regular_ipa_passes);
+
bitmap_obstack_release (NULL);
}
+
/* Perform simple optimizations based on callgraph. */
void
verify_cgraph ();
#endif
- /* Call functions declared with the "constructor" or "destructor"
- attribute. */
- cgraph_build_cdtor_fns ();
-
/* Frontend may output common variables after the unit has been finalized.
It is safe to deal with them here as they are always zero initialized. */
varpool_analyze_pending_decls ();
- cgraph_analyze_functions ();
timevar_push (TV_CGRAPHOPT);
if (pre_ipa_mem_report)
if (errorcount == 0 && sorrycount == 0)
ipa_passes ();
+ /* Do nothing else if any IPA pass found errors. */
+ if (errorcount || sorrycount)
+ {
+ timevar_pop (TV_CGRAPHOPT);
+ return;
+ }
+
/* This pass remove bodies of extern inline functions we never inlined.
Do this later so other IPA passes see what is really going on. */
cgraph_remove_unreachable_nodes (false, dump_file);
timevar_pop (TV_CGRAPHOPT);
/* Output everything. */
+ (*debug_hooks->assembly_start) ();
if (!quiet_flag)
fprintf (stderr, "Assembling functions:\n");
#ifdef ENABLE_CHECKING
verify_cgraph ();
#endif
+ cgraph_materialize_all_clones ();
cgraph_mark_functions_to_output ();
cgraph_state = CGRAPH_STATE_EXPANSION;
varpool_assemble_pending_decls ();
}
- varpool_output_debug_info ();
cgraph_process_new_functions ();
cgraph_state = CGRAPH_STATE_FINISHED;
}
#endif
}
+
+
/* Generate and emit a static constructor or destructor. WHICH must
be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
name = get_file_function_name (which_buf);
- decl = build_decl (FUNCTION_DECL, name,
+ decl = build_decl (input_location, FUNCTION_DECL, name,
build_function_type (void_type_node, void_list_node));
current_function_decl = decl;
- resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
+ resdecl = build_decl (input_location,
+ RESULT_DECL, NULL_TREE, void_type_node);
DECL_ARTIFICIAL (resdecl) = 1;
DECL_RESULT (decl) = resdecl;
+ DECL_CONTEXT (resdecl) = decl;
allocate_struct_function (decl, false);
{
struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
gimple_call_set_fndecl (e->call_stmt, new_version->decl);
- /* Update EH information too, just in case. */
- if (!stmt_could_throw_p (e->call_stmt)
- && lookup_stmt_eh_region_fn (inner_function, e->call_stmt))
- remove_stmt_from_eh_region_fn (inner_function, e->call_stmt);
+ maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
}
}
also cloned. */
for (e = old_version->callees;e; e=e->next_callee)
{
- new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->frequency,
+ new_e = cgraph_clone_edge (e, new_version, e->call_stmt,
+ e->lto_stmt_uid, 0, e->frequency,
e->loop_nest, true);
new_e->count = e->count;
}
struct cgraph_node *
cgraph_function_versioning (struct cgraph_node *old_version_node,
VEC(cgraph_edge_p,heap) *redirect_callers,
- varray_type tree_map,
+ VEC (ipa_replace_map_p,gc)* tree_map,
bitmap args_to_skip)
{
tree old_decl = old_version_node->decl;
??? We cannot use COMDAT linkage because there is no
ABI support for this. */
DECL_EXTERNAL (new_version_node->decl) = 0;
- DECL_ONE_ONLY (new_version_node->decl) = 0;
+ DECL_COMDAT_GROUP (new_version_node->decl) = NULL_TREE;
TREE_PUBLIC (new_version_node->decl) = 0;
DECL_COMDAT (new_version_node->decl) = 0;
DECL_WEAK (new_version_node->decl) = 0;
struct cgraph_node *
save_inline_function_body (struct cgraph_node *node)
{
- struct cgraph_node *first_clone;
+ struct cgraph_node *first_clone, *n;
gcc_assert (node == cgraph_node (node->decl));
cgraph_lower_function (node);
- first_clone = node->next_clone;
+ first_clone = node->clones;
first_clone->decl = copy_node (node->decl);
- node->next_clone = NULL;
- first_clone->prev_clone = NULL;
cgraph_insert_node_to_hashtable (first_clone);
gcc_assert (first_clone == cgraph_node (first_clone->decl));
+ if (first_clone->next_sibling_clone)
+ {
+ for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
+ n->clone_of = first_clone;
+ n->clone_of = first_clone;
+ n->next_sibling_clone = first_clone->clones;
+ if (first_clone->clones)
+ first_clone->clones->prev_sibling_clone = n;
+ first_clone->clones = first_clone->next_sibling_clone;
+ first_clone->next_sibling_clone->prev_sibling_clone = NULL;
+ first_clone->next_sibling_clone = NULL;
+ gcc_assert (!first_clone->prev_sibling_clone);
+ }
+ first_clone->clone_of = NULL;
+ node->clones = NULL;
+
+ if (first_clone->clones)
+ for (n = first_clone->clones; n != first_clone;)
+ {
+ gcc_assert (n->decl == node->decl);
+ n->decl = first_clone->decl;
+ if (n->clones)
+ n = n->clones;
+ else if (n->next_sibling_clone)
+ n = n->next_sibling_clone;
+ else
+ {
+ while (n != first_clone && !n->next_sibling_clone)
+ n = n->clone_of;
+ if (n != first_clone)
+ n = n->next_sibling_clone;
+ }
+ }
/* Copy the OLD_VERSION_NODE function tree to the new version. */
tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
DECL_EXTERNAL (first_clone->decl) = 0;
- DECL_ONE_ONLY (first_clone->decl) = 0;
+ DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
TREE_PUBLIC (first_clone->decl) = 0;
DECL_COMDAT (first_clone->decl) = 0;
+ VEC_free (ipa_opt_pass, heap,
+ DECL_STRUCT_FUNCTION (first_clone->decl)->ipa_transforms_to_apply);
+ DECL_STRUCT_FUNCTION (first_clone->decl)->ipa_transforms_to_apply = NULL;
- for (node = first_clone->next_clone; node; node = node->next_clone)
- node->decl = first_clone->decl;
#ifdef ENABLE_CHECKING
verify_cgraph_node (first_clone);
#endif
return first_clone;
}
+/* Given virtual clone, turn it into actual clone. */
+static void
+cgraph_materialize_clone (struct cgraph_node *node)
+{
+ bitmap_obstack_initialize (NULL);
+ /* Copy the OLD_VERSION_NODE function tree to the new version. */
+ tree_function_versioning (node->clone_of->decl, node->decl,
+ node->clone.tree_map, true,
+ node->clone.args_to_skip);
+ if (cgraph_dump_file)
+ {
+ dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
+ dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
+ }
+
+ /* Function is no longer clone. */
+ if (node->next_sibling_clone)
+ node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
+ if (node->prev_sibling_clone)
+ node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
+ else
+ node->clone_of->clones = node->next_sibling_clone;
+ node->next_sibling_clone = NULL;
+ node->prev_sibling_clone = NULL;
+ node->clone_of = NULL;
+ bitmap_obstack_release (NULL);
+}
+
+/* Once all functions from compilation unit are in memory, produce all clones
+ and update all calls.
+ We might also do this on demand if we don't want to bring all functions to
+ memory prior compilation, but current WHOPR implementation does that and it is
+ is bit easier to keep everything right in this order. */
+void
+cgraph_materialize_all_clones (void)
+{
+ struct cgraph_node *node;
+ bool stabilized = false;
+
+ if (cgraph_dump_file)
+ fprintf (cgraph_dump_file, "Materializing clones\n");
+#ifdef ENABLE_CHECKING
+ verify_cgraph ();
+#endif
+
+ /* We can also do topological order, but number of iterations should be
+ bounded by number of IPA passes since single IPA pass is probably not
+ going to create clones of clones it created itself. */
+ while (!stabilized)
+ {
+ stabilized = true;
+ for (node = cgraph_nodes; node; node = node->next)
+ {
+ if (node->clone_of && node->decl != node->clone_of->decl
+ && !gimple_has_body_p (node->decl))
+ {
+ if (gimple_has_body_p (node->clone_of->decl))
+ {
+ if (cgraph_dump_file)
+ {
+ fprintf (cgraph_dump_file, "clonning %s to %s\n",
+ cgraph_node_name (node->clone_of),
+ cgraph_node_name (node));
+ if (node->clone.tree_map)
+ {
+ unsigned int i;
+ fprintf (cgraph_dump_file, " replace map: ");
+ for (i = 0; i < VEC_length (ipa_replace_map_p,
+ node->clone.tree_map);
+ i++)
+ {
+ struct ipa_replace_map *replace_info;
+ replace_info = VEC_index (ipa_replace_map_p,
+ node->clone.tree_map,
+ i);
+ print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
+ fprintf (cgraph_dump_file, " -> ");
+ print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
+ fprintf (cgraph_dump_file, "%s%s;",
+ replace_info->replace_p ? "(replace)":"",
+ replace_info->ref_p ? "(ref)":"");
+ }
+ fprintf (cgraph_dump_file, "\n");
+ }
+ if (node->clone.args_to_skip)
+ {
+ fprintf (cgraph_dump_file, " args_to_skip: ");
+ dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
+ }
+ if (node->clone.args_to_skip)
+ {
+ fprintf (cgraph_dump_file, " combined_args_to_skip:");
+ dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
+ }
+ }
+ cgraph_materialize_clone (node);
+ }
+ else
+ stabilized = false;
+ }
+ }
+ }
+ if (cgraph_dump_file)
+ fprintf (cgraph_dump_file, "Updating call sites\n");
+ for (node = cgraph_nodes; node; node = node->next)
+ if (node->analyzed && gimple_has_body_p (node->decl)
+ && (!node->clone_of || node->clone_of->decl != node->decl))
+ {
+ struct cgraph_edge *e;
+
+ current_function_decl = node->decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+ for (e = node->callees; e; e = e->next_callee)
+ {
+ tree decl = gimple_call_fndecl (e->call_stmt);
+ /* When function gets inlined, indirect inlining might've invented
+ new edge for orginally indirect stmt. Since we are not
+ preserving clones in the original form, we must not update here
+ since other inline clones don't need to contain call to the same
+ call. Inliner will do the substitution for us later. */
+ if (decl && decl != e->callee->decl)
+ {
+ gimple new_stmt;
+ gimple_stmt_iterator gsi;
+
+ if (cgraph_dump_file)
+ {
+ fprintf (cgraph_dump_file, "updating call of %s in %s:",
+ cgraph_node_name (node),
+ cgraph_node_name (e->callee));
+ print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
+ }
+
+ if (e->callee->clone.combined_args_to_skip)
+ new_stmt = gimple_call_copy_skip_args (e->call_stmt,
+ e->callee->clone.combined_args_to_skip);
+ else
+ new_stmt = e->call_stmt;
+ if (gimple_vdef (new_stmt)
+ && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
+ SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
+ gimple_call_set_fndecl (new_stmt, e->callee->decl);
+
+ gsi = gsi_for_stmt (e->call_stmt);
+ gsi_replace (&gsi, new_stmt, true);
+
+ /* Update EH information too, just in case. */
+ maybe_clean_or_replace_eh_stmt (e->call_stmt, new_stmt);
+
+ cgraph_set_call_stmt_including_clones (node, e->call_stmt, new_stmt);
+
+ if (cgraph_dump_file)
+ {
+ fprintf (cgraph_dump_file, " updated to:");
+ print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
+ }
+ }
+ }
+ pop_cfun ();
+ current_function_decl = NULL;
+#ifdef ENABLE_CHECKING
+ verify_cgraph_node (node);
+#endif
+ }
+#ifdef ENABLE_CHECKING
+ verify_cgraph ();
+#endif
+ cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
+}
+
#include "gt-cgraphunit.h"