static void cgraph_mark_functions_to_output (void);
static void cgraph_expand_function (struct cgraph_node *);
static tree record_reference (tree *, int *, void *);
-static void cgraph_analyze_function (struct cgraph_node *node);
+static void cgraph_output_pending_asms (void);
/* Records tree nodes seen in record_reference. Simply using
walk_tree_without_duplicates doesn't guarantee each node is visited
}
/* If the user told us it is used, then it must be so. */
- if (node->local.externally_visible
- || lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
+ if (node->local.externally_visible)
+ return true;
+
+ if (!flag_unit_at_a_time && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
return true;
/* ??? If the assembler name is set by hand, it is possible to assemble
return true;
/* Externally visible functions must be output. The exception is
- COMDAT functions that must be output only when they are needed. */
- if ((TREE_PUBLIC (decl) && !flag_whole_program)
+ COMDAT functions that must be output only when they are needed.
+
+ When not optimizing, also output the static functions. (see
+ PR24561), but don't do so for always_inline functions, functions
+ declared inline and nested functions. These was optimized out
+ in the original implementation and it is unclear whether we want
+ to change the behavior here. */
+ if (((TREE_PUBLIC (decl)
+ || (!optimize && !node->local.disregard_inline_limits
+ && !DECL_DECLARED_INLINE_P (decl)
+ && !node->origin))
+ && !flag_whole_program)
&& !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
return true;
|| (!node->local.disregard_inline_limits
/* When declared inline, defer even the uninlinable functions.
This allows them to be eliminated when unused. */
- && !DECL_DECLARED_INLINE_P (decl)
+ && !DECL_DECLARED_INLINE_P (decl)
&& (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
return true;
cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
+ /* Compute the alignment early so function body expanders are
+ already informed about increased alignment. */
+ align_variable (decl, 0);
+
if (DECL_INITIAL (decl))
{
visited_nodes = pointer_set_create ();
- walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
+ walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
pointer_set_destroy (visited_nodes);
visited_nodes = NULL;
}
&& TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
|| node->force_output
|| decide_is_variable_needed (node, decl)
- /* ??? Cgraph does not yet rule the world with an iron hand,
+ /* ??? Cgraph does not yet rule the world with an iron hand,
and does not control the emission of debug information.
After a variable has its DECL_RTL set, we must assume that
it may be referenced by the debug information, and we can
if (flag_unit_at_a_time)
return false;
+ cgraph_output_pending_asms ();
+
while (cgraph_nodes_queue)
{
struct cgraph_node *n = cgraph_nodes_queue;
}
}
+ /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
+ the expansion process. Note that this queue may grow as its
+ being processed, as the new functions may generate new ones. */
+ while (cgraph_expand_queue)
+ {
+ struct cgraph_node *n = cgraph_expand_queue;
+ cgraph_expand_queue = cgraph_expand_queue->next_needed;
+ n->next_needed = NULL;
+ cgraph_finalize_function (n->decl, false);
+ output = true;
+ }
+
return output;
}
+
+
/* As an GCC extension we allow redefinition of the function. The
semantics when both copies of bodies differ is not well defined.
We replace the old body with new body so in unit at a time mode
/* If node->output is set, then this is a unit-at-a-time compilation
and we have already begun whole-unit analysis. This is *not*
testing for whether we've already emitted the function. That
- case can be sort-of legitimately seen with real function
+ case can be sort-of legitimately seen with real function
redefinition errors. I would argue that the front end should
never present us with such a case, but don't enforce that for now. */
gcc_assert (!node->output);
if (!flag_unit_at_a_time)
{
- struct cgraph_node *n;
+ struct cgraph_node *n, *next;
- for (n = cgraph_nodes; n; n = n->next)
- if (n->global.inlined_to == node)
- cgraph_remove_node (n);
+ for (n = cgraph_nodes; n; n = next)
+ {
+ next = n->next;
+ if (n->global.inlined_to == node)
+ cgraph_remove_node (n);
+ }
}
cgraph_node_remove_callees (node);
}
}
+static void
+cgraph_lower_function (struct cgraph_node *node)
+{
+ if (node->lowered)
+ return;
+ tree_lowering_passes (node->decl);
+ node->lowered = true;
+}
+
/* DECL has been parsed. Take it, queue it, compile it at the whim of the
logic in effect. If NESTED is true, then our caller cannot stand to have
the garbage collector run at the moment. We would need to either create
do_warn_unused_parameter (decl);
}
-void
-cgraph_lower_function (struct cgraph_node *node)
-{
- if (node->lowered)
- return;
- tree_lowering_passes (node->decl);
- node->lowered = true;
-}
-
/* Walk tree and record all calls. Called via walk_tree. */
static tree
record_reference (tree *tp, int *walk_subtrees, void *data)
{
cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
if (lang_hooks.callgraph.analyze_expr)
- return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
+ return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
data);
}
break;
tree step;
visited_nodes = pointer_set_create ();
- /* Reach the trees by walking over the CFG, and note the
+ /* Reach the trees by walking over the CFG, and note the
enclosing basic-blocks in the call edges. */
FOR_EACH_BB_FN (bb, this_cfun)
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
walk_tree (&TREE_OPERAND (stmt, 0),
record_reference, node, visited_nodes);
}
- else
+ else
walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
}
else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
}
-
+
pointer_set_destroy (visited_nodes);
visited_nodes = NULL;
}
/* Rebuild call edges from current function after a passes not aware
of cgraph updating. */
-static void
+static unsigned int
rebuild_cgraph_edges (void)
{
basic_block bb;
}
initialize_inline_failed (node);
gcc_assert (!node->global.inlined_to);
+ return 0;
}
struct tree_opt_pass pass_rebuild_cgraph_edges =
}
if (!node->callers && node->global.inlined_to)
{
- error ("inlined_to pointer is set but no predecesors found");
+ error ("inlined_to pointer is set but no predecessors found");
error_found = true;
}
if (node->global.inlined_to == node)
main_clone = main_clone->next_clone)
if (main_clone == node)
break;
- if (!node)
+ if (!cgraph_node (node->decl))
{
- error ("node not found in DECL_ASSEMBLER_NAME hash");
+ error ("node not found in cgraph_hash");
error_found = true;
}
-
+
if (node->analyzed
&& DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
&& (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
debug_generic_stmt (stmt);
error_found = true;
}
- if (e->callee->decl != cgraph_node (decl)->decl)
+ if (e->callee->decl != cgraph_node (decl)->decl
+ && e->inline_failed)
{
error ("edge points to wrong declaration:");
debug_tree (e->callee->decl);
verify_cgraph_node (node);
}
+/* Output one variable, if necessary. Return whether we output it. */
+static bool
+cgraph_varpool_assemble_decl (struct cgraph_varpool_node *node)
+{
+ tree decl = node->decl;
+
+ if (!TREE_ASM_WRITTEN (decl)
+ && !node->alias
+ && !DECL_EXTERNAL (decl)
+ && (TREE_CODE (decl) != VAR_DECL || !DECL_HAS_VALUE_EXPR_P (decl)))
+ {
+ assemble_variable (decl, 0, 1, 0);
+ /* Local static variables are never seen by check_global_declarations
+ so we need to output debug info by hand. */
+ if (DECL_CONTEXT (decl)
+ && (TREE_CODE (DECL_CONTEXT (decl)) == BLOCK
+ || TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
+ && errorcount == 0 && sorrycount == 0)
+ {
+ timevar_push (TV_SYMOUT);
+ (*debug_hooks->global_decl) (decl);
+ timevar_pop (TV_SYMOUT);
+ }
+ return true;
+ }
+
+ return false;
+}
/* Output all variables enqueued to be assembled. */
bool
if (errorcount || sorrycount)
return false;
-
+
/* EH might mark decls as needed during expansion. This should be safe since
we don't create references to new function, but it should not be used
elsewhere. */
while (cgraph_varpool_nodes_queue)
{
- tree decl = cgraph_varpool_nodes_queue->decl;
struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
- if (!TREE_ASM_WRITTEN (decl) && !node->alias && !DECL_EXTERNAL (decl))
- {
- assemble_variable (decl, 0, 1, 0);
- /* Local static variables are never seen by check_global_declarations
- so we need to output debug info by hand. */
- if (DECL_CONTEXT (decl)
- && (TREE_CODE (DECL_CONTEXT (decl)) == BLOCK
- || TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
- && errorcount == 0 && sorrycount == 0)
- {
- timevar_push (TV_SYMOUT);
- (*debug_hooks->global_decl) (decl);
- timevar_pop (TV_SYMOUT);
- }
- changed = true;
- }
+ if (cgraph_varpool_assemble_decl (node))
+ changed = true;
node->next_needed = NULL;
}
return changed;
}
-/* Analyze the function scheduled to be output. */
+/* Output all asm statements we have stored up to be output. */
+
static void
+cgraph_output_pending_asms (void)
+{
+ struct cgraph_asm_node *can;
+
+ if (errorcount || sorrycount)
+ return;
+
+ for (can = cgraph_asm_nodes; can; can = can->next)
+ assemble_asm (can->asm_str);
+ cgraph_asm_nodes = NULL;
+}
+
+/* Analyze the function scheduled to be output. */
+void
cgraph_analyze_function (struct cgraph_node *node)
{
tree decl = node->decl;
current_function_decl = NULL;
}
+/* Look for externally_visible and used attributes and mark cgraph nodes
+ accordingly.
+
+ We cannot mark the nodes at the point the attributes are processed (in
+ handle_*_attribute) because the copy of the declarations available at that
+ point may not be canonical. For example, in:
+
+ void f();
+ void f() __attribute__((used));
+
+ the declaration we see in handle_used_attribute will be the second
+ declaration -- but the front end will subsequently merge that declaration
+ with the original declaration and discard the second declaration.
+
+ Furthermore, we can't mark these nodes in cgraph_finalize_function because:
+
+ void f() {}
+ void f() __attribute__((externally_visible));
+
+ is valid.
+
+ So, we walk the nodes at the end of the translation unit, applying the
+ attributes at that point. */
+
+static void
+process_function_and_variable_attributes (struct cgraph_node *first,
+ struct cgraph_varpool_node *first_var)
+{
+ struct cgraph_node *node;
+ struct cgraph_varpool_node *vnode;
+
+ for (node = cgraph_nodes; node != first; node = node->next)
+ {
+ tree decl = node->decl;
+ if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
+ {
+ mark_decl_referenced (decl);
+ if (node->local.finalized)
+ cgraph_mark_needed_node (node);
+ }
+ if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
+ {
+ if (node->local.finalized)
+ cgraph_mark_needed_node (node);
+ node->externally_visible = true;
+ }
+ }
+ for (vnode = cgraph_varpool_nodes; vnode != first_var; vnode = vnode->next)
+ {
+ tree decl = vnode->decl;
+ if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
+ {
+ mark_decl_referenced (decl);
+ if (vnode->finalized)
+ cgraph_varpool_mark_needed_node (vnode);
+ }
+ if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
+ {
+ if (vnode->finalized)
+ cgraph_varpool_mark_needed_node (vnode);
+ vnode->externally_visible = true;
+ }
+ }
+}
+
/* Analyze the whole compilation unit once it is parsed completely. */
void
cgraph_finalize_compilation_unit (void)
{
- struct cgraph_node *node;
+ struct cgraph_node *node, *next;
/* Keep track of already processed nodes when called multiple times for
intermodule optimization. */
static struct cgraph_node *first_analyzed;
+ static struct cgraph_varpool_node *first_analyzed_var;
+
+ if (errorcount || sorrycount)
+ return;
finish_aliases_1 ();
if (!flag_unit_at_a_time)
{
+ cgraph_output_pending_asms ();
cgraph_assemble_pending_functions ();
return;
}
}
timevar_push (TV_CGRAPH);
+ process_function_and_variable_attributes (first_analyzed, first_analyzed_var);
cgraph_varpool_analyze_pending_decls ();
if (cgraph_dump_file)
{
if (cgraph_dump_file)
fprintf (cgraph_dump_file, "\nReclaiming functions:");
- for (node = cgraph_nodes; node != first_analyzed; node = node->next)
+ for (node = cgraph_nodes; node != first_analyzed; node = next)
{
tree decl = node->decl;
+ next = node->next;
if (node->local.finalized && !DECL_SAVED_TREE (decl))
- cgraph_reset_node (node);
+ cgraph_reset_node (node);
if (!node->reachable && DECL_SAVED_TREE (decl))
{
dump_cgraph (cgraph_dump_file);
}
first_analyzed = cgraph_nodes;
+ first_analyzed_var = cgraph_varpool_nodes;
ggc_collect ();
timevar_pop (TV_CGRAPH);
}
{
tree decl = node->decl;
struct cgraph_edge *e;
-
+
gcc_assert (!node->output);
for (e = node->callers; e; e = e->next_caller)
|| DECL_EXTERNAL (decl));
}
-
+
}
}
cgraph_expand_all_functions (void)
{
struct cgraph_node *node;
- struct cgraph_node **order =
- xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
+ struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
int order_pos = 0, new_order_pos = 0;
int i;
cgraph_expand_function (node);
}
}
+
free (order);
+
+ /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
+ the expansion process. Note that this queue may grow as its
+ being processed, as the new functions may generate new ones. */
+ while (cgraph_expand_queue)
+ {
+ node = cgraph_expand_queue;
+ cgraph_expand_queue = cgraph_expand_queue->next_needed;
+ node->next_needed = NULL;
+ node->output = 0;
+ node->lowered = DECL_STRUCT_FUNCTION (node->decl)->cfg != NULL;
+ cgraph_expand_function (node);
+ }
+}
+
+/* This is used to sort the node types by the cgraph order number. */
+
+struct cgraph_order_sort
+{
+ enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
+ union
+ {
+ struct cgraph_node *f;
+ struct cgraph_varpool_node *v;
+ struct cgraph_asm_node *a;
+ } u;
+};
+
+/* Output all functions, variables, and asm statements in the order
+ according to their order fields, which is the order in which they
+ appeared in the file. This implements -fno-toplevel-reorder. In
+ this mode we may output functions and variables which don't really
+ need to be output. */
+
+static void
+cgraph_output_in_order (void)
+{
+ int max;
+ size_t size;
+ struct cgraph_order_sort *nodes;
+ int i;
+ struct cgraph_node *pf;
+ struct cgraph_varpool_node *pv;
+ struct cgraph_asm_node *pa;
+
+ max = cgraph_order;
+ size = max * sizeof (struct cgraph_order_sort);
+ nodes = (struct cgraph_order_sort *) alloca (size);
+ memset (nodes, 0, size);
+
+ cgraph_varpool_analyze_pending_decls ();
+
+ for (pf = cgraph_nodes; pf; pf = pf->next)
+ {
+ if (pf->output)
+ {
+ i = pf->order;
+ gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
+ nodes[i].kind = ORDER_FUNCTION;
+ nodes[i].u.f = pf;
+ }
+ }
+
+ for (pv = cgraph_varpool_nodes_queue; pv; pv = pv->next_needed)
+ {
+ i = pv->order;
+ gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
+ nodes[i].kind = ORDER_VAR;
+ nodes[i].u.v = pv;
+ }
+
+ for (pa = cgraph_asm_nodes; pa; pa = pa->next)
+ {
+ i = pa->order;
+ gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
+ nodes[i].kind = ORDER_ASM;
+ nodes[i].u.a = pa;
+ }
+
+ for (i = 0; i < max; ++i)
+ {
+ switch (nodes[i].kind)
+ {
+ case ORDER_FUNCTION:
+ nodes[i].u.f->output = 0;
+ cgraph_expand_function (nodes[i].u.f);
+ break;
+
+ case ORDER_VAR:
+ cgraph_varpool_assemble_decl (nodes[i].u.v);
+ break;
+
+ case ORDER_ASM:
+ assemble_asm (nodes[i].u.a->asm_str);
+ break;
+
+ case ORDER_UNDEFINED:
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+ }
+
+ cgraph_asm_nodes = NULL;
}
/* Mark visibility of all functions.
-
+
A local function is one whose calls can occur only in the current
compilation unit and all its calls are explicit, so we can change
its calling convention. We simply mark all static functions whose
cgraph_preserve_function_body_p (tree decl)
{
struct cgraph_node *node;
- /* Keep the body; we're going to dump it. */
- if (dump_enabled_p (TDI_tree_all))
- return true;
if (!cgraph_global_info_ready)
return (DECL_INLINE (decl) && !flag_really_no_inline);
/* Look if there is any clone around. */
void
cgraph_optimize (void)
{
+ if (errorcount || sorrycount)
+ return;
+
#ifdef ENABLE_CHECKING
verify_cgraph ();
#endif
if (!flag_unit_at_a_time)
{
+ cgraph_output_pending_asms ();
cgraph_varpool_assemble_pending_decls ();
return;
}
process_pending_assemble_externals ();
-
+
/* Frontend may output common variables after the unit has been finalized.
It is safe to deal with them here as they are always zero initialized. */
cgraph_varpool_analyze_pending_decls ();
fprintf (cgraph_dump_file, "Marked ");
dump_cgraph (cgraph_dump_file);
}
- ipa_passes ();
+
+ /* Don't run the IPA passes if there was any error or sorry messages. */
+ if (errorcount == 0 && sorrycount == 0)
+ ipa_passes ();
+
/* This pass remove bodies of extern inline functions we never inlined.
Do this later so other IPA passes see what is really going on. */
cgraph_remove_unreachable_nodes (false, dump_file);
#ifdef ENABLE_CHECKING
verify_cgraph ();
#endif
-
+
cgraph_mark_functions_to_output ();
- cgraph_expand_all_functions ();
- cgraph_varpool_remove_unreferenced_decls ();
- cgraph_varpool_assemble_pending_decls ();
+ if (!flag_toplevel_reorder)
+ cgraph_output_in_order ();
+ else
+ {
+ cgraph_output_pending_asms ();
+
+ cgraph_expand_all_functions ();
+ cgraph_varpool_remove_unreferenced_decls ();
+
+ cgraph_varpool_assemble_pending_decls ();
+ }
if (cgraph_dump_file)
{
/* Double check that all inline clones are gone and that all
function bodies have been released from memory. */
if (flag_unit_at_a_time
- && !dump_enabled_p (TDI_tree_all)
&& !(sorrycount || errorcount))
{
struct cgraph_node *node;
for (node = cgraph_nodes; node; node = node->next)
if (node->analyzed
&& (node->global.inlined_to
- || DECL_SAVED_TREE (node->decl)))
+ || DECL_SAVED_TREE (node->decl)))
{
error_found = true;
dump_cgraph_node (stderr, node);
- }
+ }
if (error_found)
internal_error ("nodes with no released memory found");
}
}
/* Generate and emit a static constructor or destructor. WHICH must be
- one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
+ one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
GENERIC statements. */
void
}
else
cgraph_finalize_function (decl, 0);
-
+
if (targetm.have_ctors_dtors)
{
void (*fn) (rtx, int);
cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
}
-/* The edges representing the callers of the NEW_VERSION node were
+/* The edges representing the callers of the NEW_VERSION node were
fixed by cgraph_function_versioning (), now the call_expr in their
respective tree code should be updated to call the NEW_VERSION. */
static struct cgraph_node *
cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
- tree new_decl, varray_type redirect_callers)
+ tree new_decl,
+ VEC(cgraph_edge_p,heap) *redirect_callers)
{
struct cgraph_node *new_version;
struct cgraph_edge *e, *new_e;
unsigned i;
gcc_assert (old_version);
-
+
new_version = cgraph_node (new_decl);
new_version->analyzed = true;
next_callee = e->next_callee;
if (e->callee == old_version)
cgraph_redirect_edge_callee (e, new_version);
-
+
if (!next_callee)
break;
}
- if (redirect_callers)
- for (i = 0; i < VARRAY_ACTIVE_SIZE (redirect_callers); i++)
- {
- e = VARRAY_GENERIC_PTR (redirect_callers, i);
- /* Redirect calls to the old version node
- to point to it's new version. */
- cgraph_redirect_edge_callee (e, new_version);
- }
+ for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
+ {
+ /* Redirect calls to the old version node to point to its new
+ version. */
+ cgraph_redirect_edge_callee (e, new_version);
+ }
return new_version;
}
/* Perform function versioning.
- Function versioning includes copying of the tree and
+ Function versioning includes copying of the tree and
a callgraph update (creating a new cgraph node and updating
its callees and callers).
struct cgraph_node *
cgraph_function_versioning (struct cgraph_node *old_version_node,
- varray_type redirect_callers,
+ VEC(cgraph_edge_p,heap) *redirect_callers,
varray_type tree_map)
{
tree old_decl = old_version_node->decl;
redirect_callers);
/* Copy the OLD_VERSION_NODE function tree to the new version. */
- tree_function_versioning (old_decl, new_decl, tree_map);
+ tree_function_versioning (old_decl, new_decl, tree_map, false);
/* Update the call_expr on the edges to call the new version node. */
update_call_expr (new_version_node);
- /* Update the new version's properties.
+ /* Update the new version's properties.
Make The new version visible only within this translation unit.
- ??? We cannot use COMDAT linkage because there is no
+ ??? We cannot use COMDAT linkage because there is no
ABI support for this. */
DECL_EXTERNAL (new_version_node->decl) = 0;
DECL_ONE_ONLY (new_version_node->decl) = 0;
new_version_node->lowered = true;
return new_version_node;
}
+
+/* Produce separate function body for inline clones so the offline copy can be
+ modified without affecting them. */
+struct cgraph_node *
+save_inline_function_body (struct cgraph_node *node)
+{
+ struct cgraph_node *first_clone;
+
+ gcc_assert (node == cgraph_node (node->decl));
+
+ cgraph_lower_function (node);
+
+ /* In non-unit-at-a-time we construct full fledged clone we never output to
+ assembly file. This clone is pointed out by inline_decl of original function
+ and inlining infrastructure knows how to deal with this. */
+ if (!flag_unit_at_a_time)
+ {
+ struct cgraph_edge *e;
+
+ first_clone = cgraph_clone_node (node, node->count, 0, false);
+ first_clone->needed = 0;
+ first_clone->reachable = 1;
+ /* Recursively clone all bodies. */
+ for (e = first_clone->callees; e; e = e->next_callee)
+ if (!e->inline_failed)
+ cgraph_clone_inlined_nodes (e, true, false);
+ }
+ else
+ first_clone = node->next_clone;
+
+ first_clone->decl = copy_node (node->decl);
+ node->next_clone = NULL;
+ if (!flag_unit_at_a_time)
+ node->inline_decl = first_clone->decl;
+ first_clone->prev_clone = NULL;
+ cgraph_insert_node_to_hashtable (first_clone);
+ gcc_assert (first_clone == cgraph_node (first_clone->decl));
+
+ /* Copy the OLD_VERSION_NODE function tree to the new version. */
+ tree_function_versioning (node->decl, first_clone->decl, NULL, true);
+
+ DECL_EXTERNAL (first_clone->decl) = 0;
+ DECL_ONE_ONLY (first_clone->decl) = 0;
+ TREE_PUBLIC (first_clone->decl) = 0;
+ DECL_COMDAT (first_clone->decl) = 0;
+
+ for (node = first_clone->next_clone; node; node = node->next_clone)
+ node->decl = first_clone->decl;
+#ifdef ENABLE_CHECKING
+ verify_cgraph_node (first_clone);
+#endif
+ return first_clone;
+}
+