/* Callgraph handling code.
- Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
+ Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
Free Software Foundation, Inc.
Contributed by Jan Hubicka
#include "target.h"
#include "basic-block.h"
#include "cgraph.h"
-#include "varray.h"
#include "output.h"
#include "intl.h"
#include "gimple.h"
#include "tree-dump.h"
#include "tree-flow.h"
#include "value-prof.h"
+#include "except.h"
static void cgraph_node_remove_callers (struct cgraph_node *node);
static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
return (hashval_t) DECL_UID (n->decl);
}
+
+/* Return the cgraph node associated with function DECL. If none
+ exists, return NULL. */
+
+struct cgraph_node *
+cgraph_node_for_decl (tree decl)
+{
+ struct cgraph_node *node;
+ void **slot;
+
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
+
+ node = NULL;
+ if (cgraph_hash)
+ {
+ struct cgraph_node key;
+
+ key.decl = decl;
+ slot = htab_find_slot (cgraph_hash, &key, NO_INSERT);
+ if (slot && *slot)
+ node = (struct cgraph_node *) *slot;
+ }
+
+ return node;
+}
+
+
/* Returns nonzero if P1 and P2 are equal. */
static int
return node;
}
+/* Returns the cgraph node assigned to DECL or NULL if no cgraph node
+ is assigned. */
+
+struct cgraph_node *
+cgraph_get_node (tree decl)
+{
+ struct cgraph_node key, *node = NULL, **slot;
+
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
+
+ if (!cgraph_hash)
+ cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
+
+ key.decl = decl;
+
+ slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
+ NO_INSERT);
+
+ if (slot && *slot)
+ node = *slot;
+ return node;
+}
+
/* Insert already constructed node into hashtable. */
void
}
-/* Change field call_smt of edge E to NEW_STMT. */
+/* Change field call_stmt of edge E to NEW_STMT. */
void
cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
}
}
+/* Like cgraph_set_call_stmt but walk the clone tree and update all
+ clones sharing the same function body. */
+
+void
+cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
+ gimple old_stmt, gimple new_stmt)
+{
+ struct cgraph_node *node;
+ struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
+
+ if (edge)
+ cgraph_set_call_stmt (edge, new_stmt);
+
+ node = orig->clones;
+ if (node)
+ while (node != orig)
+ {
+ struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
+ if (edge)
+ cgraph_set_call_stmt (edge, new_stmt);
+ if (node->clones)
+ node = node->clones;
+ else if (node->next_sibling_clone)
+ node = node->next_sibling_clone;
+ else
+ {
+ while (node != orig && !node->next_sibling_clone)
+ node = node->clone_of;
+ if (node != orig)
+ node = node->next_sibling_clone;
+ }
+ }
+}
+
+/* Like cgraph_create_edge walk the clone tree and update all clones sharing
+ same function body.
+
+ TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
+ frequencies of the clones. */
+
+void
+cgraph_create_edge_including_clones (struct cgraph_node *orig,
+ struct cgraph_node *callee,
+ gimple stmt, gcov_type count,
+ int freq, int loop_depth,
+ cgraph_inline_failed_t reason)
+{
+ struct cgraph_node *node;
+ struct cgraph_edge *edge;
+
+ if (!cgraph_edge (orig, stmt))
+ {
+ edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth);
+ edge->inline_failed = reason;
+ }
+
+ node = orig->clones;
+ if (node)
+ while (node != orig)
+ {
+ /* It is possible that we already constant propagated into the clone
+ and turned indirect call into dirrect call. */
+ if (!cgraph_edge (node, stmt))
+ {
+ edge = cgraph_create_edge (node, callee, stmt, count,
+ freq, loop_depth);
+ edge->inline_failed = reason;
+ }
+
+ if (node->clones)
+ node = node->clones;
+ else if (node->next_sibling_clone)
+ node = node->next_sibling_clone;
+ else
+ {
+ while (node != orig && !node->next_sibling_clone)
+ node = node->clone_of;
+ if (node != orig)
+ node = node->next_sibling_clone;
+ }
+ }
+}
+
/* Give initial reasons why inlining would fail on EDGE. This gets either
nullified or usually overwritten by more precise reasons later. */
/* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
- OLD_STMT changed into NEW_STMT. */
+ OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
+ of OLD_STMT if it was previously call statement. */
-void
-cgraph_update_edges_for_call_stmt (gimple old_stmt, gimple new_stmt)
+static void
+cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
+ gimple old_stmt, tree old_call, gimple new_stmt)
{
- tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fn (new_stmt) : 0;
- tree old_call = (is_gimple_call (old_stmt)) ? gimple_call_fn (old_stmt) : 0;
- struct cgraph_node *node = cgraph_node (cfun->decl);
+ tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fndecl (new_stmt) : 0;
+ /* We are seeing indirect calls, then there is nothing to update. */
+ if (!new_call && !old_call)
+ return;
+ /* See if we turned indirect call into direct call or folded call to one builtin
+ into different bultin. */
if (old_call != new_call)
{
struct cgraph_edge *e = cgraph_edge (node, old_stmt);
struct cgraph_edge *ne = NULL;
- tree new_decl;
+ gcov_type count;
+ int frequency;
+ int loop_nest;
if (e)
{
- gcov_type count = e->count;
- int frequency = e->frequency;
- int loop_nest = e->loop_nest;
-
+ /* See if the call is already there. It might be because of indirect
+ inlining already found it. */
+ if (new_call && e->callee->decl == new_call)
+ return;
+
+ /* Otherwise remove edge and create new one; we can't simply redirect
+ since function has changed, so inline plan and other information
+ attached to edge is invalid. */
cgraph_remove_edge (e);
- if (new_call)
- {
- new_decl = gimple_call_fndecl (new_stmt);
- if (new_decl)
- {
- ne = cgraph_create_edge (node, cgraph_node (new_decl),
- new_stmt, count, frequency,
- loop_nest);
- gcc_assert (ne->inline_failed);
- }
- }
+ count = e->count;
+ frequency = e->frequency;
+ loop_nest = e->loop_nest;
+ }
+ else
+ {
+ /* We are seeing new direct call; compute profile info based on BB. */
+ basic_block bb = gimple_bb (new_stmt);
+ count = bb->count;
+ frequency = compute_call_stmt_bb_frequency (current_function_decl,
+ bb);
+ loop_nest = bb->loop_depth;
+ }
+
+ if (new_call)
+ {
+ ne = cgraph_create_edge (node, cgraph_node (new_call),
+ new_stmt, count, frequency,
+ loop_nest);
+ gcc_assert (ne->inline_failed);
}
}
+ /* We only updated the call stmt; update pointer in cgraph edge.. */
else if (old_stmt != new_stmt)
- {
- struct cgraph_edge *e = cgraph_edge (node, old_stmt);
+ cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
+}
- if (e)
- cgraph_set_call_stmt (e, new_stmt);
- }
+/* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
+ OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
+ of OLD_STMT before it was updated (updating can happen inplace). */
+
+void
+cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
+{
+ struct cgraph_node *orig = cgraph_node (cfun->decl);
+ struct cgraph_node *node;
+
+ cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
+ if (orig->clones)
+ for (node = orig->clones; node != orig;)
+ {
+ cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
+ if (node->clones)
+ node = node->clones;
+ else if (node->next_sibling_clone)
+ node = node->next_sibling_clone;
+ else
+ {
+ while (node != orig && !node->next_sibling_clone)
+ node = node->clone_of;
+ if (node != orig)
+ node = node->next_sibling_clone;
+ }
+ }
}
slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
if (*slot == node)
{
- if (node->next_clone)
- {
- struct cgraph_node *new_node = node->next_clone;
+ struct cgraph_node *next_inline_clone;
- *slot = new_node;
- node->next_clone->prev_clone = NULL;
- }
+ for (next_inline_clone = node->clones;
+ next_inline_clone && next_inline_clone->decl != node->decl;
+ next_inline_clone = next_inline_clone->next_sibling_clone)
+ ;
+
+ /* If there is inline clone of the node being removed, we need
+ to put it into the position of removed node and reorganize all
+ other clones to be based on it. */
+ if (next_inline_clone)
+ {
+ struct cgraph_node *n;
+ struct cgraph_node *new_clones;
+
+ *slot = next_inline_clone;
+
+ /* Unlink inline clone from the list of clones of removed node. */
+ if (next_inline_clone->next_sibling_clone)
+ next_inline_clone->next_sibling_clone->prev_sibling_clone
+ = next_inline_clone->prev_sibling_clone;
+ if (next_inline_clone->prev_sibling_clone)
+ {
+ next_inline_clone->prev_sibling_clone->next_sibling_clone
+ = next_inline_clone->next_sibling_clone;
+ }
+ else
+ node->clones = next_inline_clone->next_sibling_clone;
+
+ new_clones = node->clones;
+ node->clones = NULL;
+
+ /* Copy clone info. */
+ next_inline_clone->clone = node->clone;
+
+ /* Now place it into clone tree at same level at NODE. */
+ next_inline_clone->clone_of = node->clone_of;
+ next_inline_clone->prev_sibling_clone = NULL;
+ next_inline_clone->next_sibling_clone = NULL;
+ if (node->clone_of)
+ {
+ next_inline_clone->next_sibling_clone = node->clone_of->clones;
+ node->clone_of->clones = next_inline_clone;
+ }
+
+ /* Merge the clone list. */
+ if (new_clones)
+ {
+ if (!next_inline_clone->clones)
+ next_inline_clone->clones = new_clones;
+ else
+ {
+ n = next_inline_clone->clones;
+ while (n->next_sibling_clone)
+ n = n->next_sibling_clone;
+ n->next_sibling_clone = new_clones;
+ new_clones->prev_sibling_clone = n;
+ }
+ }
+
+ /* Update clone_of pointers. */
+ n = new_clones;
+ while (n)
+ {
+ n->clone_of = next_inline_clone;
+ n = n->next_sibling_clone;
+ }
+ }
else
{
htab_clear_slot (cgraph_hash, slot);
kill_body = true;
}
+
}
else
+ gcc_assert (node->clone_of);
+ if (node->prev_sibling_clone)
+ node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
+ else if (node->clone_of)
+ node->clone_of->clones = node->next_sibling_clone;
+ if (node->next_sibling_clone)
+ node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
+ if (node->clones)
{
- node->prev_clone->next_clone = node->next_clone;
- if (node->next_clone)
- node->next_clone->prev_clone = node->prev_clone;
+ struct cgraph_node *n;
+
+ for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
+ n->clone_of = node->clone_of;
+ n->clone_of = node->clone_of;
+ n->next_sibling_clone = node->clone_of->clones;
+ if (node->clone_of->clones)
+ node->clone_of->clones->prev_sibling_clone = n;
+ node->clone_of->clones = node->clones;
}
/* While all the clones are removed after being proceeded, the function
if (!kill_body && *slot)
{
struct cgraph_node *n = (struct cgraph_node *) *slot;
- if (!n->next_clone && !n->global.inlined_to
+ if (!n->clones && !n->clone_of && !n->global.inlined_to
&& (cgraph_global_info_ready
&& (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
kill_body = true;
free_nodes = node;
}
+/* Remove the node from cgraph. */
+
+void
+cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
+{
+ struct cgraph_edge *e, *next;
+ for (e = node->callees; e; e = next)
+ {
+ next = e->next_callee;
+ if (!e->inline_failed)
+ cgraph_remove_node_and_inline_clones (e->callee);
+ }
+ cgraph_remove_node (node);
+}
+
/* Notify finalize_compilation_unit that given node is reachable. */
void
cgraph_mark_reachable_node (node);
}
+/* Likewise indicate that a node is having address taken. */
+
+void
+cgraph_mark_address_taken_node (struct cgraph_node *node)
+{
+ node->address_taken = 1;
+ cgraph_mark_needed_node (node);
+}
+
/* Return local info for the compiled function. */
struct cgraph_local_info *
dump_cgraph_node (FILE *f, struct cgraph_node *node)
{
struct cgraph_edge *edge;
- fprintf (f, "%s/%i(%i) [%p]:", cgraph_node_name (node), node->uid,
- node->pid, (void *) node);
+ fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid,
+ node->pid);
+ dump_addr (f, " @", (void *)node);
if (node->global.inlined_to)
fprintf (f, " (inline copy in %s/%i)",
cgraph_node_name (node->global.inlined_to),
node->global.inlined_to->uid);
+ if (node->clone_of)
+ fprintf (f, " (clone of %s/%i)",
+ cgraph_node_name (node->clone_of),
+ node->clone_of->uid);
if (cgraph_function_flags_ready)
fprintf (f, " availability:%s",
cgraph_availability_names [cgraph_function_body_availability (node)]);
if (node->count)
fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
(HOST_WIDEST_INT)node->count);
- if (node->local.inline_summary.self_insns)
- fprintf (f, " %i insns", node->local.inline_summary.self_insns);
- if (node->global.insns && node->global.insns
- != node->local.inline_summary.self_insns)
- fprintf (f, " (%i after inlining)", node->global.insns);
+ if (node->local.inline_summary.self_time)
+ fprintf (f, " %i time, %i benefit", node->local.inline_summary.self_time,
+ node->local.inline_summary.time_inlining_benefit);
+ if (node->global.time && node->global.time
+ != node->local.inline_summary.self_time)
+ fprintf (f, " (%i after inlining)", node->global.time);
+ if (node->local.inline_summary.self_size)
+ fprintf (f, " %i size, %i benefit", node->local.inline_summary.self_size,
+ node->local.inline_summary.size_inlining_benefit);
+ if (node->global.size && node->global.size
+ != node->local.inline_summary.self_size)
+ fprintf (f, " (%i after inlining)", node->global.size);
if (node->local.inline_summary.estimated_self_stack_size)
fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
if (node->needed)
fprintf (f, " needed");
+ if (node->address_taken)
+ fprintf (f, " address_taken");
else if (node->reachable)
fprintf (f, " reachable");
if (gimple_has_body_p (node->decl))
by node. */
struct cgraph_node *
cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq,
- int loop_nest, bool update_original)
+ int loop_nest, bool update_original,
+ VEC(cgraph_edge_p,heap) *redirect_callers)
{
struct cgraph_node *new_node = cgraph_create_node ();
struct cgraph_edge *e;
gcov_type count_scale;
+ unsigned i;
new_node->decl = n->decl;
new_node->origin = n->origin;
new_node->global = n->global;
new_node->rtl = n->rtl;
new_node->count = count;
+ new_node->clone = n->clone;
if (n->count)
{
if (new_node->count > n->count)
n->count = 0;
}
+ for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
+ {
+ /* Redirect calls to the old version node to point to its new
+ version. */
+ cgraph_redirect_edge_callee (e, new_node);
+ }
+
+
for (e = n->callees;e; e=e->next_callee)
cgraph_clone_edge (e, new_node, e->call_stmt, count_scale, freq, loop_nest,
update_original);
- new_node->next_clone = n->next_clone;
- new_node->prev_clone = n;
- n->next_clone = new_node;
- if (new_node->next_clone)
- new_node->next_clone->prev_clone = new_node;
+ new_node->next_sibling_clone = n->clones;
+ if (n->clones)
+ n->clones->prev_sibling_clone = new_node;
+ n->clones = new_node;
+ new_node->clone_of = n;
cgraph_call_node_duplication_hooks (n, new_node);
return new_node;
}
+/* Create a new name for omp child function. Returns an identifier. */
+
+static GTY(()) unsigned int clone_fn_id_num;
+
+static tree
+clone_function_name (tree decl)
+{
+ tree name = DECL_ASSEMBLER_NAME (decl);
+ size_t len = IDENTIFIER_LENGTH (name);
+ char *tmp_name, *prefix;
+
+ prefix = XALLOCAVEC (char, len + strlen ("_clone") + 1);
+ memcpy (prefix, IDENTIFIER_POINTER (name), len);
+ strcpy (prefix + len, "_clone");
+#ifndef NO_DOT_IN_LABEL
+ prefix[len] = '.';
+#elif !defined NO_DOLLAR_IN_LABEL
+ prefix[len] = '$';
+#endif
+ ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
+ return get_identifier (tmp_name);
+}
+
+/* Create callgraph node clone with new declaration. The actual body will
+ be copied later at compilation stage.
+
+ TODO: after merging in ipa-sra use function call notes instead of args_to_skip
+ bitmap interface.
+ */
+struct cgraph_node *
+cgraph_create_virtual_clone (struct cgraph_node *old_node,
+ VEC(cgraph_edge_p,heap) *redirect_callers,
+ VEC(ipa_replace_map_p,gc) *tree_map,
+ bitmap args_to_skip)
+{
+ tree old_decl = old_node->decl;
+ struct cgraph_node *new_node = NULL;
+ tree new_decl;
+ struct cgraph_node key, **slot;
+
+ gcc_assert (tree_versionable_function_p (old_decl));
+
+ /* Make a new FUNCTION_DECL tree node */
+ if (!args_to_skip)
+ new_decl = copy_node (old_decl);
+ else
+ new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
+ DECL_STRUCT_FUNCTION (new_decl) = NULL;
+
+ /* Generate a new name for the new version. */
+ DECL_NAME (new_decl) = clone_function_name (old_decl);
+ SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
+ SET_DECL_RTL (new_decl, NULL);
+
+ new_node = cgraph_clone_node (old_node, old_node->count,
+ CGRAPH_FREQ_BASE, 0, false,
+ redirect_callers);
+ new_node->decl = new_decl;
+ /* Update the properties.
+ Make clone visible only within this translation unit. Make sure
+ that is not weak also.
+ ??? We cannot use COMDAT linkage because there is no
+ ABI support for this. */
+ DECL_EXTERNAL (new_node->decl) = 0;
+ DECL_COMDAT_GROUP (new_node->decl) = 0;
+ TREE_PUBLIC (new_node->decl) = 0;
+ DECL_COMDAT (new_node->decl) = 0;
+ DECL_WEAK (new_node->decl) = 0;
+ new_node->clone.tree_map = tree_map;
+ new_node->clone.args_to_skip = args_to_skip;
+ if (!args_to_skip)
+ new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
+ else if (old_node->clone.combined_args_to_skip)
+ {
+ int newi = 0, oldi = 0;
+ tree arg;
+ bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
+ struct cgraph_node *orig_node;
+ for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
+ ;
+ for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = TREE_CHAIN (arg), oldi++)
+ {
+ if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
+ {
+ bitmap_set_bit (new_args_to_skip, oldi);
+ continue;
+ }
+ if (bitmap_bit_p (args_to_skip, newi))
+ bitmap_set_bit (new_args_to_skip, oldi);
+ newi++;
+ }
+ new_node->clone.combined_args_to_skip = new_args_to_skip;
+ }
+ else
+ new_node->clone.combined_args_to_skip = args_to_skip;
+ new_node->local.externally_visible = 0;
+ new_node->local.local = 1;
+ new_node->lowered = true;
+ new_node->reachable = true;
+
+ key.decl = new_decl;
+ slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
+ gcc_assert (!*slot);
+ *slot = new_node;
+ if (assembler_name_hash)
+ {
+ void **aslot;
+ tree name = DECL_ASSEMBLER_NAME (new_decl);
+
+ aslot = htab_find_slot_with_hash (assembler_name_hash, name,
+ decl_assembler_name_hash (name),
+ INSERT);
+ gcc_assert (!*aslot);
+ *aslot = new_node;
+ }
+
+ return new_node;
+}
+
/* NODE is no longer nested function; update cgraph accordingly. */
void
cgraph_unnest_node (struct cgraph_node *node)
current_function_decl = NULL;
break;
}
+
+ /* Set a personality if required and we already passed EH lowering. */
+ if (lowered
+ && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
+ == eh_personality_lang))
+ DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
+}
+
+/* Return true if NODE can be made local for API change.
+ Extern inline functions and C++ COMDAT functions can be made local
+ at the expense of possible code size growth if function is used in multiple
+ compilation units. */
+bool
+cgraph_node_can_be_local_p (struct cgraph_node *node)
+{
+ return !node->needed;
+}
+
+/* Bring NODE local. */
+void
+cgraph_make_node_local (struct cgraph_node *node)
+{
+ gcc_assert (cgraph_node_can_be_local_p (node));
+ if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
+ {
+ DECL_COMDAT (node->decl) = 0;
+ DECL_COMDAT_GROUP (node->decl) = 0;
+ TREE_PUBLIC (node->decl) = 0;
+ DECL_WEAK (node->decl) = 0;
+ DECL_EXTERNAL (node->decl) = 0;
+ node->local.externally_visible = false;
+ node->local.local = true;
+ gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
+ }
}
#include "gt-cgraph.h"