/* Callgraph handling code.
- Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
Contributed by Jan Hubicka
based on DECL_UID. The call-graph nodes are created lazily using
cgraph_node function when called for unknown declaration.
- The callgraph at the moment does not represent indirect calls or calls
- from other compilation unit. Flag NEEDED is set for each node that may
- be accessed in such an invisible way and it shall be considered an
- entry point to the callgraph.
+ The callgraph at the moment does not represent all indirect calls or calls
+ from other compilation units. Flag NEEDED is set for each node that may be
+ accessed in such an invisible way and it shall be considered an entry point
+ to the callgraph.
+
+ On the other hand, the callgraph currently does contain some edges for
+ indirect calls with unknown callees which can be accessed through
+ indirect_calls field of a node. It should be noted however that at the
+ moment only calls which are potential candidates for indirect inlining are
+ added there.
Interprocedural information:
rtl_info used by RTL backend to propagate data from already compiled
functions to their callers.
+ Moreover, each node has a uid which can be used to keep information in
+ on-the-side arrays. UIDs are reused and therefore reasonably dense.
+
Inlining plans:
The function inlining information is decided in advance and maintained
#include "tree-dump.h"
#include "tree-flow.h"
#include "value-prof.h"
+#include "except.h"
+#include "diagnostic.h"
+#include "rtl.h"
static void cgraph_node_remove_callers (struct cgraph_node *node);
static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
}
}
-/* Register HOOK to be called with DATA on each removed node. */
+/* Register HOOK to be called with DATA on each inserted node. */
struct cgraph_node_hook_list *
cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
{
return entry;
}
-/* Remove ENTRY from the list of hooks called on removing nodes. */
+/* Remove ENTRY from the list of hooks called on inserted nodes. */
void
cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
{
free (entry);
}
-/* Call all node removal hooks. */
+/* Call all node insertion hooks. */
void
cgraph_call_function_insertion_hooks (struct cgraph_node *node)
{
return (hashval_t) DECL_UID (n->decl);
}
+
/* Returns nonzero if P1 and P2 are equal. */
static int
return DECL_UID (n1->decl) == DECL_UID (n2->decl);
}
-/* Allocate new callgraph node and insert it into basic data structures. */
+/* Allocate new callgraph node. */
-static struct cgraph_node *
-cgraph_create_node (void)
+static inline struct cgraph_node *
+cgraph_allocate_node (void)
{
struct cgraph_node *node;
node->uid = cgraph_max_uid++;
}
+ return node;
+}
+
+/* Allocate new callgraph node and insert it into basic data structures. */
+
+static struct cgraph_node *
+cgraph_create_node (void)
+{
+ struct cgraph_node *node = cgraph_allocate_node ();
+
node->next = cgraph_nodes;
node->pid = -1;
node->order = cgraph_order++;
cgraph_nodes->previous = node;
node->previous = NULL;
node->global.estimated_growth = INT_MIN;
+ node->frequency = NODE_FREQUENCY_NORMAL;
cgraph_nodes = node;
cgraph_n_nodes++;
return node;
if (*slot)
{
node = *slot;
+ if (node->same_body_alias)
+ node = node->same_body;
return node;
}
return node;
}
+/* Mark ALIAS as an alias to DECL. */
+
+static struct cgraph_node *
+cgraph_same_body_alias_1 (tree alias, tree decl)
+{
+ struct cgraph_node key, *alias_node, *decl_node, **slot;
+
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
+ gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
+ decl_node = cgraph_node (decl);
+
+ key.decl = alias;
+
+ slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
+
+ /* If the cgraph_node has been already created, fail. */
+ if (*slot)
+ return NULL;
+
+ alias_node = cgraph_allocate_node ();
+ alias_node->decl = alias;
+ alias_node->same_body_alias = 1;
+ alias_node->same_body = decl_node;
+ alias_node->previous = NULL;
+ if (decl_node->same_body)
+ decl_node->same_body->previous = alias_node;
+ alias_node->next = decl_node->same_body;
+ alias_node->thunk.alias = decl;
+ decl_node->same_body = alias_node;
+ *slot = alias_node;
+ return alias_node;
+}
+
+/* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
+ Same body aliases are output whenever the body of DECL is output,
+ and cgraph_node (ALIAS) transparently returns cgraph_node (DECL). */
+
+bool
+cgraph_same_body_alias (tree alias, tree decl)
+{
+#ifndef ASM_OUTPUT_DEF
+ /* If aliases aren't supported by the assembler, fail. */
+ return false;
+#endif
+
+ /*gcc_assert (!assembler_name_hash);*/
+
+ return cgraph_same_body_alias_1 (alias, decl) != NULL;
+}
+
+void
+cgraph_add_thunk (tree alias, tree decl, bool this_adjusting,
+ HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
+ tree virtual_offset,
+ tree real_alias)
+{
+ struct cgraph_node *node = cgraph_get_node (alias);
+
+ if (node)
+ {
+ gcc_assert (node->local.finalized);
+ gcc_assert (!node->same_body);
+ cgraph_remove_node (node);
+ }
+
+ node = cgraph_same_body_alias_1 (alias, decl);
+ gcc_assert (node);
+#ifdef ENABLE_CHECKING
+ gcc_assert (!virtual_offset
+ || tree_int_cst_equal (virtual_offset, size_int (virtual_value)));
+#endif
+ node->thunk.fixed_offset = fixed_offset;
+ node->thunk.this_adjusting = this_adjusting;
+ node->thunk.virtual_value = virtual_value;
+ node->thunk.virtual_offset_p = virtual_offset != NULL;
+ node->thunk.alias = real_alias;
+ node->thunk.thunk_p = true;
+}
+
+/* Returns the cgraph node assigned to DECL or NULL if no cgraph node
+ is assigned. */
+
+struct cgraph_node *
+cgraph_get_node (tree decl)
+{
+ struct cgraph_node key, *node = NULL, **slot;
+
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
+
+ if (!cgraph_hash)
+ return NULL;
+
+ key.decl = decl;
+
+ slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
+ NO_INSERT);
+
+ if (slot && *slot)
+ {
+ node = *slot;
+ if (node->same_body_alias)
+ node = node->same_body;
+ }
+ return node;
+}
+
/* Insert already constructed node into hashtable. */
void
it is __builtin_strlen and strlen, for instance. Do we need to
record them all? Original implementation marked just first one
so lets hope for the best. */
- if (*slot)
- continue;
- *slot = node;
+ if (!*slot)
+ *slot = node;
+ if (node->same_body)
+ {
+ struct cgraph_node *alias;
+
+ for (alias = node->same_body; alias; alias = alias->next)
+ {
+ hashval_t hash;
+ name = DECL_ASSEMBLER_NAME (alias->decl);
+ hash = decl_assembler_name_hash (name);
+ slot = htab_find_slot_with_hash (assembler_name_hash, name,
+ hash, INSERT);
+ if (!*slot)
+ *slot = alias;
+ }
+ }
}
}
NO_INSERT);
if (slot)
- return (struct cgraph_node *) *slot;
+ {
+ node = (struct cgraph_node *) *slot;
+ if (node->same_body_alias)
+ node = node->same_body;
+ return node;
+ }
return NULL;
}
return ((const struct cgraph_edge *) x)->call_stmt == y;
}
+/* Add call graph edge E to call site hash of its caller. */
+
+static inline void
+cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
+{
+ void **slot;
+ slot = htab_find_slot_with_hash (e->caller->call_site_hash,
+ e->call_stmt,
+ htab_hash_pointer (e->call_stmt),
+ INSERT);
+ gcc_assert (!*slot);
+ *slot = e;
+}
/* Return the callgraph edge representing the GIMPLE_CALL statement
CALL_STMT. */
solution. It is not good idea to add pointer into CALL_EXPR itself
because we want to make possible having multiple cgraph nodes representing
different clones of the same body before the body is actually cloned. */
- for (e = node->callees; e; e= e->next_callee)
+ for (e = node->callees; e; e = e->next_callee)
{
if (e->call_stmt == call_stmt)
break;
n++;
}
+ if (!e)
+ for (e = node->indirect_calls; e; e = e->next_callee)
+ {
+ if (e->call_stmt == call_stmt)
+ break;
+ n++;
+ }
+
if (n > 100)
{
node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
for (e2 = node->callees; e2; e2 = e2->next_callee)
- {
- void **slot;
- slot = htab_find_slot_with_hash (node->call_site_hash,
- e2->call_stmt,
- htab_hash_pointer (e2->call_stmt),
- INSERT);
- gcc_assert (!*slot);
- *slot = e2;
- }
+ cgraph_add_edge_to_call_site_hash (e2);
+ for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
+ cgraph_add_edge_to_call_site_hash (e2);
}
return e;
void
cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
{
+ tree decl;
+
if (e->caller->call_site_hash)
{
htab_remove_elt_with_hash (e->caller->call_site_hash,
e->call_stmt,
htab_hash_pointer (e->call_stmt));
}
+
e->call_stmt = new_stmt;
+ if (e->indirect_unknown_callee
+ && (decl = gimple_call_fndecl (new_stmt)))
+ {
+ /* Constant propagation (and possibly also inlining?) can turn an
+ indirect call into a direct one. */
+ struct cgraph_node *new_callee = cgraph_node (decl);
+
+ cgraph_make_edge_direct (e, new_callee);
+ }
+
push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
e->can_throw_external = stmt_can_throw_external (new_stmt);
pop_cfun ();
if (e->caller->call_site_hash)
- {
- void **slot;
- slot = htab_find_slot_with_hash (e->caller->call_site_hash,
- e->call_stmt,
- htab_hash_pointer
- (e->call_stmt), INSERT);
- gcc_assert (!*slot);
- *slot = e;
- }
+ cgraph_add_edge_to_call_site_hash (e);
}
-/* Like cgraph_set_call_stmt but walk the clone tree and update all clones sharing
- same function body. */
+/* Like cgraph_set_call_stmt but walk the clone tree and update all
+ clones sharing the same function body. */
void
cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
if (edge)
cgraph_set_call_stmt (edge, new_stmt);
- if (orig->clones)
- for (node = orig->clones; node != orig;)
+
+ node = orig->clones;
+ if (node)
+ while (node != orig)
{
struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
if (edge)
}
/* Like cgraph_create_edge walk the clone tree and update all clones sharing
- same function body.
-
+ same function body. If clones already have edge for OLD_STMT; only
+ update the edge same way as cgraph_set_call_stmt_including_clones does.
+
TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
- frequencies of the clones.
- */
+ frequencies of the clones. */
void
-cgraph_create_edge_including_clones (struct cgraph_node *orig, struct cgraph_node *callee,
- gimple stmt, gcov_type count, int freq,
- int loop_depth,
+cgraph_create_edge_including_clones (struct cgraph_node *orig,
+ struct cgraph_node *callee,
+ gimple old_stmt,
+ gimple stmt, gcov_type count,
+ int freq, int loop_depth,
cgraph_inline_failed_t reason)
{
struct cgraph_node *node;
+ struct cgraph_edge *edge;
if (!cgraph_edge (orig, stmt))
- cgraph_create_edge (orig, callee, stmt,
- count, freq, loop_depth)->inline_failed = reason;
+ {
+ edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth);
+ edge->inline_failed = reason;
+ }
- if (orig->clones)
- for (node = orig->clones; node != orig;)
+ node = orig->clones;
+ if (node)
+ while (node != orig)
{
- /* It is possible that we already constant propagated into the clone
- and turned indirect call into dirrect call. */
- if (!cgraph_edge (node, stmt))
- cgraph_create_edge (node, callee, stmt, count, freq,
- loop_depth)->inline_failed = reason;
+ struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
+
+ /* It is possible that clones already contain the edge while
+ master didn't. Either we promoted indirect call into direct
+ call in the clone or we are processing clones of unreachable
+ master where edges has been rmeoved. */
+ if (edge)
+ cgraph_set_call_stmt (edge, stmt);
+ else if (!cgraph_edge (node, stmt))
+ {
+ edge = cgraph_create_edge (node, callee, stmt, count,
+ freq, loop_depth);
+ edge->inline_failed = reason;
+ }
if (node->clones)
node = node->clones;
{
struct cgraph_node *callee = e->callee;
- if (!callee->analyzed)
+ if (e->indirect_unknown_callee)
+ e->inline_failed = CIF_INDIRECT_UNKNOWN_CALL;
+ else if (!callee->analyzed)
e->inline_failed = CIF_BODY_NOT_AVAILABLE;
else if (callee->local.redefined_extern_inline)
e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
else if (!callee->local.inlinable)
e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
- else if (gimple_call_cannot_inline_p (e->call_stmt))
+ else if (e->call_stmt && gimple_call_cannot_inline_p (e->call_stmt))
e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
else
e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
}
-/* Create edge from CALLER to CALLEE in the cgraph. */
+/* Allocate a cgraph_edge structure and fill it with data according to the
+ parameters of which only CALLEE can be NULL (when creating an indirect call
+ edge). */
-struct cgraph_edge *
-cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
- gimple call_stmt, gcov_type count, int freq, int nest)
+static struct cgraph_edge *
+cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
+ gimple call_stmt, gcov_type count, int freq, int nest)
{
struct cgraph_edge *edge;
+ /* LTO does not actually have access to the call_stmt since these
+ have not been loaded yet. */
+ if (call_stmt)
+ {
#ifdef ENABLE_CHECKING
- /* This is rather pricely check possibly trigerring construction of call stmt
- hashtable. */
- gcc_assert (!cgraph_edge (caller, call_stmt));
+ /* This is rather pricely check possibly trigerring construction of
+ call stmt hashtable. */
+ gcc_assert (!cgraph_edge (caller, call_stmt));
#endif
- gcc_assert (is_gimple_call (call_stmt));
+ gcc_assert (is_gimple_call (call_stmt));
+ }
if (free_edges)
{
}
edge->aux = NULL;
-
edge->caller = caller;
edge->callee = callee;
+ edge->prev_caller = NULL;
+ edge->next_caller = NULL;
+ edge->prev_callee = NULL;
+ edge->next_callee = NULL;
+
+ edge->count = count;
+ gcc_assert (count >= 0);
+ edge->frequency = freq;
+ gcc_assert (freq >= 0);
+ gcc_assert (freq <= CGRAPH_FREQ_MAX);
+ edge->loop_nest = nest;
+
edge->call_stmt = call_stmt;
push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
- edge->can_throw_external = stmt_can_throw_external (call_stmt);
+ edge->can_throw_external
+ = call_stmt ? stmt_can_throw_external (call_stmt) : false;
pop_cfun ();
- edge->prev_caller = NULL;
+ edge->call_stmt_cannot_inline_p =
+ (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
+ if (call_stmt && caller->call_site_hash)
+ cgraph_add_edge_to_call_site_hash (edge);
+
+ edge->indirect_info = NULL;
+ edge->indirect_inlining_edge = 0;
+
+ return edge;
+}
+
+/* Create edge from CALLER to CALLEE in the cgraph. */
+
+struct cgraph_edge *
+cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
+ gimple call_stmt, gcov_type count, int freq, int nest)
+{
+ struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
+ count, freq, nest);
+
+ edge->indirect_unknown_callee = 0;
+ initialize_inline_failed (edge);
+
edge->next_caller = callee->callers;
if (callee->callers)
callee->callers->prev_caller = edge;
- edge->prev_callee = NULL;
edge->next_callee = caller->callees;
if (caller->callees)
caller->callees->prev_callee = edge;
caller->callees = edge;
callee->callers = edge;
- edge->count = count;
- gcc_assert (count >= 0);
- edge->frequency = freq;
- gcc_assert (freq >= 0);
- gcc_assert (freq <= CGRAPH_FREQ_MAX);
- edge->loop_nest = nest;
- edge->indirect_call = 0;
- if (caller->call_site_hash)
- {
- void **slot;
- slot = htab_find_slot_with_hash (caller->call_site_hash,
- edge->call_stmt,
- htab_hash_pointer
- (edge->call_stmt),
- INSERT);
- gcc_assert (!*slot);
- *slot = edge;
- }
+ return edge;
+}
+
+
+/* Create an indirect edge with a yet-undetermined callee where the call
+ statement destination is a formal parameter of the caller with index
+ PARAM_INDEX. */
+
+struct cgraph_edge *
+cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
+ gcov_type count, int freq, int nest)
+{
+ struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
+ count, freq, nest);
+
+ edge->indirect_unknown_callee = 1;
initialize_inline_failed (edge);
+ edge->indirect_info = GGC_NEW (struct cgraph_indirect_call_info);
+ edge->indirect_info->param_index = -1;
+
+ edge->next_callee = caller->indirect_calls;
+ if (caller->indirect_calls)
+ caller->indirect_calls->prev_callee = edge;
+ caller->indirect_calls = edge;
+
return edge;
}
static inline void
cgraph_edge_remove_callee (struct cgraph_edge *e)
{
+ gcc_assert (!e->indirect_unknown_callee);
if (e->prev_caller)
e->prev_caller->next_caller = e->next_caller;
if (e->next_caller)
if (e->next_callee)
e->next_callee->prev_callee = e->prev_callee;
if (!e->prev_callee)
- e->caller->callees = e->next_callee;
+ {
+ if (e->indirect_unknown_callee)
+ e->caller->indirect_calls = e->next_callee;
+ else
+ e->caller->callees = e->next_callee;
+ }
if (e->caller->call_site_hash)
htab_remove_elt_with_hash (e->caller->call_site_hash,
e->call_stmt,
/* Call all edge removal hooks. */
cgraph_call_edge_removal_hooks (e);
- /* Remove from callers list of the callee. */
- cgraph_edge_remove_callee (e);
+ if (!e->indirect_unknown_callee)
+ /* Remove from callers list of the callee. */
+ cgraph_edge_remove_callee (e);
/* Remove from callees list of the callers. */
cgraph_edge_remove_caller (e);
cgraph_free_edge (e);
}
+/* Set callee of call graph edge E and add it to the corresponding set of
+ callers. */
+
+static void
+cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
+{
+ e->prev_caller = NULL;
+ if (n->callers)
+ n->callers->prev_caller = e;
+ e->next_caller = n->callers;
+ n->callers = e;
+ e->callee = n;
+}
+
/* Redirect callee of E to N. The function does not update underlying
call expression. */
cgraph_edge_remove_callee (e);
/* Insert to callers list of the new callee. */
- e->prev_caller = NULL;
- if (n->callers)
- n->callers->prev_caller = e;
- e->next_caller = n->callers;
- n->callers = e;
- e->callee = n;
+ cgraph_set_edge_callee (e, n);
+}
+
+/* Make an indirect EDGE with an unknown callee an ordinary edge leading to
+ CALLEE. */
+
+void
+cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee)
+{
+ edge->indirect_unknown_callee = 0;
+
+ /* Get the edge out of the indirect edge list. */
+ if (edge->prev_callee)
+ edge->prev_callee->next_callee = edge->next_callee;
+ if (edge->next_callee)
+ edge->next_callee->prev_callee = edge->prev_callee;
+ if (!edge->prev_callee)
+ edge->caller->indirect_calls = edge->next_callee;
+
+ /* Put it into the normal callee list */
+ edge->prev_callee = NULL;
+ edge->next_callee = edge->caller->callees;
+ if (edge->caller->callees)
+ edge->caller->callees->prev_callee = edge;
+ edge->caller->callees = edge;
+
+ /* Insert to callers list of the new callee. */
+ cgraph_set_edge_callee (edge, callee);
+
+ /* We need to re-determine the inlining status of the edge. */
+ initialize_inline_failed (edge);
}
if (e)
{
- /* See if the call is already there. It might be because of indirect
- inlining already found it. */
- if (new_call && e->callee->decl == new_call)
+ /* See if the edge is already there and has the correct callee. It
+ might be so because of indirect inlining has already updated
+ it. */
+ if (new_call && e->callee && e->callee->decl == new_call)
return;
/* Otherwise remove edge and create new one; we can't simply redirect
since function has changed, so inline plan and other information
attached to edge is invalid. */
- cgraph_remove_edge (e);
count = e->count;
frequency = e->frequency;
loop_nest = e->loop_nest;
+ cgraph_remove_edge (e);
}
else
{
{
f = e->next_callee;
cgraph_call_edge_removal_hooks (e);
- cgraph_edge_remove_callee (e);
+ if (!e->indirect_unknown_callee)
+ cgraph_edge_remove_callee (e);
cgraph_free_edge (e);
}
node->callees = NULL;
pop_cfun();
gimple_set_body (node->decl, NULL);
VEC_free (ipa_opt_pass, heap,
- DECL_STRUCT_FUNCTION (node->decl)->ipa_transforms_to_apply);
+ node->ipa_transforms_to_apply);
/* Struct function hangs a lot of data that would leak if we didn't
removed all pointers to it. */
ggc_free (DECL_STRUCT_FUNCTION (node->decl));
DECL_INITIAL (node->decl) = error_mark_node;
}
+/* Remove same body alias node. */
+
+void
+cgraph_remove_same_body_alias (struct cgraph_node *node)
+{
+ void **slot;
+ int uid = node->uid;
+
+ gcc_assert (node->same_body_alias);
+ if (node->previous)
+ node->previous->next = node->next;
+ else
+ node->same_body->same_body = node->next;
+ if (node->next)
+ node->next->previous = node->previous;
+ node->next = NULL;
+ node->previous = NULL;
+ slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
+ if (*slot == node)
+ htab_clear_slot (cgraph_hash, slot);
+ if (assembler_name_hash)
+ {
+ tree name = DECL_ASSEMBLER_NAME (node->decl);
+ slot = htab_find_slot_with_hash (assembler_name_hash, name,
+ decl_assembler_name_hash (name),
+ NO_INSERT);
+ if (slot && *slot == node)
+ htab_clear_slot (assembler_name_hash, slot);
+ }
+
+ /* Clear out the node to NULL all pointers and add the node to the free
+ list. */
+ memset (node, 0, sizeof(*node));
+ node->uid = uid;
+ NEXT_FREE_NODE (node) = free_nodes;
+ free_nodes = node;
+}
+
/* Remove the node from cgraph. */
void
cgraph_call_node_removal_hooks (node);
cgraph_node_remove_callers (node);
cgraph_node_remove_callees (node);
+ VEC_free (ipa_opt_pass, heap,
+ node->ipa_transforms_to_apply);
/* Incremental inlining access removed nodes stored in the postorder list.
*/
= next_inline_clone->prev_sibling_clone;
if (next_inline_clone->prev_sibling_clone)
{
+ gcc_assert (node->clones != next_inline_clone);
next_inline_clone->prev_sibling_clone->next_sibling_clone
= next_inline_clone->next_sibling_clone;
}
else
- node->clones = next_inline_clone->next_sibling_clone;
+ {
+ gcc_assert (node->clones == next_inline_clone);
+ node->clones = next_inline_clone->next_sibling_clone;
+ }
new_clones = node->clones;
node->clones = NULL;
next_inline_clone->next_sibling_clone = NULL;
if (node->clone_of)
{
+ if (node->clone_of->clones)
+ node->clone_of->clones->prev_sibling_clone = next_inline_clone;
next_inline_clone->next_sibling_clone = node->clone_of->clones;
node->clone_of->clones = next_inline_clone;
}
}
}
- else
- gcc_assert (node->clone_of);
if (node->prev_sibling_clone)
node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
else if (node->clone_of)
node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
if (node->clones)
{
- struct cgraph_node *n;
+ struct cgraph_node *n, *next;
+
+ if (node->clone_of)
+ {
+ for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
+ n->clone_of = node->clone_of;
+ n->clone_of = node->clone_of;
+ n->next_sibling_clone = node->clone_of->clones;
+ if (node->clone_of->clones)
+ node->clone_of->clones->prev_sibling_clone = n;
+ node->clone_of->clones = node->clones;
+ }
+ else
+ {
+ /* We are removing node with clones. this makes clones inconsistent,
+ but assume they will be removed subsequently and just keep clone
+ tree intact. This can happen in unreachable function removal since
+ we remove unreachable functions in random order, not by bottom-up
+ walk of clone trees. */
+ for (n = node->clones; n; n = next)
+ {
+ next = n->next_sibling_clone;
+ n->next_sibling_clone = NULL;
+ n->prev_sibling_clone = NULL;
+ n->clone_of = NULL;
+ }
+ }
+ }
+
+ while (node->same_body)
+ cgraph_remove_same_body_alias (node->same_body);
- for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
- n->clone_of = node->clone_of;
- n->clone_of = node->clone_of;
- n->next_sibling_clone = node->clone_of->clones;
- if (node->clone_of->clones)
- node->clone_of->clones->prev_sibling_clone = n;
- node->clone_of->clones = node->clones;
+ if (node->same_comdat_group)
+ {
+ struct cgraph_node *prev;
+ for (prev = node->same_comdat_group;
+ prev->same_comdat_group != node;
+ prev = prev->same_comdat_group)
+ ;
+ if (node->same_comdat_group == prev)
+ prev->same_comdat_group = NULL;
+ else
+ prev->same_comdat_group = node->same_comdat_group;
+ node->same_comdat_group = NULL;
}
/* While all the clones are removed after being proceeded, the function
struct cgraph_node *n = (struct cgraph_node *) *slot;
if (!n->clones && !n->clone_of && !n->global.inlined_to
&& (cgraph_global_info_ready
- && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
+ && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)
+ || n->in_other_partition)))
kill_body = true;
}
if (assembler_name_hash)
cgraph_mark_needed_node (struct cgraph_node *node)
{
node->needed = 1;
+ gcc_assert (!node->global.inlined_to);
cgraph_mark_reachable_node (node);
}
dump_cgraph_node (FILE *f, struct cgraph_node *node)
{
struct cgraph_edge *edge;
+ int indirect_calls_count = 0;
+
fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid,
node->pid);
dump_addr (f, " @", (void *)node);
+ if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
+ fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
if (node->global.inlined_to)
fprintf (f, " (inline copy in %s/%i)",
cgraph_node_name (node->global.inlined_to),
if (cgraph_function_flags_ready)
fprintf (f, " availability:%s",
cgraph_availability_names [cgraph_function_body_availability (node)]);
+ if (node->analyzed)
+ fprintf (f, " analyzed");
+ if (node->in_other_partition)
+ fprintf (f, " in_other_partition");
if (node->count)
fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
(HOST_WIDEST_INT)node->count);
fprintf (f, " address_taken");
else if (node->reachable)
fprintf (f, " reachable");
+ else if (node->reachable_from_other_partition)
+ fprintf (f, " reachable_from_other_partition");
if (gimple_has_body_p (node->decl))
fprintf (f, " body");
if (node->process)
edge->frequency / (double)CGRAPH_FREQ_BASE);
if (!edge->inline_failed)
fprintf(f, "(inlined) ");
- if (edge->indirect_call)
- fprintf(f, "(indirect) ");
+ if (edge->indirect_inlining_edge)
+ fprintf(f, "(indirect_inlining) ");
if (edge->can_throw_external)
fprintf(f, "(can throw external) ");
}
edge->callee->uid);
if (!edge->inline_failed)
fprintf(f, "(inlined) ");
- if (edge->indirect_call)
- fprintf(f, "(indirect) ");
+ if (edge->indirect_inlining_edge)
+ fprintf(f, "(indirect_inlining) ");
if (edge->count)
fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
(HOST_WIDEST_INT)edge->count);
fprintf(f, "(can throw external) ");
}
fprintf (f, "\n");
+
+ for (edge = node->indirect_calls; edge; edge = edge->next_callee)
+ indirect_calls_count++;
+ if (indirect_calls_count)
+ fprintf (f, " has %i outgoing edges for indirect calls.\n",
+ indirect_calls_count);
+
+ if (node->same_body)
+ {
+ struct cgraph_node *n;
+ fprintf (f, " aliases & thunks:");
+ for (n = node->same_body; n; n = n->next)
+ {
+ fprintf (f, " %s/%i", cgraph_node_name (n), n->uid);
+ if (n->thunk.thunk_p)
+ {
+ fprintf (f, " (thunk of %s fixed ofset %i virtual value %i has "
+ "virtual offset %i",
+ lang_hooks.decl_printable_name (n->thunk.alias, 2),
+ (int)n->thunk.fixed_offset,
+ (int)n->thunk.virtual_value,
+ (int)n->thunk.virtual_offset_p);
+ fprintf (f, ")");
+ }
+ if (DECL_ASSEMBLER_NAME_SET_P (n->decl))
+ fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (n->decl)));
+ }
+ fprintf (f, "\n");
+ }
}
/* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
struct cgraph_edge *
cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
- gimple call_stmt, gcov_type count_scale, int freq_scale,
- int loop_nest, bool update_original)
+ gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
+ int freq_scale, int loop_nest, bool update_original)
{
struct cgraph_edge *new_edge;
gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
- gcov_type freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
+ gcov_type freq;
+ /* We do not want to ignore loop nest after frequency drops to 0. */
+ if (!freq_scale)
+ freq_scale = 1;
+ freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
if (freq > CGRAPH_FREQ_MAX)
freq = CGRAPH_FREQ_MAX;
- new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
- e->loop_nest + loop_nest);
+
+ if (e->indirect_unknown_callee)
+ {
+ tree decl;
+
+ if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
+ {
+ struct cgraph_node *callee = cgraph_node (decl);
+ new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq,
+ e->loop_nest + loop_nest);
+ }
+ else
+ {
+ new_edge = cgraph_create_indirect_edge (n, call_stmt, count, freq,
+ e->loop_nest + loop_nest);
+ new_edge->indirect_info->param_index = e->indirect_info->param_index;
+ }
+ }
+ else
+ new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
+ e->loop_nest + loop_nest);
new_edge->inline_failed = e->inline_failed;
- new_edge->indirect_call = e->indirect_call;
+ new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
+ new_edge->lto_stmt_uid = stmt_uid;
if (update_original)
{
e->count -= new_edge->count;
by node. */
struct cgraph_node *
cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq,
- int loop_nest, bool update_original)
+ int loop_nest, bool update_original,
+ VEC(cgraph_edge_p,heap) *redirect_callers)
{
struct cgraph_node *new_node = cgraph_create_node ();
struct cgraph_edge *e;
gcov_type count_scale;
+ unsigned i;
new_node->decl = n->decl;
new_node->origin = n->origin;
}
new_node->analyzed = n->analyzed;
new_node->local = n->local;
+ new_node->local.externally_visible = false;
+ new_node->local.local = true;
+ new_node->local.vtable_method = false;
new_node->global = n->global;
new_node->rtl = n->rtl;
new_node->count = count;
+ new_node->frequency = n->frequency;
new_node->clone = n->clone;
+ new_node->clone.tree_map = 0;
if (n->count)
{
if (new_node->count > n->count)
n->count = 0;
}
+ for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
+ {
+ /* Redirect calls to the old version node to point to its new
+ version. */
+ cgraph_redirect_edge_callee (e, new_node);
+ }
+
+
for (e = n->callees;e; e=e->next_callee)
- cgraph_clone_edge (e, new_node, e->call_stmt, count_scale, freq, loop_nest,
- update_original);
+ cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
+ count_scale, freq, loop_nest, update_original);
+
+ for (e = n->indirect_calls; e; e = e->next_callee)
+ cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
+ count_scale, freq, loop_nest, update_original);
new_node->next_sibling_clone = n->clones;
if (n->clones)
}
/* Create callgraph node clone with new declaration. The actual body will
- be copied later at compilation stage.
+ be copied later at compilation stage.
TODO: after merging in ipa-sra use function call notes instead of args_to_skip
bitmap interface.
struct cgraph_node *new_node = NULL;
tree new_decl;
struct cgraph_node key, **slot;
- unsigned i;
- struct cgraph_edge *e;
gcc_assert (tree_versionable_function_p (old_decl));
SET_DECL_RTL (new_decl, NULL);
new_node = cgraph_clone_node (old_node, old_node->count,
- CGRAPH_FREQ_BASE, 0, false);
+ CGRAPH_FREQ_BASE, 0, false,
+ redirect_callers);
new_node->decl = new_decl;
/* Update the properties.
Make clone visible only within this translation unit. Make sure
DECL_WEAK (new_node->decl) = 0;
new_node->clone.tree_map = tree_map;
new_node->clone.args_to_skip = args_to_skip;
+ if (!args_to_skip)
+ new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
+ else if (old_node->clone.combined_args_to_skip)
+ {
+ int newi = 0, oldi = 0;
+ tree arg;
+ bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
+ struct cgraph_node *orig_node;
+ for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
+ ;
+ for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = TREE_CHAIN (arg), oldi++)
+ {
+ if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
+ {
+ bitmap_set_bit (new_args_to_skip, oldi);
+ continue;
+ }
+ if (bitmap_bit_p (args_to_skip, newi))
+ bitmap_set_bit (new_args_to_skip, oldi);
+ newi++;
+ }
+ new_node->clone.combined_args_to_skip = new_args_to_skip;
+ }
+ else
+ new_node->clone.combined_args_to_skip = args_to_skip;
new_node->local.externally_visible = 0;
new_node->local.local = 1;
new_node->lowered = true;
gcc_assert (!*aslot);
*aslot = new_node;
}
- for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
- {
- /* Redirect calls to the old version node to point to its new
- version. */
- cgraph_redirect_edge_callee (e, new_node);
- }
-
+
return new_node;
}
GIMPLE.
The function is assumed to be reachable and have address taken (so no
- API breaking optimizations are performed on it).
+ API breaking optimizations are performed on it).
Main work done by this function is to enqueue the function for later
processing to avoid need the passes to be re-entrant. */
current_function_decl = NULL;
break;
}
+
+ /* Set a personality if required and we already passed EH lowering. */
+ if (lowered
+ && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
+ == eh_personality_lang))
+ DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
}
/* Return true if NODE can be made local for API change.
bool
cgraph_node_can_be_local_p (struct cgraph_node *node)
{
- return !node->needed;
+ return (!node->needed
+ && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
+ || !node->local.externally_visible));
+}
+
+/* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
+ but other code such as notice_global_symbol generates rtl. */
+void
+cgraph_make_decl_local (tree decl)
+{
+ rtx rtl, symbol;
+
+ if (TREE_CODE (decl) == VAR_DECL)
+ DECL_COMMON (decl) = 0;
+ else if (TREE_CODE (decl) == FUNCTION_DECL)
+ {
+ DECL_COMDAT (decl) = 0;
+ DECL_COMDAT_GROUP (decl) = 0;
+ DECL_WEAK (decl) = 0;
+ DECL_EXTERNAL (decl) = 0;
+ }
+ else
+ gcc_unreachable ();
+ TREE_PUBLIC (decl) = 0;
+ if (!DECL_RTL_SET_P (decl))
+ return;
+
+ /* Update rtl flags. */
+ make_decl_rtl (decl);
+
+ rtl = DECL_RTL (decl);
+ if (!MEM_P (rtl))
+ return;
+
+ symbol = XEXP (rtl, 0);
+ if (GET_CODE (symbol) != SYMBOL_REF)
+ return;
+
+ SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
}
/* Bring NODE local. */
gcc_assert (cgraph_node_can_be_local_p (node));
if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
{
- DECL_COMDAT (node->decl) = 0;
- DECL_COMDAT_GROUP (node->decl) = 0;
- TREE_PUBLIC (node->decl) = 0;
- DECL_WEAK (node->decl) = 0;
- DECL_EXTERNAL (node->decl) = 0;
+ struct cgraph_node *alias;
+ cgraph_make_decl_local (node->decl);
+
+ for (alias = node->same_body; alias; alias = alias->next)
+ cgraph_make_decl_local (alias->decl);
+
node->local.externally_visible = false;
node->local.local = true;
gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
}
}
+/* Set TREE_NOTHROW on NODE's decl and on same_body aliases of NODE
+ if any to NOTHROW. */
+
+void
+cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
+{
+ struct cgraph_node *alias;
+ TREE_NOTHROW (node->decl) = nothrow;
+ for (alias = node->same_body; alias; alias = alias->next)
+ TREE_NOTHROW (alias->decl) = nothrow;
+}
+
+/* Set TREE_READONLY on NODE's decl and on same_body aliases of NODE
+ if any to READONLY. */
+
+void
+cgraph_set_readonly_flag (struct cgraph_node *node, bool readonly)
+{
+ struct cgraph_node *alias;
+ TREE_READONLY (node->decl) = readonly;
+ for (alias = node->same_body; alias; alias = alias->next)
+ TREE_READONLY (alias->decl) = readonly;
+}
+
+/* Set DECL_PURE_P on NODE's decl and on same_body aliases of NODE
+ if any to PURE. */
+
+void
+cgraph_set_pure_flag (struct cgraph_node *node, bool pure)
+{
+ struct cgraph_node *alias;
+ DECL_PURE_P (node->decl) = pure;
+ for (alias = node->same_body; alias; alias = alias->next)
+ DECL_PURE_P (alias->decl) = pure;
+}
+
+/* Set DECL_LOOPING_CONST_OR_PURE_P on NODE's decl and on
+ same_body aliases of NODE if any to LOOPING_CONST_OR_PURE. */
+
+void
+cgraph_set_looping_const_or_pure_flag (struct cgraph_node *node,
+ bool looping_const_or_pure)
+{
+ struct cgraph_node *alias;
+ DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping_const_or_pure;
+ for (alias = node->same_body; alias; alias = alias->next)
+ DECL_LOOPING_CONST_OR_PURE_P (alias->decl) = looping_const_or_pure;
+}
+
+/* See if the frequency of NODE can be updated based on frequencies of its
+ callers. */
+bool
+cgraph_propagate_frequency (struct cgraph_node *node)
+{
+ bool maybe_unlikely_executed = true, maybe_executed_once = true;
+ struct cgraph_edge *edge;
+ if (!node->local.local)
+ return false;
+ gcc_assert (node->analyzed);
+ if (node->frequency == NODE_FREQUENCY_HOT)
+ return false;
+ if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
+ return false;
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Processing frequency %s\n", cgraph_node_name (node));
+ for (edge = node->callers;
+ edge && (maybe_unlikely_executed || maybe_executed_once);
+ edge = edge->next_caller)
+ {
+ if (!edge->frequency)
+ continue;
+ switch (edge->caller->frequency)
+ {
+ case NODE_FREQUENCY_UNLIKELY_EXECUTED:
+ break;
+ case NODE_FREQUENCY_EXECUTED_ONCE:
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, " Called by %s that is executed once\n", cgraph_node_name (node));
+ maybe_unlikely_executed = false;
+ if (edge->loop_nest)
+ {
+ maybe_executed_once = false;
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, " Called in loop\n");
+ }
+ break;
+ case NODE_FREQUENCY_HOT:
+ case NODE_FREQUENCY_NORMAL:
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, " Called by %s that is normal or hot\n", cgraph_node_name (node));
+ maybe_unlikely_executed = false;
+ maybe_executed_once = false;
+ break;
+ }
+ }
+ if (maybe_unlikely_executed)
+ {
+ node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
+ if (dump_file)
+ fprintf (dump_file, "Node %s promoted to unlikely executed.\n", cgraph_node_name (node));
+ return true;
+ }
+ if (maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
+ {
+ node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
+ if (dump_file)
+ fprintf (dump_file, "Node %s promoted to executed once.\n", cgraph_node_name (node));
+ return true;
+ }
+ return false;
+}
+
#include "gt-cgraph.h"