#include "tree-dump.h"
#include "tree-flow.h"
#include "value-prof.h"
+#include "except.h"
static void cgraph_node_remove_callers (struct cgraph_node *node);
static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
return (hashval_t) DECL_UID (n->decl);
}
+
/* Returns nonzero if P1 and P2 are equal. */
static int
return DECL_UID (n1->decl) == DECL_UID (n2->decl);
}
-/* Allocate new callgraph node and insert it into basic data structures. */
+/* Allocate new callgraph node. */
-static struct cgraph_node *
-cgraph_create_node (void)
+static inline struct cgraph_node *
+cgraph_allocate_node (void)
{
struct cgraph_node *node;
node->uid = cgraph_max_uid++;
}
+ return node;
+}
+
+/* Allocate new callgraph node and insert it into basic data structures. */
+
+static struct cgraph_node *
+cgraph_create_node (void)
+{
+ struct cgraph_node *node = cgraph_allocate_node ();
+
node->next = cgraph_nodes;
node->pid = -1;
node->order = cgraph_order++;
if (*slot)
{
node = *slot;
+ if (node->same_body_alias)
+ node = node->same_body;
return node;
}
return node;
}
+/* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
+ Same body aliases are output whenever the body of DECL is output,
+ and cgraph_node (ALIAS) transparently returns cgraph_node (DECL). */
+
+bool
+cgraph_same_body_alias (tree alias, tree decl)
+{
+ struct cgraph_node key, *alias_node, *decl_node, **slot;
+
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
+ gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
+ gcc_assert (!assembler_name_hash);
+
+#ifndef ASM_OUTPUT_DEF
+ /* If aliases aren't supported by the assembler, fail. */
+ return false;
+#endif
+
+ /* Comdat same body aliases are only supported when comdat groups
+ are supported and the symbols are weak. */
+ if (DECL_ONE_ONLY (decl) && (!HAVE_COMDAT_GROUP || !DECL_WEAK (decl)))
+ return false;
+
+ decl_node = cgraph_node (decl);
+
+ key.decl = alias;
+
+ slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
+
+ /* If the cgraph_node has been already created, fail. */
+ if (*slot)
+ return false;
+
+ alias_node = cgraph_allocate_node ();
+ alias_node->decl = alias;
+ alias_node->same_body_alias = 1;
+ alias_node->same_body = decl_node;
+ alias_node->previous = NULL;
+ if (decl_node->same_body)
+ decl_node->same_body->previous = alias_node;
+ alias_node->next = decl_node->same_body;
+ decl_node->same_body = alias_node;
+ *slot = alias_node;
+ return true;
+}
+
+/* Returns the cgraph node assigned to DECL or NULL if no cgraph node
+ is assigned. */
+
+struct cgraph_node *
+cgraph_get_node (tree decl)
+{
+ struct cgraph_node key, *node = NULL, **slot;
+
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
+
+ if (!cgraph_hash)
+ return NULL;
+
+ key.decl = decl;
+
+ slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
+ NO_INSERT);
+
+ if (slot && *slot)
+ {
+ node = *slot;
+ if (node->same_body_alias)
+ node = node->same_body;
+ }
+ return node;
+}
+
/* Insert already constructed node into hashtable. */
void
it is __builtin_strlen and strlen, for instance. Do we need to
record them all? Original implementation marked just first one
so lets hope for the best. */
- if (*slot)
- continue;
- *slot = node;
+ if (!*slot)
+ *slot = node;
+ if (node->same_body)
+ {
+ struct cgraph_node *alias;
+
+ for (alias = node->same_body; alias; alias = alias->next)
+ {
+ hashval_t hash;
+ name = DECL_ASSEMBLER_NAME (alias->decl);
+ hash = decl_assembler_name_hash (name);
+ slot = htab_find_slot_with_hash (assembler_name_hash, name,
+ hash, INSERT);
+ if (!*slot)
+ *slot = alias;
+ }
+ }
}
}
NO_INSERT);
if (slot)
- return (struct cgraph_node *) *slot;
+ {
+ node = (struct cgraph_node *) *slot;
+ if (node->same_body_alias)
+ node = node->same_body;
+ return node;
+ }
return NULL;
}
}
}
-/* Like cgraph_set_call_stmt but walk the clone tree and update all clones sharing
- same function body. */
+/* Like cgraph_set_call_stmt but walk the clone tree and update all
+ clones sharing the same function body. */
void
cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
if (edge)
cgraph_set_call_stmt (edge, new_stmt);
- if (orig->clones)
- for (node = orig->clones; node != orig;)
+
+ node = orig->clones;
+ if (node)
+ while (node != orig)
{
struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
if (edge)
same function body.
TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
- frequencies of the clones.
- */
+ frequencies of the clones. */
void
-cgraph_create_edge_including_clones (struct cgraph_node *orig, struct cgraph_node *callee,
- gimple stmt, gcov_type count, int freq,
- int loop_depth,
+cgraph_create_edge_including_clones (struct cgraph_node *orig,
+ struct cgraph_node *callee,
+ gimple stmt, gcov_type count,
+ int freq, int loop_depth,
cgraph_inline_failed_t reason)
{
struct cgraph_node *node;
+ struct cgraph_edge *edge;
if (!cgraph_edge (orig, stmt))
- cgraph_create_edge (orig, callee, stmt,
- count, freq, loop_depth)->inline_failed = reason;
+ {
+ edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth);
+ edge->inline_failed = reason;
+ }
- if (orig->clones)
- for (node = orig->clones; node != orig;)
+ node = orig->clones;
+ if (node)
+ while (node != orig)
{
/* It is possible that we already constant propagated into the clone
and turned indirect call into dirrect call. */
if (!cgraph_edge (node, stmt))
- cgraph_create_edge (node, callee, stmt, count, freq,
- loop_depth)->inline_failed = reason;
+ {
+ edge = cgraph_create_edge (node, callee, stmt, count,
+ freq, loop_depth);
+ edge->inline_failed = reason;
+ }
if (node->clones)
node = node->clones;
e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
else if (!callee->local.inlinable)
e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
- else if (gimple_call_cannot_inline_p (e->call_stmt))
+ else if (e->call_stmt && gimple_call_cannot_inline_p (e->call_stmt))
e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
else
e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
{
struct cgraph_edge *edge;
+
+ /* LTO does not actually have access to the call_stmt since these
+ have not been loaded yet. */
+ if (call_stmt)
+ {
#ifdef ENABLE_CHECKING
- /* This is rather pricely check possibly trigerring construction of call stmt
- hashtable. */
- gcc_assert (!cgraph_edge (caller, call_stmt));
+ /* This is rather pricely check possibly trigerring construction of
+ call stmt hashtable. */
+ gcc_assert (!cgraph_edge (caller, call_stmt));
#endif
- gcc_assert (is_gimple_call (call_stmt));
+ gcc_assert (is_gimple_call (call_stmt));
+ }
if (free_edges)
{
edge->callee = callee;
edge->call_stmt = call_stmt;
push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
- edge->can_throw_external = stmt_can_throw_external (call_stmt);
+ edge->can_throw_external
+ = call_stmt ? stmt_can_throw_external (call_stmt) : false;
pop_cfun ();
edge->prev_caller = NULL;
edge->next_caller = callee->callers;
gcc_assert (freq <= CGRAPH_FREQ_MAX);
edge->loop_nest = nest;
edge->indirect_call = 0;
- if (caller->call_site_hash)
+ edge->call_stmt_cannot_inline_p =
+ (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
+ if (call_stmt && caller->call_site_hash)
{
void **slot;
slot = htab_find_slot_with_hash (caller->call_site_hash,
/* Otherwise remove edge and create new one; we can't simply redirect
since function has changed, so inline plan and other information
attached to edge is invalid. */
- cgraph_remove_edge (e);
count = e->count;
frequency = e->frequency;
loop_nest = e->loop_nest;
+ cgraph_remove_edge (e);
}
else
{
pop_cfun();
gimple_set_body (node->decl, NULL);
VEC_free (ipa_opt_pass, heap,
- DECL_STRUCT_FUNCTION (node->decl)->ipa_transforms_to_apply);
+ node->ipa_transforms_to_apply);
/* Struct function hangs a lot of data that would leak if we didn't
removed all pointers to it. */
ggc_free (DECL_STRUCT_FUNCTION (node->decl));
DECL_INITIAL (node->decl) = error_mark_node;
}
+/* Remove same body alias node. */
+
+void
+cgraph_remove_same_body_alias (struct cgraph_node *node)
+{
+ void **slot;
+ int uid = node->uid;
+
+ gcc_assert (node->same_body_alias);
+ if (node->previous)
+ node->previous->next = node->next;
+ else
+ node->same_body->same_body = node->next;
+ if (node->next)
+ node->next->previous = node->previous;
+ node->next = NULL;
+ node->previous = NULL;
+ slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
+ if (*slot == node)
+ htab_clear_slot (cgraph_hash, slot);
+ if (assembler_name_hash)
+ {
+ tree name = DECL_ASSEMBLER_NAME (node->decl);
+ slot = htab_find_slot_with_hash (assembler_name_hash, name,
+ decl_assembler_name_hash (name),
+ NO_INSERT);
+ if (slot && *slot == node)
+ htab_clear_slot (assembler_name_hash, slot);
+ }
+
+ /* Clear out the node to NULL all pointers and add the node to the free
+ list. */
+ memset (node, 0, sizeof(*node));
+ node->uid = uid;
+ NEXT_FREE_NODE (node) = free_nodes;
+ free_nodes = node;
+}
+
/* Remove the node from cgraph. */
void
cgraph_call_node_removal_hooks (node);
cgraph_node_remove_callers (node);
cgraph_node_remove_callees (node);
+ VEC_free (ipa_opt_pass, heap,
+ node->ipa_transforms_to_apply);
/* Incremental inlining access removed nodes stored in the postorder list.
*/
node->clone_of->clones = node->clones;
}
+ while (node->same_body)
+ cgraph_remove_same_body_alias (node->same_body);
+
/* While all the clones are removed after being proceeded, the function
itself is kept in the cgraph even after it is compiled. Check whether
we are done with this body and reclaim it proactively if this is the case.
cgraph_mark_needed_node (struct cgraph_node *node)
{
node->needed = 1;
+ gcc_assert (!node->global.inlined_to);
cgraph_mark_reachable_node (node);
}
/* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
struct cgraph_edge *
cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
- gimple call_stmt, gcov_type count_scale, int freq_scale,
- int loop_nest, bool update_original)
+ gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
+ int freq_scale, int loop_nest, bool update_original)
{
struct cgraph_edge *new_edge;
gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
- gcov_type freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
+ gcov_type freq;
+ /* We do not want to ignore loop nest after frequency drops to 0. */
+ if (!freq_scale)
+ freq_scale = 1;
+ freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
if (freq > CGRAPH_FREQ_MAX)
freq = CGRAPH_FREQ_MAX;
new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
new_edge->inline_failed = e->inline_failed;
new_edge->indirect_call = e->indirect_call;
+ new_edge->lto_stmt_uid = stmt_uid;
if (update_original)
{
e->count -= new_edge->count;
by node. */
struct cgraph_node *
cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq,
- int loop_nest, bool update_original)
+ int loop_nest, bool update_original,
+ VEC(cgraph_edge_p,heap) *redirect_callers)
{
struct cgraph_node *new_node = cgraph_create_node ();
struct cgraph_edge *e;
gcov_type count_scale;
+ unsigned i;
new_node->decl = n->decl;
new_node->origin = n->origin;
}
new_node->analyzed = n->analyzed;
new_node->local = n->local;
+ new_node->local.externally_visible = false;
new_node->global = n->global;
new_node->rtl = n->rtl;
new_node->count = count;
n->count = 0;
}
+ for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
+ {
+ /* Redirect calls to the old version node to point to its new
+ version. */
+ cgraph_redirect_edge_callee (e, new_node);
+ }
+
+
for (e = n->callees;e; e=e->next_callee)
- cgraph_clone_edge (e, new_node, e->call_stmt, count_scale, freq, loop_nest,
- update_original);
+ cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
+ count_scale, freq, loop_nest, update_original);
new_node->next_sibling_clone = n->clones;
if (n->clones)
struct cgraph_node *new_node = NULL;
tree new_decl;
struct cgraph_node key, **slot;
- unsigned i;
- struct cgraph_edge *e;
gcc_assert (tree_versionable_function_p (old_decl));
SET_DECL_RTL (new_decl, NULL);
new_node = cgraph_clone_node (old_node, old_node->count,
- CGRAPH_FREQ_BASE, 0, false);
+ CGRAPH_FREQ_BASE, 0, false,
+ redirect_callers);
new_node->decl = new_decl;
/* Update the properties.
Make clone visible only within this translation unit. Make sure
DECL_WEAK (new_node->decl) = 0;
new_node->clone.tree_map = tree_map;
new_node->clone.args_to_skip = args_to_skip;
+ if (!args_to_skip)
+ new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
+ else if (old_node->clone.combined_args_to_skip)
+ {
+ int newi = 0, oldi = 0;
+ tree arg;
+ bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
+ struct cgraph_node *orig_node;
+ for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
+ ;
+ for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = TREE_CHAIN (arg), oldi++)
+ {
+ if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
+ {
+ bitmap_set_bit (new_args_to_skip, oldi);
+ continue;
+ }
+ if (bitmap_bit_p (args_to_skip, newi))
+ bitmap_set_bit (new_args_to_skip, oldi);
+ newi++;
+ }
+ new_node->clone.combined_args_to_skip = new_args_to_skip;
+ }
+ else
+ new_node->clone.combined_args_to_skip = args_to_skip;
new_node->local.externally_visible = 0;
new_node->local.local = 1;
new_node->lowered = true;
gcc_assert (!*aslot);
*aslot = new_node;
}
- for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
- {
- /* Redirect calls to the old version node to point to its new
- version. */
- cgraph_redirect_edge_callee (e, new_node);
- }
-
+
return new_node;
}
push_cfun (DECL_STRUCT_FUNCTION (fndecl));
current_function_decl = fndecl;
gimple_register_cfg_hooks ();
- /* C++ Thunks are emitted late via this function, gimplify them. */
- if (!gimple_body (fndecl))
- gimplify_function_tree (fndecl);
tree_lowering_passes (fndecl);
bitmap_obstack_initialize (NULL);
if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
current_function_decl = NULL;
break;
}
+
+ /* Set a personality if required and we already passed EH lowering. */
+ if (lowered
+ && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
+ == eh_personality_lang))
+ DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
}
/* Return true if NODE can be made local for API change.
bool
cgraph_node_can_be_local_p (struct cgraph_node *node)
{
- return !node->needed;
+ return (!node->needed
+ && (DECL_COMDAT (node->decl) || !node->local.externally_visible));
}
/* Bring NODE local. */