#include "except.h"
#include "diagnostic.h"
#include "rtl.h"
+#include "ipa-utils.h"
static void cgraph_node_remove_callers (struct cgraph_node *node);
static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
struct cgraph_edge *
cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
+ int ecf_flags,
gcov_type count, int freq, int nest)
{
struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
edge->indirect_unknown_callee = 1;
initialize_inline_failed (edge);
- edge->indirect_info = GGC_NEW (struct cgraph_indirect_call_info);
+ edge->indirect_info = GGC_CNEW (struct cgraph_indirect_call_info);
edge->indirect_info->param_index = -1;
+ edge->indirect_info->ecf_flags = ecf_flags;
edge->next_callee = caller->indirect_calls;
if (caller->indirect_calls)
cgraph_edge_remove_callee (e);
cgraph_free_edge (e);
}
+ for (e = node->indirect_calls; e; e = f)
+ {
+ f = e->next_callee;
+ cgraph_call_edge_removal_hooks (e);
+ if (!e->indirect_unknown_callee)
+ cgraph_edge_remove_callee (e);
+ cgraph_free_edge (e);
+ }
+ node->indirect_calls = NULL;
node->callees = NULL;
if (node->call_site_hash)
{
{
if (!node->reachable && node->local.finalized)
{
- notice_global_symbol (node->decl);
+ if (cgraph_global_info_ready)
+ {
+ /* Verify that function does not appear to be needed out of blue
+ during the optimization process. This can happen for extern
+ inlines when bodies was removed after inlining. */
+ gcc_assert ((node->analyzed || DECL_EXTERNAL (node->decl)));
+ }
+ else
+ notice_global_symbol (node->decl);
node->reachable = 1;
- gcc_assert (!cgraph_global_info_ready);
node->next_needed = cgraph_nodes_queue;
cgraph_nodes_queue = node;
void
cgraph_mark_address_taken_node (struct cgraph_node *node)
{
+ cgraph_mark_reachable_node (node);
node->address_taken = 1;
- cgraph_mark_needed_node (node);
}
/* Return local info for the compiled function. */
fprintf (f, " always_inline");
else if (node->local.inlinable)
fprintf (f, " inlinable");
+ else if (node->local.versionable)
+ fprintf (f, " versionable");
if (node->local.redefined_extern_inline)
fprintf (f, " redefined_extern_inline");
if (TREE_ASM_WRITTEN (node->decl))
}
else
{
- new_edge = cgraph_create_indirect_edge (n, call_stmt, count, freq,
+ new_edge = cgraph_create_indirect_edge (n, call_stmt,
+ e->indirect_info->ecf_flags,
+ count, freq,
e->loop_nest + loop_nest);
- new_edge->indirect_info->param_index = e->indirect_info->param_index;
+ *new_edge->indirect_info = *e->indirect_info;
}
}
else
/* Create node representing clone of N executed COUNT times. Decrease
the execution counts from original node too.
+ The new clone will have decl set to DECL that may or may not be the same
+ as decl of N.
When UPDATE_ORIGINAL is true, the counts are subtracted from the original
function's profile to reflect the fact that part of execution is handled
by node. */
struct cgraph_node *
-cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq,
+cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
int loop_nest, bool update_original,
VEC(cgraph_edge_p,heap) *redirect_callers)
{
gcov_type count_scale;
unsigned i;
- new_node->decl = n->decl;
+ new_node->decl = decl;
new_node->origin = n->origin;
if (new_node->origin)
{
new_node->clone_of = n;
cgraph_call_node_duplication_hooks (n, new_node);
+ if (n->decl != decl)
+ {
+ struct cgraph_node **slot;
+ slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, new_node, INSERT);
+ gcc_assert (!*slot);
+ *slot = new_node;
+ if (assembler_name_hash)
+ {
+ void **aslot;
+ tree name = DECL_ASSEMBLER_NAME (decl);
+
+ aslot = htab_find_slot_with_hash (assembler_name_hash, name,
+ decl_assembler_name_hash (name),
+ INSERT);
+ gcc_assert (!*aslot);
+ *aslot = new_node;
+ }
+ }
return new_node;
}
tree old_decl = old_node->decl;
struct cgraph_node *new_node = NULL;
tree new_decl;
- struct cgraph_node key, **slot;
+ size_t i;
+ struct ipa_replace_map *map;
- gcc_assert (tree_versionable_function_p (old_decl));
+#ifdef ENABLE_CHECKING
+ if (!flag_wpa)
+ gcc_assert (tree_versionable_function_p (old_decl));
+#endif
/* Make a new FUNCTION_DECL tree node */
if (!args_to_skip)
SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
SET_DECL_RTL (new_decl, NULL);
- new_node = cgraph_clone_node (old_node, old_node->count,
+ new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
CGRAPH_FREQ_BASE, 0, false,
redirect_callers);
- new_node->decl = new_decl;
/* Update the properties.
Make clone visible only within this translation unit. Make sure
that is not weak also.
DECL_WEAK (new_node->decl) = 0;
new_node->clone.tree_map = tree_map;
new_node->clone.args_to_skip = args_to_skip;
+ for (i = 0; VEC_iterate (ipa_replace_map_p, tree_map, i, map); i++)
+ {
+ tree var = map->new_tree;
+
+ STRIP_NOPS (var);
+ if (TREE_CODE (var) != ADDR_EXPR)
+ continue;
+ var = get_base_var (var);
+ if (!var)
+ continue;
+
+ /* Record references of the future statement initializing the constant
+ argument. */
+ if (TREE_CODE (var) == FUNCTION_DECL)
+ ipa_record_reference (new_node, NULL, cgraph_node (var),
+ NULL, IPA_REF_ADDR, NULL);
+ else if (TREE_CODE (var) == VAR_DECL)
+ ipa_record_reference (new_node, NULL, NULL, varpool_node (var),
+ IPA_REF_ADDR, NULL);
+ }
if (!args_to_skip)
new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
else if (old_node->clone.combined_args_to_skip)
new_node->lowered = true;
new_node->reachable = true;
- key.decl = new_decl;
- slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
- gcc_assert (!*slot);
- *slot = new_node;
- if (assembler_name_hash)
- {
- void **aslot;
- tree name = DECL_ASSEMBLER_NAME (new_decl);
-
- aslot = htab_find_slot_with_hash (assembler_name_hash, name,
- decl_assembler_name_hash (name),
- INSERT);
- gcc_assert (!*aslot);
- *aslot = new_node;
- }
return new_node;
}
bool
cgraph_node_can_be_local_p (struct cgraph_node *node)
{
- return (!node->needed
+ return (!node->needed && !node->address_taken
&& ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
|| !node->local.externally_visible));
}