#include "diagnostic-core.h"
#include "rtl.h"
#include "ipa-utils.h"
+#include "lto-streamer.h"
+#include "ipa-inline.h"
+
+const char * const ld_plugin_symbol_resolution_names[]=
+{
+ "",
+ "undef",
+ "prevailing_def",
+ "prevailing_def_ironly",
+ "preempted_reg",
+ "preempted_ir",
+ "resolved_ir",
+ "resolved_exec",
+ "resolved_dyn",
+ "prevailing_def_ironly_exp"
+};
static void cgraph_node_remove_callers (struct cgraph_node *node);
static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
/* Maximal uid used in cgraph edges. */
int cgraph_edge_max_uid;
-/* Maximal pid used for profiling */
-int cgraph_max_pid;
-
/* Set when whole unit has been analyzed so we can access global info. */
bool cgraph_global_info_ready = false;
them, to support -fno-toplevel-reorder. */
int cgraph_order;
-/* List of hooks trigerred on cgraph_edge events. */
+/* List of hooks triggered on cgraph_edge events. */
struct cgraph_edge_hook_list {
cgraph_edge_hook hook;
void *data;
struct cgraph_edge_hook_list *next;
};
-/* List of hooks trigerred on cgraph_node events. */
+/* List of hooks triggered on cgraph_node events. */
struct cgraph_node_hook_list {
cgraph_node_hook hook;
void *data;
struct cgraph_node_hook_list *next;
};
-/* List of hooks trigerred on events involving two cgraph_edges. */
+/* List of hooks triggered on events involving two cgraph_edges. */
struct cgraph_2edge_hook_list {
cgraph_2edge_hook hook;
void *data;
struct cgraph_2edge_hook_list *next;
};
-/* List of hooks trigerred on events involving two cgraph_nodes. */
+/* List of hooks triggered on events involving two cgraph_nodes. */
struct cgraph_2node_hook_list {
cgraph_2node_hook hook;
void *data;
Do not GTY((delete)) this list so UIDs gets reliably recycled. */
static GTY(()) struct cgraph_edge *free_edges;
+/* Did procss_same_body_aliases run? */
+bool same_body_aliases_done;
+
/* Macros to access the next item in the list of free cgraph nodes and
edges. */
#define NEXT_FREE_NODE(NODE) (NODE)->next
}
/* Call all node duplication hooks. */
-static void
+void
cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
struct cgraph_node *node2)
{
/* Allocate new callgraph node and insert it into basic data structures. */
static struct cgraph_node *
-cgraph_create_node (void)
+cgraph_create_node_1 (void)
{
struct cgraph_node *node = cgraph_allocate_node ();
node->next = cgraph_nodes;
- node->pid = -1;
node->order = cgraph_order++;
if (cgraph_nodes)
cgraph_nodes->previous = node;
node->previous = NULL;
- node->global.estimated_growth = INT_MIN;
node->frequency = NODE_FREQUENCY_NORMAL;
+ node->count_materialization_scale = REG_BR_PROB_BASE;
ipa_empty_ref_list (&node->ref_list);
cgraph_nodes = node;
cgraph_n_nodes++;
/* Return cgraph node assigned to DECL. Create new one when needed. */
struct cgraph_node *
-cgraph_node (tree decl)
+cgraph_create_node (tree decl)
{
struct cgraph_node key, *node, **slot;
cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
key.decl = decl;
-
slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
+ gcc_assert (!*slot);
- if (*slot)
- {
- node = *slot;
- if (node->same_body_alias)
- node = node->same_body;
- return node;
- }
-
- node = cgraph_create_node ();
+ node = cgraph_create_node_1 ();
node->decl = decl;
*slot = node;
if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
{
- node->origin = cgraph_node (DECL_CONTEXT (decl));
+ node->origin = cgraph_get_create_node (DECL_CONTEXT (decl));
node->next_nested = node->origin->nested;
node->origin->nested = node;
}
return node;
}
-/* Mark ALIAS as an alias to DECL. */
+/* Try to find a call graph node for declaration DECL and if it does not exist,
+ create it. */
-static struct cgraph_node *
-cgraph_same_body_alias_1 (tree alias, tree decl)
+struct cgraph_node *
+cgraph_get_create_node (tree decl)
{
- struct cgraph_node key, *alias_node, *decl_node, **slot;
+ struct cgraph_node *node;
- gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
- gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
- decl_node = cgraph_node (decl);
+ node = cgraph_get_node (decl);
+ if (node)
+ return node;
- key.decl = alias;
+ return cgraph_create_node (decl);
+}
- slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
+/* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
+ the function body is associated with (not neccesarily cgraph_node (DECL). */
- /* If the cgraph_node has been already created, fail. */
- if (*slot)
- return NULL;
+struct cgraph_node *
+cgraph_create_function_alias (tree alias, tree decl)
+{
+ struct cgraph_node *alias_node;
- alias_node = cgraph_allocate_node ();
- alias_node->decl = alias;
- alias_node->same_body_alias = 1;
- alias_node->same_body = decl_node;
- alias_node->previous = NULL;
- if (decl_node->same_body)
- decl_node->same_body->previous = alias_node;
- alias_node->next = decl_node->same_body;
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
+ gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
+ alias_node = cgraph_get_create_node (alias);
+ gcc_assert (!alias_node->local.finalized);
alias_node->thunk.alias = decl;
- decl_node->same_body = alias_node;
- *slot = alias_node;
+ alias_node->local.finalized = true;
+ alias_node->alias = 1;
+
+ if ((TREE_PUBLIC (alias) && !DECL_COMDAT (alias) && !DECL_EXTERNAL (alias))
+ || (DECL_VIRTUAL_P (alias)
+ && (DECL_COMDAT (alias) || DECL_EXTERNAL (alias))))
+ cgraph_mark_reachable_node (alias_node);
return alias_node;
}
-/* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
+/* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
+ and NULL otherwise.
Same body aliases are output whenever the body of DECL is output,
- and cgraph_node (ALIAS) transparently returns cgraph_node (DECL). */
+ and cgraph_get_node (ALIAS) transparently returns cgraph_get_node (DECL). */
-bool
-cgraph_same_body_alias (tree alias, tree decl)
+struct cgraph_node *
+cgraph_same_body_alias (struct cgraph_node *decl_node ATTRIBUTE_UNUSED, tree alias, tree decl)
{
+ struct cgraph_node *n;
#ifndef ASM_OUTPUT_DEF
/* If aliases aren't supported by the assembler, fail. */
- return false;
+ return NULL;
#endif
+ /* Langhooks can create same body aliases of symbols not defined.
+ Those are useless. Drop them on the floor. */
+ if (cgraph_global_info_ready)
+ return NULL;
- /*gcc_assert (!assembler_name_hash);*/
-
- return cgraph_same_body_alias_1 (alias, decl) != NULL;
+ n = cgraph_create_function_alias (alias, decl);
+ n->same_body_alias = true;
+ if (same_body_aliases_done)
+ ipa_record_reference (n, NULL, cgraph_get_node (decl), NULL, IPA_REF_ALIAS,
+ NULL);
+ return n;
}
-void
-cgraph_add_thunk (tree alias, tree decl, bool this_adjusting,
+/* Add thunk alias into callgraph. The alias declaration is ALIAS and it
+ aliases DECL with an adjustments made into the first parameter.
+ See comments in thunk_adjust for detail on the parameters. */
+
+struct cgraph_node *
+cgraph_add_thunk (struct cgraph_node *decl_node ATTRIBUTE_UNUSED,
+ tree alias, tree decl,
+ bool this_adjusting,
HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
tree virtual_offset,
tree real_alias)
{
- struct cgraph_node *node = cgraph_get_node (alias);
+ struct cgraph_node *node;
+ node = cgraph_get_node (alias);
if (node)
{
gcc_assert (node->local.finalized);
- gcc_assert (!node->same_body);
+ gcc_assert (!node->alias);
+ gcc_assert (!node->thunk.thunk_p);
cgraph_remove_node (node);
}
- node = cgraph_same_body_alias_1 (alias, decl);
- gcc_assert (node);
-#ifdef ENABLE_CHECKING
- gcc_assert (!virtual_offset
- || tree_int_cst_equal (virtual_offset, size_int (virtual_value)));
-#endif
+ node = cgraph_create_node (alias);
+ gcc_checking_assert (!virtual_offset
+ || double_int_equal_p
+ (tree_to_double_int (virtual_offset),
+ shwi_to_double_int (virtual_value)));
node->thunk.fixed_offset = fixed_offset;
node->thunk.this_adjusting = this_adjusting;
node->thunk.virtual_value = virtual_value;
node->thunk.virtual_offset_p = virtual_offset != NULL;
node->thunk.alias = real_alias;
node->thunk.thunk_p = true;
+ node->local.finalized = true;
+
+ if (cgraph_decide_is_function_needed (node, decl))
+ cgraph_mark_needed_node (node);
+
+ if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
+ || (DECL_VIRTUAL_P (decl)
+ && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
+ cgraph_mark_reachable_node (node);
+
+ return node;
}
/* Returns the cgraph node assigned to DECL or NULL if no cgraph node
is assigned. */
struct cgraph_node *
-cgraph_get_node (tree decl)
+cgraph_get_node (const_tree decl)
{
struct cgraph_node key, *node = NULL, **slot;
if (!cgraph_hash)
return NULL;
- key.decl = decl;
+ key.decl = CONST_CAST2 (tree, const_tree, decl);
slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
NO_INSERT);
if (slot && *slot)
- {
- node = *slot;
- if (node->same_body_alias)
- node = node->same_body;
- }
+ node = *slot;
return node;
}
so lets hope for the best. */
if (!*slot)
*slot = node;
- if (node->same_body)
- {
- struct cgraph_node *alias;
-
- for (alias = node->same_body; alias; alias = alias->next)
- {
- hashval_t hash;
- name = DECL_ASSEMBLER_NAME (alias->decl);
- hash = decl_assembler_name_hash (name);
- slot = htab_find_slot_with_hash (assembler_name_hash, name,
- hash, INSERT);
- if (!*slot)
- *slot = alias;
- }
- }
}
}
if (slot)
{
node = (struct cgraph_node *) *slot;
- if (node->same_body_alias)
- node = node->same_body;
return node;
}
return NULL;
{
/* Constant propagation (and possibly also inlining?) can turn an
indirect call into a direct one. */
- struct cgraph_node *new_callee = cgraph_node (decl);
+ struct cgraph_node *new_callee = cgraph_get_node (decl);
+ gcc_checking_assert (new_callee);
cgraph_make_edge_direct (e, new_callee);
}
struct cgraph_node *callee,
gimple old_stmt,
gimple stmt, gcov_type count,
- int freq, int loop_depth,
+ int freq,
cgraph_inline_failed_t reason)
{
struct cgraph_node *node;
if (!cgraph_edge (orig, stmt))
{
- edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth);
+ edge = cgraph_create_edge (orig, callee, stmt, count, freq);
edge->inline_failed = reason;
}
/* It is possible that clones already contain the edge while
master didn't. Either we promoted indirect call into direct
call in the clone or we are processing clones of unreachable
- master where edges has been rmeoved. */
+ master where edges has been removed. */
if (edge)
cgraph_set_call_stmt (edge, stmt);
else if (!cgraph_edge (node, stmt))
{
edge = cgraph_create_edge (node, callee, stmt, count,
- freq, loop_depth);
+ freq);
edge->inline_failed = reason;
}
}
}
-/* Give initial reasons why inlining would fail on EDGE. This gets either
- nullified or usually overwritten by more precise reasons later. */
-
-static void
-initialize_inline_failed (struct cgraph_edge *e)
-{
- struct cgraph_node *callee = e->callee;
-
- if (e->indirect_unknown_callee)
- e->inline_failed = CIF_INDIRECT_UNKNOWN_CALL;
- else if (!callee->analyzed)
- e->inline_failed = CIF_BODY_NOT_AVAILABLE;
- else if (callee->local.redefined_extern_inline)
- e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
- else if (!callee->local.inlinable)
- e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
- else if (e->call_stmt && gimple_call_cannot_inline_p (e->call_stmt))
- e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
- else
- e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
-}
-
/* Allocate a cgraph_edge structure and fill it with data according to the
parameters of which only CALLEE can be NULL (when creating an indirect call
edge). */
static struct cgraph_edge *
cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
- gimple call_stmt, gcov_type count, int freq, int nest)
+ gimple call_stmt, gcov_type count, int freq)
{
struct cgraph_edge *edge;
have not been loaded yet. */
if (call_stmt)
{
-#ifdef ENABLE_CHECKING
- /* This is rather pricely check possibly trigerring construction of
- call stmt hashtable. */
- gcc_assert (!cgraph_edge (caller, call_stmt));
-#endif
+ /* This is a rather expensive check possibly triggering
+ construction of call stmt hashtable. */
+ gcc_checking_assert (!cgraph_edge (caller, call_stmt));
gcc_assert (is_gimple_call (call_stmt));
}
edge->frequency = freq;
gcc_assert (freq >= 0);
gcc_assert (freq <= CGRAPH_FREQ_MAX);
- edge->loop_nest = nest;
edge->call_stmt = call_stmt;
push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
edge->can_throw_external
= call_stmt ? stmt_can_throw_external (call_stmt) : false;
pop_cfun ();
- edge->call_stmt_cannot_inline_p =
- (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
+ if (call_stmt
+ && callee && callee->decl
+ && !gimple_check_call_matching_types (call_stmt, callee->decl))
+ edge->call_stmt_cannot_inline_p = true;
+ else
+ edge->call_stmt_cannot_inline_p = false;
if (call_stmt && caller->call_site_hash)
cgraph_add_edge_to_call_site_hash (edge);
struct cgraph_edge *
cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
- gimple call_stmt, gcov_type count, int freq, int nest)
+ gimple call_stmt, gcov_type count, int freq)
{
struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
- count, freq, nest);
+ count, freq);
edge->indirect_unknown_callee = 0;
initialize_inline_failed (edge);
return edge;
}
+/* Allocate cgraph_indirect_call_info and set its fields to default values. */
+
+struct cgraph_indirect_call_info *
+cgraph_allocate_init_indirect_info (void)
+{
+ struct cgraph_indirect_call_info *ii;
+
+ ii = ggc_alloc_cleared_cgraph_indirect_call_info ();
+ ii->param_index = -1;
+ return ii;
+}
/* Create an indirect edge with a yet-undetermined callee where the call
statement destination is a formal parameter of the caller with index
struct cgraph_edge *
cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
int ecf_flags,
- gcov_type count, int freq, int nest)
+ gcov_type count, int freq)
{
struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
- count, freq, nest);
+ count, freq);
edge->indirect_unknown_callee = 1;
initialize_inline_failed (edge);
- edge->indirect_info = ggc_alloc_cleared_cgraph_indirect_call_info ();
- edge->indirect_info->param_index = -1;
+ edge->indirect_info = cgraph_allocate_init_indirect_info ();
edge->indirect_info->ecf_flags = ecf_flags;
edge->next_callee = caller->indirect_calls;
}
/* Make an indirect EDGE with an unknown callee an ordinary edge leading to
- CALLEE. */
+ CALLEE. DELTA is an integer constant that is to be added to the this
+ pointer (first parameter) to compensate for skipping a thunk adjustment. */
void
cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee)
/* Insert to callers list of the new callee. */
cgraph_set_edge_callee (edge, callee);
+ if (edge->call_stmt)
+ edge->call_stmt_cannot_inline_p
+ = !gimple_check_call_matching_types (edge->call_stmt, callee->decl);
+
/* We need to re-determine the inlining status of the edge. */
initialize_inline_failed (edge);
}
/* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
- of OLD_STMT if it was previously call statement. */
+ of OLD_STMT if it was previously call statement.
+ If NEW_STMT is NULL, the call has been dropped without any
+ replacement. */
static void
cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
- gimple old_stmt, tree old_call, gimple new_stmt)
+ gimple old_stmt, tree old_call,
+ gimple new_stmt)
{
- tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fndecl (new_stmt) : 0;
+ tree new_call = (new_stmt && is_gimple_call (new_stmt))
+ ? gimple_call_fndecl (new_stmt) : 0;
/* We are seeing indirect calls, then there is nothing to update. */
if (!new_call && !old_call)
return;
/* See if we turned indirect call into direct call or folded call to one builtin
- into different bultin. */
+ into different builtin. */
if (old_call != new_call)
{
struct cgraph_edge *e = cgraph_edge (node, old_stmt);
struct cgraph_edge *ne = NULL;
gcov_type count;
int frequency;
- int loop_nest;
if (e)
{
/* See if the edge is already there and has the correct callee. It
might be so because of indirect inlining has already updated
- it. */
- if (new_call && e->callee && e->callee->decl == new_call)
- return;
+ it. We also might've cloned and redirected the edge. */
+ if (new_call && e->callee)
+ {
+ struct cgraph_node *callee = e->callee;
+ while (callee)
+ {
+ if (callee->decl == new_call
+ || callee->former_clone_of == new_call)
+ return;
+ callee = callee->clone_of;
+ }
+ }
/* Otherwise remove edge and create new one; we can't simply redirect
since function has changed, so inline plan and other information
attached to edge is invalid. */
count = e->count;
frequency = e->frequency;
- loop_nest = e->loop_nest;
cgraph_remove_edge (e);
}
- else
+ else if (new_call)
{
/* We are seeing new direct call; compute profile info based on BB. */
basic_block bb = gimple_bb (new_stmt);
count = bb->count;
frequency = compute_call_stmt_bb_frequency (current_function_decl,
bb);
- loop_nest = bb->loop_depth;
}
if (new_call)
{
- ne = cgraph_create_edge (node, cgraph_node (new_call),
- new_stmt, count, frequency,
- loop_nest);
+ ne = cgraph_create_edge (node, cgraph_get_create_node (new_call),
+ new_stmt, count, frequency);
gcc_assert (ne->inline_failed);
}
}
void
cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
{
- struct cgraph_node *orig = cgraph_node (cfun->decl);
+ struct cgraph_node *orig = cgraph_get_node (cfun->decl);
struct cgraph_node *node;
+ gcc_checking_assert (orig);
cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
if (orig->clones)
for (node = orig->clones; node != orig;)
DECL_INITIAL (node->decl) = error_mark_node;
}
-/* Remove same body alias node. */
-
-void
-cgraph_remove_same_body_alias (struct cgraph_node *node)
-{
- void **slot;
- int uid = node->uid;
-
- gcc_assert (node->same_body_alias);
- if (node->previous)
- node->previous->next = node->next;
- else
- node->same_body->same_body = node->next;
- if (node->next)
- node->next->previous = node->previous;
- node->next = NULL;
- node->previous = NULL;
- slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
- if (*slot == node)
- htab_clear_slot (cgraph_hash, slot);
- if (assembler_name_hash)
- {
- tree name = DECL_ASSEMBLER_NAME (node->decl);
- slot = htab_find_slot_with_hash (assembler_name_hash, name,
- decl_assembler_name_hash (name),
- NO_INSERT);
- if (slot && *slot == node)
- htab_clear_slot (assembler_name_hash, slot);
- }
-
- /* Clear out the node to NULL all pointers and add the node to the free
- list. */
- memset (node, 0, sizeof(*node));
- node->uid = uid;
- NEXT_FREE_NODE (node) = free_nodes;
- free_nodes = node;
-}
-
/* Remove the node from cgraph. */
void
}
}
- while (node->same_body)
- cgraph_remove_same_body_alias (node->same_body);
-
if (node->same_comdat_group)
{
struct cgraph_node *prev;
free_nodes = node;
}
+/* Add NEW_ to the same comdat group that OLD is in. */
+
+void
+cgraph_add_to_same_comdat_group (struct cgraph_node *new_,
+ struct cgraph_node *old)
+{
+ gcc_assert (DECL_ONE_ONLY (old->decl));
+ gcc_assert (!new_->same_comdat_group);
+ gcc_assert (new_ != old);
+
+ DECL_COMDAT_GROUP (new_->decl) = DECL_COMDAT_GROUP (old->decl);
+ new_->same_comdat_group = old;
+ if (!old->same_comdat_group)
+ old->same_comdat_group = new_;
+ else
+ {
+ struct cgraph_node *n;
+ for (n = old->same_comdat_group;
+ n->same_comdat_group != old;
+ n = n->same_comdat_group)
+ ;
+ n->same_comdat_group = new_;
+ }
+}
+
/* Remove the node from cgraph. */
void
void
cgraph_mark_address_taken_node (struct cgraph_node *node)
{
+ gcc_assert (!node->global.inlined_to);
cgraph_mark_reachable_node (node);
+ /* FIXME: address_taken flag is used both as a shortcut for testing whether
+ IPA_REF_ADDR reference exists (and thus it should be set on node
+ representing alias we take address of) and as a test whether address
+ of the object was taken (and thus it should be set on node alias is
+ referring to). We should remove the first use and the remove the
+ following set. */
+ node->address_taken = 1;
+ node = cgraph_function_or_thunk_node (node, NULL);
node->address_taken = 1;
}
struct cgraph_node *node;
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
- node = cgraph_node (decl);
+ node = cgraph_get_node (decl);
+ if (!node)
+ return NULL;
return &node->local;
}
struct cgraph_node *node;
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
- node = cgraph_node (decl);
+ node = cgraph_get_node (decl);
+ if (!node)
+ return NULL;
return &node->global;
}
struct cgraph_node *node;
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
- node = cgraph_node (decl);
- if (decl != current_function_decl
- && !TREE_ASM_WRITTEN (node->decl))
+ node = cgraph_get_node (decl);
+ if (!node
+ || (decl != current_function_decl
+ && !TREE_ASM_WRITTEN (node->decl)))
return NULL;
return &node->rtl;
}
struct cgraph_edge *edge;
int indirect_calls_count = 0;
- fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid,
- node->pid);
+ fprintf (f, "%s/%i", cgraph_node_name (node), node->uid);
dump_addr (f, " @", (void *)node);
if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
fprintf (f, " (inline copy in %s/%i)",
cgraph_node_name (node->global.inlined_to),
node->global.inlined_to->uid);
+ if (node->same_comdat_group)
+ fprintf (f, " (same comdat group as %s/%i)",
+ cgraph_node_name (node->same_comdat_group),
+ node->same_comdat_group->uid);
if (node->clone_of)
fprintf (f, " (clone of %s/%i)",
cgraph_node_name (node->clone_of),
if (node->count)
fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
(HOST_WIDEST_INT)node->count);
- if (node->local.inline_summary.self_time)
- fprintf (f, " %i time, %i benefit", node->local.inline_summary.self_time,
- node->local.inline_summary.time_inlining_benefit);
- if (node->global.time && node->global.time
- != node->local.inline_summary.self_time)
- fprintf (f, " (%i after inlining)", node->global.time);
- if (node->local.inline_summary.self_size)
- fprintf (f, " %i size, %i benefit", node->local.inline_summary.self_size,
- node->local.inline_summary.size_inlining_benefit);
- if (node->global.size && node->global.size
- != node->local.inline_summary.self_size)
- fprintf (f, " (%i after inlining)", node->global.size);
- if (node->local.inline_summary.estimated_self_stack_size)
- fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
- if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
- fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
if (node->origin)
fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
if (node->needed)
fprintf (f, " local");
if (node->local.externally_visible)
fprintf (f, " externally_visible");
- if (node->local.used_from_object_file)
- fprintf (f, " used_from_object_file");
+ if (node->resolution != LDPR_UNKNOWN)
+ fprintf (f, " %s",
+ ld_plugin_symbol_resolution_names[(int)node->resolution]);
if (node->local.finalized)
fprintf (f, " finalized");
- if (node->local.disregard_inline_limits)
- fprintf (f, " always_inline");
- else if (node->local.inlinable)
- fprintf (f, " inlinable");
- else if (node->local.versionable)
- fprintf (f, " versionable");
if (node->local.redefined_extern_inline)
fprintf (f, " redefined_extern_inline");
if (TREE_ASM_WRITTEN (node->decl))
fprintf (f, " asm_written");
+ if (node->only_called_at_startup)
+ fprintf (f, " only_called_at_startup");
+ if (node->only_called_at_exit)
+ fprintf (f, " only_called_at_exit");
+ else if (node->alias)
+ fprintf (f, " alias");
+ if (node->tm_clone)
+ fprintf (f, " tm_clone");
+
+ fprintf (f, "\n");
+
+ if (node->thunk.thunk_p)
+ {
+ fprintf (f, " thunk of %s (asm: %s) fixed offset %i virtual value %i has "
+ "virtual offset %i)\n",
+ lang_hooks.decl_printable_name (node->thunk.alias, 2),
+ IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)),
+ (int)node->thunk.fixed_offset,
+ (int)node->thunk.virtual_value,
+ (int)node->thunk.virtual_offset_p);
+ }
+ if (node->alias && node->thunk.alias)
+ {
+ fprintf (f, " alias of %s",
+ lang_hooks.decl_printable_name (node->thunk.alias, 2));
+ if (DECL_ASSEMBLER_NAME_SET_P (node->thunk.alias))
+ fprintf (f, " (asm: %s)",
+ IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
+ fprintf (f, "\n");
+ }
+
+ fprintf (f, " called by: ");
- fprintf (f, "\n called by: ");
for (edge = node->callers; edge; edge = edge->next_caller)
{
fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
if (edge->frequency)
fprintf (f, "(%.2f per call) ",
edge->frequency / (double)CGRAPH_FREQ_BASE);
- if (edge->loop_nest)
- fprintf (f, "(nested in %i loops) ", edge->loop_nest);
if (edge->can_throw_external)
fprintf(f, "(can throw external) ");
}
if (indirect_calls_count)
fprintf (f, " has %i outgoing edges for indirect calls.\n",
indirect_calls_count);
-
- if (node->same_body)
- {
- struct cgraph_node *n;
- fprintf (f, " aliases & thunks:");
- for (n = node->same_body; n; n = n->next)
- {
- fprintf (f, " %s/%i", cgraph_node_name (n), n->uid);
- if (n->thunk.thunk_p)
- {
- fprintf (f, " (thunk of %s fixed ofset %i virtual value %i has "
- "virtual offset %i",
- lang_hooks.decl_printable_name (n->thunk.alias, 2),
- (int)n->thunk.fixed_offset,
- (int)n->thunk.virtual_value,
- (int)n->thunk.virtual_offset_p);
- fprintf (f, ")");
- }
- if (DECL_ASSEMBLER_NAME_SET_P (n->decl))
- fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (n->decl)));
- }
- fprintf (f, "\n");
- }
}
void
change_decl_assembler_name (tree decl, tree name)
{
- gcc_assert (!assembler_name_hash);
+ struct cgraph_node *node;
+ void **slot;
if (!DECL_ASSEMBLER_NAME_SET_P (decl))
+ SET_DECL_ASSEMBLER_NAME (decl, name);
+ else
{
- SET_DECL_ASSEMBLER_NAME (decl, name);
- return;
- }
- if (name == DECL_ASSEMBLER_NAME (decl))
- return;
+ if (name == DECL_ASSEMBLER_NAME (decl))
+ return;
- if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
- && DECL_RTL_SET_P (decl))
- warning (0, "%D renamed after being referenced in assembly", decl);
+ if (assembler_name_hash
+ && TREE_CODE (decl) == FUNCTION_DECL
+ && (node = cgraph_get_node (decl)) != NULL)
+ {
+ tree old_name = DECL_ASSEMBLER_NAME (decl);
+ slot = htab_find_slot_with_hash (assembler_name_hash, old_name,
+ decl_assembler_name_hash (old_name),
+ NO_INSERT);
+ /* Inline clones are not hashed. */
+ if (slot && *slot == node)
+ htab_clear_slot (assembler_name_hash, slot);
+ }
+ if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
+ && DECL_RTL_SET_P (decl))
+ warning (0, "%D renamed after being referenced in assembly", decl);
- SET_DECL_ASSEMBLER_NAME (decl, name);
+ SET_DECL_ASSEMBLER_NAME (decl, name);
+ }
+ if (assembler_name_hash
+ && TREE_CODE (decl) == FUNCTION_DECL
+ && (node = cgraph_get_node (decl)) != NULL)
+ {
+ slot = htab_find_slot_with_hash (assembler_name_hash, name,
+ decl_assembler_name_hash (name),
+ INSERT);
+ gcc_assert (!*slot);
+ *slot = node;
+ }
}
/* Add a top-level asm statement to the list. */
struct cgraph_edge *
cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
- int freq_scale, int loop_nest, bool update_original)
+ int freq_scale, bool update_original)
{
struct cgraph_edge *new_edge;
gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
{
- struct cgraph_node *callee = cgraph_node (decl);
- new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq,
- e->loop_nest + loop_nest);
+ struct cgraph_node *callee = cgraph_get_node (decl);
+ gcc_checking_assert (callee);
+ new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq);
}
else
{
new_edge = cgraph_create_indirect_edge (n, call_stmt,
e->indirect_info->ecf_flags,
- count, freq,
- e->loop_nest + loop_nest);
+ count, freq);
*new_edge->indirect_info = *e->indirect_info;
}
}
else
- new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
- e->loop_nest + loop_nest);
+ {
+ new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq);
+ if (e->indirect_info)
+ {
+ new_edge->indirect_info
+ = ggc_alloc_cleared_cgraph_indirect_call_info ();
+ *new_edge->indirect_info = *e->indirect_info;
+ }
+ }
new_edge->inline_failed = e->inline_failed;
new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
new_edge->lto_stmt_uid = stmt_uid;
+ /* Clone flags that depend on call_stmt availability manually. */
+ new_edge->can_throw_external = e->can_throw_external;
+ new_edge->call_stmt_cannot_inline_p = e->call_stmt_cannot_inline_p;
if (update_original)
{
e->count -= new_edge->count;
return new_edge;
}
+
/* Create node representing clone of N executed COUNT times. Decrease
the execution counts from original node too.
The new clone will have decl set to DECL that may or may not be the same
When UPDATE_ORIGINAL is true, the counts are subtracted from the original
function's profile to reflect the fact that part of execution is handled
- by node. */
+ by node.
+ When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
+ the new clone. Otherwise the caller is responsible for doing so later. */
+
struct cgraph_node *
cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
- int loop_nest, bool update_original,
- VEC(cgraph_edge_p,heap) *redirect_callers)
+ bool update_original,
+ VEC(cgraph_edge_p,heap) *redirect_callers,
+ bool call_duplication_hook)
{
- struct cgraph_node *new_node = cgraph_create_node ();
+ struct cgraph_node *new_node = cgraph_create_node_1 ();
struct cgraph_edge *e;
gcov_type count_scale;
unsigned i;
new_node->analyzed = n->analyzed;
new_node->local = n->local;
new_node->local.externally_visible = false;
- new_node->local.used_from_object_file = false;
new_node->local.local = true;
- new_node->local.vtable_method = false;
new_node->global = n->global;
new_node->rtl = n->rtl;
new_node->count = count;
n->count = 0;
}
- for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
+ FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
{
/* Redirect calls to the old version node to point to its new
version. */
for (e = n->callees;e; e=e->next_callee)
cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
- count_scale, freq, loop_nest, update_original);
+ count_scale, freq, update_original);
for (e = n->indirect_calls; e; e = e->next_callee)
cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
- count_scale, freq, loop_nest, update_original);
+ count_scale, freq, update_original);
ipa_clone_references (new_node, NULL, &n->ref_list);
new_node->next_sibling_clone = n->clones;
n->clones = new_node;
new_node->clone_of = n;
- cgraph_call_node_duplication_hooks (n, new_node);
if (n->decl != decl)
{
struct cgraph_node **slot;
*aslot = new_node;
}
}
+
+ if (call_duplication_hook)
+ cgraph_call_node_duplication_hooks (n, new_node);
return new_node;
}
size_t i;
struct ipa_replace_map *map;
-#ifdef ENABLE_CHECKING
if (!flag_wpa)
- gcc_assert (tree_versionable_function_p (old_decl));
-#endif
+ gcc_checking_assert (tree_versionable_function_p (old_decl));
+
+ gcc_assert (old_node->local.can_change_signature || !args_to_skip);
/* Make a new FUNCTION_DECL tree node */
if (!args_to_skip)
new_decl = copy_node (old_decl);
else
- new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
+ new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false);
DECL_STRUCT_FUNCTION (new_decl) = NULL;
/* Generate a new name for the new version. */
SET_DECL_RTL (new_decl, NULL);
new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
- CGRAPH_FREQ_BASE, 0, false,
- redirect_callers);
+ CGRAPH_FREQ_BASE, false,
+ redirect_callers, false);
/* Update the properties.
Make clone visible only within this translation unit. Make sure
that is not weak also.
TREE_PUBLIC (new_node->decl) = 0;
DECL_COMDAT (new_node->decl) = 0;
DECL_WEAK (new_node->decl) = 0;
+ DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
+ DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
new_node->clone.tree_map = tree_map;
new_node->clone.args_to_skip = args_to_skip;
- for (i = 0; VEC_iterate (ipa_replace_map_p, tree_map, i, map); i++)
+ FOR_EACH_VEC_ELT (ipa_replace_map_p, tree_map, i, map)
{
tree var = map->new_tree;
/* Record references of the future statement initializing the constant
argument. */
if (TREE_CODE (var) == FUNCTION_DECL)
- ipa_record_reference (new_node, NULL, cgraph_node (var),
- NULL, IPA_REF_ADDR, NULL);
+ {
+ struct cgraph_node *ref_node = cgraph_get_node (var);
+ gcc_checking_assert (ref_node);
+ ipa_record_reference (new_node, NULL, ref_node, NULL, IPA_REF_ADDR,
+ NULL);
+ }
else if (TREE_CODE (var) == VAR_DECL)
ipa_record_reference (new_node, NULL, NULL, varpool_node (var),
IPA_REF_ADDR, NULL);
struct cgraph_node *orig_node;
for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
;
- for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = TREE_CHAIN (arg), oldi++)
+ for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = DECL_CHAIN (arg), oldi++)
{
if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
{
else
new_node->clone.combined_args_to_skip = args_to_skip;
new_node->local.externally_visible = 0;
- new_node->local.used_from_object_file = 0;
new_node->local.local = 1;
new_node->lowered = true;
new_node->reachable = true;
+ cgraph_call_node_duplication_hooks (old_node, new_node);
+
return new_node;
}
avail = AVAIL_LOCAL;
else if (!node->local.externally_visible)
avail = AVAIL_AVAILABLE;
- /* Inline functions are safe to be analyzed even if their sybol can
- be overwritten at runtime. It is not meaningful to enfore any sane
+ /* Inline functions are safe to be analyzed even if their symbol can
+ be overwritten at runtime. It is not meaningful to enforce any sane
behaviour on replacing inline function by different body. */
else if (DECL_DECLARED_INLINE_P (node->decl))
avail = AVAIL_AVAILABLE;
AVAIL_AVAILABLE here? That would be good reason to preserve this
bit. */
- else if (DECL_REPLACEABLE_P (node->decl) && !DECL_EXTERNAL (node->decl))
+ else if (decl_replaceable_p (node->decl) && !DECL_EXTERNAL (node->decl))
avail = AVAIL_OVERWRITABLE;
else avail = AVAIL_AVAILABLE;
{
case CGRAPH_STATE_CONSTRUCTION:
/* Just enqueue function to be processed at nearest occurrence. */
- node = cgraph_node (fndecl);
+ node = cgraph_create_node (fndecl);
node->next_needed = cgraph_new_nodes;
if (lowered)
node->lowered = true;
case CGRAPH_STATE_EXPANSION:
/* Bring the function into finalized state and enqueue for later
analyzing and compilation. */
- node = cgraph_node (fndecl);
+ node = cgraph_get_create_node (fndecl);
node->local.local = false;
node->local.finalized = true;
node->reachable = node->needed = true;
case CGRAPH_STATE_FINISHED:
/* At the very end of compilation we have to do all the work up
to expansion. */
+ node = cgraph_create_node (fndecl);
+ if (lowered)
+ node->lowered = true;
+ cgraph_analyze_function (node);
push_cfun (DECL_STRUCT_FUNCTION (fndecl));
current_function_decl = fndecl;
gimple_register_cfg_hooks ();
- if (!lowered)
- tree_lowering_passes (fndecl);
bitmap_obstack_initialize (NULL);
if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
execute_pass_list (pass_early_local_passes.pass.sub);
DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
}
+/* Worker for cgraph_node_can_be_local_p. */
+static bool
+cgraph_node_cannot_be_local_p_1 (struct cgraph_node *node,
+ void *data ATTRIBUTE_UNUSED)
+{
+ return !(!node->needed
+ && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
+ || !node->local.externally_visible));
+}
+
/* Return true if NODE can be made local for API change.
Extern inline functions and C++ COMDAT functions can be made local
at the expense of possible code size growth if function is used in multiple
bool
cgraph_node_can_be_local_p (struct cgraph_node *node)
{
- return (!node->needed && !node->address_taken
- && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
- || !node->local.externally_visible));
+ return (!node->address_taken
+ && !cgraph_for_node_and_aliases (node,
+ cgraph_node_cannot_be_local_p_1,
+ NULL, true));
}
/* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
if (TREE_CODE (decl) == VAR_DECL)
DECL_COMMON (decl) = 0;
- else if (TREE_CODE (decl) == FUNCTION_DECL)
+ else gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
+
+ if (DECL_ONE_ONLY (decl) || DECL_COMDAT (decl))
{
+ /* It is possible that we are linking against library defining same COMDAT
+ function. To avoid conflict we need to rename our local name of the
+ function just in the case WHOPR partitioning decide to make it hidden
+ to avoid cross partition references. */
+ if (flag_wpa)
+ {
+ const char *old_name;
+
+ old_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
+ if (TREE_CODE (decl) == FUNCTION_DECL)
+ {
+ struct cgraph_node *node = cgraph_get_node (decl);
+ change_decl_assembler_name (decl,
+ clone_function_name (decl, "local"));
+ if (node->local.lto_file_data)
+ lto_record_renamed_decl (node->local.lto_file_data,
+ old_name,
+ IDENTIFIER_POINTER
+ (DECL_ASSEMBLER_NAME (decl)));
+ }
+ else if (TREE_CODE (decl) == VAR_DECL)
+ {
+ struct varpool_node *vnode = varpool_get_node (decl);
+ /* change_decl_assembler_name will warn here on vtables because
+ C++ frontend still sets TREE_SYMBOL_REFERENCED on them. */
+ SET_DECL_ASSEMBLER_NAME (decl,
+ clone_function_name (decl, "local"));
+ if (vnode->lto_file_data)
+ lto_record_renamed_decl (vnode->lto_file_data,
+ old_name,
+ IDENTIFIER_POINTER
+ (DECL_ASSEMBLER_NAME (decl)));
+ }
+ }
+ DECL_SECTION_NAME (decl) = 0;
DECL_COMDAT (decl) = 0;
- DECL_COMDAT_GROUP (decl) = 0;
- DECL_WEAK (decl) = 0;
- DECL_EXTERNAL (decl) = 0;
}
- else
- gcc_unreachable ();
+ DECL_COMDAT_GROUP (decl) = 0;
+ DECL_WEAK (decl) = 0;
+ DECL_EXTERNAL (decl) = 0;
TREE_PUBLIC (decl) = 0;
if (!DECL_RTL_SET_P (decl))
return;
SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
}
-/* Bring NODE local. */
-void
-cgraph_make_node_local (struct cgraph_node *node)
+/* Call calback on NODE, thunks and aliases asociated to NODE.
+ When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
+ skipped. */
+
+bool
+cgraph_for_node_thunks_and_aliases (struct cgraph_node *node,
+ bool (*callback) (struct cgraph_node *, void *),
+ void *data,
+ bool include_overwritable)
+{
+ struct cgraph_edge *e;
+ int i;
+ struct ipa_ref *ref;
+
+ if (callback (node, data))
+ return true;
+ for (e = node->callers; e; e = e->next_caller)
+ if (e->caller->thunk.thunk_p
+ && (include_overwritable
+ || cgraph_function_body_availability (e->caller) > AVAIL_OVERWRITABLE))
+ if (cgraph_for_node_thunks_and_aliases (e->caller, callback, data,
+ include_overwritable))
+ return true;
+ for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
+ if (ref->use == IPA_REF_ALIAS)
+ {
+ struct cgraph_node *alias = ipa_ref_refering_node (ref);
+ if (include_overwritable
+ || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
+ if (cgraph_for_node_thunks_and_aliases (alias, callback, data,
+ include_overwritable))
+ return true;
+ }
+ return false;
+}
+
+/* Call calback on NODE and aliases asociated to NODE.
+ When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
+ skipped. */
+
+bool
+cgraph_for_node_and_aliases (struct cgraph_node *node,
+ bool (*callback) (struct cgraph_node *, void *),
+ void *data,
+ bool include_overwritable)
{
- gcc_assert (cgraph_node_can_be_local_p (node));
+ int i;
+ struct ipa_ref *ref;
+
+ if (callback (node, data))
+ return true;
+ for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
+ if (ref->use == IPA_REF_ALIAS)
+ {
+ struct cgraph_node *alias = ipa_ref_refering_node (ref);
+ if (include_overwritable
+ || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
+ if (cgraph_for_node_and_aliases (alias, callback, data,
+ include_overwritable))
+ return true;
+ }
+ return false;
+}
+
+/* Worker to bring NODE local. */
+
+static bool
+cgraph_make_node_local_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
+{
+ gcc_checking_assert (cgraph_node_can_be_local_p (node));
if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
{
- struct cgraph_node *alias;
cgraph_make_decl_local (node->decl);
- for (alias = node->same_body; alias; alias = alias->next)
- cgraph_make_decl_local (alias->decl);
-
node->local.externally_visible = false;
node->local.local = true;
+ node->resolution = LDPR_PREVAILING_DEF_IRONLY;
gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
}
+ return false;
+}
+
+/* Bring NODE local. */
+
+void
+cgraph_make_node_local (struct cgraph_node *node)
+{
+ cgraph_for_node_thunks_and_aliases (node, cgraph_make_node_local_1,
+ NULL, true);
+}
+
+/* Worker to set nothrow flag. */
+
+static bool
+cgraph_set_nothrow_flag_1 (struct cgraph_node *node, void *data)
+{
+ struct cgraph_edge *e;
+
+ TREE_NOTHROW (node->decl) = data != NULL;
+
+ if (data != NULL)
+ for (e = node->callers; e; e = e->next_caller)
+ e->can_throw_external = false;
+ return false;
}
-/* Set TREE_NOTHROW on NODE's decl and on same_body aliases of NODE
+/* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
if any to NOTHROW. */
void
cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
{
- struct cgraph_node *alias;
- TREE_NOTHROW (node->decl) = nothrow;
- for (alias = node->same_body; alias; alias = alias->next)
- TREE_NOTHROW (alias->decl) = nothrow;
+ cgraph_for_node_thunks_and_aliases (node, cgraph_set_nothrow_flag_1,
+ (void *)(size_t)nothrow, false);
}
-/* Set TREE_READONLY on NODE's decl and on same_body aliases of NODE
+/* Worker to set const flag. */
+
+static bool
+cgraph_set_const_flag_1 (struct cgraph_node *node, void *data)
+{
+ /* Static constructors and destructors without a side effect can be
+ optimized out. */
+ if (data && !((size_t)data & 2))
+ {
+ if (DECL_STATIC_CONSTRUCTOR (node->decl))
+ DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
+ if (DECL_STATIC_DESTRUCTOR (node->decl))
+ DECL_STATIC_DESTRUCTOR (node->decl) = 0;
+ }
+ TREE_READONLY (node->decl) = data != NULL;
+ DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
+ return false;
+}
+
+/* Set TREE_READONLY on NODE's decl and on aliases of NODE
if any to READONLY. */
void
-cgraph_set_readonly_flag (struct cgraph_node *node, bool readonly)
+cgraph_set_const_flag (struct cgraph_node *node, bool readonly, bool looping)
{
- struct cgraph_node *alias;
- TREE_READONLY (node->decl) = readonly;
- for (alias = node->same_body; alias; alias = alias->next)
- TREE_READONLY (alias->decl) = readonly;
+ cgraph_for_node_thunks_and_aliases (node, cgraph_set_const_flag_1,
+ (void *)(size_t)(readonly + (int)looping * 2),
+ false);
}
-/* Set DECL_PURE_P on NODE's decl and on same_body aliases of NODE
- if any to PURE. */
+/* Worker to set pure flag. */
-void
-cgraph_set_pure_flag (struct cgraph_node *node, bool pure)
+static bool
+cgraph_set_pure_flag_1 (struct cgraph_node *node, void *data)
{
- struct cgraph_node *alias;
- DECL_PURE_P (node->decl) = pure;
- for (alias = node->same_body; alias; alias = alias->next)
- DECL_PURE_P (alias->decl) = pure;
+ /* Static pureructors and destructors without a side effect can be
+ optimized out. */
+ if (data && !((size_t)data & 2))
+ {
+ if (DECL_STATIC_CONSTRUCTOR (node->decl))
+ DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
+ if (DECL_STATIC_DESTRUCTOR (node->decl))
+ DECL_STATIC_DESTRUCTOR (node->decl) = 0;
+ }
+ DECL_PURE_P (node->decl) = data != NULL;
+ DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
+ return false;
}
-/* Set DECL_LOOPING_CONST_OR_PURE_P on NODE's decl and on
- same_body aliases of NODE if any to LOOPING_CONST_OR_PURE. */
+/* Set DECL_PURE_P on NODE's decl and on aliases of NODE
+ if any to PURE. */
void
-cgraph_set_looping_const_or_pure_flag (struct cgraph_node *node,
- bool looping_const_or_pure)
+cgraph_set_pure_flag (struct cgraph_node *node, bool pure, bool looping)
{
- struct cgraph_node *alias;
- DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping_const_or_pure;
- for (alias = node->same_body; alias; alias = alias->next)
- DECL_LOOPING_CONST_OR_PURE_P (alias->decl) = looping_const_or_pure;
+ cgraph_for_node_thunks_and_aliases (node, cgraph_set_pure_flag_1,
+ (void *)(size_t)(pure + (int)looping * 2),
+ false);
}
-/* See if the frequency of NODE can be updated based on frequencies of its
- callers. */
-bool
-cgraph_propagate_frequency (struct cgraph_node *node)
+/* Data used by cgraph_propagate_frequency. */
+
+struct cgraph_propagate_frequency_data
+{
+ bool maybe_unlikely_executed;
+ bool maybe_executed_once;
+ bool only_called_at_startup;
+ bool only_called_at_exit;
+};
+
+/* Worker for cgraph_propagate_frequency_1. */
+
+static bool
+cgraph_propagate_frequency_1 (struct cgraph_node *node, void *data)
{
- bool maybe_unlikely_executed = true, maybe_executed_once = true;
+ struct cgraph_propagate_frequency_data *d;
struct cgraph_edge *edge;
- if (!node->local.local)
- return false;
- gcc_assert (node->analyzed);
- if (node->frequency == NODE_FREQUENCY_HOT)
- return false;
- if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
- return false;
- if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, "Processing frequency %s\n", cgraph_node_name (node));
+
+ d = (struct cgraph_propagate_frequency_data *)data;
for (edge = node->callers;
- edge && (maybe_unlikely_executed || maybe_executed_once);
+ edge && (d->maybe_unlikely_executed || d->maybe_executed_once
+ || d->only_called_at_startup || d->only_called_at_exit);
edge = edge->next_caller)
{
+ if (edge->caller != node)
+ {
+ d->only_called_at_startup &= edge->caller->only_called_at_startup;
+ /* It makes sense to put main() together with the static constructors.
+ It will be executed for sure, but rest of functions called from
+ main are definitely not at startup only. */
+ if (MAIN_NAME_P (DECL_NAME (edge->caller->decl)))
+ d->only_called_at_startup = 0;
+ d->only_called_at_exit &= edge->caller->only_called_at_exit;
+ }
if (!edge->frequency)
continue;
switch (edge->caller->frequency)
break;
case NODE_FREQUENCY_EXECUTED_ONCE:
if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, " Called by %s that is executed once\n", cgraph_node_name (node));
- maybe_unlikely_executed = false;
- if (edge->loop_nest)
+ fprintf (dump_file, " Called by %s that is executed once\n",
+ cgraph_node_name (edge->caller));
+ d->maybe_unlikely_executed = false;
+ if (inline_edge_summary (edge)->loop_depth)
{
- maybe_executed_once = false;
+ d->maybe_executed_once = false;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Called in loop\n");
}
case NODE_FREQUENCY_HOT:
case NODE_FREQUENCY_NORMAL:
if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, " Called by %s that is normal or hot\n", cgraph_node_name (node));
- maybe_unlikely_executed = false;
- maybe_executed_once = false;
+ fprintf (dump_file, " Called by %s that is normal or hot\n",
+ cgraph_node_name (edge->caller));
+ d->maybe_unlikely_executed = false;
+ d->maybe_executed_once = false;
break;
}
}
- if (maybe_unlikely_executed)
- {
- node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
+ return edge != NULL;
+}
+
+/* See if the frequency of NODE can be updated based on frequencies of its
+ callers. */
+bool
+cgraph_propagate_frequency (struct cgraph_node *node)
+{
+ struct cgraph_propagate_frequency_data d = {true, true, true, true};
+ bool changed = false;
+
+ if (!node->local.local)
+ return false;
+ gcc_assert (node->analyzed);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Processing frequency %s\n", cgraph_node_name (node));
+
+ cgraph_for_node_and_aliases (node, cgraph_propagate_frequency_1, &d, true);
+
+ if ((d.only_called_at_startup && !d.only_called_at_exit)
+ && !node->only_called_at_startup)
+ {
+ node->only_called_at_startup = true;
if (dump_file)
- fprintf (dump_file, "Node %s promoted to unlikely executed.\n", cgraph_node_name (node));
- return true;
- }
- if (maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
- {
- node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
+ fprintf (dump_file, "Node %s promoted to only called at startup.\n",
+ cgraph_node_name (node));
+ changed = true;
+ }
+ if ((d.only_called_at_exit && !d.only_called_at_startup)
+ && !node->only_called_at_exit)
+ {
+ node->only_called_at_exit = true;
if (dump_file)
- fprintf (dump_file, "Node %s promoted to executed once.\n", cgraph_node_name (node));
- return true;
- }
- return false;
+ fprintf (dump_file, "Node %s promoted to only called at exit.\n",
+ cgraph_node_name (node));
+ changed = true;
+ }
+ /* These come either from profile or user hints; never update them. */
+ if (node->frequency == NODE_FREQUENCY_HOT
+ || node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
+ return changed;
+ if (d.maybe_unlikely_executed)
+ {
+ node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
+ if (dump_file)
+ fprintf (dump_file, "Node %s promoted to unlikely executed.\n",
+ cgraph_node_name (node));
+ changed = true;
+ }
+ else if (d.maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
+ {
+ node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
+ if (dump_file)
+ fprintf (dump_file, "Node %s promoted to executed once.\n",
+ cgraph_node_name (node));
+ changed = true;
+ }
+ return changed;
}
/* Return true when NODE can not return or throw and thus
return cgraph_node_cannot_return (e->callee);
}
+/* Return true when function NODE can be removed from callgraph
+ if all direct calls are eliminated. */
+
+bool
+cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
+{
+ gcc_assert (!node->global.inlined_to);
+ /* Extern inlines can always go, we will use the external definition. */
+ if (DECL_EXTERNAL (node->decl))
+ return true;
+ /* When function is needed, we can not remove it. */
+ if (node->needed || node->reachable_from_other_partition)
+ return false;
+ if (DECL_STATIC_CONSTRUCTOR (node->decl)
+ || DECL_STATIC_DESTRUCTOR (node->decl))
+ return false;
+ /* Only COMDAT functions can be removed if externally visible. */
+ if (node->local.externally_visible
+ && (!DECL_COMDAT (node->decl)
+ || cgraph_used_from_object_file_p (node)))
+ return false;
+ return true;
+}
+
+/* Worker for cgraph_can_remove_if_no_direct_calls_p. */
+
+static bool
+nonremovable_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
+{
+ return !cgraph_can_remove_if_no_direct_calls_and_refs_p (node);
+}
+
+/* Return true when function NODE and its aliases can be removed from callgraph
+ if all direct calls are eliminated. */
+
+bool
+cgraph_can_remove_if_no_direct_calls_p (struct cgraph_node *node)
+{
+ /* Extern inlines can always go, we will use the external definition. */
+ if (DECL_EXTERNAL (node->decl))
+ return true;
+ if (node->address_taken)
+ return false;
+ return !cgraph_for_node_and_aliases (node, nonremovable_p, NULL, true);
+}
+
+/* Worker for cgraph_can_remove_if_no_direct_calls_p. */
+
+static bool
+used_from_object_file_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
+{
+ return cgraph_used_from_object_file_p (node);
+}
+
+/* Return true when function NODE can be expected to be removed
+ from program when direct calls in this compilation unit are removed.
+
+ As a special case COMDAT functions are
+ cgraph_can_remove_if_no_direct_calls_p while the are not
+ cgraph_only_called_directly_p (it is possible they are called from other
+ unit)
+
+ This function behaves as cgraph_only_called_directly_p because eliminating
+ all uses of COMDAT function does not make it necessarily disappear from
+ the program unless we are compiling whole program or we do LTO. In this
+ case we know we win since dynamic linking will not really discard the
+ linkonce section. */
+
+bool
+cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
+{
+ gcc_assert (!node->global.inlined_to);
+ if (cgraph_for_node_and_aliases (node, used_from_object_file_p, NULL, true))
+ return false;
+ if (!in_lto_p && !flag_whole_program)
+ return cgraph_only_called_directly_p (node);
+ else
+ {
+ if (DECL_EXTERNAL (node->decl))
+ return true;
+ return cgraph_can_remove_if_no_direct_calls_p (node);
+ }
+}
+
+/* Return true when RESOLUTION indicate that linker will use
+ the symbol from non-LTO object files. */
+
+bool
+resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
+{
+ return (resolution == LDPR_PREVAILING_DEF
+ || resolution == LDPR_PREEMPTED_REG
+ || resolution == LDPR_RESOLVED_EXEC
+ || resolution == LDPR_RESOLVED_DYN);
+}
+
+
+/* Return true when NODE is known to be used from other (non-LTO) object file.
+ Known only when doing LTO via linker plugin. */
+
+bool
+cgraph_used_from_object_file_p (struct cgraph_node *node)
+{
+ gcc_assert (!node->global.inlined_to);
+ if (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))
+ return false;
+ if (resolution_used_from_other_file_p (node->resolution))
+ return true;
+ return false;
+}
+
+/* Worker for cgraph_only_called_directly_p. */
+
+static bool
+cgraph_not_only_called_directly_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
+{
+ return !cgraph_only_called_directly_or_aliased_p (node);
+}
+
+/* Return true when function NODE and all its aliases are only called
+ directly.
+ i.e. it is not externally visible, address was not taken and
+ it is not used in any other non-standard way. */
+
+bool
+cgraph_only_called_directly_p (struct cgraph_node *node)
+{
+ gcc_assert (cgraph_function_or_thunk_node (node, NULL) == node);
+ return !cgraph_for_node_and_aliases (node, cgraph_not_only_called_directly_p_1,
+ NULL, true);
+}
+
+
+/* Collect all callers of NODE. Worker for collect_callers_of_node. */
+
+static bool
+collect_callers_of_node_1 (struct cgraph_node *node, void *data)
+{
+ VEC (cgraph_edge_p, heap) ** redirect_callers = (VEC (cgraph_edge_p, heap) **)data;
+ struct cgraph_edge *cs;
+ enum availability avail;
+ cgraph_function_or_thunk_node (node, &avail);
+
+ if (avail > AVAIL_OVERWRITABLE)
+ for (cs = node->callers; cs != NULL; cs = cs->next_caller)
+ if (!cs->indirect_inlining_edge)
+ VEC_safe_push (cgraph_edge_p, heap, *redirect_callers, cs);
+ return false;
+}
+
+/* Collect all callers of NODE and its aliases that are known to lead to NODE
+ (i.e. are not overwritable). */
+
+VEC (cgraph_edge_p, heap) *
+collect_callers_of_node (struct cgraph_node *node)
+{
+ VEC (cgraph_edge_p, heap) * redirect_callers = NULL;
+ cgraph_for_node_and_aliases (node, collect_callers_of_node_1,
+ &redirect_callers, false);
+ return redirect_callers;
+}
+
#include "gt-cgraph.h"