node2->aux = edge->next_caller;
else
node2->aux = &last;
- /* Break possible cycles involving always-inline
- functions by ignoring edges from always-inline
- functions to non-always-inline functions. */
- if (edge->caller->local.disregard_inline_limits
- && !edge->callee->local.disregard_inline_limits)
- continue;
if (!edge->caller->aux)
{
if (!edge->caller->callers)
}
}
-/* Add cgraph NODE to queue starting at FIRST.
-
- The queue is linked via AUX pointers and terminated by pointer to 1.
- We enqueue nodes at two occasions: when we find them reachable or when we find
- their bodies needed for further clonning. In the second case we mark them
- by pointer to 2 after processing so they are re-queue when they become
- reachable. */
-
-static void
-enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
-{
- /* Node is still in queue; do nothing. */
- if (node->aux && node->aux != (void *) 2)
- return;
- /* Node was already processed as unreachable, re-enqueue
- only if it became reachable now. */
- if (node->aux == (void *)2 && !node->reachable)
- return;
- node->aux = *first;
- *first = node;
-}
-
-/* Add varpool NODE to queue starting at FIRST. */
-
-static void
-enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
-{
- node->aux = *first;
- *first = node;
-}
-
-/* Process references. */
-
-static void
-process_references (struct ipa_ref_list *list,
- struct cgraph_node **first,
- struct varpool_node **first_varpool,
- bool before_inlining_p)
-{
- int i;
- struct ipa_ref *ref;
- for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
- {
- if (ref->refered_type == IPA_REF_CGRAPH)
- {
- struct cgraph_node *node = ipa_ref_node (ref);
- if (!node->reachable
- && (!DECL_EXTERNAL (node->decl)
- || before_inlining_p))
- {
- node->reachable = true;
- enqueue_cgraph_node (node, first);
- }
- }
- else
- {
- struct varpool_node *node = ipa_ref_varpool_node (ref);
- if (!node->needed)
- {
- varpool_mark_needed_node (node);
- enqueue_varpool_node (node, first_varpool);
- }
- }
- }
-}
-
-/* Return true when function NODE can be removed from callgraph
- if all direct calls are eliminated. */
-
-static inline bool
-varpool_can_remove_if_no_refs (struct varpool_node *node)
-{
- return (!node->force_output && !node->used_from_other_partition
- && (DECL_COMDAT (node->decl) || !node->externally_visible));
-}
-
/* Perform reachability analysis and reclaim all unreachable nodes.
If BEFORE_INLINING_P is true this function is called before inlining
decisions has been made. If BEFORE_INLINING_P is false this function also
cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
{
struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
- struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
+ struct cgraph_node *processed = (struct cgraph_node *) (void *) 2;
struct cgraph_node *node, *next;
- struct varpool_node *vnode, *vnext;
bool changed = false;
#ifdef ENABLE_CHECKING
#ifdef ENABLE_CHECKING
for (node = cgraph_nodes; node; node = node->next)
gcc_assert (!node->aux);
- for (vnode = varpool_nodes; vnode; vnode = vnode->next)
- gcc_assert (!vnode->aux);
#endif
- varpool_reset_queue ();
for (node = cgraph_nodes; node; node = node->next)
- if (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
+ if (!cgraph_can_remove_if_no_direct_calls_p (node)
&& ((!DECL_EXTERNAL (node->decl))
+ || !node->analyzed
|| before_inlining_p))
{
gcc_assert (!node->global.inlined_to);
- enqueue_cgraph_node (node, &first);
+ node->aux = first;
+ first = node;
node->reachable = true;
}
else
gcc_assert (!node->aux);
node->reachable = false;
}
- for (vnode = varpool_nodes; vnode; vnode = vnode->next)
- {
- vnode->next_needed = NULL;
- vnode->prev_needed = NULL;
- if (!varpool_can_remove_if_no_refs (vnode))
- {
- vnode->needed = false;
- varpool_mark_needed_node (vnode);
- enqueue_varpool_node (vnode, &first_varpool);
- }
- else
- vnode->needed = false;
- }
/* Perform reachability analysis. As a special case do not consider
extern inline functions not inlined as live because we won't output
- them at all.
-
- We maintain two worklist, one for cgraph nodes other for varpools and
- are finished once both are empty. */
-
- while (first != (struct cgraph_node *) (void *) 1
- || first_varpool != (struct varpool_node *) (void *) 1)
+ them at all. */
+ while (first != (void *) 1)
{
- if (first != (struct cgraph_node *) (void *) 1)
- {
- struct cgraph_edge *e;
- node = first;
- first = (struct cgraph_node *) first->aux;
- if (!node->reachable)
- node->aux = (void *)2;
-
- /* If we found this node reachable, first mark on the callees
- reachable too, unless they are direct calls to extern inline functions
- we decided to not inline. */
- if (node->reachable)
- for (e = node->callees; e; e = e->next_callee)
- if (!e->callee->reachable
- && node->analyzed
- && (!e->inline_failed || !e->callee->analyzed
- || (!DECL_EXTERNAL (e->callee->decl))
- || before_inlining_p))
- {
- e->callee->reachable = true;
- enqueue_cgraph_node (e->callee, &first);
- }
-
- /* If any function in a comdat group is reachable, force
- all other functions in the same comdat group to be
- also reachable. */
- if (node->same_comdat_group
- && node->reachable
- && !node->global.inlined_to)
+ struct cgraph_edge *e;
+ node = first;
+ first = (struct cgraph_node *) first->aux;
+ node->aux = processed;
+
+ if (node->reachable)
+ for (e = node->callees; e; e = e->next_callee)
+ if (!e->callee->reachable
+ && node->analyzed
+ && (!e->inline_failed || !e->callee->analyzed
+ || (!DECL_EXTERNAL (e->callee->decl))
+ || before_inlining_p))
{
- for (next = node->same_comdat_group;
- next != node;
- next = next->same_comdat_group)
- if (!next->reachable)
- {
- next->reachable = true;
- enqueue_cgraph_node (next, &first);
- }
+ bool prev_reachable = e->callee->reachable;
+ e->callee->reachable |= node->reachable;
+ if (!e->callee->aux
+ || (e->callee->aux == processed
+ && prev_reachable != e->callee->reachable))
+ {
+ e->callee->aux = first;
+ first = e->callee;
+ }
}
- /* We can freely remove inline clones even if they are cloned, however if
- function is clone of real clone, we must keep it around in order to
- make materialize_clones produce function body with the changes
- applied. */
- while (node->clone_of && !node->clone_of->aux && !gimple_has_body_p (node->decl))
+ /* If any function in a comdat group is reachable, force
+ all other functions in the same comdat group to be
+ also reachable. */
+ if (node->same_comdat_group
+ && node->reachable
+ && !node->global.inlined_to)
+ {
+ for (next = node->same_comdat_group;
+ next != node;
+ next = next->same_comdat_group)
+ if (!next->reachable)
+ {
+ next->aux = first;
+ first = next;
+ next->reachable = true;
+ }
+ }
+
+ /* We can freely remove inline clones even if they are cloned, however if
+ function is clone of real clone, we must keep it around in order to
+ make materialize_clones produce function body with the changes
+ applied. */
+ while (node->clone_of && !node->clone_of->aux && !gimple_has_body_p (node->decl))
+ {
+ bool noninline = node->clone_of->decl != node->decl;
+ node = node->clone_of;
+ if (noninline)
{
- bool noninline = node->clone_of->decl != node->decl;
- node = node->clone_of;
- if (noninline && !node->reachable && !node->aux)
- {
- enqueue_cgraph_node (node, &first);
- break;
- }
+ node->aux = first;
+ first = node;
+ break;
}
- process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
- }
- if (first_varpool != (struct varpool_node *) (void *) 1)
- {
- vnode = first_varpool;
- first_varpool = (struct varpool_node *)first_varpool->aux;
- vnode->aux = NULL;
- process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
}
}
- /* Remove unreachable nodes.
-
- Completely unreachable functions can be fully removed from the callgraph.
- Extern inline functions that we decided to not inline need to become unanalyzed nodes of
- callgraph (so we still have edges to them). We remove function body then.
-
- Also we need to care functions that are unreachable but we need to keep them around
- for later clonning. In this case we also turn them to unanalyzed nodes, but
- keep the body around. */
+ /* Remove unreachable nodes. Extern inline functions need special care;
+ Unreachable extern inline functions shall be removed.
+ Reachable extern inline functions we never inlined shall get their bodies
+ eliminated.
+ Reachable extern inline functions we sometimes inlined will be turned into
+ unanalyzed nodes so they look like for true extern functions to the rest
+ of code. Body of such functions is released via remove_node once the
+ inline clones are eliminated. */
for (node = cgraph_nodes; node; node = next)
{
next = node->next;
/* See if there is reachable caller. */
for (e = node->callers; e; e = e->next_caller)
- if (e->caller->reachable)
+ if (e->caller->aux)
break;
/* If so, we need to keep node in the callgraph. */
node->analyzed = false;
node->local.inlinable = false;
}
- else
- gcc_assert (!clone->in_other_partition);
cgraph_node_remove_callees (node);
- ipa_remove_all_references (&node->ref_list);
if (node->prev_sibling_clone)
node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
else if (node->clone_of)
}
node->aux = NULL;
}
- if (file)
- fprintf (file, "\nReclaiming variables:");
- for (vnode = varpool_nodes; vnode; vnode = vnext)
- {
- vnext = vnode->next;
- if (!vnode->needed)
- {
- if (file)
- fprintf (file, " %s", varpool_node_name (vnode));
- varpool_remove_node (vnode);
- }
- }
- if (file)
- fprintf (file, "\nClearing address taken flags:");
- for (node = cgraph_nodes; node; node = node->next)
- if (node->address_taken
- && !node->reachable_from_other_partition)
- {
- int i;
- struct ipa_ref *ref;
- bool found = false;
- for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
- && !found; i++)
- found = true;
- if (!found)
- {
- if (file)
- fprintf (file, " %s", cgraph_node_name (node));
- node->address_taken = false;
- }
- }
-
#ifdef ENABLE_CHECKING
verify_cgraph ();
#endif
return false;
}
-/* Dissolve the same_comdat_group list in which NODE resides. */
-
-static void
-dissolve_same_comdat_group_list (struct cgraph_node *node)
-{
- struct cgraph_node *n = node, *next;
- do
- {
- next = n->same_comdat_group;
- n->same_comdat_group = NULL;
- n = next;
- }
- while (n != node);
-}
-
/* Mark visibility of all functions.
A local function is one whose calls can occur only in the current
and simplifies later passes. */
if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
{
-#ifdef ENABLE_CHECKING
- struct cgraph_node *n;
-
- for (n = node->same_comdat_group;
- n != node;
- n = n->same_comdat_group)
+ struct cgraph_node *n = node, *next;
+ do
+ {
/* If at least one of same comdat group functions is external,
all of them have to be, otherwise it is a front-end bug. */
gcc_assert (DECL_EXTERNAL (n->decl));
-#endif
- dissolve_same_comdat_group_list (node);
+ next = n->same_comdat_group;
+ n->same_comdat_group = NULL;
+ n = next;
+ }
+ while (n != node);
}
gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
|| TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
{
gcc_assert (whole_program || !TREE_PUBLIC (node->decl));
cgraph_make_decl_local (node->decl);
- if (node->same_comdat_group)
- /* cgraph_externally_visible_p has already checked all other nodes
- in the group and they will all be made local. We need to
- dissolve the group at once so that the predicate does not
- segfault though. */
- dissolve_same_comdat_group_list (node);
}
node->local.local = (cgraph_only_called_directly_p (node)
&& node->analyzed
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_remove_functions | TODO_dump_cgraph
- | TODO_ggc_collect /* todo_flags_finish */
+ TODO_remove_functions | TODO_dump_cgraph/* todo_flags_finish */
}
};
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_remove_functions | TODO_dump_cgraph
- | TODO_ggc_collect /* todo_flags_finish */
+ TODO_dump_cgraph | TODO_remove_functions/* todo_flags_finish */
},
NULL, /* generate_summary */
NULL, /* write_summary */
NULL, /* read_summary */
- NULL, /* write_optimization_summary */
- NULL, /* read_optimization_summary */
+ NULL, /* function_read_summary */
NULL, /* stmt_fixup */
0, /* TODOs */
NULL, /* function_transform */
dump_cgraph_node_set (stderr, set);
}
-/* Hash a varpool node set element. */
-
-static hashval_t
-hash_varpool_node_set_element (const void *p)
-{
- const_varpool_node_set_element element = (const_varpool_node_set_element) p;
- return htab_hash_pointer (element->node);
-}
-
-/* Compare two varpool node set elements. */
-
-static int
-eq_varpool_node_set_element (const void *p1, const void *p2)
-{
- const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
- const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
-
- return e1->node == e2->node;
-}
-
-/* Create a new varpool node set. */
-
-varpool_node_set
-varpool_node_set_new (void)
-{
- varpool_node_set new_node_set;
-
- new_node_set = GGC_NEW (struct varpool_node_set_def);
- new_node_set->hashtab = htab_create_ggc (10,
- hash_varpool_node_set_element,
- eq_varpool_node_set_element,
- NULL);
- new_node_set->nodes = NULL;
- return new_node_set;
-}
-
-/* Add varpool_node NODE to varpool_node_set SET. */
-
-void
-varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
-{
- void **slot;
- varpool_node_set_element element;
- struct varpool_node_set_element_def dummy;
-
- dummy.node = node;
- slot = htab_find_slot (set->hashtab, &dummy, INSERT);
-
- if (*slot != HTAB_EMPTY_ENTRY)
- {
- element = (varpool_node_set_element) *slot;
- gcc_assert (node == element->node
- && (VEC_index (varpool_node_ptr, set->nodes, element->index)
- == node));
- return;
- }
-
- /* Insert node into hash table. */
- element =
- (varpool_node_set_element) GGC_NEW (struct varpool_node_set_element_def);
- element->node = node;
- element->index = VEC_length (varpool_node_ptr, set->nodes);
- *slot = element;
-
- /* Insert into node vector. */
- VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
-}
-
-/* Remove varpool_node NODE from varpool_node_set SET. */
-
-void
-varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
-{
- void **slot, **last_slot;
- varpool_node_set_element element, last_element;
- struct varpool_node *last_node;
- struct varpool_node_set_element_def dummy;
-
- dummy.node = node;
- slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
- if (slot == NULL)
- return;
-
- element = (varpool_node_set_element) *slot;
- gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
- == node);
-
- /* Remove from vector. We do this by swapping node with the last element
- of the vector. */
- last_node = VEC_pop (varpool_node_ptr, set->nodes);
- if (last_node != node)
- {
- dummy.node = last_node;
- last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
- last_element = (varpool_node_set_element) *last_slot;
- gcc_assert (last_element);
-
- /* Move the last element to the original spot of NODE. */
- last_element->index = element->index;
- VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
- last_node);
- }
-
- /* Remove element from hash table. */
- htab_clear_slot (set->hashtab, slot);
- ggc_free (element);
-}
-
-/* Find NODE in SET and return an iterator to it if found. A null iterator
- is returned if NODE is not in SET. */
-
-varpool_node_set_iterator
-varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
-{
- void **slot;
- struct varpool_node_set_element_def dummy;
- varpool_node_set_element element;
- varpool_node_set_iterator vsi;
-
- dummy.node = node;
- slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
- if (slot == NULL)
- vsi.index = (unsigned) ~0;
- else
- {
- element = (varpool_node_set_element) *slot;
- gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
- == node);
- vsi.index = element->index;
- }
- vsi.set = set;
-
- return vsi;
-}
-
-/* Dump content of SET to file F. */
-
-void
-dump_varpool_node_set (FILE *f, varpool_node_set set)
-{
- varpool_node_set_iterator iter;
-
- for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
- {
- struct varpool_node *node = vsi_node (iter);
- dump_varpool_node (f, node);
- }
-}
-
-/* Dump content of SET to stderr. */
-
-void
-debug_varpool_node_set (varpool_node_set set)
-{
- dump_varpool_node_set (stderr, set);
-}
-
-
-/* Simple ipa profile pass propagating frequencies across the callgraph. */
-
-static unsigned int
-ipa_profile (void)
-{
- struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
- struct cgraph_edge *e;
- int order_pos;
- bool something_changed = false;
- int i;
-
- order_pos = cgraph_postorder (order);
- for (i = order_pos - 1; i >= 0; i--)
- {
- if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
- {
- for (e = order[i]->callees; e; e = e->next_callee)
- if (e->callee->local.local && !e->callee->aux)
- {
- something_changed = true;
- e->callee->aux = (void *)1;
- }
- }
- order[i]->aux = NULL;
- }
-
- while (something_changed)
- {
- something_changed = false;
- for (i = order_pos - 1; i >= 0; i--)
- {
- if (order[i]->aux && cgraph_propagate_frequency (order[i]))
- {
- for (e = order[i]->callees; e; e = e->next_callee)
- if (e->callee->local.local && !e->callee->aux)
- {
- something_changed = true;
- e->callee->aux = (void *)1;
- }
- }
- order[i]->aux = NULL;
- }
- }
- free (order);
- return 0;
-}
-
-static bool
-gate_ipa_profile (void)
-{
- return flag_ipa_profile;
-}
-
-struct ipa_opt_pass_d pass_ipa_profile =
-{
- {
- IPA_PASS,
- "ipa-profile", /* name */
- gate_ipa_profile, /* gate */
- ipa_profile, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- TV_IPA_PROFILE, /* tv_id */
- 0, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- 0 /* todo_flags_finish */
- },
- NULL, /* generate_summary */
- NULL, /* write_summary */
- NULL, /* read_summary */
- NULL, /* write_optimization_summary */
- NULL, /* read_optimization_summary */
- NULL, /* stmt_fixup */
- 0, /* TODOs */
- NULL, /* function_transform */
- NULL /* variable_transform */
-};