/* Callgraph handling code.
- Copyright (C) 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
Contributed by Jan Hubicka
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
/* This file contains basic routines manipulating call graph and variable pool
The callgraph at the moment does not represent indirect calls or calls
from other compilation unit. Flag NEEDED is set for each node that may
- be accessed in such a invisible way and it shall be considered an
+ be accessed in such an invisible way and it shall be considered an
entry point to the callgraph.
Intraprocedural information:
Callgraph is place to store data needed for intraprocedural optimization.
- All datastructures are divided into three components: local_info that
+ All data structures are divided into three components: local_info that
is produced while analyzing the function, global_info that is result
- of global walkking of the callgraph on the end of compilation and
+ of global walking of the callgraph on the end of compilation and
rtl_info used by RTL backend to propagate data from already compiled
functions to their callers.
The function inlining information is decided in advance and maintained
in the callgraph as so called inline plan.
- For each inlined call, the calle's node is clonned to represent the
- new function copy produced by inlininer.
- Each inlined call gets unque corresponding clone node of the callee
- and the datastructure is updated while inlining is performed, so
- the clones are elliminated and their callee edges redirected to the
+ For each inlined call, the callee's node is cloned to represent the
+ new function copy produced by inliner.
+ Each inlined call gets a unique corresponding clone node of the callee
+ and the data structure is updated while inlining is performed, so
+ the clones are eliminated and their callee edges redirected to the
caller.
Each edge has "inline_failed" field. When the field is set to NULL,
- the call will be inlined. When it is non-NULL it contains an reason
- why inlining wasn't performaned.
+ the call will be inlined. When it is non-NULL it contains a reason
+ why inlining wasn't performed.
The varpool data structure:
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
+#include "tree-inline.h"
#include "langhooks.h"
#include "hashtab.h"
#include "toplev.h"
#include "ggc.h"
#include "debug.h"
#include "target.h"
+#include "basic-block.h"
#include "cgraph.h"
#include "varray.h"
#include "output.h"
#include "intl.h"
+#include "tree-gimple.h"
+#include "tree-dump.h"
+
+static void cgraph_node_remove_callers (struct cgraph_node *node);
+static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
+static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
/* Hash table used to convert declarations into nodes. */
static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
-/* We destructivly update callgraph during inlining and thus we need to
- keep information on whether inlining happent separately. */
-htab_t cgraph_inline_hash;
-
/* The linked list of cgraph nodes. */
struct cgraph_node *cgraph_nodes;
/* Queue of cgraph nodes scheduled to be lowered. */
struct cgraph_node *cgraph_nodes_queue;
+/* Queue of cgraph nodes scheduled to be expanded. This is a
+ secondary queue used during optimization to accommodate passes that
+ may generate new functions that need to be optimized and expanded. */
+struct cgraph_node *cgraph_expand_queue;
+
/* Number of nodes in existence. */
int cgraph_n_nodes;
/* Set when whole unit has been analyzed so we can access global info. */
bool cgraph_global_info_ready = false;
+/* Set when the cgraph is fully build and the basic flags are computed. */
+bool cgraph_function_flags_ready = false;
+
/* Hash table used to convert declarations into nodes. */
static GTY((param_is (struct cgraph_varpool_node))) htab_t cgraph_varpool_hash;
/* Queue of cgraph nodes scheduled to be lowered and output. */
-struct cgraph_varpool_node *cgraph_varpool_nodes_queue;
-
-/* Number of nodes in existence. */
-int cgraph_varpool_n_nodes;
+struct cgraph_varpool_node *cgraph_varpool_nodes_queue, *cgraph_varpool_first_unanalyzed_node;
/* The linked list of cgraph varpool nodes. */
-static GTY(()) struct cgraph_varpool_node *cgraph_varpool_nodes;
+static GTY(()) struct cgraph_varpool_node *cgraph_varpool_nodes;
+
+/* End of the varpool queue. Needs to be QTYed to work with PCH. */
+static GTY(()) struct cgraph_varpool_node *cgraph_varpool_last_needed_node;
+
+/* Linked list of cgraph asm nodes. */
+struct cgraph_asm_node *cgraph_asm_nodes;
+
+/* Last node in cgraph_asm_nodes. */
+static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
+
+/* The order index of the next cgraph node to be created. This is
+ used so that we can sort the cgraph nodes in order by when we saw
+ them, to support -fno-toplevel-reorder. */
+int cgraph_order;
static hashval_t hash_node (const void *);
static int eq_node (const void *, const void *);
static hashval_t
hash_node (const void *p)
{
- return ((hashval_t)
- IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME
- (((struct cgraph_node *) p)->decl)));
+ const struct cgraph_node *n = (const struct cgraph_node *) p;
+ return (hashval_t) DECL_UID (n->decl);
}
/* Returns nonzero if P1 and P2 are equal. */
static int
eq_node (const void *p1, const void *p2)
{
- return ((DECL_ASSEMBLER_NAME (((struct cgraph_node *) p1)->decl)) ==
- (tree) p2);
+ const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
+ const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
+ return DECL_UID (n1->decl) == DECL_UID (n2->decl);
}
-/* Allocate new callgraph node and insert it into basic datastructures. */
+/* Allocate new callgraph node and insert it into basic data structures. */
static struct cgraph_node *
cgraph_create_node (void)
{
struct cgraph_node *node;
- node = ggc_alloc_cleared (sizeof (*node));
+ node = GGC_CNEW (struct cgraph_node);
node->next = cgraph_nodes;
node->uid = cgraph_max_uid++;
+ node->order = cgraph_order++;
if (cgraph_nodes)
cgraph_nodes->previous = node;
node->previous = NULL;
+ node->global.estimated_growth = INT_MIN;
cgraph_nodes = node;
cgraph_n_nodes++;
return node;
struct cgraph_node *
cgraph_node (tree decl)
{
- struct cgraph_node *node;
- struct cgraph_node **slot;
+ struct cgraph_node key, *node, **slot;
- if (TREE_CODE (decl) != FUNCTION_DECL)
- abort ();
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
if (!cgraph_hash)
cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
- slot = (struct cgraph_node **)
- htab_find_slot_with_hash (cgraph_hash, DECL_ASSEMBLER_NAME (decl),
- IDENTIFIER_HASH_VALUE
- (DECL_ASSEMBLER_NAME (decl)), INSERT);
+ key.decl = decl;
+
+ slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
+
if (*slot)
- return *slot;
+ {
+ node = *slot;
+ if (!node->master_clone)
+ node->master_clone = node;
+ return node;
+ }
node = cgraph_create_node ();
node->decl = decl;
node->origin = cgraph_node (DECL_CONTEXT (decl));
node->next_nested = node->origin->nested;
node->origin->nested = node;
+ node->master_clone = node;
}
return node;
}
-/* Return callgraph edge representing CALL_EXPR. */
+/* Insert already constructed node into hashtable. */
+
+void
+cgraph_insert_node_to_hashtable (struct cgraph_node *node)
+{
+ struct cgraph_node **slot;
+
+ slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
+
+ gcc_assert (!*slot);
+ *slot = node;
+}
+
+/* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
+
+static bool
+decl_assembler_name_equal (tree decl, tree asmname)
+{
+ tree decl_asmname = DECL_ASSEMBLER_NAME (decl);
+
+ if (decl_asmname == asmname)
+ return true;
+
+ /* If the target assembler name was set by the user, things are trickier.
+ We have a leading '*' to begin with. After that, it's arguable what
+ is the correct thing to do with -fleading-underscore. Arguably, we've
+ historically been doing the wrong thing in assemble_alias by always
+ printing the leading underscore. Since we're not changing that, make
+ sure user_label_prefix follows the '*' before matching. */
+ if (IDENTIFIER_POINTER (decl_asmname)[0] == '*')
+ {
+ const char *decl_str = IDENTIFIER_POINTER (decl_asmname) + 1;
+ size_t ulp_len = strlen (user_label_prefix);
+
+ if (ulp_len == 0)
+ ;
+ else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
+ decl_str += ulp_len;
+ else
+ return false;
+
+ return strcmp (decl_str, IDENTIFIER_POINTER (asmname)) == 0;
+ }
+
+ return false;
+}
+
+
+/* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
+ Return NULL if there's no such node. */
+
+struct cgraph_node *
+cgraph_node_for_asm (tree asmname)
+{
+ struct cgraph_node *node;
+
+ for (node = cgraph_nodes; node ; node = node->next)
+ if (decl_assembler_name_equal (node->decl, asmname))
+ return node;
+
+ return NULL;
+}
+
+/* Return callgraph edge representing CALL_EXPR statement. */
struct cgraph_edge *
-cgraph_edge (struct cgraph_node *node, tree call_expr)
+cgraph_edge (struct cgraph_node *node, tree call_stmt)
{
struct cgraph_edge *e;
/* This loop may turn out to be performance problem. In such case adding
hashtables into call nodes with very many edges is probably best
- sollution. It is not good idea to add pointer into CALL_EXPR itself
+ solution. It is not good idea to add pointer into CALL_EXPR itself
because we want to make possible having multiple cgraph nodes representing
different clones of the same body before the body is actually cloned. */
for (e = node->callees; e; e= e->next_callee)
- if (e->call_expr == call_expr)
+ if (e->call_stmt == call_stmt)
break;
return e;
}
-/* Try to find existing function for identifier ID. */
-struct cgraph_node *
-cgraph_node_for_identifier (tree id)
-{
- struct cgraph_node **slot;
-
- if (TREE_CODE (id) != IDENTIFIER_NODE)
- abort ();
-
- if (!cgraph_hash)
- return NULL;
-
- slot = (struct cgraph_node **)
- htab_find_slot_with_hash (cgraph_hash, id,
- IDENTIFIER_HASH_VALUE (id), NO_INSERT);
- if (!slot)
- return NULL;
- return *slot;
-}
-
/* Create edge from CALLER to CALLEE in the cgraph. */
struct cgraph_edge *
cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
- tree call_expr)
+ tree call_stmt, gcov_type count, int nest)
{
- struct cgraph_edge *edge = ggc_alloc (sizeof (struct cgraph_edge));
+ struct cgraph_edge *edge = GGC_NEW (struct cgraph_edge);
#ifdef ENABLE_CHECKING
struct cgraph_edge *e;
for (e = caller->callees; e; e = e->next_callee)
- if (e->call_expr == call_expr)
- abort ();
+ gcc_assert (e->call_stmt != call_stmt);
#endif
- if (TREE_CODE (call_expr) != CALL_EXPR)
- abort ();
+ gcc_assert (get_call_expr_in (call_stmt));
if (!DECL_SAVED_TREE (callee->decl))
edge->inline_failed = N_("function body not available");
edge->caller = caller;
edge->callee = callee;
- edge->call_expr = call_expr;
+ edge->call_stmt = call_stmt;
+ edge->prev_caller = NULL;
edge->next_caller = callee->callers;
+ if (callee->callers)
+ callee->callers->prev_caller = edge;
+ edge->prev_callee = NULL;
edge->next_callee = caller->callees;
+ if (caller->callees)
+ caller->callees->prev_callee = edge;
caller->callees = edge;
callee->callers = edge;
+ edge->count = count;
+ edge->loop_nest = nest;
return edge;
}
-/* Remove the edge E the cgraph. */
+/* Remove the edge E from the list of the callers of the callee. */
+
+static inline void
+cgraph_edge_remove_callee (struct cgraph_edge *e)
+{
+ if (e->prev_caller)
+ e->prev_caller->next_caller = e->next_caller;
+ if (e->next_caller)
+ e->next_caller->prev_caller = e->prev_caller;
+ if (!e->prev_caller)
+ e->callee->callers = e->next_caller;
+}
+
+/* Remove the edge E from the list of the callees of the caller. */
+
+static inline void
+cgraph_edge_remove_caller (struct cgraph_edge *e)
+{
+ if (e->prev_callee)
+ e->prev_callee->next_callee = e->next_callee;
+ if (e->next_callee)
+ e->next_callee->prev_callee = e->prev_callee;
+ if (!e->prev_callee)
+ e->caller->callees = e->next_callee;
+}
+
+/* Remove the edge E in the cgraph. */
void
cgraph_remove_edge (struct cgraph_edge *e)
{
- struct cgraph_edge **edge, **edge2;
+ /* Remove from callers list of the callee. */
+ cgraph_edge_remove_callee (e);
- for (edge = &e->callee->callers; *edge && *edge != e;
- edge = &((*edge)->next_caller))
- continue;
- if (!*edge)
- abort ();
- *edge = (*edge)->next_caller;
- for (edge2 = &e->caller->callees; *edge2 && *edge2 != e;
- edge2 = &(*edge2)->next_callee)
- continue;
- if (!*edge2)
- abort ();
- *edge2 = (*edge2)->next_callee;
+ /* Remove from callees list of the callers. */
+ cgraph_edge_remove_caller (e);
}
/* Redirect callee of E to N. The function does not update underlying
void
cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
{
- struct cgraph_edge **edge;
+ /* Remove from callers list of the current callee. */
+ cgraph_edge_remove_callee (e);
- for (edge = &e->callee->callers; *edge && *edge != e;
- edge = &((*edge)->next_caller))
- continue;
- if (!*edge)
- abort ();
- *edge = (*edge)->next_caller;
- e->callee = n;
+ /* Insert to callers list of the new callee. */
+ e->prev_caller = NULL;
+ if (n->callers)
+ n->callers->prev_caller = e;
e->next_caller = n->callers;
n->callers = e;
+ e->callee = n;
+}
+
+/* Remove all callees from the node. */
+
+void
+cgraph_node_remove_callees (struct cgraph_node *node)
+{
+ struct cgraph_edge *e;
+
+ /* It is sufficient to remove the edges from the lists of callers of
+ the callees. The callee list of the node can be zapped with one
+ assignment. */
+ for (e = node->callees; e; e = e->next_callee)
+ cgraph_edge_remove_callee (e);
+ node->callees = NULL;
+}
+
+/* Remove all callers from the node. */
+
+static void
+cgraph_node_remove_callers (struct cgraph_node *node)
+{
+ struct cgraph_edge *e;
+
+ /* It is sufficient to remove the edges from the lists of callees of
+ the callers. The caller list of the node can be zapped with one
+ assignment. */
+ for (e = node->callers; e; e = e->next_caller)
+ cgraph_edge_remove_caller (e);
+ node->callers = NULL;
}
/* Remove the node from cgraph. */
cgraph_remove_node (struct cgraph_node *node)
{
void **slot;
- bool check_dead = 1;
+ bool kill_body = false;
- while (node->callers)
- cgraph_remove_edge (node->callers);
- while (node->callees)
- cgraph_remove_edge (node->callees);
+ cgraph_node_remove_callers (node);
+ cgraph_node_remove_callees (node);
while (node->nested)
cgraph_remove_node (node->nested);
if (node->origin)
cgraph_nodes = node->next;
if (node->next)
node->next->previous = node->previous;
- slot =
- htab_find_slot_with_hash (cgraph_hash, DECL_ASSEMBLER_NAME (node->decl),
- IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME
- (node->decl)), NO_INSERT);
+ slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
if (*slot == node)
{
if (node->next_clone)
- *slot = node->next_clone;
+ {
+ struct cgraph_node *new_node = node->next_clone;
+ struct cgraph_node *n;
+
+ /* Make the next clone be the master clone */
+ for (n = new_node; n; n = n->next_clone)
+ n->master_clone = new_node;
+
+ *slot = new_node;
+ node->next_clone->prev_clone = NULL;
+ }
else
{
htab_clear_slot (cgraph_hash, slot);
- if (!dump_enabled_p (TDI_all))
- {
- DECL_SAVED_TREE (node->decl) = NULL;
- DECL_STRUCT_FUNCTION (node->decl) = NULL;
- }
- check_dead = false;
+ kill_body = true;
}
}
else
{
- struct cgraph_node *n;
+ node->prev_clone->next_clone = node->next_clone;
+ if (node->next_clone)
+ node->next_clone->prev_clone = node->prev_clone;
+ }
- for (n = *slot; n->next_clone != node; n = n->next_clone)
- continue;
- n->next_clone = node->next_clone;
+ /* While all the clones are removed after being proceeded, the function
+ itself is kept in the cgraph even after it is compiled. Check whether
+ we are done with this body and reclaim it proactively if this is the case.
+ */
+ if (!kill_body && *slot)
+ {
+ struct cgraph_node *n = (struct cgraph_node *) *slot;
+ if (!n->next_clone && !n->global.inlined_to
+ && (cgraph_global_info_ready
+ && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
+ kill_body = true;
}
- /* Work out whether we still need a function body (either there is inline
- clone or there is out of line function whose body is not written). */
- if (check_dead && flag_unit_at_a_time)
+ if (kill_body && !dump_enabled_p (TDI_tree_all) && flag_unit_at_a_time)
{
- struct cgraph_node *n;
-
- for (n = *slot; n; n = n->next_clone)
- if (n->global.inlined_to
- || (!n->global.inlined_to
- && !TREE_ASM_WRITTEN (n->decl) && !DECL_EXTERNAL (n->decl)))
- break;
- if (!n && !dump_enabled_p (TDI_all))
- {
- DECL_SAVED_TREE (node->decl) = NULL;
- DECL_STRUCT_FUNCTION (node->decl) = NULL;
- }
+ DECL_SAVED_TREE (node->decl) = NULL;
+ DECL_STRUCT_FUNCTION (node->decl) = NULL;
+ DECL_INITIAL (node->decl) = error_mark_node;
}
cgraph_n_nodes--;
/* Do not free the structure itself so the walk over chain can continue. */
{
notice_global_symbol (node->decl);
node->reachable = 1;
+ gcc_assert (!cgraph_global_info_ready);
node->next_needed = cgraph_nodes_queue;
cgraph_nodes_queue = node;
-
- /* At the moment frontend automatically emits all nested functions. */
- if (node->nested)
- {
- struct cgraph_node *node2;
-
- for (node2 = node->nested; node2; node2 = node2->next_nested)
- if (!node2->reachable)
- cgraph_mark_reachable_node (node2);
- }
}
}
cgraph_mark_reachable_node (node);
}
-/* Return true when CALLER_DECL calls CALLEE_DECL. */
-
-bool
-cgraph_calls_p (tree caller_decl, tree callee_decl)
-{
- struct cgraph_node *caller = cgraph_node (caller_decl);
- struct cgraph_node *callee = cgraph_node (callee_decl);
- struct cgraph_edge *edge;
-
- for (edge = callee->callers; edge && (edge)->caller != caller;
- edge = (edge->next_caller))
- continue;
- return edge != NULL;
-}
-
/* Return local info for the compiled function. */
struct cgraph_local_info *
cgraph_local_info (tree decl)
{
struct cgraph_node *node;
- if (TREE_CODE (decl) != FUNCTION_DECL)
- abort ();
+
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
node = cgraph_node (decl);
return &node->local;
}
cgraph_global_info (tree decl)
{
struct cgraph_node *node;
- if (TREE_CODE (decl) != FUNCTION_DECL || !cgraph_global_info_ready)
- abort ();
+
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
node = cgraph_node (decl);
return &node->global;
}
cgraph_rtl_info (tree decl)
{
struct cgraph_node *node;
- if (TREE_CODE (decl) != FUNCTION_DECL)
- abort ();
+
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
node = cgraph_node (decl);
if (decl != current_function_decl
&& !TREE_ASM_WRITTEN (node->decl))
return lang_hooks.decl_printable_name (node->decl, 2);
}
+/* Return name of the node used in debug output. */
+static const char *
+cgraph_varpool_node_name (struct cgraph_varpool_node *node)
+{
+ return lang_hooks.decl_printable_name (node->decl, 2);
+}
+
+/* Names used to print out the availability enum. */
+static const char * const availability_names[] =
+ {"unset", "not_available", "overwrittable", "available", "local"};
+
/* Dump given cgraph node. */
void
dump_cgraph_node (FILE *f, struct cgraph_node *node)
fprintf (f, " (inline copy in %s/%i)",
cgraph_node_name (node->global.inlined_to),
node->global.inlined_to->uid);
+ if (cgraph_function_flags_ready)
+ fprintf (f, " availability:%s",
+ availability_names [cgraph_function_body_availability (node)]);
+ if (node->master_clone && node->master_clone->uid != node->uid)
+ fprintf (f, "(%i)", node->master_clone->uid);
+ if (node->count)
+ fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
+ (HOST_WIDEST_INT)node->count);
if (node->local.self_insns)
fprintf (f, " %i insns", node->local.self_insns);
if (node->global.insns && node->global.insns != node->local.self_insns)
fprintf (f, " tree");
if (node->output)
fprintf (f, " output");
-
if (node->local.local)
fprintf (f, " local");
+ if (node->local.externally_visible)
+ fprintf (f, " externally_visible");
+ if (node->local.finalized)
+ fprintf (f, " finalized");
if (node->local.disregard_inline_limits)
fprintf (f, " always_inline");
else if (node->local.inlinable)
fprintf (f, " inlinable");
+ if (node->local.redefined_extern_inline)
+ fprintf (f, " redefined_extern_inline");
if (TREE_ASM_WRITTEN (node->decl))
fprintf (f, " asm_written");
{
fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
edge->caller->uid);
+ if (edge->count)
+ fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
+ (HOST_WIDEST_INT)edge->count);
if (!edge->inline_failed)
fprintf(f, "(inlined) ");
}
edge->callee->uid);
if (!edge->inline_failed)
fprintf(f, "(inlined) ");
+ if (edge->count)
+ fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
+ (HOST_WIDEST_INT)edge->count);
+ if (edge->loop_nest)
+ fprintf (f, "(nested in %i loops) ", edge->loop_nest);
}
fprintf (f, "\n");
}
dump_cgraph_node (f, node);
}
+/* Dump given cgraph node. */
+void
+dump_cgraph_varpool_node (FILE *f, struct cgraph_varpool_node *node)
+{
+ fprintf (f, "%s:", cgraph_varpool_node_name (node));
+ fprintf (f, " availability:%s", availability_names [cgraph_variable_initializer_availability (node)]);
+ if (DECL_INITIAL (node->decl))
+ fprintf (f, " initialized");
+ if (node->needed)
+ fprintf (f, " needed");
+ if (node->analyzed)
+ fprintf (f, " analyzed");
+ if (node->finalized)
+ fprintf (f, " finalized");
+ if (node->output)
+ fprintf (f, " output");
+ if (node->externally_visible)
+ fprintf (f, " externally_visible");
+ fprintf (f, "\n");
+}
+
+/* Dump the callgraph. */
+
+void
+dump_varpool (FILE *f)
+{
+ struct cgraph_varpool_node *node;
+
+ fprintf (f, "variable pool:\n\n");
+ for (node = cgraph_varpool_nodes; node; node = node->next_needed)
+ dump_cgraph_varpool_node (f, node);
+}
+
/* Returns a hash code for P. */
static hashval_t
-cgraph_varpool_hash_node (const void *p)
+hash_varpool_node (const void *p)
{
- return ((hashval_t)
- IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME
- (((struct cgraph_varpool_node *) p)->decl)));
+ const struct cgraph_varpool_node *n = (const struct cgraph_varpool_node *) p;
+ return (hashval_t) DECL_UID (n->decl);
}
/* Returns nonzero if P1 and P2 are equal. */
static int
-eq_cgraph_varpool_node (const void *p1, const void *p2)
+eq_varpool_node (const void *p1, const void *p2)
{
- return ((DECL_ASSEMBLER_NAME (((struct cgraph_varpool_node *) p1)->decl)) ==
- (tree) p2);
+ const struct cgraph_varpool_node *n1 =
+ (const struct cgraph_varpool_node *) p1;
+ const struct cgraph_varpool_node *n2 =
+ (const struct cgraph_varpool_node *) p2;
+ return DECL_UID (n1->decl) == DECL_UID (n2->decl);
}
/* Return cgraph_varpool node assigned to DECL. Create new one when needed. */
struct cgraph_varpool_node *
cgraph_varpool_node (tree decl)
{
- struct cgraph_varpool_node *node;
- struct cgraph_varpool_node **slot;
+ struct cgraph_varpool_node key, *node, **slot;
- if (!DECL_P (decl) || TREE_CODE (decl) == FUNCTION_DECL)
- abort ();
+ gcc_assert (DECL_P (decl) && TREE_CODE (decl) != FUNCTION_DECL);
if (!cgraph_varpool_hash)
- cgraph_varpool_hash = htab_create_ggc (10, cgraph_varpool_hash_node,
- eq_cgraph_varpool_node, NULL);
+ cgraph_varpool_hash = htab_create_ggc (10, hash_varpool_node,
+ eq_varpool_node, NULL);
+ key.decl = decl;
slot = (struct cgraph_varpool_node **)
- htab_find_slot_with_hash (cgraph_varpool_hash, DECL_ASSEMBLER_NAME (decl),
- IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (decl)),
- INSERT);
+ htab_find_slot (cgraph_varpool_hash, &key, INSERT);
if (*slot)
return *slot;
- node = ggc_alloc_cleared (sizeof (*node));
+ node = GGC_CNEW (struct cgraph_varpool_node);
node->decl = decl;
- cgraph_varpool_n_nodes++;
+ node->order = cgraph_order++;
+ node->next = cgraph_varpool_nodes;
cgraph_varpool_nodes = node;
*slot = node;
return node;
}
+struct cgraph_varpool_node *
+cgraph_varpool_node_for_asm (tree asmname)
+{
+ struct cgraph_varpool_node *node;
+
+ for (node = cgraph_varpool_nodes; node ; node = node->next)
+ if (decl_assembler_name_equal (node->decl, asmname))
+ return node;
+
+ return NULL;
+}
+
/* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
void
change_decl_assembler_name (tree decl, tree name)
{
- struct cgraph_node *node = NULL;
- struct cgraph_varpool_node *vnode = NULL;
- void **slot;
-
if (!DECL_ASSEMBLER_NAME_SET_P (decl))
{
SET_DECL_ASSEMBLER_NAME (decl, name);
if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
&& DECL_RTL_SET_P (decl))
- warning ("%D renamed after being referenced in assembly", decl);
+ warning (0, "%D renamed after being referenced in assembly", decl);
- if (TREE_CODE (decl) == FUNCTION_DECL && cgraph_hash)
- {
- /* Take a look whether declaration is in the cgraph structure. */
- slot =
- htab_find_slot_with_hash (cgraph_hash, DECL_ASSEMBLER_NAME (decl),
- IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME
- (decl)), NO_INSERT);
- if (slot)
- node = *slot;
-
- /* It is, verify that we are the canonical node for this decl. */
- if (node && node->decl == decl)
- {
- node = *slot;
- htab_clear_slot (cgraph_hash, slot);
- }
- else
- node = NULL;
- }
- if (TREE_CODE (decl) == VAR_DECL && TREE_STATIC (decl) && cgraph_varpool_hash)
- {
- /* Take a look whether declaration is in the cgraph structure. */
- slot =
- htab_find_slot_with_hash (cgraph_varpool_hash, DECL_ASSEMBLER_NAME (decl),
- IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME
- (decl)), NO_INSERT);
- if (slot)
- vnode = *slot;
-
- /* It is, verify that we are the canonical vnode for this decl. */
- if (vnode && vnode->decl == decl)
- {
- vnode = *slot;
- htab_clear_slot (cgraph_varpool_hash, slot);
- }
- else
- vnode = NULL;
- }
SET_DECL_ASSEMBLER_NAME (decl, name);
- if (node)
- {
- slot =
- htab_find_slot_with_hash (cgraph_hash, name,
- IDENTIFIER_HASH_VALUE (name), INSERT);
- if (*slot)
- abort ();
- *slot = node;
- }
- if (vnode)
- {
- slot =
- htab_find_slot_with_hash (cgraph_varpool_hash, name,
- IDENTIFIER_HASH_VALUE (name), INSERT);
- if (*slot)
- abort ();
- *slot = vnode;
- }
}
-/* Try to find existing function for identifier ID. */
-struct cgraph_varpool_node *
-cgraph_varpool_node_for_identifier (tree id)
+/* Helper function for finalization code - add node into lists so it will
+ be analyzed and compiled. */
+void
+cgraph_varpool_enqueue_needed_node (struct cgraph_varpool_node *node)
{
- struct cgraph_varpool_node **slot;
-
- if (TREE_CODE (id) != IDENTIFIER_NODE)
- abort ();
-
- if (!cgraph_varpool_hash)
- return NULL;
+ if (cgraph_varpool_last_needed_node)
+ cgraph_varpool_last_needed_node->next_needed = node;
+ cgraph_varpool_last_needed_node = node;
+ node->next_needed = NULL;
+ if (!cgraph_varpool_nodes_queue)
+ cgraph_varpool_nodes_queue = node;
+ if (!cgraph_varpool_first_unanalyzed_node)
+ cgraph_varpool_first_unanalyzed_node = node;
+ notice_global_symbol (node->decl);
+}
- slot = (struct cgraph_varpool_node **)
- htab_find_slot_with_hash (cgraph_varpool_hash, id,
- IDENTIFIER_HASH_VALUE (id), NO_INSERT);
- if (!slot)
- return NULL;
- return *slot;
+/* Reset the queue of needed nodes. */
+void
+cgraph_varpool_reset_queue (void)
+{
+ cgraph_varpool_last_needed_node = NULL;
+ cgraph_varpool_nodes_queue = NULL;
+ cgraph_varpool_first_unanalyzed_node = NULL;
}
/* Notify finalize_compilation_unit that given node is reachable
cgraph_varpool_mark_needed_node (struct cgraph_varpool_node *node)
{
if (!node->needed && node->finalized)
- {
- node->next_needed = cgraph_varpool_nodes_queue;
- cgraph_varpool_nodes_queue = node;
- notice_global_symbol (node->decl);
- }
+ cgraph_varpool_enqueue_needed_node (node);
node->needed = 1;
}
+/* Determine if variable DECL is needed. That is, visible to something
+ either outside this translation unit, something magic in the system
+ configury, or (if not doing unit-at-a-time) to something we haven't
+ seen yet. */
+
+bool
+decide_is_variable_needed (struct cgraph_varpool_node *node, tree decl)
+{
+ /* If the user told us it is used, then it must be so. */
+ if (node->externally_visible
+ || lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
+ return true;
+
+ /* ??? If the assembler name is set by hand, it is possible to assemble
+ the name later after finalizing the function and the fact is noticed
+ in assemble_name then. This is arguably a bug. */
+ if (DECL_ASSEMBLER_NAME_SET_P (decl)
+ && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
+ return true;
+
+ /* If we decided it was needed before, but at the time we didn't have
+ the definition available, then it's still needed. */
+ if (node->needed)
+ return true;
+
+ /* Externally visible variables must be output. The exception is
+ COMDAT variables that must be output only when they are needed. */
+ if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
+ return true;
+
+ /* When not reordering top level variables, we have to assume that
+ we are going to keep everything. */
+ if (flag_unit_at_a_time && flag_toplevel_reorder)
+ return false;
+
+ /* We want to emit COMDAT variables only when absolutely necessary. */
+ if (DECL_COMDAT (decl))
+ return false;
+ return true;
+}
+
void
cgraph_varpool_finalize_decl (tree decl)
{
or local (in C, has internal linkage). So do nothing more
if this function has already run. */
if (node->finalized)
- return;
- if (node->needed)
{
- node->next_needed = cgraph_varpool_nodes_queue;
- cgraph_varpool_nodes_queue = node;
- notice_global_symbol (decl);
+ if (cgraph_global_info_ready || (!flag_unit_at_a_time && !flag_openmp))
+ cgraph_varpool_assemble_pending_decls ();
+ return;
}
+ if (node->needed)
+ cgraph_varpool_enqueue_needed_node (node);
node->finalized = true;
- if (/* Externally visible variables must be output. The exception are
- COMDAT functions that must be output only when they are needed. */
- (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
- /* Function whose name is output to the assembler file must be produced.
- It is possible to assemble the name later after finalizing the function
- and the fact is noticed in assemble_name then. */
- || (DECL_ASSEMBLER_NAME_SET_P (decl)
- && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))))
- {
- cgraph_varpool_mark_needed_node (node);
- }
+ if (decide_is_variable_needed (node, decl))
+ cgraph_varpool_mark_needed_node (node);
+ /* Since we reclaim unreachable nodes at the end of every language
+ level unit, we need to be conservative about possible entry points
+ there. */
+ else if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
+ cgraph_varpool_mark_needed_node (node);
+ if (cgraph_global_info_ready || (!flag_unit_at_a_time && !flag_openmp))
+ cgraph_varpool_assemble_pending_decls ();
}
-bool
-cgraph_varpool_assemble_pending_decls (void)
-{
- bool changed = false;
-
- while (cgraph_varpool_nodes_queue)
- {
- tree decl = cgraph_varpool_nodes_queue->decl;
- struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
+/* Add a top-level asm statement to the list. */
- cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
- if (!TREE_ASM_WRITTEN (decl))
- {
- assemble_variable (decl, 0, 1, 0);
- changed = true;
- }
- node->next_needed = NULL;
- }
- return changed;
+struct cgraph_asm_node *
+cgraph_add_asm_node (tree asm_str)
+{
+ struct cgraph_asm_node *node;
+
+ node = GGC_CNEW (struct cgraph_asm_node);
+ node->asm_str = asm_str;
+ node->order = cgraph_order++;
+ node->next = NULL;
+ if (cgraph_asm_nodes == NULL)
+ cgraph_asm_nodes = node;
+ else
+ cgraph_asm_last_node->next = node;
+ cgraph_asm_last_node = node;
+ return node;
}
/* Return true when the DECL can possibly be inlined. */
{
if (!cgraph_global_info_ready)
return (DECL_INLINE (decl) && !flag_really_no_inline);
- if (!cgraph_inline_hash)
- return false;
- return (htab_find_slot (cgraph_inline_hash, DECL_ASSEMBLER_NAME (decl),
- NO_INSERT) != NULL);
+ return DECL_POSSIBLY_INLINED (decl);
}
/* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
struct cgraph_edge *
-cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n, tree call_expr)
+cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
+ tree call_stmt, gcov_type count_scale, int loop_nest,
+ bool update_original)
{
- struct cgraph_edge *new = cgraph_create_edge (n, e->callee, call_expr);
+ struct cgraph_edge *new;
+
+ new = cgraph_create_edge (n, e->callee, call_stmt,
+ e->count * count_scale / REG_BR_PROB_BASE,
+ e->loop_nest + loop_nest);
new->inline_failed = e->inline_failed;
+ if (update_original)
+ {
+ e->count -= new->count;
+ if (e->count < 0)
+ e->count = 0;
+ }
return new;
}
-/* Create node representing clone of N. */
+/* Create node representing clone of N executed COUNT times. Decrease
+ the execution counts from original node too.
+
+ When UPDATE_ORIGINAL is true, the counts are subtracted from the original
+ function's profile to reflect the fact that part of execution is handled
+ by node. */
struct cgraph_node *
-cgraph_clone_node (struct cgraph_node *n)
+cgraph_clone_node (struct cgraph_node *n, gcov_type count, int loop_nest,
+ bool update_original)
{
struct cgraph_node *new = cgraph_create_node ();
struct cgraph_edge *e;
+ gcov_type count_scale;
new->decl = n->decl;
new->origin = n->origin;
new->local = n->local;
new->global = n->global;
new->rtl = n->rtl;
+ new->master_clone = n->master_clone;
+ new->count = count;
+ if (n->count)
+ count_scale = new->count * REG_BR_PROB_BASE / n->count;
+ else
+ count_scale = 0;
+ if (update_original)
+ {
+ n->count -= count;
+ if (n->count < 0)
+ n->count = 0;
+ }
for (e = n->callees;e; e=e->next_callee)
- cgraph_clone_edge (e, new, e->call_expr);
+ cgraph_clone_edge (e, new, e->call_stmt, count_scale, loop_nest,
+ update_original);
new->next_clone = n->next_clone;
+ new->prev_clone = n;
n->next_clone = new;
+ if (new->next_clone)
+ new->next_clone->prev_clone = new;
return new;
}
+
+/* Return true if N is an master_clone, (see cgraph_master_clone). */
+
+bool
+cgraph_is_master_clone (struct cgraph_node *n)
+{
+ return (n == cgraph_master_clone (n));
+}
+
+struct cgraph_node *
+cgraph_master_clone (struct cgraph_node *n)
+{
+ enum availability avail = cgraph_function_body_availability (n);
+
+ if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
+ return NULL;
+
+ if (!n->master_clone)
+ n->master_clone = cgraph_node (n->decl);
+
+ return n->master_clone;
+}
+
+/* NODE is no longer nested function; update cgraph accordingly. */
+void
+cgraph_unnest_node (struct cgraph_node *node)
+{
+ struct cgraph_node **node2 = &node->origin->nested;
+ gcc_assert (node->origin);
+
+ while (*node2 != node)
+ node2 = &(*node2)->next_nested;
+ *node2 = node->next_nested;
+ node->origin = NULL;
+}
+
+/* Return function availability. See cgraph.h for description of individual
+ return values. */
+enum availability
+cgraph_function_body_availability (struct cgraph_node *node)
+{
+ enum availability avail;
+ gcc_assert (cgraph_function_flags_ready);
+ if (!node->analyzed)
+ avail = AVAIL_NOT_AVAILABLE;
+ else if (node->local.local)
+ avail = AVAIL_LOCAL;
+ else if (node->local.externally_visible)
+ avail = AVAIL_AVAILABLE;
+
+ /* If the function can be overwritten, return OVERWRITABLE. Take
+ care at least of two notable extensions - the COMDAT functions
+ used to share template instantiations in C++ (this is symmetric
+ to code cp_cannot_inline_tree_fn and probably shall be shared and
+ the inlinability hooks completely eliminated).
+
+ ??? Does the C++ one definition rule allow us to always return
+ AVAIL_AVAILABLE here? That would be good reason to preserve this
+ hook Similarly deal with extern inline functions - this is again
+ necessary to get C++ shared functions having keyed templates
+ right and in the C extension documentation we probably should
+ document the requirement of both versions of function (extern
+ inline and offline) having same side effect characteristics as
+ good optimization is what this optimization is about. */
+
+ else if (!(*targetm.binds_local_p) (node->decl)
+ && !DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl))
+ avail = AVAIL_OVERWRITABLE;
+ else avail = AVAIL_AVAILABLE;
+
+ return avail;
+}
+
+/* Return variable availability. See cgraph.h for description of individual
+ return values. */
+enum availability
+cgraph_variable_initializer_availability (struct cgraph_varpool_node *node)
+{
+ gcc_assert (cgraph_function_flags_ready);
+ if (!node->finalized)
+ return AVAIL_NOT_AVAILABLE;
+ if (!TREE_PUBLIC (node->decl))
+ return AVAIL_AVAILABLE;
+ /* If the variable can be overwritten, return OVERWRITABLE. Takes
+ care of at least two notable extensions - the COMDAT variables
+ used to share template instantiations in C++. */
+ if (!(*targetm.binds_local_p) (node->decl) && !DECL_COMDAT (node->decl))
+ return AVAIL_OVERWRITABLE;
+ return AVAIL_AVAILABLE;
+}
+
+
+/* Add the function FNDECL to the call graph. FNDECL is assumed to be
+ in low GIMPLE form and ready to be processed by cgraph_finalize_function.
+
+ When operating in unit-at-a-time, a new callgraph node is added to
+ CGRAPH_EXPAND_QUEUE, which is processed after all the original
+ functions in the call graph .
+
+ When not in unit-at-a-time, the new callgraph node is added to
+ CGRAPH_NODES_QUEUE for cgraph_assemble_pending_functions to
+ process. */
+
+void
+cgraph_add_new_function (tree fndecl)
+{
+ struct cgraph_node *n = cgraph_node (fndecl);
+ n->next_needed = cgraph_expand_queue;
+ cgraph_expand_queue = n;
+}
+
#include "gt-cgraph.h"