/* Callgraph handling code.
- Copyright (C) 2003 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
Contributed by Jan Hubicka
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
+
+/* This file contains basic routines manipulating call graph
+
+The callgraph:
+
+ The call-graph is data structure designed for intra-procedural optimization
+ but it is also used in non-unit-at-a-time compilation to allow easier code
+ sharing.
+
+ The call-graph consist of nodes and edges represented via linked lists.
+ Each function (external or not) corresponds to the unique node.
+
+ The mapping from declarations to call-graph nodes is done using hash table
+ based on DECL_UID. The call-graph nodes are created lazily using
+ cgraph_node function when called for unknown declaration.
+
+ The callgraph at the moment does not represent indirect calls or calls
+ from other compilation unit. Flag NEEDED is set for each node that may
+ be accessed in such an invisible way and it shall be considered an
+ entry point to the callgraph.
+
+ Interprocedural information:
+
+ Callgraph is place to store data needed for interprocedural optimization.
+ All data structures are divided into three components: local_info that
+ is produced while analyzing the function, global_info that is result
+ of global walking of the callgraph on the end of compilation and
+ rtl_info used by RTL backend to propagate data from already compiled
+ functions to their callers.
+
+ Inlining plans:
+
+ The function inlining information is decided in advance and maintained
+ in the callgraph as so called inline plan.
+ For each inlined call, the callee's node is cloned to represent the
+ new function copy produced by inliner.
+ Each inlined call gets a unique corresponding clone node of the callee
+ and the data structure is updated while inlining is performed, so
+ the clones are eliminated and their callee edges redirected to the
+ caller.
+
+ Each edge has "inline_failed" field. When the field is set to NULL,
+ the call will be inlined. When it is non-NULL it contains a reason
+ why inlining wasn't performed. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
+#include "tree-inline.h"
#include "langhooks.h"
#include "hashtab.h"
#include "toplev.h"
#include "ggc.h"
#include "debug.h"
#include "target.h"
+#include "basic-block.h"
#include "cgraph.h"
#include "varray.h"
#include "output.h"
+#include "intl.h"
+#include "tree-gimple.h"
+#include "tree-dump.h"
+#include "tree-flow.h"
+static void cgraph_node_remove_callers (struct cgraph_node *node);
+static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
+static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
/* Hash table used to convert declarations into nodes. */
static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
/* Queue of cgraph nodes scheduled to be lowered. */
struct cgraph_node *cgraph_nodes_queue;
+/* Queue of cgraph nodes scheduled to be added into cgraph. This is a
+ secondary queue used during optimization to accommodate passes that
+ may generate new functions that need to be optimized and expanded. */
+struct cgraph_node *cgraph_new_nodes;
+
/* Number of nodes in existence. */
int cgraph_n_nodes;
+/* Maximal uid used in cgraph nodes. */
+int cgraph_max_uid;
+
+/* Maximal pid used for profiling */
+int cgraph_max_pid;
+
/* Set when whole unit has been analyzed so we can access global info. */
bool cgraph_global_info_ready = false;
-/* Hash table used to convert declarations into nodes. */
-static GTY((param_is (struct cgraph_varpool_node))) htab_t cgraph_varpool_hash;
+/* What state callgraph is in right now. */
+enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
-/* Queue of cgraph nodes scheduled to be lowered and output. */
-struct cgraph_varpool_node *cgraph_varpool_nodes_queue;
+/* Set when the cgraph is fully build and the basic flags are computed. */
+bool cgraph_function_flags_ready = false;
-/* Number of nodes in existence. */
-int cgraph_varpool_n_nodes;
+/* Linked list of cgraph asm nodes. */
+struct cgraph_asm_node *cgraph_asm_nodes;
+
+/* Last node in cgraph_asm_nodes. */
+static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
-/* The linked list of cgraph varpool nodes. */
-static GTY(()) struct cgraph_varpool_node *cgraph_varpool_nodes;
+/* The order index of the next cgraph node to be created. This is
+ used so that we can sort the cgraph nodes in order by when we saw
+ them, to support -fno-toplevel-reorder. */
+int cgraph_order;
-static struct cgraph_edge *create_edge PARAMS ((struct cgraph_node *,
- struct cgraph_node *));
-static void cgraph_remove_edge PARAMS ((struct cgraph_node *, struct cgraph_node *));
-static hashval_t hash_node PARAMS ((const void *));
-static int eq_node PARAMS ((const void *, const void *));
+static hashval_t hash_node (const void *);
+static int eq_node (const void *, const void *);
/* Returns a hash code for P. */
static hashval_t
-hash_node (p)
- const void *p;
+hash_node (const void *p)
{
- return ((hashval_t)
- IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME
- (((struct cgraph_node *) p)->decl)));
+ const struct cgraph_node *n = (const struct cgraph_node *) p;
+ return (hashval_t) DECL_UID (n->decl);
}
/* Returns nonzero if P1 and P2 are equal. */
static int
-eq_node (p1, p2)
- const void *p1;
- const void *p2;
+eq_node (const void *p1, const void *p2)
+{
+ const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
+ const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
+ return DECL_UID (n1->decl) == DECL_UID (n2->decl);
+}
+
+/* Allocate new callgraph node and insert it into basic data structures. */
+
+static struct cgraph_node *
+cgraph_create_node (void)
{
- return ((DECL_ASSEMBLER_NAME (((struct cgraph_node *) p1)->decl)) ==
- (tree) p2);
+ struct cgraph_node *node;
+
+ node = GGC_CNEW (struct cgraph_node);
+ node->next = cgraph_nodes;
+ node->uid = cgraph_max_uid++;
+ node->pid = -1;
+ node->order = cgraph_order++;
+ if (cgraph_nodes)
+ cgraph_nodes->previous = node;
+ node->previous = NULL;
+ node->global.estimated_growth = INT_MIN;
+ cgraph_nodes = node;
+ cgraph_n_nodes++;
+ return node;
}
/* Return cgraph node assigned to DECL. Create new one when needed. */
+
struct cgraph_node *
-cgraph_node (decl)
- tree decl;
+cgraph_node (tree decl)
{
- struct cgraph_node *node;
- struct cgraph_node **slot;
+ struct cgraph_node key, *node, **slot;
- if (TREE_CODE (decl) != FUNCTION_DECL)
- abort ();
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
if (!cgraph_hash)
cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
- slot = (struct cgraph_node **)
- htab_find_slot_with_hash (cgraph_hash, DECL_ASSEMBLER_NAME (decl),
- IDENTIFIER_HASH_VALUE
- (DECL_ASSEMBLER_NAME (decl)), 1);
+ key.decl = decl;
+
+ slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
+
if (*slot)
- return *slot;
- node = ggc_alloc_cleared (sizeof (*node));
+ {
+ node = *slot;
+ if (!node->master_clone)
+ node->master_clone = node;
+ return node;
+ }
+
+ node = cgraph_create_node ();
node->decl = decl;
- node->next = cgraph_nodes;
- if (cgraph_nodes)
- cgraph_nodes->previous = node;
- node->previous = NULL;
- cgraph_nodes = node;
- cgraph_n_nodes++;
*slot = node;
if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
{
node->origin = cgraph_node (DECL_CONTEXT (decl));
node->next_nested = node->origin->nested;
node->origin->nested = node;
+ node->master_clone = node;
}
return node;
}
-/* Try to find existing function for identifier ID. */
-struct cgraph_node *
-cgraph_node_for_identifier (id)
- tree id;
+/* Insert already constructed node into hashtable. */
+
+void
+cgraph_insert_node_to_hashtable (struct cgraph_node *node)
{
struct cgraph_node **slot;
- if (TREE_CODE (id) != IDENTIFIER_NODE)
- abort ();
+ slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
- if (!cgraph_hash)
- return NULL;
+ gcc_assert (!*slot);
+ *slot = node;
+}
- slot = (struct cgraph_node **)
- htab_find_slot_with_hash (cgraph_hash, id,
- IDENTIFIER_HASH_VALUE (id), 0);
- if (!slot)
- return NULL;
- return *slot;
+
+/* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
+ Return NULL if there's no such node. */
+
+struct cgraph_node *
+cgraph_node_for_asm (tree asmname)
+{
+ struct cgraph_node *node;
+
+ for (node = cgraph_nodes; node ; node = node->next)
+ if (decl_assembler_name_equal (node->decl, asmname))
+ return node;
+
+ return NULL;
+}
+
+/* Returns a hash value for X (which really is a die_struct). */
+
+static hashval_t
+edge_hash (const void *x)
+{
+ return htab_hash_pointer (((struct cgraph_edge *) x)->call_stmt);
+}
+
+/* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
+
+static int
+edge_eq (const void *x, const void *y)
+{
+ return ((struct cgraph_edge *) x)->call_stmt == y;
+}
+
+/* Return callgraph edge representing CALL_EXPR statement. */
+struct cgraph_edge *
+cgraph_edge (struct cgraph_node *node, tree call_stmt)
+{
+ struct cgraph_edge *e, *e2;
+ int n = 0;
+
+ if (node->call_site_hash)
+ return htab_find_with_hash (node->call_site_hash, call_stmt,
+ htab_hash_pointer (call_stmt));
+
+ /* This loop may turn out to be performance problem. In such case adding
+ hashtables into call nodes with very many edges is probably best
+ solution. It is not good idea to add pointer into CALL_EXPR itself
+ because we want to make possible having multiple cgraph nodes representing
+ different clones of the same body before the body is actually cloned. */
+ for (e = node->callees; e; e= e->next_callee)
+ {
+ if (e->call_stmt == call_stmt)
+ break;
+ n++;
+ }
+ if (n > 100)
+ {
+ node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
+ for (e2 = node->callees; e2; e2 = e2->next_callee)
+ {
+ void **slot;
+ slot = htab_find_slot_with_hash (node->call_site_hash,
+ e2->call_stmt,
+ htab_hash_pointer (e2->call_stmt),
+ INSERT);
+ gcc_assert (!*slot);
+ *slot = e2;
+ }
+ }
+ return e;
+}
+
+/* Change call_smtt of edge E to NEW_STMT. */
+
+void
+cgraph_set_call_stmt (struct cgraph_edge *e, tree new_stmt)
+{
+ if (e->caller->call_site_hash)
+ {
+ htab_remove_elt_with_hash (e->caller->call_site_hash,
+ e->call_stmt,
+ htab_hash_pointer (e->call_stmt));
+ }
+ e->call_stmt = new_stmt;
+ if (e->caller->call_site_hash)
+ {
+ void **slot;
+ slot = htab_find_slot_with_hash (e->caller->call_site_hash,
+ e->call_stmt,
+ htab_hash_pointer
+ (e->call_stmt), INSERT);
+ gcc_assert (!*slot);
+ *slot = e;
+ }
}
/* Create edge from CALLER to CALLEE in the cgraph. */
-static struct cgraph_edge *
-create_edge (caller, callee)
- struct cgraph_node *caller, *callee;
+struct cgraph_edge *
+cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
+ tree call_stmt, gcov_type count, int freq, int nest)
{
- struct cgraph_edge *edge = ggc_alloc (sizeof (struct cgraph_edge));
+ struct cgraph_edge *edge = GGC_NEW (struct cgraph_edge);
+#ifdef ENABLE_CHECKING
+ struct cgraph_edge *e;
+
+ for (e = caller->callees; e; e = e->next_callee)
+ gcc_assert (e->call_stmt != call_stmt);
+#endif
+
+ gcc_assert (get_call_expr_in (call_stmt));
+
+ if (!DECL_SAVED_TREE (callee->decl))
+ edge->inline_failed = N_("function body not available");
+ else if (callee->local.redefined_extern_inline)
+ edge->inline_failed = N_("redefined extern inline functions are not "
+ "considered for inlining");
+ else if (callee->local.inlinable)
+ edge->inline_failed = N_("function not considered for inlining");
+ else
+ edge->inline_failed = N_("function not inlinable");
+
+ edge->aux = NULL;
edge->caller = caller;
edge->callee = callee;
+ edge->call_stmt = call_stmt;
+ edge->prev_caller = NULL;
edge->next_caller = callee->callers;
+ if (callee->callers)
+ callee->callers->prev_caller = edge;
+ edge->prev_callee = NULL;
edge->next_callee = caller->callees;
+ if (caller->callees)
+ caller->callees->prev_callee = edge;
caller->callees = edge;
callee->callers = edge;
+ edge->count = count;
+ gcc_assert (count >= 0);
+ edge->frequency = freq;
+ gcc_assert (freq >= 0);
+ gcc_assert (freq <= CGRAPH_FREQ_MAX);
+ edge->loop_nest = nest;
+ if (caller->call_site_hash)
+ {
+ void **slot;
+ slot = htab_find_slot_with_hash (caller->call_site_hash,
+ edge->call_stmt,
+ htab_hash_pointer
+ (edge->call_stmt),
+ INSERT);
+ gcc_assert (!*slot);
+ *slot = edge;
+ }
return edge;
}
-/* Remove the edge from CALLER to CALLEE in the cgraph. */
+/* Remove the edge E from the list of the callers of the callee. */
+
+static inline void
+cgraph_edge_remove_callee (struct cgraph_edge *e)
+{
+ if (e->prev_caller)
+ e->prev_caller->next_caller = e->next_caller;
+ if (e->next_caller)
+ e->next_caller->prev_caller = e->prev_caller;
+ if (!e->prev_caller)
+ e->callee->callers = e->next_caller;
+}
+
+/* Remove the edge E from the list of the callees of the caller. */
+
+static inline void
+cgraph_edge_remove_caller (struct cgraph_edge *e)
+{
+ if (e->prev_callee)
+ e->prev_callee->next_callee = e->next_callee;
+ if (e->next_callee)
+ e->next_callee->prev_callee = e->prev_callee;
+ if (!e->prev_callee)
+ e->caller->callees = e->next_callee;
+ if (e->caller->call_site_hash)
+ htab_remove_elt_with_hash (e->caller->call_site_hash,
+ e->call_stmt,
+ htab_hash_pointer (e->call_stmt));
+}
+
+/* Remove the edge E in the cgraph. */
+
+void
+cgraph_remove_edge (struct cgraph_edge *e)
+{
+ /* Remove from callers list of the callee. */
+ cgraph_edge_remove_callee (e);
+
+ /* Remove from callees list of the callers. */
+ cgraph_edge_remove_caller (e);
+}
+
+/* Redirect callee of E to N. The function does not update underlying
+ call expression. */
+
+void
+cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
+{
+ /* Remove from callers list of the current callee. */
+ cgraph_edge_remove_callee (e);
+
+ /* Insert to callers list of the new callee. */
+ e->prev_caller = NULL;
+ if (n->callers)
+ n->callers->prev_caller = e;
+ e->next_caller = n->callers;
+ n->callers = e;
+ e->callee = n;
+}
+
+/* Remove all callees from the node. */
+
+void
+cgraph_node_remove_callees (struct cgraph_node *node)
+{
+ struct cgraph_edge *e;
+
+ /* It is sufficient to remove the edges from the lists of callers of
+ the callees. The callee list of the node can be zapped with one
+ assignment. */
+ for (e = node->callees; e; e = e->next_callee)
+ cgraph_edge_remove_callee (e);
+ node->callees = NULL;
+ if (node->call_site_hash)
+ {
+ htab_delete (node->call_site_hash);
+ node->call_site_hash = NULL;
+ }
+}
+
+/* Remove all callers from the node. */
static void
-cgraph_remove_edge (caller, callee)
- struct cgraph_node *caller, *callee;
+cgraph_node_remove_callers (struct cgraph_node *node)
{
- struct cgraph_edge **edge, **edge2;
+ struct cgraph_edge *e;
+
+ /* It is sufficient to remove the edges from the lists of callees of
+ the callers. The caller list of the node can be zapped with one
+ assignment. */
+ for (e = node->callers; e; e = e->next_caller)
+ cgraph_edge_remove_caller (e);
+ node->callers = NULL;
+}
+
+/* Release memory used to represent body of function NODE. */
- for (edge = &callee->callers; *edge && (*edge)->caller != caller;
- edge = &((*edge)->next_caller))
- continue;
- if (!*edge)
- abort ();
- *edge = (*edge)->next_caller;
- for (edge2 = &caller->callees; *edge2 && (*edge2)->callee != callee;
- edge2 = &(*edge2)->next_callee)
- continue;
- if (!*edge2)
- abort ();
- *edge2 = (*edge2)->next_callee;
+void
+cgraph_release_function_body (struct cgraph_node *node)
+{
+ if (DECL_STRUCT_FUNCTION (node->decl)
+ && DECL_STRUCT_FUNCTION (node->decl)->gimple_df)
+ {
+ tree old_decl = current_function_decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+ current_function_decl = node->decl;
+ delete_tree_ssa ();
+ delete_tree_cfg_annotations ();
+ cfun->eh = NULL;
+ current_function_decl = old_decl;
+ pop_cfun();
+ }
+ DECL_SAVED_TREE (node->decl) = NULL;
+ DECL_STRUCT_FUNCTION (node->decl) = NULL;
+ DECL_INITIAL (node->decl) = error_mark_node;
}
/* Remove the node from cgraph. */
void
-cgraph_remove_node (node)
- struct cgraph_node *node;
+cgraph_remove_node (struct cgraph_node *node)
{
- while (node->callers)
- cgraph_remove_edge (node->callers->caller, node);
- while (node->callees)
- cgraph_remove_edge (node, node->callees->callee);
+ void **slot;
+ bool kill_body = false;
+
+ cgraph_node_remove_callers (node);
+ cgraph_node_remove_callees (node);
+ /* Incremental inlining access removed nodes stored in the postorder list.
+ */
+ node->needed = node->reachable = false;
while (node->nested)
cgraph_remove_node (node->nested);
if (node->origin)
if (node->previous)
node->previous->next = node->next;
else
- cgraph_nodes = node;
+ cgraph_nodes = node->next;
if (node->next)
node->next->previous = node->previous;
- DECL_SAVED_TREE (node->decl) = NULL;
- /* Do not free the structure itself so the walk over chain can continue. */
-}
-
-/* Notify finalize_compilation_unit that given node is reachable
- or needed. */
-void
-cgraph_mark_needed_node (node, needed)
- struct cgraph_node *node;
- int needed;
-{
- if (needed)
+ node->next = NULL;
+ node->previous = NULL;
+ slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
+ if (*slot == node)
{
- node->needed = 1;
+ if (node->next_clone)
+ {
+ struct cgraph_node *new_node = node->next_clone;
+ struct cgraph_node *n;
+
+ /* Make the next clone be the master clone */
+ for (n = new_node; n; n = n->next_clone)
+ n->master_clone = new_node;
+
+ *slot = new_node;
+ node->next_clone->prev_clone = NULL;
+ }
+ else
+ {
+ htab_clear_slot (cgraph_hash, slot);
+ kill_body = true;
+ }
}
- if (!node->reachable)
+ else
{
- node->reachable = 1;
- if (DECL_SAVED_TREE (node->decl))
- {
- node->next_needed = cgraph_nodes_queue;
- cgraph_nodes_queue = node;
- }
+ node->prev_clone->next_clone = node->next_clone;
+ if (node->next_clone)
+ node->next_clone->prev_clone = node->prev_clone;
}
-}
+ /* While all the clones are removed after being proceeded, the function
+ itself is kept in the cgraph even after it is compiled. Check whether
+ we are done with this body and reclaim it proactively if this is the case.
+ */
+ if (!kill_body && *slot)
+ {
+ struct cgraph_node *n = (struct cgraph_node *) *slot;
+ if (!n->next_clone && !n->global.inlined_to
+ && (cgraph_global_info_ready
+ && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
+ kill_body = true;
+ }
-/* Record call from CALLER to CALLEE */
-
-struct cgraph_edge *
-cgraph_record_call (caller, callee)
- tree caller, callee;
-{
- return create_edge (cgraph_node (caller), cgraph_node (callee));
+ if (kill_body && flag_unit_at_a_time)
+ cgraph_release_function_body (node);
+ node->decl = NULL;
+ if (node->call_site_hash)
+ {
+ htab_delete (node->call_site_hash);
+ node->call_site_hash = NULL;
+ }
+ cgraph_n_nodes--;
+ /* Do not free the structure itself so the walk over chain can continue. */
}
+/* Notify finalize_compilation_unit that given node is reachable. */
+
void
-cgraph_remove_call (caller, callee)
- tree caller, callee;
+cgraph_mark_reachable_node (struct cgraph_node *node)
{
- cgraph_remove_edge (cgraph_node (caller), cgraph_node (callee));
+ if (!node->reachable && node->local.finalized)
+ {
+ notice_global_symbol (node->decl);
+ node->reachable = 1;
+ gcc_assert (!cgraph_global_info_ready);
+
+ node->next_needed = cgraph_nodes_queue;
+ cgraph_nodes_queue = node;
+ }
}
-/* Return true when CALLER_DECL calls CALLEE_DECL. */
+/* Likewise indicate that a node is needed, i.e. reachable via some
+ external means. */
-bool
-cgraph_calls_p (caller_decl, callee_decl)
- tree caller_decl, callee_decl;
+void
+cgraph_mark_needed_node (struct cgraph_node *node)
{
- struct cgraph_node *caller = cgraph_node (caller_decl);
- struct cgraph_node *callee = cgraph_node (callee_decl);
- struct cgraph_edge *edge;
-
- for (edge = callee->callers; edge && (edge)->caller != caller;
- edge = (edge->next_caller))
- continue;
- return edge != NULL;
+ node->needed = 1;
+ cgraph_mark_reachable_node (node);
}
/* Return local info for the compiled function. */
struct cgraph_local_info *
-cgraph_local_info (decl)
- tree decl;
+cgraph_local_info (tree decl)
{
struct cgraph_node *node;
- if (TREE_CODE (decl) != FUNCTION_DECL)
- abort ();
+
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
node = cgraph_node (decl);
return &node->local;
}
/* Return local info for the compiled function. */
struct cgraph_global_info *
-cgraph_global_info (decl)
- tree decl;
+cgraph_global_info (tree decl)
{
struct cgraph_node *node;
- if (TREE_CODE (decl) != FUNCTION_DECL || !cgraph_global_info_ready)
- abort ();
+
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
node = cgraph_node (decl);
return &node->global;
}
/* Return local info for the compiled function. */
struct cgraph_rtl_info *
-cgraph_rtl_info (decl)
- tree decl;
+cgraph_rtl_info (tree decl)
{
struct cgraph_node *node;
- if (TREE_CODE (decl) != FUNCTION_DECL)
- abort ();
+
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
node = cgraph_node (decl);
if (decl != current_function_decl
&& !TREE_ASM_WRITTEN (node->decl))
return &node->rtl;
}
+/* Return name of the node used in debug output. */
+const char *
+cgraph_node_name (struct cgraph_node *node)
+{
+ return lang_hooks.decl_printable_name (node->decl, 2);
+}
+
+/* Names used to print out the availability enum. */
+const char * const cgraph_availability_names[] =
+ {"unset", "not_available", "overwrittable", "available", "local"};
+
+/* Dump given cgraph node. */
+void
+dump_cgraph_node (FILE *f, struct cgraph_node *node)
+{
+ struct cgraph_edge *edge;
+ fprintf (f, "%s/%i(%i):", cgraph_node_name (node), node->uid, node->pid);
+ if (node->global.inlined_to)
+ fprintf (f, " (inline copy in %s/%i)",
+ cgraph_node_name (node->global.inlined_to),
+ node->global.inlined_to->uid);
+ if (cgraph_function_flags_ready)
+ fprintf (f, " availability:%s",
+ cgraph_availability_names [cgraph_function_body_availability (node)]);
+ if (node->master_clone && node->master_clone->uid != node->uid)
+ fprintf (f, "(%i)", node->master_clone->uid);
+ if (node->count)
+ fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
+ (HOST_WIDEST_INT)node->count);
+ if (node->local.self_insns)
+ fprintf (f, " %i insns", node->local.self_insns);
+ if (node->global.insns && node->global.insns != node->local.self_insns)
+ fprintf (f, " (%i after inlining)", node->global.insns);
+ if (node->local.estimated_self_stack_size)
+ fprintf (f, " %i bytes stack usage", (int)node->local.estimated_self_stack_size);
+ if (node->global.estimated_stack_size != node->local.estimated_self_stack_size)
+ fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
+ if (node->origin)
+ fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
+ if (node->needed)
+ fprintf (f, " needed");
+ else if (node->reachable)
+ fprintf (f, " reachable");
+ if (DECL_SAVED_TREE (node->decl))
+ fprintf (f, " tree");
+ if (node->output)
+ fprintf (f, " output");
+ if (node->local.local)
+ fprintf (f, " local");
+ if (node->local.externally_visible)
+ fprintf (f, " externally_visible");
+ if (node->local.finalized)
+ fprintf (f, " finalized");
+ if (node->local.disregard_inline_limits)
+ fprintf (f, " always_inline");
+ else if (node->local.inlinable)
+ fprintf (f, " inlinable");
+ if (node->local.redefined_extern_inline)
+ fprintf (f, " redefined_extern_inline");
+ if (TREE_ASM_WRITTEN (node->decl))
+ fprintf (f, " asm_written");
+
+ fprintf (f, "\n called by: ");
+ for (edge = node->callers; edge; edge = edge->next_caller)
+ {
+ fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
+ edge->caller->uid);
+ if (edge->count)
+ fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
+ (HOST_WIDEST_INT)edge->count);
+ if (edge->frequency)
+ fprintf (f, "(%.2f per call) ",
+ edge->frequency / (double)CGRAPH_FREQ_BASE);
+ if (!edge->inline_failed)
+ fprintf(f, "(inlined) ");
+ }
+
+ fprintf (f, "\n calls: ");
+ for (edge = node->callees; edge; edge = edge->next_callee)
+ {
+ fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
+ edge->callee->uid);
+ if (!edge->inline_failed)
+ fprintf(f, "(inlined) ");
+ if (edge->count)
+ fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
+ (HOST_WIDEST_INT)edge->count);
+ if (edge->frequency)
+ fprintf (f, "(%.2f per call) ",
+ edge->frequency / (double)CGRAPH_FREQ_BASE);
+ if (edge->loop_nest)
+ fprintf (f, "(nested in %i loops) ", edge->loop_nest);
+ }
+ fprintf (f, "\n");
+}
/* Dump the callgraph. */
void
-dump_cgraph (f)
- FILE *f;
+dump_cgraph (FILE *f)
{
struct cgraph_node *node;
- fprintf (f, "\nCallgraph:\n\n");
+ fprintf (f, "callgraph:\n\n");
for (node = cgraph_nodes; node; node = node->next)
+ dump_cgraph_node (f, node);
+}
+
+/* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
+void
+change_decl_assembler_name (tree decl, tree name)
+{
+ if (!DECL_ASSEMBLER_NAME_SET_P (decl))
{
- struct cgraph_edge *edge;
- fprintf (f, "%s", IDENTIFIER_POINTER (DECL_NAME (node->decl)));
- if (node->origin)
- fprintf (f, " nested in: %s",
- IDENTIFIER_POINTER (DECL_NAME (node->origin->decl)));
- if (node->needed)
- fprintf (f, " needed");
- else if (node->reachable)
- fprintf (f, " reachable");
- if (DECL_SAVED_TREE (node->decl))
- fprintf (f, " tree");
-
- fprintf (f, "\n called by :");
- for (edge = node->callers; edge; edge = edge->next_caller)
- fprintf (f, "%s ",
- IDENTIFIER_POINTER (DECL_NAME (edge->caller->decl)));
-
- fprintf (f, "\n calls: ");
- for (edge = node->callees; edge; edge = edge->next_callee)
- fprintf (f, "%s ",
- IDENTIFIER_POINTER (DECL_NAME (edge->callee->decl)));
- fprintf (f, "\n");
+ SET_DECL_ASSEMBLER_NAME (decl, name);
+ return;
}
+ if (name == DECL_ASSEMBLER_NAME (decl))
+ return;
+
+ if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
+ && DECL_RTL_SET_P (decl))
+ warning (0, "%D renamed after being referenced in assembly", decl);
+
+ SET_DECL_ASSEMBLER_NAME (decl, name);
}
-/* Returns a hash code for P. */
+/* Add a top-level asm statement to the list. */
-static hashval_t
-cgraph_varpool_hash_node (const PTR p)
+struct cgraph_asm_node *
+cgraph_add_asm_node (tree asm_str)
{
- return ((hashval_t)
- IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME
- (((struct cgraph_varpool_node *) p)->decl)));
+ struct cgraph_asm_node *node;
+
+ node = GGC_CNEW (struct cgraph_asm_node);
+ node->asm_str = asm_str;
+ node->order = cgraph_order++;
+ node->next = NULL;
+ if (cgraph_asm_nodes == NULL)
+ cgraph_asm_nodes = node;
+ else
+ cgraph_asm_last_node->next = node;
+ cgraph_asm_last_node = node;
+ return node;
}
-/* Returns nonzero if P1 and P2 are equal. */
+/* Return true when the DECL can possibly be inlined. */
+bool
+cgraph_function_possibly_inlined_p (tree decl)
+{
+ if (!cgraph_global_info_ready)
+ return (DECL_INLINE (decl) && !flag_really_no_inline);
+ return DECL_POSSIBLY_INLINED (decl);
+}
-static int
-eq_cgraph_varpool_node (const PTR p1, const PTR p2)
+/* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
+struct cgraph_edge *
+cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
+ tree call_stmt, gcov_type count_scale, int freq_scale,
+ int loop_nest, bool update_original)
{
- return ((DECL_ASSEMBLER_NAME (((struct cgraph_varpool_node *) p1)->decl)) ==
- (tree) p2);
+ struct cgraph_edge *new;
+ gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
+ gcov_type freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
+
+ if (freq > CGRAPH_FREQ_MAX)
+ freq = CGRAPH_FREQ_MAX;
+ new = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
+ e->loop_nest + loop_nest);
+
+ new->inline_failed = e->inline_failed;
+ if (update_original)
+ {
+ e->count -= new->count;
+ if (e->count < 0)
+ e->count = 0;
+ }
+ return new;
}
-/* Return cgraph_varpool node assigned to DECL. Create new one when needed. */
-struct cgraph_varpool_node *
-cgraph_varpool_node (tree decl)
+/* Create node representing clone of N executed COUNT times. Decrease
+ the execution counts from original node too.
+
+ When UPDATE_ORIGINAL is true, the counts are subtracted from the original
+ function's profile to reflect the fact that part of execution is handled
+ by node. */
+struct cgraph_node *
+cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq, int loop_nest,
+ bool update_original)
{
- struct cgraph_varpool_node *node;
- struct cgraph_varpool_node **slot;
+ struct cgraph_node *new = cgraph_create_node ();
+ struct cgraph_edge *e;
+ gcov_type count_scale;
- if (!DECL_P (decl) || TREE_CODE (decl) == FUNCTION_DECL)
- abort ();
+ new->decl = n->decl;
+ new->origin = n->origin;
+ if (new->origin)
+ {
+ new->next_nested = new->origin->nested;
+ new->origin->nested = new;
+ }
+ new->analyzed = n->analyzed;
+ new->local = n->local;
+ new->global = n->global;
+ new->rtl = n->rtl;
+ new->master_clone = n->master_clone;
+ new->count = count;
+ if (n->count)
+ count_scale = new->count * REG_BR_PROB_BASE / n->count;
+ else
+ count_scale = 0;
+ if (update_original)
+ {
+ n->count -= count;
+ if (n->count < 0)
+ n->count = 0;
+ }
- if (!cgraph_varpool_hash)
- cgraph_varpool_hash = htab_create_ggc (10, cgraph_varpool_hash_node,
- eq_cgraph_varpool_node, NULL);
+ for (e = n->callees;e; e=e->next_callee)
+ cgraph_clone_edge (e, new, e->call_stmt, count_scale, freq, loop_nest,
+ update_original);
+ new->next_clone = n->next_clone;
+ new->prev_clone = n;
+ n->next_clone = new;
+ if (new->next_clone)
+ new->next_clone->prev_clone = new;
- slot = (struct cgraph_varpool_node **)
- htab_find_slot_with_hash (cgraph_varpool_hash, DECL_ASSEMBLER_NAME (decl),
- IDENTIFIER_HASH_VALUE (DECL_ASSEMBLER_NAME (decl)),
- 1);
- if (*slot)
- return *slot;
- node = ggc_alloc_cleared (sizeof (*node));
- node->decl = decl;
- cgraph_varpool_n_nodes++;
- cgraph_varpool_nodes = node;
- *slot = node;
- return node;
+ return new;
}
-/* Try to find existing function for identifier ID. */
-struct cgraph_varpool_node *
-cgraph_varpool_node_for_identifier (tree id)
+/* Return true if N is an master_clone, (see cgraph_master_clone). */
+
+bool
+cgraph_is_master_clone (struct cgraph_node *n)
{
- struct cgraph_varpool_node **slot;
+ return (n == cgraph_master_clone (n));
+}
- if (TREE_CODE (id) != IDENTIFIER_NODE)
- abort ();
+struct cgraph_node *
+cgraph_master_clone (struct cgraph_node *n)
+{
+ enum availability avail = cgraph_function_body_availability (n);
- if (!cgraph_varpool_hash)
+ if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
return NULL;
- slot = (struct cgraph_varpool_node **)
- htab_find_slot_with_hash (cgraph_varpool_hash, id,
- IDENTIFIER_HASH_VALUE (id), 0);
- if (!slot)
- return NULL;
- return *slot;
-}
+ if (!n->master_clone)
+ n->master_clone = cgraph_node (n->decl);
-/* Notify finalize_compilation_unit that given node is reachable
- or needed. */
-void
-cgraph_varpool_mark_needed_node (struct cgraph_varpool_node *node)
-{
- if (!node->needed && node->finalized)
- {
- node->next_needed = cgraph_varpool_nodes_queue;
- cgraph_varpool_nodes_queue = node;
- }
- node->needed = 1;
+ return n->master_clone;
}
+/* NODE is no longer nested function; update cgraph accordingly. */
void
-cgraph_varpool_finalize_decl (tree decl)
+cgraph_unnest_node (struct cgraph_node *node)
{
- struct cgraph_varpool_node *node = cgraph_varpool_node (decl);
+ struct cgraph_node **node2 = &node->origin->nested;
+ gcc_assert (node->origin);
- if (node->needed && !node->finalized)
- {
- node->next_needed = cgraph_varpool_nodes_queue;
- cgraph_varpool_nodes_queue = node;
- }
- node->finalized = true;
-
- if (/* Externally visible variables must be output. The exception are
- COMDAT functions that must be output only when they are needed. */
- (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
- /* Function whose name is output to the assembler file must be produced.
- It is possible to assemble the name later after finalizing the function
- and the fact is noticed in assemble_name then. */
- || (DECL_ASSEMBLER_NAME_SET_P (decl)
- && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))))
- {
- cgraph_varpool_mark_needed_node (node);
- }
+ while (*node2 != node)
+ node2 = &(*node2)->next_nested;
+ *node2 = node->next_nested;
+ node->origin = NULL;
}
-bool
-cgraph_varpool_assemble_pending_decls ()
+/* Return function availability. See cgraph.h for description of individual
+ return values. */
+enum availability
+cgraph_function_body_availability (struct cgraph_node *node)
{
- bool changed = false;
+ enum availability avail;
+ gcc_assert (cgraph_function_flags_ready);
+ if (!node->analyzed)
+ avail = AVAIL_NOT_AVAILABLE;
+ else if (node->local.local)
+ avail = AVAIL_LOCAL;
+ else if (node->local.externally_visible)
+ avail = AVAIL_AVAILABLE;
+
+ /* If the function can be overwritten, return OVERWRITABLE. Take
+ care at least of two notable extensions - the COMDAT functions
+ used to share template instantiations in C++ (this is symmetric
+ to code cp_cannot_inline_tree_fn and probably shall be shared and
+ the inlinability hooks completely eliminated).
+
+ ??? Does the C++ one definition rule allow us to always return
+ AVAIL_AVAILABLE here? That would be good reason to preserve this
+ hook Similarly deal with extern inline functions - this is again
+ necessary to get C++ shared functions having keyed templates
+ right and in the C extension documentation we probably should
+ document the requirement of both versions of function (extern
+ inline and offline) having same side effect characteristics as
+ good optimization is what this optimization is about. */
+
+ else if (!(*targetm.binds_local_p) (node->decl)
+ && !DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl))
+ avail = AVAIL_OVERWRITABLE;
+ else avail = AVAIL_AVAILABLE;
+
+ return avail;
+}
- while (cgraph_varpool_nodes_queue)
- {
- tree decl = cgraph_varpool_nodes_queue->decl;
- struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
+/* Add the function FNDECL to the call graph.
+ Unlike cgraph_finalize_function, this function is intended to be used
+ by middle end and allows insertion of new function at arbitrary point
+ of compilation. The function can be either in high, low or SSA form
+ GIMPLE.
- cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
- if (!TREE_ASM_WRITTEN (decl))
- {
- assemble_variable (decl, 0, 1, 0);
- changed = true;
- }
- node->next_needed = NULL;
+ The function is assumed to be reachable and have address taken (so no
+ API breaking optimizations are performed on it).
+
+ Main work done by this function is to enqueue the function for later
+ processing to avoid need the passes to be re-entrant. */
+
+void
+cgraph_add_new_function (tree fndecl, bool lowered)
+{
+ struct cgraph_node *node;
+ switch (cgraph_state)
+ {
+ case CGRAPH_STATE_CONSTRUCTION:
+ /* Just enqueue function to be processed at nearest occurence. */
+ node = cgraph_node (fndecl);
+ node->next_needed = cgraph_new_nodes;
+ if (lowered)
+ node->lowered = true;
+ cgraph_new_nodes = node;
+ break;
+
+ case CGRAPH_STATE_IPA:
+ case CGRAPH_STATE_IPA_SSA:
+ case CGRAPH_STATE_EXPANSION:
+ /* Bring the function into finalized state and enqueue for later
+ analyzing and compilation. */
+ node = cgraph_node (fndecl);
+ node->local.local = false;
+ node->local.finalized = true;
+ node->reachable = node->needed = true;
+ if (lowered)
+ node->lowered = true;
+ node->next_needed = cgraph_new_nodes;
+ cgraph_new_nodes = node;
+ break;
+
+ case CGRAPH_STATE_FINISHED:
+ /* At the very end of compilation we have to do all the work up
+ to expansion. */
+ push_cfun (DECL_STRUCT_FUNCTION (fndecl));
+ current_function_decl = fndecl;
+ tree_register_cfg_hooks ();
+ if (!lowered)
+ tree_lowering_passes (fndecl);
+ bitmap_obstack_initialize (NULL);
+ if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)) && optimize)
+ execute_pass_list (pass_early_local_passes.sub);
+ bitmap_obstack_release (NULL);
+ tree_rest_of_compilation (fndecl);
+ pop_cfun ();
+ current_function_decl = NULL;
+ break;
}
- return changed;
}
-
#include "gt-cgraph.h"