X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fcgraph.c;h=33eb1fe7d2f3f6d7e1dddd3a0d5f5d05c192921b;hb=a3a36c39f62490fc35d320b0d6cebd5358049e1f;hp=6c93ac5f17342e2f3a0eafc7d29add5a99da0d5c;hpb=8ec2a79805715a461ee3ae9078b9f1af304dff1c;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/cgraph.c b/gcc/cgraph.c index 6c93ac5f173..33eb1fe7d2f 100644 --- a/gcc/cgraph.c +++ b/gcc/cgraph.c @@ -1,5 +1,5 @@ /* Callgraph handling code. - Copyright (C) 2003, 2004 Free Software Foundation, Inc. + Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc. Contributed by Jan Hubicka This file is part of GCC. @@ -16,8 +16,8 @@ for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING. If not, write to the Free -Software Foundation, 59 Temple Place - Suite 330, Boston, MA -02111-1307, USA. */ +Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA +02110-1301, USA. */ /* This file contains basic routines manipulating call graph and variable pool @@ -44,7 +44,7 @@ The callgraph: The callgraph at the moment does not represent indirect calls or calls from other compilation unit. Flag NEEDED is set for each node that may - be accessed in such a invisible way and it shall be considered an + be accessed in such an invisible way and it shall be considered an entry point to the callgraph. Intraprocedural information: @@ -84,6 +84,7 @@ The varpool data structure: #include "coretypes.h" #include "tm.h" #include "tree.h" +#include "tree-inline.h" #include "langhooks.h" #include "hashtab.h" #include "toplev.h" @@ -91,10 +92,17 @@ The varpool data structure: #include "ggc.h" #include "debug.h" #include "target.h" +#include "basic-block.h" #include "cgraph.h" #include "varray.h" #include "output.h" #include "intl.h" +#include "tree-gimple.h" +#include "tree-dump.h" + +static void cgraph_node_remove_callers (struct cgraph_node *node); +static inline void cgraph_edge_remove_caller (struct cgraph_edge *e); +static inline void cgraph_edge_remove_callee (struct cgraph_edge *e); /* Hash table used to convert declarations into nodes. */ static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash; @@ -114,17 +122,21 @@ int cgraph_max_uid; /* Set when whole unit has been analyzed so we can access global info. */ bool cgraph_global_info_ready = false; +/* Set when the cgraph is fully build and the basic flags are computed. */ +bool cgraph_function_flags_ready = false; + /* Hash table used to convert declarations into nodes. */ static GTY((param_is (struct cgraph_varpool_node))) htab_t cgraph_varpool_hash; /* Queue of cgraph nodes scheduled to be lowered and output. */ -struct cgraph_varpool_node *cgraph_varpool_nodes_queue; +struct cgraph_varpool_node *cgraph_varpool_nodes_queue, *cgraph_varpool_first_unanalyzed_node; -/* Number of nodes in existence. */ -int cgraph_varpool_n_nodes; /* The linked list of cgraph varpool nodes. */ -static GTY(()) struct cgraph_varpool_node *cgraph_varpool_nodes; +static GTY(()) struct cgraph_varpool_node *cgraph_varpool_nodes; + +/* End of the varpool queue. Needs to be QTYed to work with PCH. */ +static GTY(()) struct cgraph_varpool_node *cgraph_varpool_last_needed_node; static hashval_t hash_node (const void *); static int eq_node (const void *, const void *); @@ -159,6 +171,7 @@ cgraph_create_node (void) if (cgraph_nodes) cgraph_nodes->previous = node; node->previous = NULL; + node->global.estimated_growth = INT_MIN; cgraph_nodes = node; cgraph_n_nodes++; return node; @@ -180,7 +193,12 @@ cgraph_node (tree decl) slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT); if (*slot) - return *slot; + { + node = *slot; + if (!node->master_clone) + node->master_clone = node; + return node; + } node = cgraph_create_node (); node->decl = decl; @@ -190,13 +208,64 @@ cgraph_node (tree decl) node->origin = cgraph_node (DECL_CONTEXT (decl)); node->next_nested = node->origin->nested; node->origin->nested = node; + node->master_clone = node; } return node; } -/* Return callgraph edge representing CALL_EXPR. */ +/* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */ + +static bool +decl_assembler_name_equal (tree decl, tree asmname) +{ + tree decl_asmname = DECL_ASSEMBLER_NAME (decl); + + if (decl_asmname == asmname) + return true; + + /* If the target assembler name was set by the user, things are trickier. + We have a leading '*' to begin with. After that, it's arguable what + is the correct thing to do with -fleading-underscore. Arguably, we've + historically been doing the wrong thing in assemble_alias by always + printing the leading underscore. Since we're not changing that, make + sure user_label_prefix follows the '*' before matching. */ + if (IDENTIFIER_POINTER (decl_asmname)[0] == '*') + { + const char *decl_str = IDENTIFIER_POINTER (decl_asmname) + 1; + size_t ulp_len = strlen (user_label_prefix); + + if (ulp_len == 0) + ; + else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0) + decl_str += ulp_len; + else + return false; + + return strcmp (decl_str, IDENTIFIER_POINTER (asmname)) == 0; + } + + return false; +} + + +/* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME. + Return NULL if there's no such node. */ + +struct cgraph_node * +cgraph_node_for_asm (tree asmname) +{ + struct cgraph_node *node; + + for (node = cgraph_nodes; node ; node = node->next) + if (decl_assembler_name_equal (node->decl, asmname)) + return node; + + return NULL; +} + +/* Return callgraph edge representing CALL_EXPR statement. */ struct cgraph_edge * -cgraph_edge (struct cgraph_node *node, tree call_expr) +cgraph_edge (struct cgraph_node *node, tree call_stmt) { struct cgraph_edge *e; @@ -206,7 +275,7 @@ cgraph_edge (struct cgraph_node *node, tree call_expr) because we want to make possible having multiple cgraph nodes representing different clones of the same body before the body is actually cloned. */ for (e = node->callees; e; e= e->next_callee) - if (e->call_expr == call_expr) + if (e->call_stmt == call_stmt) break; return e; } @@ -215,17 +284,17 @@ cgraph_edge (struct cgraph_node *node, tree call_expr) struct cgraph_edge * cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee, - tree call_expr) + tree call_stmt, gcov_type count, int nest) { struct cgraph_edge *edge = ggc_alloc (sizeof (struct cgraph_edge)); #ifdef ENABLE_CHECKING struct cgraph_edge *e; for (e = caller->callees; e; e = e->next_callee) - gcc_assert (e->call_expr != call_expr); + gcc_assert (e->call_stmt != call_stmt); #endif - gcc_assert (TREE_CODE (call_expr) == CALL_EXPR); + gcc_assert (get_call_expr_in (call_stmt)); if (!DECL_SAVED_TREE (callee->decl)) edge->inline_failed = N_("function body not available"); @@ -241,31 +310,58 @@ cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee, edge->caller = caller; edge->callee = callee; - edge->call_expr = call_expr; + edge->call_stmt = call_stmt; + edge->prev_caller = NULL; edge->next_caller = callee->callers; + if (callee->callers) + callee->callers->prev_caller = edge; + edge->prev_callee = NULL; edge->next_callee = caller->callees; + if (caller->callees) + caller->callees->prev_callee = edge; caller->callees = edge; callee->callers = edge; + edge->count = count; + edge->loop_nest = nest; return edge; } -/* Remove the edge E the cgraph. */ +/* Remove the edge E from the list of the callers of the callee. */ + +static inline void +cgraph_edge_remove_callee (struct cgraph_edge *e) +{ + if (e->prev_caller) + e->prev_caller->next_caller = e->next_caller; + if (e->next_caller) + e->next_caller->prev_caller = e->prev_caller; + if (!e->prev_caller) + e->callee->callers = e->next_caller; +} + +/* Remove the edge E from the list of the callees of the caller. */ + +static inline void +cgraph_edge_remove_caller (struct cgraph_edge *e) +{ + if (e->prev_callee) + e->prev_callee->next_callee = e->next_callee; + if (e->next_callee) + e->next_callee->prev_callee = e->prev_callee; + if (!e->prev_callee) + e->caller->callees = e->next_callee; +} + +/* Remove the edge E in the cgraph. */ void cgraph_remove_edge (struct cgraph_edge *e) { - struct cgraph_edge **edge, **edge2; + /* Remove from callers list of the callee. */ + cgraph_edge_remove_callee (e); - for (edge = &e->callee->callers; *edge && *edge != e; - edge = &((*edge)->next_caller)) - continue; - gcc_assert (*edge); - *edge = (*edge)->next_caller; - for (edge2 = &e->caller->callees; *edge2 && *edge2 != e; - edge2 = &(*edge2)->next_callee) - continue; - gcc_assert (*edge2); - *edge2 = (*edge2)->next_callee; + /* Remove from callees list of the callers. */ + cgraph_edge_remove_caller (e); } /* Redirect callee of E to N. The function does not update underlying @@ -274,16 +370,46 @@ cgraph_remove_edge (struct cgraph_edge *e) void cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n) { - struct cgraph_edge **edge; + /* Remove from callers list of the current callee. */ + cgraph_edge_remove_callee (e); - for (edge = &e->callee->callers; *edge && *edge != e; - edge = &((*edge)->next_caller)) - continue; - gcc_assert (*edge); - *edge = (*edge)->next_caller; - e->callee = n; + /* Insert to callers list of the new callee. */ + e->prev_caller = NULL; + if (n->callers) + n->callers->prev_caller = e; e->next_caller = n->callers; n->callers = e; + e->callee = n; +} + +/* Remove all callees from the node. */ + +void +cgraph_node_remove_callees (struct cgraph_node *node) +{ + struct cgraph_edge *e; + + /* It is sufficient to remove the edges from the lists of callers of + the callees. The callee list of the node can be zapped with one + assignment. */ + for (e = node->callees; e; e = e->next_callee) + cgraph_edge_remove_callee (e); + node->callees = NULL; +} + +/* Remove all callers from the node. */ + +static void +cgraph_node_remove_callers (struct cgraph_node *node) +{ + struct cgraph_edge *e; + + /* It is sufficient to remove the edges from the lists of callees of + the callers. The caller list of the node can be zapped with one + assignment. */ + for (e = node->callers; e; e = e->next_caller) + cgraph_edge_remove_caller (e); + node->callers = NULL; } /* Remove the node from cgraph. */ @@ -292,12 +418,10 @@ void cgraph_remove_node (struct cgraph_node *node) { void **slot; - bool check_dead = 1; + bool kill_body = false; - while (node->callers) - cgraph_remove_edge (node->callers); - while (node->callees) - cgraph_remove_edge (node->callees); + cgraph_node_remove_callers (node); + cgraph_node_remove_callees (node); while (node->nested) cgraph_remove_node (node->nested); if (node->origin) @@ -318,44 +442,48 @@ cgraph_remove_node (struct cgraph_node *node) if (*slot == node) { if (node->next_clone) - *slot = node->next_clone; + { + struct cgraph_node *new_node = node->next_clone; + struct cgraph_node *n; + + /* Make the next clone be the master clone */ + for (n = new_node; n; n = n->next_clone) + n->master_clone = new_node; + + *slot = new_node; + node->next_clone->prev_clone = NULL; + } else { htab_clear_slot (cgraph_hash, slot); - if (!dump_enabled_p (TDI_tree_all)) - { - DECL_SAVED_TREE (node->decl) = NULL; - DECL_STRUCT_FUNCTION (node->decl) = NULL; - } - check_dead = false; + kill_body = true; } } else { - struct cgraph_node *n; + node->prev_clone->next_clone = node->next_clone; + if (node->next_clone) + node->next_clone->prev_clone = node->prev_clone; + } - for (n = *slot; n->next_clone != node; n = n->next_clone) - continue; - n->next_clone = node->next_clone; + /* While all the clones are removed after being proceeded, the function + itself is kept in the cgraph even after it is compiled. Check whether + we are done with this body and reclaim it proactively if this is the case. + */ + if (!kill_body && *slot) + { + struct cgraph_node *n = *slot; + if (!n->next_clone && !n->global.inlined_to + && (cgraph_global_info_ready + && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)))) + kill_body = true; } - /* Work out whether we still need a function body (either there is inline - clone or there is out of line function whose body is not written). */ - if (check_dead && flag_unit_at_a_time) + if (kill_body && !dump_enabled_p (TDI_tree_all) && flag_unit_at_a_time) { - struct cgraph_node *n; - - for (n = *slot; n; n = n->next_clone) - if (n->global.inlined_to - || (!n->global.inlined_to - && !TREE_ASM_WRITTEN (n->decl) && !DECL_EXTERNAL (n->decl))) - break; - if (!n && !dump_enabled_p (TDI_tree_all)) - { - DECL_SAVED_TREE (node->decl) = NULL; - DECL_STRUCT_FUNCTION (node->decl) = NULL; - DECL_INITIAL (node->decl) = error_mark_node; - } + DECL_SAVED_TREE (node->decl) = NULL; + DECL_STRUCT_FUNCTION (node->decl) = NULL; + DECL_INITIAL (node->decl) = error_mark_node; } cgraph_n_nodes--; /* Do not free the structure itself so the walk over chain can continue. */ @@ -370,6 +498,7 @@ cgraph_mark_reachable_node (struct cgraph_node *node) { notice_global_symbol (node->decl); node->reachable = 1; + gcc_assert (!cgraph_global_info_ready); node->next_needed = cgraph_nodes_queue; cgraph_nodes_queue = node; @@ -386,21 +515,6 @@ cgraph_mark_needed_node (struct cgraph_node *node) cgraph_mark_reachable_node (node); } -/* Return true when CALLER_DECL calls CALLEE_DECL. */ - -bool -cgraph_calls_p (tree caller_decl, tree callee_decl) -{ - struct cgraph_node *caller = cgraph_node (caller_decl); - struct cgraph_node *callee = cgraph_node (callee_decl); - struct cgraph_edge *edge; - - for (edge = callee->callers; edge && (edge)->caller != caller; - edge = (edge->next_caller)) - continue; - return edge != NULL; -} - /* Return local info for the compiled function. */ struct cgraph_local_info * @@ -447,6 +561,17 @@ cgraph_node_name (struct cgraph_node *node) return lang_hooks.decl_printable_name (node->decl, 2); } +/* Return name of the node used in debug output. */ +static const char * +cgraph_varpool_node_name (struct cgraph_varpool_node *node) +{ + return lang_hooks.decl_printable_name (node->decl, 2); +} + +/* Names used to print out the availability enum. */ +static const char * const availability_names[] = + {"unset", "not_available", "overwrittable", "available", "local"}; + /* Dump given cgraph node. */ void dump_cgraph_node (FILE *f, struct cgraph_node *node) @@ -457,6 +582,14 @@ dump_cgraph_node (FILE *f, struct cgraph_node *node) fprintf (f, " (inline copy in %s/%i)", cgraph_node_name (node->global.inlined_to), node->global.inlined_to->uid); + if (cgraph_function_flags_ready) + fprintf (f, " availability:%s", + availability_names [cgraph_function_body_availability (node)]); + if (node->master_clone && node->master_clone->uid != node->uid) + fprintf (f, "(%i)", node->master_clone->uid); + if (node->count) + fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x", + (HOST_WIDEST_INT)node->count); if (node->local.self_insns) fprintf (f, " %i insns", node->local.self_insns); if (node->global.insns && node->global.insns != node->local.self_insns) @@ -471,13 +604,18 @@ dump_cgraph_node (FILE *f, struct cgraph_node *node) fprintf (f, " tree"); if (node->output) fprintf (f, " output"); - if (node->local.local) fprintf (f, " local"); + if (node->local.externally_visible) + fprintf (f, " externally_visible"); + if (node->local.finalized) + fprintf (f, " finalized"); if (node->local.disregard_inline_limits) fprintf (f, " always_inline"); else if (node->local.inlinable) fprintf (f, " inlinable"); + if (node->local.redefined_extern_inline) + fprintf (f, " redefined_extern_inline"); if (TREE_ASM_WRITTEN (node->decl)) fprintf (f, " asm_written"); @@ -486,6 +624,9 @@ dump_cgraph_node (FILE *f, struct cgraph_node *node) { fprintf (f, "%s/%i ", cgraph_node_name (edge->caller), edge->caller->uid); + if (edge->count) + fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ", + (HOST_WIDEST_INT)edge->count); if (!edge->inline_failed) fprintf(f, "(inlined) "); } @@ -497,6 +638,11 @@ dump_cgraph_node (FILE *f, struct cgraph_node *node) edge->callee->uid); if (!edge->inline_failed) fprintf(f, "(inlined) "); + if (edge->count) + fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ", + (HOST_WIDEST_INT)edge->count); + if (edge->loop_nest) + fprintf (f, "(nested in %i loops) ", edge->loop_nest); } fprintf (f, "\n"); } @@ -513,6 +659,39 @@ dump_cgraph (FILE *f) dump_cgraph_node (f, node); } +/* Dump given cgraph node. */ +void +dump_cgraph_varpool_node (FILE *f, struct cgraph_varpool_node *node) +{ + fprintf (f, "%s:", cgraph_varpool_node_name (node)); + fprintf (f, " availability:%s", availability_names [cgraph_variable_initializer_availability (node)]); + if (DECL_INITIAL (node->decl)) + fprintf (f, " initialized"); + if (node->needed) + fprintf (f, " needed"); + if (node->analyzed) + fprintf (f, " analyzed"); + if (node->finalized) + fprintf (f, " finalized"); + if (node->output) + fprintf (f, " output"); + if (node->externally_visible) + fprintf (f, " externally_visible"); + fprintf (f, "\n"); +} + +/* Dump the callgraph. */ + +void +dump_varpool (FILE *f) +{ + struct cgraph_varpool_node *node; + + fprintf (f, "variable pool:\n\n"); + for (node = cgraph_varpool_nodes; node; node = node->next_needed) + dump_cgraph_varpool_node (f, node); +} + /* Returns a hash code for P. */ static hashval_t @@ -549,12 +728,24 @@ cgraph_varpool_node (tree decl) return *slot; node = ggc_alloc_cleared (sizeof (*node)); node->decl = decl; - cgraph_varpool_n_nodes++; + node->next = cgraph_varpool_nodes; cgraph_varpool_nodes = node; *slot = node; return node; } +struct cgraph_varpool_node * +cgraph_varpool_node_for_asm (tree asmname) +{ + struct cgraph_varpool_node *node; + + for (node = cgraph_varpool_nodes; node ; node = node->next) + if (decl_assembler_name_equal (node->decl, asmname)) + return node; + + return NULL; +} + /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */ void change_decl_assembler_name (tree decl, tree name) @@ -569,25 +760,88 @@ change_decl_assembler_name (tree decl, tree name) if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)) && DECL_RTL_SET_P (decl)) - warning ("%D renamed after being referenced in assembly", decl); + warning (0, "%D renamed after being referenced in assembly", decl); SET_DECL_ASSEMBLER_NAME (decl, name); } +/* Helper function for finalization code - add node into lists so it will + be analyzed and compiled. */ +void +cgraph_varpool_enqueue_needed_node (struct cgraph_varpool_node *node) +{ + if (cgraph_varpool_last_needed_node) + cgraph_varpool_last_needed_node->next_needed = node; + cgraph_varpool_last_needed_node = node; + node->next_needed = NULL; + if (!cgraph_varpool_nodes_queue) + cgraph_varpool_nodes_queue = node; + if (!cgraph_varpool_first_unanalyzed_node) + cgraph_varpool_first_unanalyzed_node = node; + notice_global_symbol (node->decl); +} + +/* Reset the queue of needed nodes. */ +void +cgraph_varpool_reset_queue (void) +{ + cgraph_varpool_last_needed_node = NULL; + cgraph_varpool_nodes_queue = NULL; + cgraph_varpool_first_unanalyzed_node = NULL; +} + /* Notify finalize_compilation_unit that given node is reachable or needed. */ void cgraph_varpool_mark_needed_node (struct cgraph_varpool_node *node) { if (!node->needed && node->finalized) - { - node->next_needed = cgraph_varpool_nodes_queue; - cgraph_varpool_nodes_queue = node; - notice_global_symbol (node->decl); - } + cgraph_varpool_enqueue_needed_node (node); node->needed = 1; } +/* Determine if variable DECL is needed. That is, visible to something + either outside this translation unit, something magic in the system + configury, or (if not doing unit-at-a-time) to something we haven't + seen yet. */ + +bool +decide_is_variable_needed (struct cgraph_varpool_node *node, tree decl) +{ + /* If the user told us it is used, then it must be so. */ + if (node->externally_visible + || lookup_attribute ("used", DECL_ATTRIBUTES (decl))) + return true; + + /* ??? If the assembler name is set by hand, it is possible to assemble + the name later after finalizing the function and the fact is noticed + in assemble_name then. This is arguably a bug. */ + if (DECL_ASSEMBLER_NAME_SET_P (decl) + && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))) + return true; + + /* If we decided it was needed before, but at the time we didn't have + the definition available, then it's still needed. */ + if (node->needed) + return true; + + /* Externally visible variables must be output. The exception is + COMDAT variables that must be output only when they are needed. */ + if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)) + return true; + + if (flag_unit_at_a_time) + return false; + + /* If not doing unit at a time, then we'll only defer this function + if its marked for inlining. Otherwise we want to emit it now. */ + + /* We want to emit COMDAT variables only when absolutely necessary. */ + if (DECL_COMDAT (decl)) + return false; + return true; +} + void cgraph_varpool_finalize_decl (tree decl) { @@ -598,47 +852,24 @@ cgraph_varpool_finalize_decl (tree decl) or local (in C, has internal linkage). So do nothing more if this function has already run. */ if (node->finalized) - return; - if (node->needed) { - node->next_needed = cgraph_varpool_nodes_queue; - cgraph_varpool_nodes_queue = node; - notice_global_symbol (decl); + if (cgraph_global_info_ready || !flag_unit_at_a_time) + cgraph_varpool_assemble_pending_decls (); + return; } + if (node->needed) + cgraph_varpool_enqueue_needed_node (node); node->finalized = true; - if (/* Externally visible variables must be output. The exception are - COMDAT functions that must be output only when they are needed. */ - (TREE_PUBLIC (decl) && !DECL_COMDAT (decl)) - /* Function whose name is output to the assembler file must be produced. - It is possible to assemble the name later after finalizing the function - and the fact is noticed in assemble_name then. */ - || (DECL_ASSEMBLER_NAME_SET_P (decl) - && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))) - { - cgraph_varpool_mark_needed_node (node); - } -} - -bool -cgraph_varpool_assemble_pending_decls (void) -{ - bool changed = false; - - while (cgraph_varpool_nodes_queue) - { - tree decl = cgraph_varpool_nodes_queue->decl; - struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue; - - cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed; - if (!TREE_ASM_WRITTEN (decl)) - { - assemble_variable (decl, 0, 1, 0); - changed = true; - } - node->next_needed = NULL; - } - return changed; + if (decide_is_variable_needed (node, decl)) + cgraph_varpool_mark_needed_node (node); + /* Since we reclaim unreachable nodes at the end of every language + level unit, we need to be conservative about possible entry points + there. */ + else if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)) + cgraph_varpool_mark_needed_node (node); + if (cgraph_global_info_ready || !flag_unit_at_a_time) + cgraph_varpool_assemble_pending_decls (); } /* Return true when the DECL can possibly be inlined. */ @@ -652,20 +883,39 @@ cgraph_function_possibly_inlined_p (tree decl) /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */ struct cgraph_edge * -cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n, tree call_expr) +cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n, + tree call_stmt, gcov_type count_scale, int loop_nest, + bool update_original) { - struct cgraph_edge *new = cgraph_create_edge (n, e->callee, call_expr); + struct cgraph_edge *new; + + new = cgraph_create_edge (n, e->callee, call_stmt, + e->count * count_scale / REG_BR_PROB_BASE, + e->loop_nest + loop_nest); new->inline_failed = e->inline_failed; + if (update_original) + { + e->count -= new->count; + if (e->count < 0) + e->count = 0; + } return new; } -/* Create node representing clone of N. */ +/* Create node representing clone of N executed COUNT times. Decrease + the execution counts from original node too. + + When UPDATE_ORIGINAL is true, the counts are subtracted from the original + function's profile to reflect the fact that part of execution is handled + by node. */ struct cgraph_node * -cgraph_clone_node (struct cgraph_node *n) +cgraph_clone_node (struct cgraph_node *n, gcov_type count, int loop_nest, + bool update_original) { struct cgraph_node *new = cgraph_create_node (); struct cgraph_edge *e; + gcov_type count_scale; new->decl = n->decl; new->origin = n->origin; @@ -678,13 +928,120 @@ cgraph_clone_node (struct cgraph_node *n) new->local = n->local; new->global = n->global; new->rtl = n->rtl; + new->master_clone = n->master_clone; + new->count = count; + if (n->count) + count_scale = new->count * REG_BR_PROB_BASE / n->count; + else + count_scale = 0; + if (update_original) + { + n->count -= count; + if (n->count < 0) + n->count = 0; + } for (e = n->callees;e; e=e->next_callee) - cgraph_clone_edge (e, new, e->call_expr); + cgraph_clone_edge (e, new, e->call_stmt, count_scale, loop_nest, + update_original); new->next_clone = n->next_clone; + new->prev_clone = n; n->next_clone = new; + if (new->next_clone) + new->next_clone->prev_clone = new; return new; } + +/* Return true if N is an master_clone, (see cgraph_master_clone). */ + +bool +cgraph_is_master_clone (struct cgraph_node *n) +{ + return (n == cgraph_master_clone (n)); +} + +struct cgraph_node * +cgraph_master_clone (struct cgraph_node *n) +{ + enum availability avail = cgraph_function_body_availability (n); + + if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE) + return NULL; + + if (!n->master_clone) + n->master_clone = cgraph_node (n->decl); + + return n->master_clone; +} + +/* NODE is no longer nested function; update cgraph accordingly. */ +void +cgraph_unnest_node (struct cgraph_node *node) +{ + struct cgraph_node **node2 = &node->origin->nested; + gcc_assert (node->origin); + + while (*node2 != node) + node2 = &(*node2)->next_nested; + *node2 = node->next_nested; + node->origin = NULL; +} + +/* Return function availability. See cgraph.h for description of individual + return values. */ +enum availability +cgraph_function_body_availability (struct cgraph_node *node) +{ + enum availability avail; + gcc_assert (cgraph_function_flags_ready); + if (!node->analyzed) + avail = AVAIL_NOT_AVAILABLE; + else if (node->local.local) + avail = AVAIL_LOCAL; + else if (node->local.externally_visible) + avail = AVAIL_AVAILABLE; + + /* If the function can be overwritten, return OVERWRITABLE. Take + care at least of two notable extensions - the COMDAT functions + used to share template instantiations in C++ (this is symmetric + to code cp_cannot_inline_tree_fn and probably shall be shared and + the inlinability hooks completely eliminated). + + ??? Does the C++ one definition rule allow us to always return + AVAIL_AVAILABLE here? That would be good reason to preserve this + hook Similarly deal with extern inline functions - this is again + necessary to get C++ shared functions having keyed templates + right and in the C extension documentation we probably should + document the requirement of both versions of function (extern + inline and offline) having same side effect characteristics as + good optimization is what this optimization is about. */ + + else if (!(*targetm.binds_local_p) (node->decl) + && !DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl)) + avail = AVAIL_OVERWRITABLE; + else avail = AVAIL_AVAILABLE; + + return avail; +} + +/* Return variable availability. See cgraph.h for description of individual + return values. */ +enum availability +cgraph_variable_initializer_availability (struct cgraph_varpool_node *node) +{ + gcc_assert (cgraph_function_flags_ready); + if (!node->finalized) + return AVAIL_NOT_AVAILABLE; + if (!TREE_PUBLIC (node->decl)) + return AVAIL_AVAILABLE; + /* If the variable can be overwritten, return OVERWRITABLE. Takes + care of at least two notable extensions - the COMDAT variables + used to share template instantiations in C++. */ + if (!(*targetm.binds_local_p) (node->decl) && !DECL_COMDAT (node->decl)) + return AVAIL_OVERWRITABLE; + return AVAIL_AVAILABLE; +} + #include "gt-cgraph.h"