1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains basic routines manipulating call graph
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
37 The callgraph at the moment does not represent indirect calls or calls
38 from other compilation unit. Flag NEEDED is set for each node that may
39 be accessed in such an invisible way and it shall be considered an
40 entry point to the callgraph.
42 Interprocedural information:
44 Callgraph is place to store data needed for interprocedural optimization.
45 All data structures are divided into three components: local_info that
46 is produced while analyzing the function, global_info that is result
47 of global walking of the callgraph on the end of compilation and
48 rtl_info used by RTL backend to propagate data from already compiled
49 functions to their callers.
53 The function inlining information is decided in advance and maintained
54 in the callgraph as so called inline plan.
55 For each inlined call, the callee's node is cloned to represent the
56 new function copy produced by inliner.
57 Each inlined call gets a unique corresponding clone node of the callee
58 and the data structure is updated while inlining is performed, so
59 the clones are eliminated and their callee edges redirected to the
62 Each edge has "inline_failed" field. When the field is set to NULL,
63 the call will be inlined. When it is non-NULL it contains a reason
64 why inlining wasn't performed. */
68 #include "coretypes.h"
71 #include "tree-inline.h"
72 #include "langhooks.h"
79 #include "basic-block.h"
84 #include "tree-dump.h"
85 #include "tree-flow.h"
86 #include "value-prof.h"
88 static void cgraph_node_remove_callers (struct cgraph_node *node);
89 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
90 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
92 /* Hash table used to convert declarations into nodes. */
93 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
94 /* Hash table used to convert assembler names into nodes. */
95 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
97 /* The linked list of cgraph nodes. */
98 struct cgraph_node *cgraph_nodes;
100 /* Queue of cgraph nodes scheduled to be lowered. */
101 struct cgraph_node *cgraph_nodes_queue;
103 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
104 secondary queue used during optimization to accommodate passes that
105 may generate new functions that need to be optimized and expanded. */
106 struct cgraph_node *cgraph_new_nodes;
108 /* Number of nodes in existence. */
111 /* Maximal uid used in cgraph nodes. */
114 /* Maximal uid used in cgraph edges. */
115 int cgraph_edge_max_uid;
117 /* Maximal pid used for profiling */
120 /* Set when whole unit has been analyzed so we can access global info. */
121 bool cgraph_global_info_ready = false;
123 /* What state callgraph is in right now. */
124 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
126 /* Set when the cgraph is fully build and the basic flags are computed. */
127 bool cgraph_function_flags_ready = false;
129 /* Linked list of cgraph asm nodes. */
130 struct cgraph_asm_node *cgraph_asm_nodes;
132 /* Last node in cgraph_asm_nodes. */
133 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
135 /* The order index of the next cgraph node to be created. This is
136 used so that we can sort the cgraph nodes in order by when we saw
137 them, to support -fno-toplevel-reorder. */
140 /* List of hooks trigerred on cgraph_edge events. */
141 struct cgraph_edge_hook_list {
142 cgraph_edge_hook hook;
144 struct cgraph_edge_hook_list *next;
147 /* List of hooks trigerred on cgraph_node events. */
148 struct cgraph_node_hook_list {
149 cgraph_node_hook hook;
151 struct cgraph_node_hook_list *next;
154 /* List of hooks trigerred on events involving two cgraph_edges. */
155 struct cgraph_2edge_hook_list {
156 cgraph_2edge_hook hook;
158 struct cgraph_2edge_hook_list *next;
161 /* List of hooks trigerred on events involving two cgraph_nodes. */
162 struct cgraph_2node_hook_list {
163 cgraph_2node_hook hook;
165 struct cgraph_2node_hook_list *next;
168 /* List of hooks triggered when an edge is removed. */
169 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
170 /* List of hooks triggered when a node is removed. */
171 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
172 /* List of hooks triggered when an edge is duplicated. */
173 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
174 /* List of hooks triggered when a node is duplicated. */
175 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
176 /* List of hooks triggered when an function is inserted. */
177 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
179 /* Head of a linked list of unused (freed) call graph nodes.
180 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
181 static GTY(()) struct cgraph_node *free_nodes;
182 /* Head of a linked list of unused (freed) call graph edges.
183 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
184 static GTY(()) struct cgraph_edge *free_edges;
186 /* Macros to access the next item in the list of free cgraph nodes and
188 #define NEXT_FREE_NODE(NODE) (NODE)->next
189 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
191 /* Register HOOK to be called with DATA on each removed edge. */
192 struct cgraph_edge_hook_list *
193 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
195 struct cgraph_edge_hook_list *entry;
196 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
198 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
208 /* Remove ENTRY from the list of hooks called on removing edges. */
210 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
212 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
214 while (*ptr != entry)
220 /* Call all edge removal hooks. */
222 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
224 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
227 entry->hook (e, entry->data);
232 /* Register HOOK to be called with DATA on each removed node. */
233 struct cgraph_node_hook_list *
234 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
236 struct cgraph_node_hook_list *entry;
237 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
239 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
249 /* Remove ENTRY from the list of hooks called on removing nodes. */
251 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
253 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
255 while (*ptr != entry)
261 /* Call all node removal hooks. */
263 cgraph_call_node_removal_hooks (struct cgraph_node *node)
265 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
268 entry->hook (node, entry->data);
273 /* Register HOOK to be called with DATA on each removed node. */
274 struct cgraph_node_hook_list *
275 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
277 struct cgraph_node_hook_list *entry;
278 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
280 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
290 /* Remove ENTRY from the list of hooks called on removing nodes. */
292 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
294 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
296 while (*ptr != entry)
302 /* Call all node removal hooks. */
304 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
306 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
309 entry->hook (node, entry->data);
314 /* Register HOOK to be called with DATA on each duplicated edge. */
315 struct cgraph_2edge_hook_list *
316 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
318 struct cgraph_2edge_hook_list *entry;
319 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
321 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
331 /* Remove ENTRY from the list of hooks called on duplicating edges. */
333 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
335 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
337 while (*ptr != entry)
343 /* Call all edge duplication hooks. */
345 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
346 struct cgraph_edge *cs2)
348 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
351 entry->hook (cs1, cs2, entry->data);
356 /* Register HOOK to be called with DATA on each duplicated node. */
357 struct cgraph_2node_hook_list *
358 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
360 struct cgraph_2node_hook_list *entry;
361 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
363 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
373 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
375 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
377 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
379 while (*ptr != entry)
385 /* Call all node duplication hooks. */
387 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
388 struct cgraph_node *node2)
390 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
393 entry->hook (node1, node2, entry->data);
398 /* Returns a hash code for P. */
401 hash_node (const void *p)
403 const struct cgraph_node *n = (const struct cgraph_node *) p;
404 return (hashval_t) DECL_UID (n->decl);
407 /* Returns nonzero if P1 and P2 are equal. */
410 eq_node (const void *p1, const void *p2)
412 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
413 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
414 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
417 /* Allocate new callgraph node and insert it into basic data structures. */
419 static struct cgraph_node *
420 cgraph_create_node (void)
422 struct cgraph_node *node;
427 free_nodes = NEXT_FREE_NODE (node);
431 node = GGC_CNEW (struct cgraph_node);
432 node->uid = cgraph_max_uid++;
435 node->next = cgraph_nodes;
437 node->order = cgraph_order++;
439 cgraph_nodes->previous = node;
440 node->previous = NULL;
441 node->global.estimated_growth = INT_MIN;
447 /* Return cgraph node assigned to DECL. Create new one when needed. */
450 cgraph_node (tree decl)
452 struct cgraph_node key, *node, **slot;
454 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
457 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
461 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
469 node = cgraph_create_node ();
472 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
474 node->origin = cgraph_node (DECL_CONTEXT (decl));
475 node->next_nested = node->origin->nested;
476 node->origin->nested = node;
478 if (assembler_name_hash)
481 tree name = DECL_ASSEMBLER_NAME (decl);
483 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
484 decl_assembler_name_hash (name),
486 /* We can have multiple declarations with same assembler name. For C++
487 it is __builtin_strlen and strlen, for instance. Do we need to
488 record them all? Original implementation marked just first one
489 so lets hope for the best. */
496 /* Insert already constructed node into hashtable. */
499 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
501 struct cgraph_node **slot;
503 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
509 /* Returns a hash code for P. */
512 hash_node_by_assembler_name (const void *p)
514 const struct cgraph_node *n = (const struct cgraph_node *) p;
515 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
518 /* Returns nonzero if P1 and P2 are equal. */
521 eq_assembler_name (const void *p1, const void *p2)
523 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
524 const_tree name = (const_tree)p2;
525 return (decl_assembler_name_equal (n1->decl, name));
528 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
529 Return NULL if there's no such node. */
532 cgraph_node_for_asm (tree asmname)
534 struct cgraph_node *node;
537 if (!assembler_name_hash)
539 assembler_name_hash =
540 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
542 for (node = cgraph_nodes; node; node = node->next)
543 if (!node->global.inlined_to)
545 tree name = DECL_ASSEMBLER_NAME (node->decl);
546 slot = htab_find_slot_with_hash (assembler_name_hash, name,
547 decl_assembler_name_hash (name),
549 /* We can have multiple declarations with same assembler name. For C++
550 it is __builtin_strlen and strlen, for instance. Do we need to
551 record them all? Original implementation marked just first one
552 so lets hope for the best. */
559 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
560 decl_assembler_name_hash (asmname),
564 return (struct cgraph_node *) *slot;
568 /* Returns a hash value for X (which really is a die_struct). */
571 edge_hash (const void *x)
573 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
576 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
579 edge_eq (const void *x, const void *y)
581 return ((const struct cgraph_edge *) x)->call_stmt == y;
585 /* Return the callgraph edge representing the GIMPLE_CALL statement
589 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
591 struct cgraph_edge *e, *e2;
594 if (node->call_site_hash)
595 return (struct cgraph_edge *)
596 htab_find_with_hash (node->call_site_hash, call_stmt,
597 htab_hash_pointer (call_stmt));
599 /* This loop may turn out to be performance problem. In such case adding
600 hashtables into call nodes with very many edges is probably best
601 solution. It is not good idea to add pointer into CALL_EXPR itself
602 because we want to make possible having multiple cgraph nodes representing
603 different clones of the same body before the body is actually cloned. */
604 for (e = node->callees; e; e= e->next_callee)
606 if (e->call_stmt == call_stmt)
613 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
614 for (e2 = node->callees; e2; e2 = e2->next_callee)
617 slot = htab_find_slot_with_hash (node->call_site_hash,
619 htab_hash_pointer (e2->call_stmt),
630 /* Change field call_stmt of edge E to NEW_STMT. */
633 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
635 if (e->caller->call_site_hash)
637 htab_remove_elt_with_hash (e->caller->call_site_hash,
639 htab_hash_pointer (e->call_stmt));
641 e->call_stmt = new_stmt;
642 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
643 e->can_throw_external = stmt_can_throw_external (new_stmt);
645 if (e->caller->call_site_hash)
648 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
651 (e->call_stmt), INSERT);
657 /* Like cgraph_set_call_stmt but walk the clone tree and update all clones sharing
658 same function body. */
661 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
662 gimple old_stmt, gimple new_stmt)
664 struct cgraph_node *node;
665 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
668 cgraph_set_call_stmt (edge, new_stmt);
670 for (node = orig->clones; node != orig;)
672 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
674 cgraph_set_call_stmt (edge, new_stmt);
677 else if (node->next_sibling_clone)
678 node = node->next_sibling_clone;
681 while (node != orig && !node->next_sibling_clone)
682 node = node->clone_of;
684 node = node->next_sibling_clone;
689 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
692 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
693 frequencies of the clones.
697 cgraph_create_edge_including_clones (struct cgraph_node *orig, struct cgraph_node *callee,
698 gimple stmt, gcov_type count, int freq,
700 cgraph_inline_failed_t reason)
702 struct cgraph_node *node;
704 if (!cgraph_edge (orig, stmt))
705 cgraph_create_edge (orig, callee, stmt,
706 count, freq, loop_depth)->inline_failed = reason;
709 for (node = orig->clones; node != orig;)
711 /* It is possible that we already constant propagated into the clone
712 and turned indirect call into dirrect call. */
713 if (!cgraph_edge (node, stmt))
714 cgraph_create_edge (node, callee, stmt, count, freq,
715 loop_depth)->inline_failed = reason;
719 else if (node->next_sibling_clone)
720 node = node->next_sibling_clone;
723 while (node != orig && !node->next_sibling_clone)
724 node = node->clone_of;
726 node = node->next_sibling_clone;
731 /* Give initial reasons why inlining would fail on EDGE. This gets either
732 nullified or usually overwritten by more precise reasons later. */
735 initialize_inline_failed (struct cgraph_edge *e)
737 struct cgraph_node *callee = e->callee;
739 if (!callee->analyzed)
740 e->inline_failed = CIF_BODY_NOT_AVAILABLE;
741 else if (callee->local.redefined_extern_inline)
742 e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
743 else if (!callee->local.inlinable)
744 e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
745 else if (gimple_call_cannot_inline_p (e->call_stmt))
746 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
748 e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
751 /* Create edge from CALLER to CALLEE in the cgraph. */
754 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
755 gimple call_stmt, gcov_type count, int freq, int nest)
757 struct cgraph_edge *edge;
759 #ifdef ENABLE_CHECKING
760 /* This is rather pricely check possibly trigerring construction of call stmt
762 gcc_assert (!cgraph_edge (caller, call_stmt));
765 gcc_assert (is_gimple_call (call_stmt));
770 free_edges = NEXT_FREE_EDGE (edge);
774 edge = GGC_NEW (struct cgraph_edge);
775 edge->uid = cgraph_edge_max_uid++;
780 edge->caller = caller;
781 edge->callee = callee;
782 edge->call_stmt = call_stmt;
783 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
784 edge->can_throw_external = stmt_can_throw_external (call_stmt);
786 edge->prev_caller = NULL;
787 edge->next_caller = callee->callers;
789 callee->callers->prev_caller = edge;
790 edge->prev_callee = NULL;
791 edge->next_callee = caller->callees;
793 caller->callees->prev_callee = edge;
794 caller->callees = edge;
795 callee->callers = edge;
797 gcc_assert (count >= 0);
798 edge->frequency = freq;
799 gcc_assert (freq >= 0);
800 gcc_assert (freq <= CGRAPH_FREQ_MAX);
801 edge->loop_nest = nest;
802 edge->indirect_call = 0;
803 if (caller->call_site_hash)
806 slot = htab_find_slot_with_hash (caller->call_site_hash,
815 initialize_inline_failed (edge);
820 /* Remove the edge E from the list of the callers of the callee. */
823 cgraph_edge_remove_callee (struct cgraph_edge *e)
826 e->prev_caller->next_caller = e->next_caller;
828 e->next_caller->prev_caller = e->prev_caller;
830 e->callee->callers = e->next_caller;
833 /* Remove the edge E from the list of the callees of the caller. */
836 cgraph_edge_remove_caller (struct cgraph_edge *e)
839 e->prev_callee->next_callee = e->next_callee;
841 e->next_callee->prev_callee = e->prev_callee;
843 e->caller->callees = e->next_callee;
844 if (e->caller->call_site_hash)
845 htab_remove_elt_with_hash (e->caller->call_site_hash,
847 htab_hash_pointer (e->call_stmt));
850 /* Put the edge onto the free list. */
853 cgraph_free_edge (struct cgraph_edge *e)
857 /* Clear out the edge so we do not dangle pointers. */
858 memset (e, 0, sizeof (*e));
860 NEXT_FREE_EDGE (e) = free_edges;
864 /* Remove the edge E in the cgraph. */
867 cgraph_remove_edge (struct cgraph_edge *e)
869 /* Call all edge removal hooks. */
870 cgraph_call_edge_removal_hooks (e);
872 /* Remove from callers list of the callee. */
873 cgraph_edge_remove_callee (e);
875 /* Remove from callees list of the callers. */
876 cgraph_edge_remove_caller (e);
878 /* Put the edge onto the free list. */
879 cgraph_free_edge (e);
882 /* Redirect callee of E to N. The function does not update underlying
886 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
888 /* Remove from callers list of the current callee. */
889 cgraph_edge_remove_callee (e);
891 /* Insert to callers list of the new callee. */
892 e->prev_caller = NULL;
894 n->callers->prev_caller = e;
895 e->next_caller = n->callers;
901 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
902 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
903 of OLD_STMT if it was previously call statement. */
906 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
907 gimple old_stmt, tree old_call, gimple new_stmt)
909 tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fndecl (new_stmt) : 0;
911 /* We are seeing indirect calls, then there is nothing to update. */
912 if (!new_call && !old_call)
914 /* See if we turned indirect call into direct call or folded call to one builtin
915 into different bultin. */
916 if (old_call != new_call)
918 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
919 struct cgraph_edge *ne = NULL;
926 /* See if the call is already there. It might be because of indirect
927 inlining already found it. */
928 if (new_call && e->callee->decl == new_call)
931 /* Otherwise remove edge and create new one; we can't simply redirect
932 since function has changed, so inline plan and other information
933 attached to edge is invalid. */
934 cgraph_remove_edge (e);
936 frequency = e->frequency;
937 loop_nest = e->loop_nest;
941 /* We are seeing new direct call; compute profile info based on BB. */
942 basic_block bb = gimple_bb (new_stmt);
944 frequency = compute_call_stmt_bb_frequency (current_function_decl,
946 loop_nest = bb->loop_depth;
951 ne = cgraph_create_edge (node, cgraph_node (new_call),
952 new_stmt, count, frequency,
954 gcc_assert (ne->inline_failed);
957 /* We only updated the call stmt; update pointer in cgraph edge.. */
958 else if (old_stmt != new_stmt)
959 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
962 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
963 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
964 of OLD_STMT before it was updated (updating can happen inplace). */
967 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
969 struct cgraph_node *orig = cgraph_node (cfun->decl);
970 struct cgraph_node *node;
972 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
974 for (node = orig->clones; node != orig;)
976 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
979 else if (node->next_sibling_clone)
980 node = node->next_sibling_clone;
983 while (node != orig && !node->next_sibling_clone)
984 node = node->clone_of;
986 node = node->next_sibling_clone;
992 /* Remove all callees from the node. */
995 cgraph_node_remove_callees (struct cgraph_node *node)
997 struct cgraph_edge *e, *f;
999 /* It is sufficient to remove the edges from the lists of callers of
1000 the callees. The callee list of the node can be zapped with one
1002 for (e = node->callees; e; e = f)
1005 cgraph_call_edge_removal_hooks (e);
1006 cgraph_edge_remove_callee (e);
1007 cgraph_free_edge (e);
1009 node->callees = NULL;
1010 if (node->call_site_hash)
1012 htab_delete (node->call_site_hash);
1013 node->call_site_hash = NULL;
1017 /* Remove all callers from the node. */
1020 cgraph_node_remove_callers (struct cgraph_node *node)
1022 struct cgraph_edge *e, *f;
1024 /* It is sufficient to remove the edges from the lists of callees of
1025 the callers. The caller list of the node can be zapped with one
1027 for (e = node->callers; e; e = f)
1030 cgraph_call_edge_removal_hooks (e);
1031 cgraph_edge_remove_caller (e);
1032 cgraph_free_edge (e);
1034 node->callers = NULL;
1037 /* Release memory used to represent body of function NODE. */
1040 cgraph_release_function_body (struct cgraph_node *node)
1042 if (DECL_STRUCT_FUNCTION (node->decl))
1044 tree old_decl = current_function_decl;
1045 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1046 if (cfun->gimple_df)
1048 current_function_decl = node->decl;
1050 delete_tree_cfg_annotations ();
1052 current_function_decl = old_decl;
1056 gcc_assert (dom_computed[0] == DOM_NONE);
1057 gcc_assert (dom_computed[1] == DOM_NONE);
1060 if (cfun->value_histograms)
1062 gcc_assert (!current_loops);
1064 gimple_set_body (node->decl, NULL);
1065 VEC_free (ipa_opt_pass, heap,
1066 DECL_STRUCT_FUNCTION (node->decl)->ipa_transforms_to_apply);
1067 /* Struct function hangs a lot of data that would leak if we didn't
1068 removed all pointers to it. */
1069 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1070 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1072 DECL_SAVED_TREE (node->decl) = NULL;
1073 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1074 of its associated function function declaration because it's
1075 needed to emit debug info later. */
1076 if (!node->abstract_and_needed)
1077 DECL_INITIAL (node->decl) = error_mark_node;
1080 /* Remove the node from cgraph. */
1083 cgraph_remove_node (struct cgraph_node *node)
1086 bool kill_body = false;
1087 struct cgraph_node *n;
1088 int uid = node->uid;
1090 cgraph_call_node_removal_hooks (node);
1091 cgraph_node_remove_callers (node);
1092 cgraph_node_remove_callees (node);
1094 /* Incremental inlining access removed nodes stored in the postorder list.
1096 node->needed = node->reachable = false;
1097 for (n = node->nested; n; n = n->next_nested)
1099 node->nested = NULL;
1102 struct cgraph_node **node2 = &node->origin->nested;
1104 while (*node2 != node)
1105 node2 = &(*node2)->next_nested;
1106 *node2 = node->next_nested;
1109 node->previous->next = node->next;
1111 cgraph_nodes = node->next;
1113 node->next->previous = node->previous;
1115 node->previous = NULL;
1116 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1119 struct cgraph_node *next_inline_clone;
1121 for (next_inline_clone = node->clones;
1122 next_inline_clone && next_inline_clone->decl != node->decl;
1123 next_inline_clone = next_inline_clone->next_sibling_clone)
1126 /* If there is inline clone of the node being removed, we need
1127 to put it into the position of removed node and reorganize all
1128 other clones to be based on it. */
1129 if (next_inline_clone)
1131 struct cgraph_node *n;
1132 struct cgraph_node *new_clones;
1134 *slot = next_inline_clone;
1136 /* Unlink inline clone from the list of clones of removed node. */
1137 if (next_inline_clone->next_sibling_clone)
1138 next_inline_clone->next_sibling_clone->prev_sibling_clone
1139 = next_inline_clone->prev_sibling_clone;
1140 if (next_inline_clone->prev_sibling_clone)
1142 next_inline_clone->prev_sibling_clone->next_sibling_clone
1143 = next_inline_clone->next_sibling_clone;
1146 node->clones = next_inline_clone->next_sibling_clone;
1148 new_clones = node->clones;
1149 node->clones = NULL;
1151 /* Copy clone info. */
1152 next_inline_clone->clone = node->clone;
1154 /* Now place it into clone tree at same level at NODE. */
1155 next_inline_clone->clone_of = node->clone_of;
1156 next_inline_clone->prev_sibling_clone = NULL;
1157 next_inline_clone->next_sibling_clone = NULL;
1160 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1161 node->clone_of->clones = next_inline_clone;
1164 /* Merge the clone list. */
1167 if (!next_inline_clone->clones)
1168 next_inline_clone->clones = new_clones;
1171 n = next_inline_clone->clones;
1172 while (n->next_sibling_clone)
1173 n = n->next_sibling_clone;
1174 n->next_sibling_clone = new_clones;
1175 new_clones->prev_sibling_clone = n;
1179 /* Update clone_of pointers. */
1183 n->clone_of = next_inline_clone;
1184 n = n->next_sibling_clone;
1189 htab_clear_slot (cgraph_hash, slot);
1195 gcc_assert (node->clone_of);
1196 if (node->prev_sibling_clone)
1197 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1198 else if (node->clone_of)
1199 node->clone_of->clones = node->next_sibling_clone;
1200 if (node->next_sibling_clone)
1201 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1204 struct cgraph_node *n;
1206 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1207 n->clone_of = node->clone_of;
1208 n->clone_of = node->clone_of;
1209 n->next_sibling_clone = node->clone_of->clones;
1210 if (node->clone_of->clones)
1211 node->clone_of->clones->prev_sibling_clone = n;
1212 node->clone_of->clones = node->clones;
1215 /* While all the clones are removed after being proceeded, the function
1216 itself is kept in the cgraph even after it is compiled. Check whether
1217 we are done with this body and reclaim it proactively if this is the case.
1219 if (!kill_body && *slot)
1221 struct cgraph_node *n = (struct cgraph_node *) *slot;
1222 if (!n->clones && !n->clone_of && !n->global.inlined_to
1223 && (cgraph_global_info_ready
1224 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
1227 if (assembler_name_hash)
1229 tree name = DECL_ASSEMBLER_NAME (node->decl);
1230 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1231 decl_assembler_name_hash (name),
1233 /* Inline clones are not hashed. */
1234 if (slot && *slot == node)
1235 htab_clear_slot (assembler_name_hash, slot);
1239 cgraph_release_function_body (node);
1241 if (node->call_site_hash)
1243 htab_delete (node->call_site_hash);
1244 node->call_site_hash = NULL;
1248 /* Clear out the node to NULL all pointers and add the node to the free
1250 memset (node, 0, sizeof(*node));
1252 NEXT_FREE_NODE (node) = free_nodes;
1256 /* Remove the node from cgraph. */
1259 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1261 struct cgraph_edge *e, *next;
1262 for (e = node->callees; e; e = next)
1264 next = e->next_callee;
1265 if (!e->inline_failed)
1266 cgraph_remove_node_and_inline_clones (e->callee);
1268 cgraph_remove_node (node);
1271 /* Notify finalize_compilation_unit that given node is reachable. */
1274 cgraph_mark_reachable_node (struct cgraph_node *node)
1276 if (!node->reachable && node->local.finalized)
1278 notice_global_symbol (node->decl);
1279 node->reachable = 1;
1280 gcc_assert (!cgraph_global_info_ready);
1282 node->next_needed = cgraph_nodes_queue;
1283 cgraph_nodes_queue = node;
1287 /* Likewise indicate that a node is needed, i.e. reachable via some
1291 cgraph_mark_needed_node (struct cgraph_node *node)
1294 cgraph_mark_reachable_node (node);
1297 /* Likewise indicate that a node is having address taken. */
1300 cgraph_mark_address_taken_node (struct cgraph_node *node)
1302 node->address_taken = 1;
1303 cgraph_mark_needed_node (node);
1306 /* Return local info for the compiled function. */
1308 struct cgraph_local_info *
1309 cgraph_local_info (tree decl)
1311 struct cgraph_node *node;
1313 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1314 node = cgraph_node (decl);
1315 return &node->local;
1318 /* Return local info for the compiled function. */
1320 struct cgraph_global_info *
1321 cgraph_global_info (tree decl)
1323 struct cgraph_node *node;
1325 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1326 node = cgraph_node (decl);
1327 return &node->global;
1330 /* Return local info for the compiled function. */
1332 struct cgraph_rtl_info *
1333 cgraph_rtl_info (tree decl)
1335 struct cgraph_node *node;
1337 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1338 node = cgraph_node (decl);
1339 if (decl != current_function_decl
1340 && !TREE_ASM_WRITTEN (node->decl))
1345 /* Return a string describing the failure REASON. */
1348 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1351 #define DEFCIFCODE(code, string) string,
1353 static const char *cif_string_table[CIF_N_REASONS] = {
1354 #include "cif-code.def"
1357 /* Signedness of an enum type is implementation defined, so cast it
1358 to unsigned before testing. */
1359 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1360 return cif_string_table[reason];
1363 /* Return name of the node used in debug output. */
1365 cgraph_node_name (struct cgraph_node *node)
1367 return lang_hooks.decl_printable_name (node->decl, 2);
1370 /* Names used to print out the availability enum. */
1371 const char * const cgraph_availability_names[] =
1372 {"unset", "not_available", "overwritable", "available", "local"};
1375 /* Dump call graph node NODE to file F. */
1378 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1380 struct cgraph_edge *edge;
1381 fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid,
1383 dump_addr (f, " @", (void *)node);
1384 if (node->global.inlined_to)
1385 fprintf (f, " (inline copy in %s/%i)",
1386 cgraph_node_name (node->global.inlined_to),
1387 node->global.inlined_to->uid);
1389 fprintf (f, " (clone of %s/%i)",
1390 cgraph_node_name (node->clone_of),
1391 node->clone_of->uid);
1392 if (cgraph_function_flags_ready)
1393 fprintf (f, " availability:%s",
1394 cgraph_availability_names [cgraph_function_body_availability (node)]);
1396 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1397 (HOST_WIDEST_INT)node->count);
1398 if (node->local.inline_summary.self_time)
1399 fprintf (f, " %i time, %i benefit", node->local.inline_summary.self_time,
1400 node->local.inline_summary.time_inlining_benefit);
1401 if (node->global.time && node->global.time
1402 != node->local.inline_summary.self_time)
1403 fprintf (f, " (%i after inlining)", node->global.time);
1404 if (node->local.inline_summary.self_size)
1405 fprintf (f, " %i size, %i benefit", node->local.inline_summary.self_size,
1406 node->local.inline_summary.size_inlining_benefit);
1407 if (node->global.size && node->global.size
1408 != node->local.inline_summary.self_size)
1409 fprintf (f, " (%i after inlining)", node->global.size);
1410 if (node->local.inline_summary.estimated_self_stack_size)
1411 fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
1412 if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
1413 fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
1415 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1417 fprintf (f, " needed");
1418 if (node->address_taken)
1419 fprintf (f, " address_taken");
1420 else if (node->reachable)
1421 fprintf (f, " reachable");
1422 if (gimple_has_body_p (node->decl))
1423 fprintf (f, " body");
1425 fprintf (f, " process");
1426 if (node->local.local)
1427 fprintf (f, " local");
1428 if (node->local.externally_visible)
1429 fprintf (f, " externally_visible");
1430 if (node->local.finalized)
1431 fprintf (f, " finalized");
1432 if (node->local.disregard_inline_limits)
1433 fprintf (f, " always_inline");
1434 else if (node->local.inlinable)
1435 fprintf (f, " inlinable");
1436 if (node->local.redefined_extern_inline)
1437 fprintf (f, " redefined_extern_inline");
1438 if (TREE_ASM_WRITTEN (node->decl))
1439 fprintf (f, " asm_written");
1441 fprintf (f, "\n called by: ");
1442 for (edge = node->callers; edge; edge = edge->next_caller)
1444 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1447 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1448 (HOST_WIDEST_INT)edge->count);
1449 if (edge->frequency)
1450 fprintf (f, "(%.2f per call) ",
1451 edge->frequency / (double)CGRAPH_FREQ_BASE);
1452 if (!edge->inline_failed)
1453 fprintf(f, "(inlined) ");
1454 if (edge->indirect_call)
1455 fprintf(f, "(indirect) ");
1456 if (edge->can_throw_external)
1457 fprintf(f, "(can throw external) ");
1460 fprintf (f, "\n calls: ");
1461 for (edge = node->callees; edge; edge = edge->next_callee)
1463 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1465 if (!edge->inline_failed)
1466 fprintf(f, "(inlined) ");
1467 if (edge->indirect_call)
1468 fprintf(f, "(indirect) ");
1470 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1471 (HOST_WIDEST_INT)edge->count);
1472 if (edge->frequency)
1473 fprintf (f, "(%.2f per call) ",
1474 edge->frequency / (double)CGRAPH_FREQ_BASE);
1475 if (edge->loop_nest)
1476 fprintf (f, "(nested in %i loops) ", edge->loop_nest);
1477 if (edge->can_throw_external)
1478 fprintf(f, "(can throw external) ");
1484 /* Dump call graph node NODE to stderr. */
1487 debug_cgraph_node (struct cgraph_node *node)
1489 dump_cgraph_node (stderr, node);
1493 /* Dump the callgraph to file F. */
1496 dump_cgraph (FILE *f)
1498 struct cgraph_node *node;
1500 fprintf (f, "callgraph:\n\n");
1501 for (node = cgraph_nodes; node; node = node->next)
1502 dump_cgraph_node (f, node);
1506 /* Dump the call graph to stderr. */
1511 dump_cgraph (stderr);
1515 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
1518 change_decl_assembler_name (tree decl, tree name)
1520 gcc_assert (!assembler_name_hash);
1521 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
1523 SET_DECL_ASSEMBLER_NAME (decl, name);
1526 if (name == DECL_ASSEMBLER_NAME (decl))
1529 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
1530 && DECL_RTL_SET_P (decl))
1531 warning (0, "%D renamed after being referenced in assembly", decl);
1533 SET_DECL_ASSEMBLER_NAME (decl, name);
1536 /* Add a top-level asm statement to the list. */
1538 struct cgraph_asm_node *
1539 cgraph_add_asm_node (tree asm_str)
1541 struct cgraph_asm_node *node;
1543 node = GGC_CNEW (struct cgraph_asm_node);
1544 node->asm_str = asm_str;
1545 node->order = cgraph_order++;
1547 if (cgraph_asm_nodes == NULL)
1548 cgraph_asm_nodes = node;
1550 cgraph_asm_last_node->next = node;
1551 cgraph_asm_last_node = node;
1555 /* Return true when the DECL can possibly be inlined. */
1557 cgraph_function_possibly_inlined_p (tree decl)
1559 if (!cgraph_global_info_ready)
1560 return !DECL_UNINLINABLE (decl);
1561 return DECL_POSSIBLY_INLINED (decl);
1564 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
1565 struct cgraph_edge *
1566 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
1567 gimple call_stmt, gcov_type count_scale, int freq_scale,
1568 int loop_nest, bool update_original)
1570 struct cgraph_edge *new_edge;
1571 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
1572 gcov_type freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
1574 if (freq > CGRAPH_FREQ_MAX)
1575 freq = CGRAPH_FREQ_MAX;
1576 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
1577 e->loop_nest + loop_nest);
1579 new_edge->inline_failed = e->inline_failed;
1580 new_edge->indirect_call = e->indirect_call;
1581 if (update_original)
1583 e->count -= new_edge->count;
1587 cgraph_call_edge_duplication_hooks (e, new_edge);
1591 /* Create node representing clone of N executed COUNT times. Decrease
1592 the execution counts from original node too.
1594 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
1595 function's profile to reflect the fact that part of execution is handled
1597 struct cgraph_node *
1598 cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq,
1599 int loop_nest, bool update_original)
1601 struct cgraph_node *new_node = cgraph_create_node ();
1602 struct cgraph_edge *e;
1603 gcov_type count_scale;
1605 new_node->decl = n->decl;
1606 new_node->origin = n->origin;
1607 if (new_node->origin)
1609 new_node->next_nested = new_node->origin->nested;
1610 new_node->origin->nested = new_node;
1612 new_node->analyzed = n->analyzed;
1613 new_node->local = n->local;
1614 new_node->global = n->global;
1615 new_node->rtl = n->rtl;
1616 new_node->count = count;
1617 new_node->clone = n->clone;
1620 if (new_node->count > n->count)
1621 count_scale = REG_BR_PROB_BASE;
1623 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
1627 if (update_original)
1634 for (e = n->callees;e; e=e->next_callee)
1635 cgraph_clone_edge (e, new_node, e->call_stmt, count_scale, freq, loop_nest,
1638 new_node->next_sibling_clone = n->clones;
1640 n->clones->prev_sibling_clone = new_node;
1641 n->clones = new_node;
1642 new_node->clone_of = n;
1644 cgraph_call_node_duplication_hooks (n, new_node);
1648 /* Create a new name for omp child function. Returns an identifier. */
1650 static GTY(()) unsigned int clone_fn_id_num;
1653 clone_function_name (tree decl)
1655 tree name = DECL_ASSEMBLER_NAME (decl);
1656 size_t len = IDENTIFIER_LENGTH (name);
1657 char *tmp_name, *prefix;
1659 prefix = XALLOCAVEC (char, len + strlen ("_clone") + 1);
1660 memcpy (prefix, IDENTIFIER_POINTER (name), len);
1661 strcpy (prefix + len, "_clone");
1662 #ifndef NO_DOT_IN_LABEL
1664 #elif !defined NO_DOLLAR_IN_LABEL
1667 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
1668 return get_identifier (tmp_name);
1671 /* Create callgraph node clone with new declaration. The actual body will
1672 be copied later at compilation stage.
1674 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
1677 struct cgraph_node *
1678 cgraph_create_virtual_clone (struct cgraph_node *old_node,
1679 VEC(cgraph_edge_p,heap) *redirect_callers,
1680 VEC(ipa_replace_map_p,gc) *tree_map,
1681 bitmap args_to_skip)
1683 tree old_decl = old_node->decl;
1684 struct cgraph_node *new_node = NULL;
1686 struct cgraph_node key, **slot;
1688 struct cgraph_edge *e;
1690 gcc_assert (tree_versionable_function_p (old_decl));
1692 /* Make a new FUNCTION_DECL tree node */
1694 new_decl = copy_node (old_decl);
1696 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
1697 DECL_STRUCT_FUNCTION (new_decl) = NULL;
1699 /* Generate a new name for the new version. */
1700 DECL_NAME (new_decl) = clone_function_name (old_decl);
1701 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
1702 SET_DECL_RTL (new_decl, NULL);
1704 new_node = cgraph_clone_node (old_node, old_node->count,
1705 CGRAPH_FREQ_BASE, 0, false);
1706 new_node->decl = new_decl;
1707 /* Update the properties.
1708 Make clone visible only within this translation unit. Make sure
1709 that is not weak also.
1710 ??? We cannot use COMDAT linkage because there is no
1711 ABI support for this. */
1712 DECL_EXTERNAL (new_node->decl) = 0;
1713 DECL_COMDAT_GROUP (new_node->decl) = 0;
1714 TREE_PUBLIC (new_node->decl) = 0;
1715 DECL_COMDAT (new_node->decl) = 0;
1716 DECL_WEAK (new_node->decl) = 0;
1717 new_node->clone.tree_map = tree_map;
1718 new_node->clone.args_to_skip = args_to_skip;
1719 new_node->local.externally_visible = 0;
1720 new_node->local.local = 1;
1721 new_node->lowered = true;
1722 new_node->reachable = true;
1724 key.decl = new_decl;
1725 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
1726 gcc_assert (!*slot);
1728 if (assembler_name_hash)
1731 tree name = DECL_ASSEMBLER_NAME (new_decl);
1733 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
1734 decl_assembler_name_hash (name),
1736 gcc_assert (!*aslot);
1739 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1741 /* Redirect calls to the old version node to point to its new
1743 cgraph_redirect_edge_callee (e, new_node);
1749 /* NODE is no longer nested function; update cgraph accordingly. */
1751 cgraph_unnest_node (struct cgraph_node *node)
1753 struct cgraph_node **node2 = &node->origin->nested;
1754 gcc_assert (node->origin);
1756 while (*node2 != node)
1757 node2 = &(*node2)->next_nested;
1758 *node2 = node->next_nested;
1759 node->origin = NULL;
1762 /* Return function availability. See cgraph.h for description of individual
1765 cgraph_function_body_availability (struct cgraph_node *node)
1767 enum availability avail;
1768 gcc_assert (cgraph_function_flags_ready);
1769 if (!node->analyzed)
1770 avail = AVAIL_NOT_AVAILABLE;
1771 else if (node->local.local)
1772 avail = AVAIL_LOCAL;
1773 else if (!node->local.externally_visible)
1774 avail = AVAIL_AVAILABLE;
1775 /* Inline functions are safe to be analyzed even if their sybol can
1776 be overwritten at runtime. It is not meaningful to enfore any sane
1777 behaviour on replacing inline function by different body. */
1778 else if (DECL_DECLARED_INLINE_P (node->decl))
1779 avail = AVAIL_AVAILABLE;
1781 /* If the function can be overwritten, return OVERWRITABLE. Take
1782 care at least of two notable extensions - the COMDAT functions
1783 used to share template instantiations in C++ (this is symmetric
1784 to code cp_cannot_inline_tree_fn and probably shall be shared and
1785 the inlinability hooks completely eliminated).
1787 ??? Does the C++ one definition rule allow us to always return
1788 AVAIL_AVAILABLE here? That would be good reason to preserve this
1791 else if (DECL_REPLACEABLE_P (node->decl) && !DECL_EXTERNAL (node->decl))
1792 avail = AVAIL_OVERWRITABLE;
1793 else avail = AVAIL_AVAILABLE;
1798 /* Add the function FNDECL to the call graph.
1799 Unlike cgraph_finalize_function, this function is intended to be used
1800 by middle end and allows insertion of new function at arbitrary point
1801 of compilation. The function can be either in high, low or SSA form
1804 The function is assumed to be reachable and have address taken (so no
1805 API breaking optimizations are performed on it).
1807 Main work done by this function is to enqueue the function for later
1808 processing to avoid need the passes to be re-entrant. */
1811 cgraph_add_new_function (tree fndecl, bool lowered)
1813 struct cgraph_node *node;
1814 switch (cgraph_state)
1816 case CGRAPH_STATE_CONSTRUCTION:
1817 /* Just enqueue function to be processed at nearest occurrence. */
1818 node = cgraph_node (fndecl);
1819 node->next_needed = cgraph_new_nodes;
1821 node->lowered = true;
1822 cgraph_new_nodes = node;
1825 case CGRAPH_STATE_IPA:
1826 case CGRAPH_STATE_IPA_SSA:
1827 case CGRAPH_STATE_EXPANSION:
1828 /* Bring the function into finalized state and enqueue for later
1829 analyzing and compilation. */
1830 node = cgraph_node (fndecl);
1831 node->local.local = false;
1832 node->local.finalized = true;
1833 node->reachable = node->needed = true;
1834 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
1836 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
1837 current_function_decl = fndecl;
1838 gimple_register_cfg_hooks ();
1839 tree_lowering_passes (fndecl);
1840 bitmap_obstack_initialize (NULL);
1841 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
1842 execute_pass_list (pass_early_local_passes.pass.sub);
1843 bitmap_obstack_release (NULL);
1845 current_function_decl = NULL;
1850 node->lowered = true;
1851 node->next_needed = cgraph_new_nodes;
1852 cgraph_new_nodes = node;
1855 case CGRAPH_STATE_FINISHED:
1856 /* At the very end of compilation we have to do all the work up
1858 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
1859 current_function_decl = fndecl;
1860 gimple_register_cfg_hooks ();
1862 tree_lowering_passes (fndecl);
1863 bitmap_obstack_initialize (NULL);
1864 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
1865 execute_pass_list (pass_early_local_passes.pass.sub);
1866 bitmap_obstack_release (NULL);
1867 tree_rest_of_compilation (fndecl);
1869 current_function_decl = NULL;
1874 /* Return true if NODE can be made local for API change.
1875 Extern inline functions and C++ COMDAT functions can be made local
1876 at the expense of possible code size growth if function is used in multiple
1877 compilation units. */
1879 cgraph_node_can_be_local_p (struct cgraph_node *node)
1881 return !node->needed;
1884 /* Bring NODE local. */
1886 cgraph_make_node_local (struct cgraph_node *node)
1888 gcc_assert (cgraph_node_can_be_local_p (node));
1889 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
1891 DECL_COMDAT (node->decl) = 0;
1892 DECL_COMDAT_GROUP (node->decl) = 0;
1893 TREE_PUBLIC (node->decl) = 0;
1894 DECL_WEAK (node->decl) = 0;
1895 DECL_EXTERNAL (node->decl) = 0;
1896 node->local.externally_visible = false;
1897 node->local.local = true;
1898 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
1902 #include "gt-cgraph.h"