1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains basic routines manipulating call graph
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
37 The callgraph at the moment does not represent indirect calls or calls
38 from other compilation unit. Flag NEEDED is set for each node that may
39 be accessed in such an invisible way and it shall be considered an
40 entry point to the callgraph.
42 Interprocedural information:
44 Callgraph is place to store data needed for interprocedural optimization.
45 All data structures are divided into three components: local_info that
46 is produced while analyzing the function, global_info that is result
47 of global walking of the callgraph on the end of compilation and
48 rtl_info used by RTL backend to propagate data from already compiled
49 functions to their callers.
53 The function inlining information is decided in advance and maintained
54 in the callgraph as so called inline plan.
55 For each inlined call, the callee's node is cloned to represent the
56 new function copy produced by inliner.
57 Each inlined call gets a unique corresponding clone node of the callee
58 and the data structure is updated while inlining is performed, so
59 the clones are eliminated and their callee edges redirected to the
62 Each edge has "inline_failed" field. When the field is set to NULL,
63 the call will be inlined. When it is non-NULL it contains a reason
64 why inlining wasn't performed. */
68 #include "coretypes.h"
71 #include "tree-inline.h"
72 #include "langhooks.h"
79 #include "basic-block.h"
84 #include "tree-dump.h"
85 #include "tree-flow.h"
86 #include "value-prof.h"
89 static void cgraph_node_remove_callers (struct cgraph_node *node);
90 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
91 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
93 /* Hash table used to convert declarations into nodes. */
94 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
95 /* Hash table used to convert assembler names into nodes. */
96 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
98 /* The linked list of cgraph nodes. */
99 struct cgraph_node *cgraph_nodes;
101 /* Queue of cgraph nodes scheduled to be lowered. */
102 struct cgraph_node *cgraph_nodes_queue;
104 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
105 secondary queue used during optimization to accommodate passes that
106 may generate new functions that need to be optimized and expanded. */
107 struct cgraph_node *cgraph_new_nodes;
109 /* Number of nodes in existence. */
112 /* Maximal uid used in cgraph nodes. */
115 /* Maximal uid used in cgraph edges. */
116 int cgraph_edge_max_uid;
118 /* Maximal pid used for profiling */
121 /* Set when whole unit has been analyzed so we can access global info. */
122 bool cgraph_global_info_ready = false;
124 /* What state callgraph is in right now. */
125 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
127 /* Set when the cgraph is fully build and the basic flags are computed. */
128 bool cgraph_function_flags_ready = false;
130 /* Linked list of cgraph asm nodes. */
131 struct cgraph_asm_node *cgraph_asm_nodes;
133 /* Last node in cgraph_asm_nodes. */
134 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
136 /* The order index of the next cgraph node to be created. This is
137 used so that we can sort the cgraph nodes in order by when we saw
138 them, to support -fno-toplevel-reorder. */
141 /* List of hooks trigerred on cgraph_edge events. */
142 struct cgraph_edge_hook_list {
143 cgraph_edge_hook hook;
145 struct cgraph_edge_hook_list *next;
148 /* List of hooks trigerred on cgraph_node events. */
149 struct cgraph_node_hook_list {
150 cgraph_node_hook hook;
152 struct cgraph_node_hook_list *next;
155 /* List of hooks trigerred on events involving two cgraph_edges. */
156 struct cgraph_2edge_hook_list {
157 cgraph_2edge_hook hook;
159 struct cgraph_2edge_hook_list *next;
162 /* List of hooks trigerred on events involving two cgraph_nodes. */
163 struct cgraph_2node_hook_list {
164 cgraph_2node_hook hook;
166 struct cgraph_2node_hook_list *next;
169 /* List of hooks triggered when an edge is removed. */
170 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
171 /* List of hooks triggered when a node is removed. */
172 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
173 /* List of hooks triggered when an edge is duplicated. */
174 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
175 /* List of hooks triggered when a node is duplicated. */
176 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
177 /* List of hooks triggered when an function is inserted. */
178 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
180 /* Head of a linked list of unused (freed) call graph nodes.
181 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
182 static GTY(()) struct cgraph_node *free_nodes;
183 /* Head of a linked list of unused (freed) call graph edges.
184 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
185 static GTY(()) struct cgraph_edge *free_edges;
187 /* Macros to access the next item in the list of free cgraph nodes and
189 #define NEXT_FREE_NODE(NODE) (NODE)->next
190 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
192 /* Register HOOK to be called with DATA on each removed edge. */
193 struct cgraph_edge_hook_list *
194 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
196 struct cgraph_edge_hook_list *entry;
197 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
199 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
209 /* Remove ENTRY from the list of hooks called on removing edges. */
211 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
213 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
215 while (*ptr != entry)
221 /* Call all edge removal hooks. */
223 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
225 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
228 entry->hook (e, entry->data);
233 /* Register HOOK to be called with DATA on each removed node. */
234 struct cgraph_node_hook_list *
235 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
237 struct cgraph_node_hook_list *entry;
238 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
240 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
250 /* Remove ENTRY from the list of hooks called on removing nodes. */
252 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
254 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
256 while (*ptr != entry)
262 /* Call all node removal hooks. */
264 cgraph_call_node_removal_hooks (struct cgraph_node *node)
266 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
269 entry->hook (node, entry->data);
274 /* Register HOOK to be called with DATA on each removed node. */
275 struct cgraph_node_hook_list *
276 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
278 struct cgraph_node_hook_list *entry;
279 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
281 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
291 /* Remove ENTRY from the list of hooks called on removing nodes. */
293 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
295 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
297 while (*ptr != entry)
303 /* Call all node removal hooks. */
305 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
307 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
310 entry->hook (node, entry->data);
315 /* Register HOOK to be called with DATA on each duplicated edge. */
316 struct cgraph_2edge_hook_list *
317 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
319 struct cgraph_2edge_hook_list *entry;
320 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
322 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
332 /* Remove ENTRY from the list of hooks called on duplicating edges. */
334 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
336 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
338 while (*ptr != entry)
344 /* Call all edge duplication hooks. */
346 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
347 struct cgraph_edge *cs2)
349 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
352 entry->hook (cs1, cs2, entry->data);
357 /* Register HOOK to be called with DATA on each duplicated node. */
358 struct cgraph_2node_hook_list *
359 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
361 struct cgraph_2node_hook_list *entry;
362 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
364 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
374 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
376 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
378 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
380 while (*ptr != entry)
386 /* Call all node duplication hooks. */
388 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
389 struct cgraph_node *node2)
391 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
394 entry->hook (node1, node2, entry->data);
399 /* Returns a hash code for P. */
402 hash_node (const void *p)
404 const struct cgraph_node *n = (const struct cgraph_node *) p;
405 return (hashval_t) DECL_UID (n->decl);
409 /* Return the cgraph node associated with function DECL. If none
410 exists, return NULL. */
413 cgraph_node_for_decl (tree decl)
415 struct cgraph_node *node;
418 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
423 struct cgraph_node key;
426 slot = htab_find_slot (cgraph_hash, &key, NO_INSERT);
428 node = (struct cgraph_node *) *slot;
435 /* Returns nonzero if P1 and P2 are equal. */
438 eq_node (const void *p1, const void *p2)
440 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
441 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
442 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
445 /* Allocate new callgraph node and insert it into basic data structures. */
447 static struct cgraph_node *
448 cgraph_create_node (void)
450 struct cgraph_node *node;
455 free_nodes = NEXT_FREE_NODE (node);
459 node = GGC_CNEW (struct cgraph_node);
460 node->uid = cgraph_max_uid++;
463 node->next = cgraph_nodes;
465 node->order = cgraph_order++;
467 cgraph_nodes->previous = node;
468 node->previous = NULL;
469 node->global.estimated_growth = INT_MIN;
475 /* Return cgraph node assigned to DECL. Create new one when needed. */
478 cgraph_node (tree decl)
480 struct cgraph_node key, *node, **slot;
482 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
485 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
489 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
497 node = cgraph_create_node ();
500 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
502 node->origin = cgraph_node (DECL_CONTEXT (decl));
503 node->next_nested = node->origin->nested;
504 node->origin->nested = node;
506 if (assembler_name_hash)
509 tree name = DECL_ASSEMBLER_NAME (decl);
511 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
512 decl_assembler_name_hash (name),
514 /* We can have multiple declarations with same assembler name. For C++
515 it is __builtin_strlen and strlen, for instance. Do we need to
516 record them all? Original implementation marked just first one
517 so lets hope for the best. */
524 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
528 cgraph_get_node (tree decl)
530 struct cgraph_node key, *node = NULL, **slot;
532 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
535 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
539 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
547 /* Insert already constructed node into hashtable. */
550 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
552 struct cgraph_node **slot;
554 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
560 /* Returns a hash code for P. */
563 hash_node_by_assembler_name (const void *p)
565 const struct cgraph_node *n = (const struct cgraph_node *) p;
566 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
569 /* Returns nonzero if P1 and P2 are equal. */
572 eq_assembler_name (const void *p1, const void *p2)
574 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
575 const_tree name = (const_tree)p2;
576 return (decl_assembler_name_equal (n1->decl, name));
579 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
580 Return NULL if there's no such node. */
583 cgraph_node_for_asm (tree asmname)
585 struct cgraph_node *node;
588 if (!assembler_name_hash)
590 assembler_name_hash =
591 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
593 for (node = cgraph_nodes; node; node = node->next)
594 if (!node->global.inlined_to)
596 tree name = DECL_ASSEMBLER_NAME (node->decl);
597 slot = htab_find_slot_with_hash (assembler_name_hash, name,
598 decl_assembler_name_hash (name),
600 /* We can have multiple declarations with same assembler name. For C++
601 it is __builtin_strlen and strlen, for instance. Do we need to
602 record them all? Original implementation marked just first one
603 so lets hope for the best. */
610 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
611 decl_assembler_name_hash (asmname),
615 return (struct cgraph_node *) *slot;
619 /* Returns a hash value for X (which really is a die_struct). */
622 edge_hash (const void *x)
624 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
627 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
630 edge_eq (const void *x, const void *y)
632 return ((const struct cgraph_edge *) x)->call_stmt == y;
636 /* Return the callgraph edge representing the GIMPLE_CALL statement
640 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
642 struct cgraph_edge *e, *e2;
645 if (node->call_site_hash)
646 return (struct cgraph_edge *)
647 htab_find_with_hash (node->call_site_hash, call_stmt,
648 htab_hash_pointer (call_stmt));
650 /* This loop may turn out to be performance problem. In such case adding
651 hashtables into call nodes with very many edges is probably best
652 solution. It is not good idea to add pointer into CALL_EXPR itself
653 because we want to make possible having multiple cgraph nodes representing
654 different clones of the same body before the body is actually cloned. */
655 for (e = node->callees; e; e= e->next_callee)
657 if (e->call_stmt == call_stmt)
664 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
665 for (e2 = node->callees; e2; e2 = e2->next_callee)
668 slot = htab_find_slot_with_hash (node->call_site_hash,
670 htab_hash_pointer (e2->call_stmt),
681 /* Change field call_stmt of edge E to NEW_STMT. */
684 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
686 if (e->caller->call_site_hash)
688 htab_remove_elt_with_hash (e->caller->call_site_hash,
690 htab_hash_pointer (e->call_stmt));
692 e->call_stmt = new_stmt;
693 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
694 e->can_throw_external = stmt_can_throw_external (new_stmt);
696 if (e->caller->call_site_hash)
699 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
702 (e->call_stmt), INSERT);
708 /* Like cgraph_set_call_stmt but walk the clone tree and update all
709 clones sharing the same function body. */
712 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
713 gimple old_stmt, gimple new_stmt)
715 struct cgraph_node *node;
716 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
719 cgraph_set_call_stmt (edge, new_stmt);
725 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
727 cgraph_set_call_stmt (edge, new_stmt);
730 else if (node->next_sibling_clone)
731 node = node->next_sibling_clone;
734 while (node != orig && !node->next_sibling_clone)
735 node = node->clone_of;
737 node = node->next_sibling_clone;
742 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
745 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
746 frequencies of the clones. */
749 cgraph_create_edge_including_clones (struct cgraph_node *orig,
750 struct cgraph_node *callee,
751 gimple stmt, gcov_type count,
752 int freq, int loop_depth,
753 cgraph_inline_failed_t reason)
755 struct cgraph_node *node;
756 struct cgraph_edge *edge;
758 if (!cgraph_edge (orig, stmt))
760 edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth);
761 edge->inline_failed = reason;
768 /* It is possible that we already constant propagated into the clone
769 and turned indirect call into dirrect call. */
770 if (!cgraph_edge (node, stmt))
772 edge = cgraph_create_edge (node, callee, stmt, count,
774 edge->inline_failed = reason;
779 else if (node->next_sibling_clone)
780 node = node->next_sibling_clone;
783 while (node != orig && !node->next_sibling_clone)
784 node = node->clone_of;
786 node = node->next_sibling_clone;
791 /* Give initial reasons why inlining would fail on EDGE. This gets either
792 nullified or usually overwritten by more precise reasons later. */
795 initialize_inline_failed (struct cgraph_edge *e)
797 struct cgraph_node *callee = e->callee;
799 if (!callee->analyzed)
800 e->inline_failed = CIF_BODY_NOT_AVAILABLE;
801 else if (callee->local.redefined_extern_inline)
802 e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
803 else if (!callee->local.inlinable)
804 e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
805 else if (gimple_call_cannot_inline_p (e->call_stmt))
806 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
808 e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
811 /* Create edge from CALLER to CALLEE in the cgraph. */
814 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
815 gimple call_stmt, gcov_type count, int freq, int nest)
817 struct cgraph_edge *edge;
819 #ifdef ENABLE_CHECKING
820 /* This is rather pricely check possibly trigerring construction of call stmt
822 gcc_assert (!cgraph_edge (caller, call_stmt));
825 gcc_assert (is_gimple_call (call_stmt));
830 free_edges = NEXT_FREE_EDGE (edge);
834 edge = GGC_NEW (struct cgraph_edge);
835 edge->uid = cgraph_edge_max_uid++;
840 edge->caller = caller;
841 edge->callee = callee;
842 edge->call_stmt = call_stmt;
843 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
844 edge->can_throw_external = stmt_can_throw_external (call_stmt);
846 edge->prev_caller = NULL;
847 edge->next_caller = callee->callers;
849 callee->callers->prev_caller = edge;
850 edge->prev_callee = NULL;
851 edge->next_callee = caller->callees;
853 caller->callees->prev_callee = edge;
854 caller->callees = edge;
855 callee->callers = edge;
857 gcc_assert (count >= 0);
858 edge->frequency = freq;
859 gcc_assert (freq >= 0);
860 gcc_assert (freq <= CGRAPH_FREQ_MAX);
861 edge->loop_nest = nest;
862 edge->indirect_call = 0;
863 if (caller->call_site_hash)
866 slot = htab_find_slot_with_hash (caller->call_site_hash,
875 initialize_inline_failed (edge);
880 /* Remove the edge E from the list of the callers of the callee. */
883 cgraph_edge_remove_callee (struct cgraph_edge *e)
886 e->prev_caller->next_caller = e->next_caller;
888 e->next_caller->prev_caller = e->prev_caller;
890 e->callee->callers = e->next_caller;
893 /* Remove the edge E from the list of the callees of the caller. */
896 cgraph_edge_remove_caller (struct cgraph_edge *e)
899 e->prev_callee->next_callee = e->next_callee;
901 e->next_callee->prev_callee = e->prev_callee;
903 e->caller->callees = e->next_callee;
904 if (e->caller->call_site_hash)
905 htab_remove_elt_with_hash (e->caller->call_site_hash,
907 htab_hash_pointer (e->call_stmt));
910 /* Put the edge onto the free list. */
913 cgraph_free_edge (struct cgraph_edge *e)
917 /* Clear out the edge so we do not dangle pointers. */
918 memset (e, 0, sizeof (*e));
920 NEXT_FREE_EDGE (e) = free_edges;
924 /* Remove the edge E in the cgraph. */
927 cgraph_remove_edge (struct cgraph_edge *e)
929 /* Call all edge removal hooks. */
930 cgraph_call_edge_removal_hooks (e);
932 /* Remove from callers list of the callee. */
933 cgraph_edge_remove_callee (e);
935 /* Remove from callees list of the callers. */
936 cgraph_edge_remove_caller (e);
938 /* Put the edge onto the free list. */
939 cgraph_free_edge (e);
942 /* Redirect callee of E to N. The function does not update underlying
946 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
948 /* Remove from callers list of the current callee. */
949 cgraph_edge_remove_callee (e);
951 /* Insert to callers list of the new callee. */
952 e->prev_caller = NULL;
954 n->callers->prev_caller = e;
955 e->next_caller = n->callers;
961 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
962 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
963 of OLD_STMT if it was previously call statement. */
966 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
967 gimple old_stmt, tree old_call, gimple new_stmt)
969 tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fndecl (new_stmt) : 0;
971 /* We are seeing indirect calls, then there is nothing to update. */
972 if (!new_call && !old_call)
974 /* See if we turned indirect call into direct call or folded call to one builtin
975 into different bultin. */
976 if (old_call != new_call)
978 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
979 struct cgraph_edge *ne = NULL;
986 /* See if the call is already there. It might be because of indirect
987 inlining already found it. */
988 if (new_call && e->callee->decl == new_call)
991 /* Otherwise remove edge and create new one; we can't simply redirect
992 since function has changed, so inline plan and other information
993 attached to edge is invalid. */
994 cgraph_remove_edge (e);
996 frequency = e->frequency;
997 loop_nest = e->loop_nest;
1001 /* We are seeing new direct call; compute profile info based on BB. */
1002 basic_block bb = gimple_bb (new_stmt);
1004 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1006 loop_nest = bb->loop_depth;
1011 ne = cgraph_create_edge (node, cgraph_node (new_call),
1012 new_stmt, count, frequency,
1014 gcc_assert (ne->inline_failed);
1017 /* We only updated the call stmt; update pointer in cgraph edge.. */
1018 else if (old_stmt != new_stmt)
1019 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1022 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1023 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1024 of OLD_STMT before it was updated (updating can happen inplace). */
1027 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1029 struct cgraph_node *orig = cgraph_node (cfun->decl);
1030 struct cgraph_node *node;
1032 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1034 for (node = orig->clones; node != orig;)
1036 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1038 node = node->clones;
1039 else if (node->next_sibling_clone)
1040 node = node->next_sibling_clone;
1043 while (node != orig && !node->next_sibling_clone)
1044 node = node->clone_of;
1046 node = node->next_sibling_clone;
1052 /* Remove all callees from the node. */
1055 cgraph_node_remove_callees (struct cgraph_node *node)
1057 struct cgraph_edge *e, *f;
1059 /* It is sufficient to remove the edges from the lists of callers of
1060 the callees. The callee list of the node can be zapped with one
1062 for (e = node->callees; e; e = f)
1065 cgraph_call_edge_removal_hooks (e);
1066 cgraph_edge_remove_callee (e);
1067 cgraph_free_edge (e);
1069 node->callees = NULL;
1070 if (node->call_site_hash)
1072 htab_delete (node->call_site_hash);
1073 node->call_site_hash = NULL;
1077 /* Remove all callers from the node. */
1080 cgraph_node_remove_callers (struct cgraph_node *node)
1082 struct cgraph_edge *e, *f;
1084 /* It is sufficient to remove the edges from the lists of callees of
1085 the callers. The caller list of the node can be zapped with one
1087 for (e = node->callers; e; e = f)
1090 cgraph_call_edge_removal_hooks (e);
1091 cgraph_edge_remove_caller (e);
1092 cgraph_free_edge (e);
1094 node->callers = NULL;
1097 /* Release memory used to represent body of function NODE. */
1100 cgraph_release_function_body (struct cgraph_node *node)
1102 if (DECL_STRUCT_FUNCTION (node->decl))
1104 tree old_decl = current_function_decl;
1105 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1106 if (cfun->gimple_df)
1108 current_function_decl = node->decl;
1110 delete_tree_cfg_annotations ();
1112 current_function_decl = old_decl;
1116 gcc_assert (dom_computed[0] == DOM_NONE);
1117 gcc_assert (dom_computed[1] == DOM_NONE);
1120 if (cfun->value_histograms)
1122 gcc_assert (!current_loops);
1124 gimple_set_body (node->decl, NULL);
1125 VEC_free (ipa_opt_pass, heap,
1126 DECL_STRUCT_FUNCTION (node->decl)->ipa_transforms_to_apply);
1127 /* Struct function hangs a lot of data that would leak if we didn't
1128 removed all pointers to it. */
1129 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1130 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1132 DECL_SAVED_TREE (node->decl) = NULL;
1133 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1134 of its associated function function declaration because it's
1135 needed to emit debug info later. */
1136 if (!node->abstract_and_needed)
1137 DECL_INITIAL (node->decl) = error_mark_node;
1140 /* Remove the node from cgraph. */
1143 cgraph_remove_node (struct cgraph_node *node)
1146 bool kill_body = false;
1147 struct cgraph_node *n;
1148 int uid = node->uid;
1150 cgraph_call_node_removal_hooks (node);
1151 cgraph_node_remove_callers (node);
1152 cgraph_node_remove_callees (node);
1154 /* Incremental inlining access removed nodes stored in the postorder list.
1156 node->needed = node->reachable = false;
1157 for (n = node->nested; n; n = n->next_nested)
1159 node->nested = NULL;
1162 struct cgraph_node **node2 = &node->origin->nested;
1164 while (*node2 != node)
1165 node2 = &(*node2)->next_nested;
1166 *node2 = node->next_nested;
1169 node->previous->next = node->next;
1171 cgraph_nodes = node->next;
1173 node->next->previous = node->previous;
1175 node->previous = NULL;
1176 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1179 struct cgraph_node *next_inline_clone;
1181 for (next_inline_clone = node->clones;
1182 next_inline_clone && next_inline_clone->decl != node->decl;
1183 next_inline_clone = next_inline_clone->next_sibling_clone)
1186 /* If there is inline clone of the node being removed, we need
1187 to put it into the position of removed node and reorganize all
1188 other clones to be based on it. */
1189 if (next_inline_clone)
1191 struct cgraph_node *n;
1192 struct cgraph_node *new_clones;
1194 *slot = next_inline_clone;
1196 /* Unlink inline clone from the list of clones of removed node. */
1197 if (next_inline_clone->next_sibling_clone)
1198 next_inline_clone->next_sibling_clone->prev_sibling_clone
1199 = next_inline_clone->prev_sibling_clone;
1200 if (next_inline_clone->prev_sibling_clone)
1202 next_inline_clone->prev_sibling_clone->next_sibling_clone
1203 = next_inline_clone->next_sibling_clone;
1206 node->clones = next_inline_clone->next_sibling_clone;
1208 new_clones = node->clones;
1209 node->clones = NULL;
1211 /* Copy clone info. */
1212 next_inline_clone->clone = node->clone;
1214 /* Now place it into clone tree at same level at NODE. */
1215 next_inline_clone->clone_of = node->clone_of;
1216 next_inline_clone->prev_sibling_clone = NULL;
1217 next_inline_clone->next_sibling_clone = NULL;
1220 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1221 node->clone_of->clones = next_inline_clone;
1224 /* Merge the clone list. */
1227 if (!next_inline_clone->clones)
1228 next_inline_clone->clones = new_clones;
1231 n = next_inline_clone->clones;
1232 while (n->next_sibling_clone)
1233 n = n->next_sibling_clone;
1234 n->next_sibling_clone = new_clones;
1235 new_clones->prev_sibling_clone = n;
1239 /* Update clone_of pointers. */
1243 n->clone_of = next_inline_clone;
1244 n = n->next_sibling_clone;
1249 htab_clear_slot (cgraph_hash, slot);
1255 gcc_assert (node->clone_of);
1256 if (node->prev_sibling_clone)
1257 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1258 else if (node->clone_of)
1259 node->clone_of->clones = node->next_sibling_clone;
1260 if (node->next_sibling_clone)
1261 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1264 struct cgraph_node *n;
1266 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1267 n->clone_of = node->clone_of;
1268 n->clone_of = node->clone_of;
1269 n->next_sibling_clone = node->clone_of->clones;
1270 if (node->clone_of->clones)
1271 node->clone_of->clones->prev_sibling_clone = n;
1272 node->clone_of->clones = node->clones;
1275 /* While all the clones are removed after being proceeded, the function
1276 itself is kept in the cgraph even after it is compiled. Check whether
1277 we are done with this body and reclaim it proactively if this is the case.
1279 if (!kill_body && *slot)
1281 struct cgraph_node *n = (struct cgraph_node *) *slot;
1282 if (!n->clones && !n->clone_of && !n->global.inlined_to
1283 && (cgraph_global_info_ready
1284 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
1287 if (assembler_name_hash)
1289 tree name = DECL_ASSEMBLER_NAME (node->decl);
1290 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1291 decl_assembler_name_hash (name),
1293 /* Inline clones are not hashed. */
1294 if (slot && *slot == node)
1295 htab_clear_slot (assembler_name_hash, slot);
1299 cgraph_release_function_body (node);
1301 if (node->call_site_hash)
1303 htab_delete (node->call_site_hash);
1304 node->call_site_hash = NULL;
1308 /* Clear out the node to NULL all pointers and add the node to the free
1310 memset (node, 0, sizeof(*node));
1312 NEXT_FREE_NODE (node) = free_nodes;
1316 /* Remove the node from cgraph. */
1319 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1321 struct cgraph_edge *e, *next;
1322 for (e = node->callees; e; e = next)
1324 next = e->next_callee;
1325 if (!e->inline_failed)
1326 cgraph_remove_node_and_inline_clones (e->callee);
1328 cgraph_remove_node (node);
1331 /* Notify finalize_compilation_unit that given node is reachable. */
1334 cgraph_mark_reachable_node (struct cgraph_node *node)
1336 if (!node->reachable && node->local.finalized)
1338 notice_global_symbol (node->decl);
1339 node->reachable = 1;
1340 gcc_assert (!cgraph_global_info_ready);
1342 node->next_needed = cgraph_nodes_queue;
1343 cgraph_nodes_queue = node;
1347 /* Likewise indicate that a node is needed, i.e. reachable via some
1351 cgraph_mark_needed_node (struct cgraph_node *node)
1354 cgraph_mark_reachable_node (node);
1357 /* Likewise indicate that a node is having address taken. */
1360 cgraph_mark_address_taken_node (struct cgraph_node *node)
1362 node->address_taken = 1;
1363 cgraph_mark_needed_node (node);
1366 /* Return local info for the compiled function. */
1368 struct cgraph_local_info *
1369 cgraph_local_info (tree decl)
1371 struct cgraph_node *node;
1373 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1374 node = cgraph_node (decl);
1375 return &node->local;
1378 /* Return local info for the compiled function. */
1380 struct cgraph_global_info *
1381 cgraph_global_info (tree decl)
1383 struct cgraph_node *node;
1385 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1386 node = cgraph_node (decl);
1387 return &node->global;
1390 /* Return local info for the compiled function. */
1392 struct cgraph_rtl_info *
1393 cgraph_rtl_info (tree decl)
1395 struct cgraph_node *node;
1397 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1398 node = cgraph_node (decl);
1399 if (decl != current_function_decl
1400 && !TREE_ASM_WRITTEN (node->decl))
1405 /* Return a string describing the failure REASON. */
1408 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1411 #define DEFCIFCODE(code, string) string,
1413 static const char *cif_string_table[CIF_N_REASONS] = {
1414 #include "cif-code.def"
1417 /* Signedness of an enum type is implementation defined, so cast it
1418 to unsigned before testing. */
1419 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1420 return cif_string_table[reason];
1423 /* Return name of the node used in debug output. */
1425 cgraph_node_name (struct cgraph_node *node)
1427 return lang_hooks.decl_printable_name (node->decl, 2);
1430 /* Names used to print out the availability enum. */
1431 const char * const cgraph_availability_names[] =
1432 {"unset", "not_available", "overwritable", "available", "local"};
1435 /* Dump call graph node NODE to file F. */
1438 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1440 struct cgraph_edge *edge;
1441 fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid,
1443 dump_addr (f, " @", (void *)node);
1444 if (node->global.inlined_to)
1445 fprintf (f, " (inline copy in %s/%i)",
1446 cgraph_node_name (node->global.inlined_to),
1447 node->global.inlined_to->uid);
1449 fprintf (f, " (clone of %s/%i)",
1450 cgraph_node_name (node->clone_of),
1451 node->clone_of->uid);
1452 if (cgraph_function_flags_ready)
1453 fprintf (f, " availability:%s",
1454 cgraph_availability_names [cgraph_function_body_availability (node)]);
1456 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1457 (HOST_WIDEST_INT)node->count);
1458 if (node->local.inline_summary.self_time)
1459 fprintf (f, " %i time, %i benefit", node->local.inline_summary.self_time,
1460 node->local.inline_summary.time_inlining_benefit);
1461 if (node->global.time && node->global.time
1462 != node->local.inline_summary.self_time)
1463 fprintf (f, " (%i after inlining)", node->global.time);
1464 if (node->local.inline_summary.self_size)
1465 fprintf (f, " %i size, %i benefit", node->local.inline_summary.self_size,
1466 node->local.inline_summary.size_inlining_benefit);
1467 if (node->global.size && node->global.size
1468 != node->local.inline_summary.self_size)
1469 fprintf (f, " (%i after inlining)", node->global.size);
1470 if (node->local.inline_summary.estimated_self_stack_size)
1471 fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
1472 if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
1473 fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
1475 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1477 fprintf (f, " needed");
1478 if (node->address_taken)
1479 fprintf (f, " address_taken");
1480 else if (node->reachable)
1481 fprintf (f, " reachable");
1482 if (gimple_has_body_p (node->decl))
1483 fprintf (f, " body");
1485 fprintf (f, " process");
1486 if (node->local.local)
1487 fprintf (f, " local");
1488 if (node->local.externally_visible)
1489 fprintf (f, " externally_visible");
1490 if (node->local.finalized)
1491 fprintf (f, " finalized");
1492 if (node->local.disregard_inline_limits)
1493 fprintf (f, " always_inline");
1494 else if (node->local.inlinable)
1495 fprintf (f, " inlinable");
1496 if (node->local.redefined_extern_inline)
1497 fprintf (f, " redefined_extern_inline");
1498 if (TREE_ASM_WRITTEN (node->decl))
1499 fprintf (f, " asm_written");
1501 fprintf (f, "\n called by: ");
1502 for (edge = node->callers; edge; edge = edge->next_caller)
1504 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1507 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1508 (HOST_WIDEST_INT)edge->count);
1509 if (edge->frequency)
1510 fprintf (f, "(%.2f per call) ",
1511 edge->frequency / (double)CGRAPH_FREQ_BASE);
1512 if (!edge->inline_failed)
1513 fprintf(f, "(inlined) ");
1514 if (edge->indirect_call)
1515 fprintf(f, "(indirect) ");
1516 if (edge->can_throw_external)
1517 fprintf(f, "(can throw external) ");
1520 fprintf (f, "\n calls: ");
1521 for (edge = node->callees; edge; edge = edge->next_callee)
1523 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1525 if (!edge->inline_failed)
1526 fprintf(f, "(inlined) ");
1527 if (edge->indirect_call)
1528 fprintf(f, "(indirect) ");
1530 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1531 (HOST_WIDEST_INT)edge->count);
1532 if (edge->frequency)
1533 fprintf (f, "(%.2f per call) ",
1534 edge->frequency / (double)CGRAPH_FREQ_BASE);
1535 if (edge->loop_nest)
1536 fprintf (f, "(nested in %i loops) ", edge->loop_nest);
1537 if (edge->can_throw_external)
1538 fprintf(f, "(can throw external) ");
1544 /* Dump call graph node NODE to stderr. */
1547 debug_cgraph_node (struct cgraph_node *node)
1549 dump_cgraph_node (stderr, node);
1553 /* Dump the callgraph to file F. */
1556 dump_cgraph (FILE *f)
1558 struct cgraph_node *node;
1560 fprintf (f, "callgraph:\n\n");
1561 for (node = cgraph_nodes; node; node = node->next)
1562 dump_cgraph_node (f, node);
1566 /* Dump the call graph to stderr. */
1571 dump_cgraph (stderr);
1575 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
1578 change_decl_assembler_name (tree decl, tree name)
1580 gcc_assert (!assembler_name_hash);
1581 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
1583 SET_DECL_ASSEMBLER_NAME (decl, name);
1586 if (name == DECL_ASSEMBLER_NAME (decl))
1589 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
1590 && DECL_RTL_SET_P (decl))
1591 warning (0, "%D renamed after being referenced in assembly", decl);
1593 SET_DECL_ASSEMBLER_NAME (decl, name);
1596 /* Add a top-level asm statement to the list. */
1598 struct cgraph_asm_node *
1599 cgraph_add_asm_node (tree asm_str)
1601 struct cgraph_asm_node *node;
1603 node = GGC_CNEW (struct cgraph_asm_node);
1604 node->asm_str = asm_str;
1605 node->order = cgraph_order++;
1607 if (cgraph_asm_nodes == NULL)
1608 cgraph_asm_nodes = node;
1610 cgraph_asm_last_node->next = node;
1611 cgraph_asm_last_node = node;
1615 /* Return true when the DECL can possibly be inlined. */
1617 cgraph_function_possibly_inlined_p (tree decl)
1619 if (!cgraph_global_info_ready)
1620 return !DECL_UNINLINABLE (decl);
1621 return DECL_POSSIBLY_INLINED (decl);
1624 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
1625 struct cgraph_edge *
1626 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
1627 gimple call_stmt, gcov_type count_scale, int freq_scale,
1628 int loop_nest, bool update_original)
1630 struct cgraph_edge *new_edge;
1631 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
1632 gcov_type freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
1634 if (freq > CGRAPH_FREQ_MAX)
1635 freq = CGRAPH_FREQ_MAX;
1636 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
1637 e->loop_nest + loop_nest);
1639 new_edge->inline_failed = e->inline_failed;
1640 new_edge->indirect_call = e->indirect_call;
1641 if (update_original)
1643 e->count -= new_edge->count;
1647 cgraph_call_edge_duplication_hooks (e, new_edge);
1651 /* Create node representing clone of N executed COUNT times. Decrease
1652 the execution counts from original node too.
1654 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
1655 function's profile to reflect the fact that part of execution is handled
1657 struct cgraph_node *
1658 cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq,
1659 int loop_nest, bool update_original)
1661 struct cgraph_node *new_node = cgraph_create_node ();
1662 struct cgraph_edge *e;
1663 gcov_type count_scale;
1665 new_node->decl = n->decl;
1666 new_node->origin = n->origin;
1667 if (new_node->origin)
1669 new_node->next_nested = new_node->origin->nested;
1670 new_node->origin->nested = new_node;
1672 new_node->analyzed = n->analyzed;
1673 new_node->local = n->local;
1674 new_node->global = n->global;
1675 new_node->rtl = n->rtl;
1676 new_node->count = count;
1677 new_node->clone = n->clone;
1680 if (new_node->count > n->count)
1681 count_scale = REG_BR_PROB_BASE;
1683 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
1687 if (update_original)
1694 for (e = n->callees;e; e=e->next_callee)
1695 cgraph_clone_edge (e, new_node, e->call_stmt, count_scale, freq, loop_nest,
1698 new_node->next_sibling_clone = n->clones;
1700 n->clones->prev_sibling_clone = new_node;
1701 n->clones = new_node;
1702 new_node->clone_of = n;
1704 cgraph_call_node_duplication_hooks (n, new_node);
1708 /* Create a new name for omp child function. Returns an identifier. */
1710 static GTY(()) unsigned int clone_fn_id_num;
1713 clone_function_name (tree decl)
1715 tree name = DECL_ASSEMBLER_NAME (decl);
1716 size_t len = IDENTIFIER_LENGTH (name);
1717 char *tmp_name, *prefix;
1719 prefix = XALLOCAVEC (char, len + strlen ("_clone") + 1);
1720 memcpy (prefix, IDENTIFIER_POINTER (name), len);
1721 strcpy (prefix + len, "_clone");
1722 #ifndef NO_DOT_IN_LABEL
1724 #elif !defined NO_DOLLAR_IN_LABEL
1727 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
1728 return get_identifier (tmp_name);
1731 /* Create callgraph node clone with new declaration. The actual body will
1732 be copied later at compilation stage.
1734 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
1737 struct cgraph_node *
1738 cgraph_create_virtual_clone (struct cgraph_node *old_node,
1739 VEC(cgraph_edge_p,heap) *redirect_callers,
1740 VEC(ipa_replace_map_p,gc) *tree_map,
1741 bitmap args_to_skip)
1743 tree old_decl = old_node->decl;
1744 struct cgraph_node *new_node = NULL;
1746 struct cgraph_node key, **slot;
1748 struct cgraph_edge *e;
1750 gcc_assert (tree_versionable_function_p (old_decl));
1752 /* Make a new FUNCTION_DECL tree node */
1754 new_decl = copy_node (old_decl);
1756 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
1757 DECL_STRUCT_FUNCTION (new_decl) = NULL;
1759 /* Generate a new name for the new version. */
1760 DECL_NAME (new_decl) = clone_function_name (old_decl);
1761 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
1762 SET_DECL_RTL (new_decl, NULL);
1764 new_node = cgraph_clone_node (old_node, old_node->count,
1765 CGRAPH_FREQ_BASE, 0, false);
1766 new_node->decl = new_decl;
1767 /* Update the properties.
1768 Make clone visible only within this translation unit. Make sure
1769 that is not weak also.
1770 ??? We cannot use COMDAT linkage because there is no
1771 ABI support for this. */
1772 DECL_EXTERNAL (new_node->decl) = 0;
1773 DECL_COMDAT_GROUP (new_node->decl) = 0;
1774 TREE_PUBLIC (new_node->decl) = 0;
1775 DECL_COMDAT (new_node->decl) = 0;
1776 DECL_WEAK (new_node->decl) = 0;
1777 new_node->clone.tree_map = tree_map;
1778 new_node->clone.args_to_skip = args_to_skip;
1780 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
1781 else if (old_node->clone.combined_args_to_skip)
1783 int newi = 0, oldi = 0;
1785 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
1786 struct cgraph_node *orig_node;
1787 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
1789 for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = TREE_CHAIN (arg), oldi++)
1791 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
1793 bitmap_set_bit (new_args_to_skip, oldi);
1796 if (bitmap_bit_p (args_to_skip, newi))
1797 bitmap_set_bit (new_args_to_skip, oldi);
1800 new_node->clone.combined_args_to_skip = new_args_to_skip;
1803 new_node->clone.combined_args_to_skip = args_to_skip;
1804 new_node->local.externally_visible = 0;
1805 new_node->local.local = 1;
1806 new_node->lowered = true;
1807 new_node->reachable = true;
1809 key.decl = new_decl;
1810 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
1811 gcc_assert (!*slot);
1813 if (assembler_name_hash)
1816 tree name = DECL_ASSEMBLER_NAME (new_decl);
1818 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
1819 decl_assembler_name_hash (name),
1821 gcc_assert (!*aslot);
1824 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1826 /* Redirect calls to the old version node to point to its new
1828 cgraph_redirect_edge_callee (e, new_node);
1834 /* NODE is no longer nested function; update cgraph accordingly. */
1836 cgraph_unnest_node (struct cgraph_node *node)
1838 struct cgraph_node **node2 = &node->origin->nested;
1839 gcc_assert (node->origin);
1841 while (*node2 != node)
1842 node2 = &(*node2)->next_nested;
1843 *node2 = node->next_nested;
1844 node->origin = NULL;
1847 /* Return function availability. See cgraph.h for description of individual
1850 cgraph_function_body_availability (struct cgraph_node *node)
1852 enum availability avail;
1853 gcc_assert (cgraph_function_flags_ready);
1854 if (!node->analyzed)
1855 avail = AVAIL_NOT_AVAILABLE;
1856 else if (node->local.local)
1857 avail = AVAIL_LOCAL;
1858 else if (!node->local.externally_visible)
1859 avail = AVAIL_AVAILABLE;
1860 /* Inline functions are safe to be analyzed even if their sybol can
1861 be overwritten at runtime. It is not meaningful to enfore any sane
1862 behaviour on replacing inline function by different body. */
1863 else if (DECL_DECLARED_INLINE_P (node->decl))
1864 avail = AVAIL_AVAILABLE;
1866 /* If the function can be overwritten, return OVERWRITABLE. Take
1867 care at least of two notable extensions - the COMDAT functions
1868 used to share template instantiations in C++ (this is symmetric
1869 to code cp_cannot_inline_tree_fn and probably shall be shared and
1870 the inlinability hooks completely eliminated).
1872 ??? Does the C++ one definition rule allow us to always return
1873 AVAIL_AVAILABLE here? That would be good reason to preserve this
1876 else if (DECL_REPLACEABLE_P (node->decl) && !DECL_EXTERNAL (node->decl))
1877 avail = AVAIL_OVERWRITABLE;
1878 else avail = AVAIL_AVAILABLE;
1883 /* Add the function FNDECL to the call graph.
1884 Unlike cgraph_finalize_function, this function is intended to be used
1885 by middle end and allows insertion of new function at arbitrary point
1886 of compilation. The function can be either in high, low or SSA form
1889 The function is assumed to be reachable and have address taken (so no
1890 API breaking optimizations are performed on it).
1892 Main work done by this function is to enqueue the function for later
1893 processing to avoid need the passes to be re-entrant. */
1896 cgraph_add_new_function (tree fndecl, bool lowered)
1898 struct cgraph_node *node;
1899 switch (cgraph_state)
1901 case CGRAPH_STATE_CONSTRUCTION:
1902 /* Just enqueue function to be processed at nearest occurrence. */
1903 node = cgraph_node (fndecl);
1904 node->next_needed = cgraph_new_nodes;
1906 node->lowered = true;
1907 cgraph_new_nodes = node;
1910 case CGRAPH_STATE_IPA:
1911 case CGRAPH_STATE_IPA_SSA:
1912 case CGRAPH_STATE_EXPANSION:
1913 /* Bring the function into finalized state and enqueue for later
1914 analyzing and compilation. */
1915 node = cgraph_node (fndecl);
1916 node->local.local = false;
1917 node->local.finalized = true;
1918 node->reachable = node->needed = true;
1919 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
1921 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
1922 current_function_decl = fndecl;
1923 gimple_register_cfg_hooks ();
1924 tree_lowering_passes (fndecl);
1925 bitmap_obstack_initialize (NULL);
1926 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
1927 execute_pass_list (pass_early_local_passes.pass.sub);
1928 bitmap_obstack_release (NULL);
1930 current_function_decl = NULL;
1935 node->lowered = true;
1936 node->next_needed = cgraph_new_nodes;
1937 cgraph_new_nodes = node;
1940 case CGRAPH_STATE_FINISHED:
1941 /* At the very end of compilation we have to do all the work up
1943 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
1944 current_function_decl = fndecl;
1945 gimple_register_cfg_hooks ();
1947 tree_lowering_passes (fndecl);
1948 bitmap_obstack_initialize (NULL);
1949 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
1950 execute_pass_list (pass_early_local_passes.pass.sub);
1951 bitmap_obstack_release (NULL);
1952 tree_rest_of_compilation (fndecl);
1954 current_function_decl = NULL;
1958 /* Set a personality if required and we already passed EH lowering. */
1960 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
1961 == eh_personality_lang))
1962 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
1965 /* Return true if NODE can be made local for API change.
1966 Extern inline functions and C++ COMDAT functions can be made local
1967 at the expense of possible code size growth if function is used in multiple
1968 compilation units. */
1970 cgraph_node_can_be_local_p (struct cgraph_node *node)
1972 return !node->needed;
1975 /* Bring NODE local. */
1977 cgraph_make_node_local (struct cgraph_node *node)
1979 gcc_assert (cgraph_node_can_be_local_p (node));
1980 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
1982 DECL_COMDAT (node->decl) = 0;
1983 DECL_COMDAT_GROUP (node->decl) = 0;
1984 TREE_PUBLIC (node->decl) = 0;
1985 DECL_WEAK (node->decl) = 0;
1986 DECL_EXTERNAL (node->decl) = 0;
1987 node->local.externally_visible = false;
1988 node->local.local = true;
1989 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
1993 #include "gt-cgraph.h"