1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains basic routines manipulating call graph
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
37 The callgraph at the moment does not represent indirect calls or calls
38 from other compilation unit. Flag NEEDED is set for each node that may
39 be accessed in such an invisible way and it shall be considered an
40 entry point to the callgraph.
42 Interprocedural information:
44 Callgraph is place to store data needed for interprocedural optimization.
45 All data structures are divided into three components: local_info that
46 is produced while analyzing the function, global_info that is result
47 of global walking of the callgraph on the end of compilation and
48 rtl_info used by RTL backend to propagate data from already compiled
49 functions to their callers.
53 The function inlining information is decided in advance and maintained
54 in the callgraph as so called inline plan.
55 For each inlined call, the callee's node is cloned to represent the
56 new function copy produced by inliner.
57 Each inlined call gets a unique corresponding clone node of the callee
58 and the data structure is updated while inlining is performed, so
59 the clones are eliminated and their callee edges redirected to the
62 Each edge has "inline_failed" field. When the field is set to NULL,
63 the call will be inlined. When it is non-NULL it contains a reason
64 why inlining wasn't performed. */
68 #include "coretypes.h"
71 #include "tree-inline.h"
72 #include "langhooks.h"
79 #include "basic-block.h"
84 #include "tree-dump.h"
85 #include "tree-flow.h"
86 #include "value-prof.h"
89 static void cgraph_node_remove_callers (struct cgraph_node *node);
90 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
91 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
93 /* Hash table used to convert declarations into nodes. */
94 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
95 /* Hash table used to convert assembler names into nodes. */
96 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
98 /* The linked list of cgraph nodes. */
99 struct cgraph_node *cgraph_nodes;
101 /* Queue of cgraph nodes scheduled to be lowered. */
102 struct cgraph_node *cgraph_nodes_queue;
104 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
105 secondary queue used during optimization to accommodate passes that
106 may generate new functions that need to be optimized and expanded. */
107 struct cgraph_node *cgraph_new_nodes;
109 /* Number of nodes in existence. */
112 /* Maximal uid used in cgraph nodes. */
115 /* Maximal uid used in cgraph edges. */
116 int cgraph_edge_max_uid;
118 /* Maximal pid used for profiling */
121 /* Set when whole unit has been analyzed so we can access global info. */
122 bool cgraph_global_info_ready = false;
124 /* What state callgraph is in right now. */
125 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
127 /* Set when the cgraph is fully build and the basic flags are computed. */
128 bool cgraph_function_flags_ready = false;
130 /* Linked list of cgraph asm nodes. */
131 struct cgraph_asm_node *cgraph_asm_nodes;
133 /* Last node in cgraph_asm_nodes. */
134 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
136 /* The order index of the next cgraph node to be created. This is
137 used so that we can sort the cgraph nodes in order by when we saw
138 them, to support -fno-toplevel-reorder. */
141 /* List of hooks trigerred on cgraph_edge events. */
142 struct cgraph_edge_hook_list {
143 cgraph_edge_hook hook;
145 struct cgraph_edge_hook_list *next;
148 /* List of hooks trigerred on cgraph_node events. */
149 struct cgraph_node_hook_list {
150 cgraph_node_hook hook;
152 struct cgraph_node_hook_list *next;
155 /* List of hooks trigerred on events involving two cgraph_edges. */
156 struct cgraph_2edge_hook_list {
157 cgraph_2edge_hook hook;
159 struct cgraph_2edge_hook_list *next;
162 /* List of hooks trigerred on events involving two cgraph_nodes. */
163 struct cgraph_2node_hook_list {
164 cgraph_2node_hook hook;
166 struct cgraph_2node_hook_list *next;
169 /* List of hooks triggered when an edge is removed. */
170 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
171 /* List of hooks triggered when a node is removed. */
172 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
173 /* List of hooks triggered when an edge is duplicated. */
174 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
175 /* List of hooks triggered when a node is duplicated. */
176 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
177 /* List of hooks triggered when an function is inserted. */
178 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
180 /* Head of a linked list of unused (freed) call graph nodes.
181 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
182 static GTY(()) struct cgraph_node *free_nodes;
183 /* Head of a linked list of unused (freed) call graph edges.
184 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
185 static GTY(()) struct cgraph_edge *free_edges;
187 /* Macros to access the next item in the list of free cgraph nodes and
189 #define NEXT_FREE_NODE(NODE) (NODE)->next
190 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
192 /* Register HOOK to be called with DATA on each removed edge. */
193 struct cgraph_edge_hook_list *
194 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
196 struct cgraph_edge_hook_list *entry;
197 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
199 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
209 /* Remove ENTRY from the list of hooks called on removing edges. */
211 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
213 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
215 while (*ptr != entry)
221 /* Call all edge removal hooks. */
223 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
225 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
228 entry->hook (e, entry->data);
233 /* Register HOOK to be called with DATA on each removed node. */
234 struct cgraph_node_hook_list *
235 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
237 struct cgraph_node_hook_list *entry;
238 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
240 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
250 /* Remove ENTRY from the list of hooks called on removing nodes. */
252 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
254 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
256 while (*ptr != entry)
262 /* Call all node removal hooks. */
264 cgraph_call_node_removal_hooks (struct cgraph_node *node)
266 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
269 entry->hook (node, entry->data);
274 /* Register HOOK to be called with DATA on each removed node. */
275 struct cgraph_node_hook_list *
276 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
278 struct cgraph_node_hook_list *entry;
279 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
281 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
291 /* Remove ENTRY from the list of hooks called on removing nodes. */
293 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
295 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
297 while (*ptr != entry)
303 /* Call all node removal hooks. */
305 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
307 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
310 entry->hook (node, entry->data);
315 /* Register HOOK to be called with DATA on each duplicated edge. */
316 struct cgraph_2edge_hook_list *
317 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
319 struct cgraph_2edge_hook_list *entry;
320 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
322 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
332 /* Remove ENTRY from the list of hooks called on duplicating edges. */
334 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
336 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
338 while (*ptr != entry)
344 /* Call all edge duplication hooks. */
346 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
347 struct cgraph_edge *cs2)
349 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
352 entry->hook (cs1, cs2, entry->data);
357 /* Register HOOK to be called with DATA on each duplicated node. */
358 struct cgraph_2node_hook_list *
359 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
361 struct cgraph_2node_hook_list *entry;
362 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
364 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
374 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
376 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
378 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
380 while (*ptr != entry)
386 /* Call all node duplication hooks. */
388 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
389 struct cgraph_node *node2)
391 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
394 entry->hook (node1, node2, entry->data);
399 /* Returns a hash code for P. */
402 hash_node (const void *p)
404 const struct cgraph_node *n = (const struct cgraph_node *) p;
405 return (hashval_t) DECL_UID (n->decl);
409 /* Returns nonzero if P1 and P2 are equal. */
412 eq_node (const void *p1, const void *p2)
414 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
415 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
416 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
419 /* Allocate new callgraph node. */
421 static inline struct cgraph_node *
422 cgraph_allocate_node (void)
424 struct cgraph_node *node;
429 free_nodes = NEXT_FREE_NODE (node);
433 node = GGC_CNEW (struct cgraph_node);
434 node->uid = cgraph_max_uid++;
440 /* Allocate new callgraph node and insert it into basic data structures. */
442 static struct cgraph_node *
443 cgraph_create_node (void)
445 struct cgraph_node *node = cgraph_allocate_node ();
447 node->next = cgraph_nodes;
449 node->order = cgraph_order++;
451 cgraph_nodes->previous = node;
452 node->previous = NULL;
453 node->global.estimated_growth = INT_MIN;
459 /* Return cgraph node assigned to DECL. Create new one when needed. */
462 cgraph_node (tree decl)
464 struct cgraph_node key, *node, **slot;
466 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
469 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
473 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
478 if (node->same_body_alias)
479 node = node->same_body;
483 node = cgraph_create_node ();
486 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
488 node->origin = cgraph_node (DECL_CONTEXT (decl));
489 node->next_nested = node->origin->nested;
490 node->origin->nested = node;
492 if (assembler_name_hash)
495 tree name = DECL_ASSEMBLER_NAME (decl);
497 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
498 decl_assembler_name_hash (name),
500 /* We can have multiple declarations with same assembler name. For C++
501 it is __builtin_strlen and strlen, for instance. Do we need to
502 record them all? Original implementation marked just first one
503 so lets hope for the best. */
510 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
511 Same body aliases are output whenever the body of DECL is output,
512 and cgraph_node (ALIAS) transparently returns cgraph_node (DECL). */
515 cgraph_same_body_alias (tree alias, tree decl)
517 struct cgraph_node key, *alias_node, *decl_node, **slot;
519 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
520 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
521 gcc_assert (!assembler_name_hash);
523 #ifndef ASM_OUTPUT_DEF
524 /* If aliases aren't supported by the assembler, fail. */
528 /* Comdat same body aliases are only supported when comdat groups
529 are supported and the symbols are weak. */
530 if (DECL_ONE_ONLY (decl) && (!HAVE_COMDAT_GROUP || !DECL_WEAK (decl)))
533 decl_node = cgraph_node (decl);
537 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
539 /* If the cgraph_node has been already created, fail. */
543 alias_node = cgraph_allocate_node ();
544 alias_node->decl = alias;
545 alias_node->same_body_alias = 1;
546 alias_node->same_body = decl_node;
547 alias_node->previous = NULL;
548 if (decl_node->same_body)
549 decl_node->same_body->previous = alias_node;
550 alias_node->next = decl_node->same_body;
551 decl_node->same_body = alias_node;
556 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
560 cgraph_get_node (tree decl)
562 struct cgraph_node key, *node = NULL, **slot;
564 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
571 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
577 if (node->same_body_alias)
578 node = node->same_body;
583 /* Insert already constructed node into hashtable. */
586 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
588 struct cgraph_node **slot;
590 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
596 /* Returns a hash code for P. */
599 hash_node_by_assembler_name (const void *p)
601 const struct cgraph_node *n = (const struct cgraph_node *) p;
602 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
605 /* Returns nonzero if P1 and P2 are equal. */
608 eq_assembler_name (const void *p1, const void *p2)
610 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
611 const_tree name = (const_tree)p2;
612 return (decl_assembler_name_equal (n1->decl, name));
615 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
616 Return NULL if there's no such node. */
619 cgraph_node_for_asm (tree asmname)
621 struct cgraph_node *node;
624 if (!assembler_name_hash)
626 assembler_name_hash =
627 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
629 for (node = cgraph_nodes; node; node = node->next)
630 if (!node->global.inlined_to)
632 tree name = DECL_ASSEMBLER_NAME (node->decl);
633 slot = htab_find_slot_with_hash (assembler_name_hash, name,
634 decl_assembler_name_hash (name),
636 /* We can have multiple declarations with same assembler name. For C++
637 it is __builtin_strlen and strlen, for instance. Do we need to
638 record them all? Original implementation marked just first one
639 so lets hope for the best. */
644 struct cgraph_node *alias;
646 for (alias = node->same_body; alias; alias = alias->next)
649 name = DECL_ASSEMBLER_NAME (alias->decl);
650 hash = decl_assembler_name_hash (name);
651 slot = htab_find_slot_with_hash (assembler_name_hash, name,
660 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
661 decl_assembler_name_hash (asmname),
666 node = (struct cgraph_node *) *slot;
667 if (node->same_body_alias)
668 node = node->same_body;
674 /* Returns a hash value for X (which really is a die_struct). */
677 edge_hash (const void *x)
679 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
682 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
685 edge_eq (const void *x, const void *y)
687 return ((const struct cgraph_edge *) x)->call_stmt == y;
691 /* Return the callgraph edge representing the GIMPLE_CALL statement
695 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
697 struct cgraph_edge *e, *e2;
700 if (node->call_site_hash)
701 return (struct cgraph_edge *)
702 htab_find_with_hash (node->call_site_hash, call_stmt,
703 htab_hash_pointer (call_stmt));
705 /* This loop may turn out to be performance problem. In such case adding
706 hashtables into call nodes with very many edges is probably best
707 solution. It is not good idea to add pointer into CALL_EXPR itself
708 because we want to make possible having multiple cgraph nodes representing
709 different clones of the same body before the body is actually cloned. */
710 for (e = node->callees; e; e= e->next_callee)
712 if (e->call_stmt == call_stmt)
719 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
720 for (e2 = node->callees; e2; e2 = e2->next_callee)
723 slot = htab_find_slot_with_hash (node->call_site_hash,
725 htab_hash_pointer (e2->call_stmt),
736 /* Change field call_stmt of edge E to NEW_STMT. */
739 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
741 if (e->caller->call_site_hash)
743 htab_remove_elt_with_hash (e->caller->call_site_hash,
745 htab_hash_pointer (e->call_stmt));
747 e->call_stmt = new_stmt;
748 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
749 e->can_throw_external = stmt_can_throw_external (new_stmt);
751 if (e->caller->call_site_hash)
754 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
757 (e->call_stmt), INSERT);
763 /* Like cgraph_set_call_stmt but walk the clone tree and update all
764 clones sharing the same function body. */
767 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
768 gimple old_stmt, gimple new_stmt)
770 struct cgraph_node *node;
771 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
774 cgraph_set_call_stmt (edge, new_stmt);
780 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
782 cgraph_set_call_stmt (edge, new_stmt);
785 else if (node->next_sibling_clone)
786 node = node->next_sibling_clone;
789 while (node != orig && !node->next_sibling_clone)
790 node = node->clone_of;
792 node = node->next_sibling_clone;
797 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
800 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
801 frequencies of the clones. */
804 cgraph_create_edge_including_clones (struct cgraph_node *orig,
805 struct cgraph_node *callee,
806 gimple stmt, gcov_type count,
807 int freq, int loop_depth,
808 cgraph_inline_failed_t reason)
810 struct cgraph_node *node;
811 struct cgraph_edge *edge;
813 if (!cgraph_edge (orig, stmt))
815 edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth);
816 edge->inline_failed = reason;
823 /* It is possible that we already constant propagated into the clone
824 and turned indirect call into dirrect call. */
825 if (!cgraph_edge (node, stmt))
827 edge = cgraph_create_edge (node, callee, stmt, count,
829 edge->inline_failed = reason;
834 else if (node->next_sibling_clone)
835 node = node->next_sibling_clone;
838 while (node != orig && !node->next_sibling_clone)
839 node = node->clone_of;
841 node = node->next_sibling_clone;
846 /* Give initial reasons why inlining would fail on EDGE. This gets either
847 nullified or usually overwritten by more precise reasons later. */
850 initialize_inline_failed (struct cgraph_edge *e)
852 struct cgraph_node *callee = e->callee;
854 if (!callee->analyzed)
855 e->inline_failed = CIF_BODY_NOT_AVAILABLE;
856 else if (callee->local.redefined_extern_inline)
857 e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
858 else if (!callee->local.inlinable)
859 e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
860 else if (e->call_stmt && gimple_call_cannot_inline_p (e->call_stmt))
861 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
863 e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
866 /* Create edge from CALLER to CALLEE in the cgraph. */
869 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
870 gimple call_stmt, gcov_type count, int freq, int nest)
872 struct cgraph_edge *edge;
875 /* LTO does not actually have access to the call_stmt since these
876 have not been loaded yet. */
879 #ifdef ENABLE_CHECKING
880 /* This is rather pricely check possibly trigerring construction of
881 call stmt hashtable. */
882 gcc_assert (!cgraph_edge (caller, call_stmt));
885 gcc_assert (is_gimple_call (call_stmt));
891 free_edges = NEXT_FREE_EDGE (edge);
895 edge = GGC_NEW (struct cgraph_edge);
896 edge->uid = cgraph_edge_max_uid++;
901 edge->caller = caller;
902 edge->callee = callee;
903 edge->call_stmt = call_stmt;
904 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
905 edge->can_throw_external
906 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
908 edge->prev_caller = NULL;
909 edge->next_caller = callee->callers;
911 callee->callers->prev_caller = edge;
912 edge->prev_callee = NULL;
913 edge->next_callee = caller->callees;
915 caller->callees->prev_callee = edge;
916 caller->callees = edge;
917 callee->callers = edge;
919 gcc_assert (count >= 0);
920 edge->frequency = freq;
921 gcc_assert (freq >= 0);
922 gcc_assert (freq <= CGRAPH_FREQ_MAX);
923 edge->loop_nest = nest;
924 edge->indirect_call = 0;
925 edge->call_stmt_cannot_inline_p =
926 (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
927 if (call_stmt && caller->call_site_hash)
930 slot = htab_find_slot_with_hash (caller->call_site_hash,
939 initialize_inline_failed (edge);
944 /* Remove the edge E from the list of the callers of the callee. */
947 cgraph_edge_remove_callee (struct cgraph_edge *e)
950 e->prev_caller->next_caller = e->next_caller;
952 e->next_caller->prev_caller = e->prev_caller;
954 e->callee->callers = e->next_caller;
957 /* Remove the edge E from the list of the callees of the caller. */
960 cgraph_edge_remove_caller (struct cgraph_edge *e)
963 e->prev_callee->next_callee = e->next_callee;
965 e->next_callee->prev_callee = e->prev_callee;
967 e->caller->callees = e->next_callee;
968 if (e->caller->call_site_hash)
969 htab_remove_elt_with_hash (e->caller->call_site_hash,
971 htab_hash_pointer (e->call_stmt));
974 /* Put the edge onto the free list. */
977 cgraph_free_edge (struct cgraph_edge *e)
981 /* Clear out the edge so we do not dangle pointers. */
982 memset (e, 0, sizeof (*e));
984 NEXT_FREE_EDGE (e) = free_edges;
988 /* Remove the edge E in the cgraph. */
991 cgraph_remove_edge (struct cgraph_edge *e)
993 /* Call all edge removal hooks. */
994 cgraph_call_edge_removal_hooks (e);
996 /* Remove from callers list of the callee. */
997 cgraph_edge_remove_callee (e);
999 /* Remove from callees list of the callers. */
1000 cgraph_edge_remove_caller (e);
1002 /* Put the edge onto the free list. */
1003 cgraph_free_edge (e);
1006 /* Redirect callee of E to N. The function does not update underlying
1010 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1012 /* Remove from callers list of the current callee. */
1013 cgraph_edge_remove_callee (e);
1015 /* Insert to callers list of the new callee. */
1016 e->prev_caller = NULL;
1018 n->callers->prev_caller = e;
1019 e->next_caller = n->callers;
1025 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1026 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1027 of OLD_STMT if it was previously call statement. */
1030 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1031 gimple old_stmt, tree old_call, gimple new_stmt)
1033 tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fndecl (new_stmt) : 0;
1035 /* We are seeing indirect calls, then there is nothing to update. */
1036 if (!new_call && !old_call)
1038 /* See if we turned indirect call into direct call or folded call to one builtin
1039 into different bultin. */
1040 if (old_call != new_call)
1042 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1043 struct cgraph_edge *ne = NULL;
1050 /* See if the call is already there. It might be because of indirect
1051 inlining already found it. */
1052 if (new_call && e->callee->decl == new_call)
1055 /* Otherwise remove edge and create new one; we can't simply redirect
1056 since function has changed, so inline plan and other information
1057 attached to edge is invalid. */
1059 frequency = e->frequency;
1060 loop_nest = e->loop_nest;
1061 cgraph_remove_edge (e);
1065 /* We are seeing new direct call; compute profile info based on BB. */
1066 basic_block bb = gimple_bb (new_stmt);
1068 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1070 loop_nest = bb->loop_depth;
1075 ne = cgraph_create_edge (node, cgraph_node (new_call),
1076 new_stmt, count, frequency,
1078 gcc_assert (ne->inline_failed);
1081 /* We only updated the call stmt; update pointer in cgraph edge.. */
1082 else if (old_stmt != new_stmt)
1083 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1086 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1087 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1088 of OLD_STMT before it was updated (updating can happen inplace). */
1091 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1093 struct cgraph_node *orig = cgraph_node (cfun->decl);
1094 struct cgraph_node *node;
1096 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1098 for (node = orig->clones; node != orig;)
1100 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1102 node = node->clones;
1103 else if (node->next_sibling_clone)
1104 node = node->next_sibling_clone;
1107 while (node != orig && !node->next_sibling_clone)
1108 node = node->clone_of;
1110 node = node->next_sibling_clone;
1116 /* Remove all callees from the node. */
1119 cgraph_node_remove_callees (struct cgraph_node *node)
1121 struct cgraph_edge *e, *f;
1123 /* It is sufficient to remove the edges from the lists of callers of
1124 the callees. The callee list of the node can be zapped with one
1126 for (e = node->callees; e; e = f)
1129 cgraph_call_edge_removal_hooks (e);
1130 cgraph_edge_remove_callee (e);
1131 cgraph_free_edge (e);
1133 node->callees = NULL;
1134 if (node->call_site_hash)
1136 htab_delete (node->call_site_hash);
1137 node->call_site_hash = NULL;
1141 /* Remove all callers from the node. */
1144 cgraph_node_remove_callers (struct cgraph_node *node)
1146 struct cgraph_edge *e, *f;
1148 /* It is sufficient to remove the edges from the lists of callees of
1149 the callers. The caller list of the node can be zapped with one
1151 for (e = node->callers; e; e = f)
1154 cgraph_call_edge_removal_hooks (e);
1155 cgraph_edge_remove_caller (e);
1156 cgraph_free_edge (e);
1158 node->callers = NULL;
1161 /* Release memory used to represent body of function NODE. */
1164 cgraph_release_function_body (struct cgraph_node *node)
1166 if (DECL_STRUCT_FUNCTION (node->decl))
1168 tree old_decl = current_function_decl;
1169 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1170 if (cfun->gimple_df)
1172 current_function_decl = node->decl;
1174 delete_tree_cfg_annotations ();
1176 current_function_decl = old_decl;
1180 gcc_assert (dom_computed[0] == DOM_NONE);
1181 gcc_assert (dom_computed[1] == DOM_NONE);
1184 if (cfun->value_histograms)
1186 gcc_assert (!current_loops);
1188 gimple_set_body (node->decl, NULL);
1189 VEC_free (ipa_opt_pass, heap,
1190 node->ipa_transforms_to_apply);
1191 /* Struct function hangs a lot of data that would leak if we didn't
1192 removed all pointers to it. */
1193 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1194 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1196 DECL_SAVED_TREE (node->decl) = NULL;
1197 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1198 of its associated function function declaration because it's
1199 needed to emit debug info later. */
1200 if (!node->abstract_and_needed)
1201 DECL_INITIAL (node->decl) = error_mark_node;
1204 /* Remove same body alias node. */
1207 cgraph_remove_same_body_alias (struct cgraph_node *node)
1210 int uid = node->uid;
1212 gcc_assert (node->same_body_alias);
1214 node->previous->next = node->next;
1216 node->same_body->same_body = node->next;
1218 node->next->previous = node->previous;
1220 node->previous = NULL;
1221 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1223 htab_clear_slot (cgraph_hash, slot);
1224 if (assembler_name_hash)
1226 tree name = DECL_ASSEMBLER_NAME (node->decl);
1227 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1228 decl_assembler_name_hash (name),
1230 if (slot && *slot == node)
1231 htab_clear_slot (assembler_name_hash, slot);
1234 /* Clear out the node to NULL all pointers and add the node to the free
1236 memset (node, 0, sizeof(*node));
1238 NEXT_FREE_NODE (node) = free_nodes;
1242 /* Remove the node from cgraph. */
1245 cgraph_remove_node (struct cgraph_node *node)
1248 bool kill_body = false;
1249 struct cgraph_node *n;
1250 int uid = node->uid;
1252 cgraph_call_node_removal_hooks (node);
1253 cgraph_node_remove_callers (node);
1254 cgraph_node_remove_callees (node);
1255 VEC_free (ipa_opt_pass, heap,
1256 node->ipa_transforms_to_apply);
1258 /* Incremental inlining access removed nodes stored in the postorder list.
1260 node->needed = node->reachable = false;
1261 for (n = node->nested; n; n = n->next_nested)
1263 node->nested = NULL;
1266 struct cgraph_node **node2 = &node->origin->nested;
1268 while (*node2 != node)
1269 node2 = &(*node2)->next_nested;
1270 *node2 = node->next_nested;
1273 node->previous->next = node->next;
1275 cgraph_nodes = node->next;
1277 node->next->previous = node->previous;
1279 node->previous = NULL;
1280 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1283 struct cgraph_node *next_inline_clone;
1285 for (next_inline_clone = node->clones;
1286 next_inline_clone && next_inline_clone->decl != node->decl;
1287 next_inline_clone = next_inline_clone->next_sibling_clone)
1290 /* If there is inline clone of the node being removed, we need
1291 to put it into the position of removed node and reorganize all
1292 other clones to be based on it. */
1293 if (next_inline_clone)
1295 struct cgraph_node *n;
1296 struct cgraph_node *new_clones;
1298 *slot = next_inline_clone;
1300 /* Unlink inline clone from the list of clones of removed node. */
1301 if (next_inline_clone->next_sibling_clone)
1302 next_inline_clone->next_sibling_clone->prev_sibling_clone
1303 = next_inline_clone->prev_sibling_clone;
1304 if (next_inline_clone->prev_sibling_clone)
1306 next_inline_clone->prev_sibling_clone->next_sibling_clone
1307 = next_inline_clone->next_sibling_clone;
1310 node->clones = next_inline_clone->next_sibling_clone;
1312 new_clones = node->clones;
1313 node->clones = NULL;
1315 /* Copy clone info. */
1316 next_inline_clone->clone = node->clone;
1318 /* Now place it into clone tree at same level at NODE. */
1319 next_inline_clone->clone_of = node->clone_of;
1320 next_inline_clone->prev_sibling_clone = NULL;
1321 next_inline_clone->next_sibling_clone = NULL;
1324 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1325 node->clone_of->clones = next_inline_clone;
1328 /* Merge the clone list. */
1331 if (!next_inline_clone->clones)
1332 next_inline_clone->clones = new_clones;
1335 n = next_inline_clone->clones;
1336 while (n->next_sibling_clone)
1337 n = n->next_sibling_clone;
1338 n->next_sibling_clone = new_clones;
1339 new_clones->prev_sibling_clone = n;
1343 /* Update clone_of pointers. */
1347 n->clone_of = next_inline_clone;
1348 n = n->next_sibling_clone;
1353 htab_clear_slot (cgraph_hash, slot);
1359 gcc_assert (node->clone_of);
1360 if (node->prev_sibling_clone)
1361 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1362 else if (node->clone_of)
1363 node->clone_of->clones = node->next_sibling_clone;
1364 if (node->next_sibling_clone)
1365 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1368 struct cgraph_node *n;
1370 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1371 n->clone_of = node->clone_of;
1372 n->clone_of = node->clone_of;
1373 n->next_sibling_clone = node->clone_of->clones;
1374 if (node->clone_of->clones)
1375 node->clone_of->clones->prev_sibling_clone = n;
1376 node->clone_of->clones = node->clones;
1379 while (node->same_body)
1380 cgraph_remove_same_body_alias (node->same_body);
1382 /* While all the clones are removed after being proceeded, the function
1383 itself is kept in the cgraph even after it is compiled. Check whether
1384 we are done with this body and reclaim it proactively if this is the case.
1386 if (!kill_body && *slot)
1388 struct cgraph_node *n = (struct cgraph_node *) *slot;
1389 if (!n->clones && !n->clone_of && !n->global.inlined_to
1390 && (cgraph_global_info_ready
1391 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
1394 if (assembler_name_hash)
1396 tree name = DECL_ASSEMBLER_NAME (node->decl);
1397 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1398 decl_assembler_name_hash (name),
1400 /* Inline clones are not hashed. */
1401 if (slot && *slot == node)
1402 htab_clear_slot (assembler_name_hash, slot);
1406 cgraph_release_function_body (node);
1408 if (node->call_site_hash)
1410 htab_delete (node->call_site_hash);
1411 node->call_site_hash = NULL;
1415 /* Clear out the node to NULL all pointers and add the node to the free
1417 memset (node, 0, sizeof(*node));
1419 NEXT_FREE_NODE (node) = free_nodes;
1423 /* Remove the node from cgraph. */
1426 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1428 struct cgraph_edge *e, *next;
1429 for (e = node->callees; e; e = next)
1431 next = e->next_callee;
1432 if (!e->inline_failed)
1433 cgraph_remove_node_and_inline_clones (e->callee);
1435 cgraph_remove_node (node);
1438 /* Notify finalize_compilation_unit that given node is reachable. */
1441 cgraph_mark_reachable_node (struct cgraph_node *node)
1443 if (!node->reachable && node->local.finalized)
1445 notice_global_symbol (node->decl);
1446 node->reachable = 1;
1447 gcc_assert (!cgraph_global_info_ready);
1449 node->next_needed = cgraph_nodes_queue;
1450 cgraph_nodes_queue = node;
1454 /* Likewise indicate that a node is needed, i.e. reachable via some
1458 cgraph_mark_needed_node (struct cgraph_node *node)
1461 gcc_assert (!node->global.inlined_to);
1462 cgraph_mark_reachable_node (node);
1465 /* Likewise indicate that a node is having address taken. */
1468 cgraph_mark_address_taken_node (struct cgraph_node *node)
1470 node->address_taken = 1;
1471 cgraph_mark_needed_node (node);
1474 /* Return local info for the compiled function. */
1476 struct cgraph_local_info *
1477 cgraph_local_info (tree decl)
1479 struct cgraph_node *node;
1481 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1482 node = cgraph_node (decl);
1483 return &node->local;
1486 /* Return local info for the compiled function. */
1488 struct cgraph_global_info *
1489 cgraph_global_info (tree decl)
1491 struct cgraph_node *node;
1493 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1494 node = cgraph_node (decl);
1495 return &node->global;
1498 /* Return local info for the compiled function. */
1500 struct cgraph_rtl_info *
1501 cgraph_rtl_info (tree decl)
1503 struct cgraph_node *node;
1505 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1506 node = cgraph_node (decl);
1507 if (decl != current_function_decl
1508 && !TREE_ASM_WRITTEN (node->decl))
1513 /* Return a string describing the failure REASON. */
1516 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1519 #define DEFCIFCODE(code, string) string,
1521 static const char *cif_string_table[CIF_N_REASONS] = {
1522 #include "cif-code.def"
1525 /* Signedness of an enum type is implementation defined, so cast it
1526 to unsigned before testing. */
1527 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1528 return cif_string_table[reason];
1531 /* Return name of the node used in debug output. */
1533 cgraph_node_name (struct cgraph_node *node)
1535 return lang_hooks.decl_printable_name (node->decl, 2);
1538 /* Names used to print out the availability enum. */
1539 const char * const cgraph_availability_names[] =
1540 {"unset", "not_available", "overwritable", "available", "local"};
1543 /* Dump call graph node NODE to file F. */
1546 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1548 struct cgraph_edge *edge;
1549 fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid,
1551 dump_addr (f, " @", (void *)node);
1552 if (node->global.inlined_to)
1553 fprintf (f, " (inline copy in %s/%i)",
1554 cgraph_node_name (node->global.inlined_to),
1555 node->global.inlined_to->uid);
1557 fprintf (f, " (clone of %s/%i)",
1558 cgraph_node_name (node->clone_of),
1559 node->clone_of->uid);
1560 if (cgraph_function_flags_ready)
1561 fprintf (f, " availability:%s",
1562 cgraph_availability_names [cgraph_function_body_availability (node)]);
1564 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1565 (HOST_WIDEST_INT)node->count);
1566 if (node->local.inline_summary.self_time)
1567 fprintf (f, " %i time, %i benefit", node->local.inline_summary.self_time,
1568 node->local.inline_summary.time_inlining_benefit);
1569 if (node->global.time && node->global.time
1570 != node->local.inline_summary.self_time)
1571 fprintf (f, " (%i after inlining)", node->global.time);
1572 if (node->local.inline_summary.self_size)
1573 fprintf (f, " %i size, %i benefit", node->local.inline_summary.self_size,
1574 node->local.inline_summary.size_inlining_benefit);
1575 if (node->global.size && node->global.size
1576 != node->local.inline_summary.self_size)
1577 fprintf (f, " (%i after inlining)", node->global.size);
1578 if (node->local.inline_summary.estimated_self_stack_size)
1579 fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
1580 if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
1581 fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
1583 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1585 fprintf (f, " needed");
1586 if (node->address_taken)
1587 fprintf (f, " address_taken");
1588 else if (node->reachable)
1589 fprintf (f, " reachable");
1590 if (gimple_has_body_p (node->decl))
1591 fprintf (f, " body");
1593 fprintf (f, " process");
1594 if (node->local.local)
1595 fprintf (f, " local");
1596 if (node->local.externally_visible)
1597 fprintf (f, " externally_visible");
1598 if (node->local.finalized)
1599 fprintf (f, " finalized");
1600 if (node->local.disregard_inline_limits)
1601 fprintf (f, " always_inline");
1602 else if (node->local.inlinable)
1603 fprintf (f, " inlinable");
1604 if (node->local.redefined_extern_inline)
1605 fprintf (f, " redefined_extern_inline");
1606 if (TREE_ASM_WRITTEN (node->decl))
1607 fprintf (f, " asm_written");
1609 fprintf (f, "\n called by: ");
1610 for (edge = node->callers; edge; edge = edge->next_caller)
1612 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1615 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1616 (HOST_WIDEST_INT)edge->count);
1617 if (edge->frequency)
1618 fprintf (f, "(%.2f per call) ",
1619 edge->frequency / (double)CGRAPH_FREQ_BASE);
1620 if (!edge->inline_failed)
1621 fprintf(f, "(inlined) ");
1622 if (edge->indirect_call)
1623 fprintf(f, "(indirect) ");
1624 if (edge->can_throw_external)
1625 fprintf(f, "(can throw external) ");
1628 fprintf (f, "\n calls: ");
1629 for (edge = node->callees; edge; edge = edge->next_callee)
1631 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1633 if (!edge->inline_failed)
1634 fprintf(f, "(inlined) ");
1635 if (edge->indirect_call)
1636 fprintf(f, "(indirect) ");
1638 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1639 (HOST_WIDEST_INT)edge->count);
1640 if (edge->frequency)
1641 fprintf (f, "(%.2f per call) ",
1642 edge->frequency / (double)CGRAPH_FREQ_BASE);
1643 if (edge->loop_nest)
1644 fprintf (f, "(nested in %i loops) ", edge->loop_nest);
1645 if (edge->can_throw_external)
1646 fprintf(f, "(can throw external) ");
1652 /* Dump call graph node NODE to stderr. */
1655 debug_cgraph_node (struct cgraph_node *node)
1657 dump_cgraph_node (stderr, node);
1661 /* Dump the callgraph to file F. */
1664 dump_cgraph (FILE *f)
1666 struct cgraph_node *node;
1668 fprintf (f, "callgraph:\n\n");
1669 for (node = cgraph_nodes; node; node = node->next)
1670 dump_cgraph_node (f, node);
1674 /* Dump the call graph to stderr. */
1679 dump_cgraph (stderr);
1683 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
1686 change_decl_assembler_name (tree decl, tree name)
1688 gcc_assert (!assembler_name_hash);
1689 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
1691 SET_DECL_ASSEMBLER_NAME (decl, name);
1694 if (name == DECL_ASSEMBLER_NAME (decl))
1697 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
1698 && DECL_RTL_SET_P (decl))
1699 warning (0, "%D renamed after being referenced in assembly", decl);
1701 SET_DECL_ASSEMBLER_NAME (decl, name);
1704 /* Add a top-level asm statement to the list. */
1706 struct cgraph_asm_node *
1707 cgraph_add_asm_node (tree asm_str)
1709 struct cgraph_asm_node *node;
1711 node = GGC_CNEW (struct cgraph_asm_node);
1712 node->asm_str = asm_str;
1713 node->order = cgraph_order++;
1715 if (cgraph_asm_nodes == NULL)
1716 cgraph_asm_nodes = node;
1718 cgraph_asm_last_node->next = node;
1719 cgraph_asm_last_node = node;
1723 /* Return true when the DECL can possibly be inlined. */
1725 cgraph_function_possibly_inlined_p (tree decl)
1727 if (!cgraph_global_info_ready)
1728 return !DECL_UNINLINABLE (decl);
1729 return DECL_POSSIBLY_INLINED (decl);
1732 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
1733 struct cgraph_edge *
1734 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
1735 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
1736 int freq_scale, int loop_nest, bool update_original)
1738 struct cgraph_edge *new_edge;
1739 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
1742 /* We do not want to ignore loop nest after frequency drops to 0. */
1745 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
1746 if (freq > CGRAPH_FREQ_MAX)
1747 freq = CGRAPH_FREQ_MAX;
1748 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
1749 e->loop_nest + loop_nest);
1751 new_edge->inline_failed = e->inline_failed;
1752 new_edge->indirect_call = e->indirect_call;
1753 new_edge->lto_stmt_uid = stmt_uid;
1754 if (update_original)
1756 e->count -= new_edge->count;
1760 cgraph_call_edge_duplication_hooks (e, new_edge);
1764 /* Create node representing clone of N executed COUNT times. Decrease
1765 the execution counts from original node too.
1767 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
1768 function's profile to reflect the fact that part of execution is handled
1770 struct cgraph_node *
1771 cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq,
1772 int loop_nest, bool update_original,
1773 VEC(cgraph_edge_p,heap) *redirect_callers)
1775 struct cgraph_node *new_node = cgraph_create_node ();
1776 struct cgraph_edge *e;
1777 gcov_type count_scale;
1780 new_node->decl = n->decl;
1781 new_node->origin = n->origin;
1782 if (new_node->origin)
1784 new_node->next_nested = new_node->origin->nested;
1785 new_node->origin->nested = new_node;
1787 new_node->analyzed = n->analyzed;
1788 new_node->local = n->local;
1789 new_node->local.externally_visible = false;
1790 new_node->global = n->global;
1791 new_node->rtl = n->rtl;
1792 new_node->count = count;
1793 new_node->clone = n->clone;
1796 if (new_node->count > n->count)
1797 count_scale = REG_BR_PROB_BASE;
1799 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
1803 if (update_original)
1810 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1812 /* Redirect calls to the old version node to point to its new
1814 cgraph_redirect_edge_callee (e, new_node);
1818 for (e = n->callees;e; e=e->next_callee)
1819 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
1820 count_scale, freq, loop_nest, update_original);
1822 new_node->next_sibling_clone = n->clones;
1824 n->clones->prev_sibling_clone = new_node;
1825 n->clones = new_node;
1826 new_node->clone_of = n;
1828 cgraph_call_node_duplication_hooks (n, new_node);
1832 /* Create a new name for omp child function. Returns an identifier. */
1834 static GTY(()) unsigned int clone_fn_id_num;
1837 clone_function_name (tree decl)
1839 tree name = DECL_ASSEMBLER_NAME (decl);
1840 size_t len = IDENTIFIER_LENGTH (name);
1841 char *tmp_name, *prefix;
1843 prefix = XALLOCAVEC (char, len + strlen ("_clone") + 1);
1844 memcpy (prefix, IDENTIFIER_POINTER (name), len);
1845 strcpy (prefix + len, "_clone");
1846 #ifndef NO_DOT_IN_LABEL
1848 #elif !defined NO_DOLLAR_IN_LABEL
1851 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
1852 return get_identifier (tmp_name);
1855 /* Create callgraph node clone with new declaration. The actual body will
1856 be copied later at compilation stage.
1858 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
1861 struct cgraph_node *
1862 cgraph_create_virtual_clone (struct cgraph_node *old_node,
1863 VEC(cgraph_edge_p,heap) *redirect_callers,
1864 VEC(ipa_replace_map_p,gc) *tree_map,
1865 bitmap args_to_skip)
1867 tree old_decl = old_node->decl;
1868 struct cgraph_node *new_node = NULL;
1870 struct cgraph_node key, **slot;
1872 gcc_assert (tree_versionable_function_p (old_decl));
1874 /* Make a new FUNCTION_DECL tree node */
1876 new_decl = copy_node (old_decl);
1878 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
1879 DECL_STRUCT_FUNCTION (new_decl) = NULL;
1881 /* Generate a new name for the new version. */
1882 DECL_NAME (new_decl) = clone_function_name (old_decl);
1883 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
1884 SET_DECL_RTL (new_decl, NULL);
1886 new_node = cgraph_clone_node (old_node, old_node->count,
1887 CGRAPH_FREQ_BASE, 0, false,
1889 new_node->decl = new_decl;
1890 /* Update the properties.
1891 Make clone visible only within this translation unit. Make sure
1892 that is not weak also.
1893 ??? We cannot use COMDAT linkage because there is no
1894 ABI support for this. */
1895 DECL_EXTERNAL (new_node->decl) = 0;
1896 DECL_COMDAT_GROUP (new_node->decl) = 0;
1897 TREE_PUBLIC (new_node->decl) = 0;
1898 DECL_COMDAT (new_node->decl) = 0;
1899 DECL_WEAK (new_node->decl) = 0;
1900 new_node->clone.tree_map = tree_map;
1901 new_node->clone.args_to_skip = args_to_skip;
1903 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
1904 else if (old_node->clone.combined_args_to_skip)
1906 int newi = 0, oldi = 0;
1908 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
1909 struct cgraph_node *orig_node;
1910 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
1912 for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = TREE_CHAIN (arg), oldi++)
1914 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
1916 bitmap_set_bit (new_args_to_skip, oldi);
1919 if (bitmap_bit_p (args_to_skip, newi))
1920 bitmap_set_bit (new_args_to_skip, oldi);
1923 new_node->clone.combined_args_to_skip = new_args_to_skip;
1926 new_node->clone.combined_args_to_skip = args_to_skip;
1927 new_node->local.externally_visible = 0;
1928 new_node->local.local = 1;
1929 new_node->lowered = true;
1930 new_node->reachable = true;
1932 key.decl = new_decl;
1933 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
1934 gcc_assert (!*slot);
1936 if (assembler_name_hash)
1939 tree name = DECL_ASSEMBLER_NAME (new_decl);
1941 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
1942 decl_assembler_name_hash (name),
1944 gcc_assert (!*aslot);
1951 /* NODE is no longer nested function; update cgraph accordingly. */
1953 cgraph_unnest_node (struct cgraph_node *node)
1955 struct cgraph_node **node2 = &node->origin->nested;
1956 gcc_assert (node->origin);
1958 while (*node2 != node)
1959 node2 = &(*node2)->next_nested;
1960 *node2 = node->next_nested;
1961 node->origin = NULL;
1964 /* Return function availability. See cgraph.h for description of individual
1967 cgraph_function_body_availability (struct cgraph_node *node)
1969 enum availability avail;
1970 gcc_assert (cgraph_function_flags_ready);
1971 if (!node->analyzed)
1972 avail = AVAIL_NOT_AVAILABLE;
1973 else if (node->local.local)
1974 avail = AVAIL_LOCAL;
1975 else if (!node->local.externally_visible)
1976 avail = AVAIL_AVAILABLE;
1977 /* Inline functions are safe to be analyzed even if their sybol can
1978 be overwritten at runtime. It is not meaningful to enfore any sane
1979 behaviour on replacing inline function by different body. */
1980 else if (DECL_DECLARED_INLINE_P (node->decl))
1981 avail = AVAIL_AVAILABLE;
1983 /* If the function can be overwritten, return OVERWRITABLE. Take
1984 care at least of two notable extensions - the COMDAT functions
1985 used to share template instantiations in C++ (this is symmetric
1986 to code cp_cannot_inline_tree_fn and probably shall be shared and
1987 the inlinability hooks completely eliminated).
1989 ??? Does the C++ one definition rule allow us to always return
1990 AVAIL_AVAILABLE here? That would be good reason to preserve this
1993 else if (DECL_REPLACEABLE_P (node->decl) && !DECL_EXTERNAL (node->decl))
1994 avail = AVAIL_OVERWRITABLE;
1995 else avail = AVAIL_AVAILABLE;
2000 /* Add the function FNDECL to the call graph.
2001 Unlike cgraph_finalize_function, this function is intended to be used
2002 by middle end and allows insertion of new function at arbitrary point
2003 of compilation. The function can be either in high, low or SSA form
2006 The function is assumed to be reachable and have address taken (so no
2007 API breaking optimizations are performed on it).
2009 Main work done by this function is to enqueue the function for later
2010 processing to avoid need the passes to be re-entrant. */
2013 cgraph_add_new_function (tree fndecl, bool lowered)
2015 struct cgraph_node *node;
2016 switch (cgraph_state)
2018 case CGRAPH_STATE_CONSTRUCTION:
2019 /* Just enqueue function to be processed at nearest occurrence. */
2020 node = cgraph_node (fndecl);
2021 node->next_needed = cgraph_new_nodes;
2023 node->lowered = true;
2024 cgraph_new_nodes = node;
2027 case CGRAPH_STATE_IPA:
2028 case CGRAPH_STATE_IPA_SSA:
2029 case CGRAPH_STATE_EXPANSION:
2030 /* Bring the function into finalized state and enqueue for later
2031 analyzing and compilation. */
2032 node = cgraph_node (fndecl);
2033 node->local.local = false;
2034 node->local.finalized = true;
2035 node->reachable = node->needed = true;
2036 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
2038 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2039 current_function_decl = fndecl;
2040 gimple_register_cfg_hooks ();
2041 tree_lowering_passes (fndecl);
2042 bitmap_obstack_initialize (NULL);
2043 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2044 execute_pass_list (pass_early_local_passes.pass.sub);
2045 bitmap_obstack_release (NULL);
2047 current_function_decl = NULL;
2052 node->lowered = true;
2053 node->next_needed = cgraph_new_nodes;
2054 cgraph_new_nodes = node;
2057 case CGRAPH_STATE_FINISHED:
2058 /* At the very end of compilation we have to do all the work up
2060 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2061 current_function_decl = fndecl;
2062 gimple_register_cfg_hooks ();
2064 tree_lowering_passes (fndecl);
2065 bitmap_obstack_initialize (NULL);
2066 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2067 execute_pass_list (pass_early_local_passes.pass.sub);
2068 bitmap_obstack_release (NULL);
2069 tree_rest_of_compilation (fndecl);
2071 current_function_decl = NULL;
2075 /* Set a personality if required and we already passed EH lowering. */
2077 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
2078 == eh_personality_lang))
2079 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
2082 /* Return true if NODE can be made local for API change.
2083 Extern inline functions and C++ COMDAT functions can be made local
2084 at the expense of possible code size growth if function is used in multiple
2085 compilation units. */
2087 cgraph_node_can_be_local_p (struct cgraph_node *node)
2089 return (!node->needed
2090 && (DECL_COMDAT (node->decl) || !node->local.externally_visible));
2093 /* Bring NODE local. */
2095 cgraph_make_node_local (struct cgraph_node *node)
2097 gcc_assert (cgraph_node_can_be_local_p (node));
2098 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2100 DECL_COMDAT (node->decl) = 0;
2101 DECL_COMDAT_GROUP (node->decl) = 0;
2102 TREE_PUBLIC (node->decl) = 0;
2103 DECL_WEAK (node->decl) = 0;
2104 DECL_EXTERNAL (node->decl) = 0;
2105 node->local.externally_visible = false;
2106 node->local.local = true;
2107 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2111 #include "gt-cgraph.h"