1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains basic routines manipulating call graph
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
37 The callgraph at the moment does not represent all indirect calls or calls
38 from other compilation units. Flag NEEDED is set for each node that may be
39 accessed in such an invisible way and it shall be considered an entry point
42 On the other hand, the callgraph currently does contain some edges for
43 indirect calls with unknown callees which can be accessed through
44 indirect_calls field of a node. It should be noted however that at the
45 moment only calls which are potential candidates for indirect inlining are
48 Interprocedural information:
50 Callgraph is place to store data needed for interprocedural optimization.
51 All data structures are divided into three components: local_info that
52 is produced while analyzing the function, global_info that is result
53 of global walking of the callgraph on the end of compilation and
54 rtl_info used by RTL backend to propagate data from already compiled
55 functions to their callers.
57 Moreover, each node has a uid which can be used to keep information in
58 on-the-side arrays. UIDs are reused and therefore reasonably dense.
62 The function inlining information is decided in advance and maintained
63 in the callgraph as so called inline plan.
64 For each inlined call, the callee's node is cloned to represent the
65 new function copy produced by inliner.
66 Each inlined call gets a unique corresponding clone node of the callee
67 and the data structure is updated while inlining is performed, so
68 the clones are eliminated and their callee edges redirected to the
71 Each edge has "inline_failed" field. When the field is set to NULL,
72 the call will be inlined. When it is non-NULL it contains a reason
73 why inlining wasn't performed. */
77 #include "coretypes.h"
80 #include "tree-inline.h"
81 #include "langhooks.h"
88 #include "basic-block.h"
93 #include "tree-dump.h"
94 #include "tree-flow.h"
95 #include "value-prof.h"
97 #include "diagnostic-core.h"
99 #include "ipa-utils.h"
101 static void cgraph_node_remove_callers (struct cgraph_node *node);
102 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
103 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
105 /* Hash table used to convert declarations into nodes. */
106 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
107 /* Hash table used to convert assembler names into nodes. */
108 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
110 /* The linked list of cgraph nodes. */
111 struct cgraph_node *cgraph_nodes;
113 /* Queue of cgraph nodes scheduled to be lowered. */
114 struct cgraph_node *cgraph_nodes_queue;
116 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
117 secondary queue used during optimization to accommodate passes that
118 may generate new functions that need to be optimized and expanded. */
119 struct cgraph_node *cgraph_new_nodes;
121 /* Number of nodes in existence. */
124 /* Maximal uid used in cgraph nodes. */
127 /* Maximal uid used in cgraph edges. */
128 int cgraph_edge_max_uid;
130 /* Maximal pid used for profiling */
133 /* Set when whole unit has been analyzed so we can access global info. */
134 bool cgraph_global_info_ready = false;
136 /* What state callgraph is in right now. */
137 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
139 /* Set when the cgraph is fully build and the basic flags are computed. */
140 bool cgraph_function_flags_ready = false;
142 /* Linked list of cgraph asm nodes. */
143 struct cgraph_asm_node *cgraph_asm_nodes;
145 /* Last node in cgraph_asm_nodes. */
146 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
148 /* The order index of the next cgraph node to be created. This is
149 used so that we can sort the cgraph nodes in order by when we saw
150 them, to support -fno-toplevel-reorder. */
153 /* List of hooks trigerred on cgraph_edge events. */
154 struct cgraph_edge_hook_list {
155 cgraph_edge_hook hook;
157 struct cgraph_edge_hook_list *next;
160 /* List of hooks trigerred on cgraph_node events. */
161 struct cgraph_node_hook_list {
162 cgraph_node_hook hook;
164 struct cgraph_node_hook_list *next;
167 /* List of hooks trigerred on events involving two cgraph_edges. */
168 struct cgraph_2edge_hook_list {
169 cgraph_2edge_hook hook;
171 struct cgraph_2edge_hook_list *next;
174 /* List of hooks trigerred on events involving two cgraph_nodes. */
175 struct cgraph_2node_hook_list {
176 cgraph_2node_hook hook;
178 struct cgraph_2node_hook_list *next;
181 /* List of hooks triggered when an edge is removed. */
182 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
183 /* List of hooks triggered when a node is removed. */
184 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
185 /* List of hooks triggered when an edge is duplicated. */
186 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
187 /* List of hooks triggered when a node is duplicated. */
188 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
189 /* List of hooks triggered when an function is inserted. */
190 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
192 /* Head of a linked list of unused (freed) call graph nodes.
193 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
194 static GTY(()) struct cgraph_node *free_nodes;
195 /* Head of a linked list of unused (freed) call graph edges.
196 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
197 static GTY(()) struct cgraph_edge *free_edges;
199 /* Macros to access the next item in the list of free cgraph nodes and
201 #define NEXT_FREE_NODE(NODE) (NODE)->next
202 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
204 /* Register HOOK to be called with DATA on each removed edge. */
205 struct cgraph_edge_hook_list *
206 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
208 struct cgraph_edge_hook_list *entry;
209 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
211 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
221 /* Remove ENTRY from the list of hooks called on removing edges. */
223 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
225 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
227 while (*ptr != entry)
233 /* Call all edge removal hooks. */
235 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
237 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
240 entry->hook (e, entry->data);
245 /* Register HOOK to be called with DATA on each removed node. */
246 struct cgraph_node_hook_list *
247 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
249 struct cgraph_node_hook_list *entry;
250 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
252 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
262 /* Remove ENTRY from the list of hooks called on removing nodes. */
264 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
266 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
268 while (*ptr != entry)
274 /* Call all node removal hooks. */
276 cgraph_call_node_removal_hooks (struct cgraph_node *node)
278 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
281 entry->hook (node, entry->data);
286 /* Register HOOK to be called with DATA on each inserted node. */
287 struct cgraph_node_hook_list *
288 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
290 struct cgraph_node_hook_list *entry;
291 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
293 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
303 /* Remove ENTRY from the list of hooks called on inserted nodes. */
305 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
307 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
309 while (*ptr != entry)
315 /* Call all node insertion hooks. */
317 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
319 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
322 entry->hook (node, entry->data);
327 /* Register HOOK to be called with DATA on each duplicated edge. */
328 struct cgraph_2edge_hook_list *
329 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
331 struct cgraph_2edge_hook_list *entry;
332 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
334 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
344 /* Remove ENTRY from the list of hooks called on duplicating edges. */
346 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
348 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
350 while (*ptr != entry)
356 /* Call all edge duplication hooks. */
358 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
359 struct cgraph_edge *cs2)
361 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
364 entry->hook (cs1, cs2, entry->data);
369 /* Register HOOK to be called with DATA on each duplicated node. */
370 struct cgraph_2node_hook_list *
371 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
373 struct cgraph_2node_hook_list *entry;
374 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
376 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
386 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
388 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
390 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
392 while (*ptr != entry)
398 /* Call all node duplication hooks. */
400 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
401 struct cgraph_node *node2)
403 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
406 entry->hook (node1, node2, entry->data);
411 /* Returns a hash code for P. */
414 hash_node (const void *p)
416 const struct cgraph_node *n = (const struct cgraph_node *) p;
417 return (hashval_t) DECL_UID (n->decl);
421 /* Returns nonzero if P1 and P2 are equal. */
424 eq_node (const void *p1, const void *p2)
426 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
427 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
428 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
431 /* Allocate new callgraph node. */
433 static inline struct cgraph_node *
434 cgraph_allocate_node (void)
436 struct cgraph_node *node;
441 free_nodes = NEXT_FREE_NODE (node);
445 node = ggc_alloc_cleared_cgraph_node ();
446 node->uid = cgraph_max_uid++;
452 /* Allocate new callgraph node and insert it into basic data structures. */
454 static struct cgraph_node *
455 cgraph_create_node (void)
457 struct cgraph_node *node = cgraph_allocate_node ();
459 node->next = cgraph_nodes;
461 node->order = cgraph_order++;
463 cgraph_nodes->previous = node;
464 node->previous = NULL;
465 node->global.estimated_growth = INT_MIN;
466 node->frequency = NODE_FREQUENCY_NORMAL;
467 ipa_empty_ref_list (&node->ref_list);
473 /* Return cgraph node assigned to DECL. Create new one when needed. */
476 cgraph_node (tree decl)
478 struct cgraph_node key, *node, **slot;
480 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
483 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
487 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
492 if (node->same_body_alias)
493 node = node->same_body;
497 node = cgraph_create_node ();
500 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
502 node->origin = cgraph_node (DECL_CONTEXT (decl));
503 node->next_nested = node->origin->nested;
504 node->origin->nested = node;
506 if (assembler_name_hash)
509 tree name = DECL_ASSEMBLER_NAME (decl);
511 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
512 decl_assembler_name_hash (name),
514 /* We can have multiple declarations with same assembler name. For C++
515 it is __builtin_strlen and strlen, for instance. Do we need to
516 record them all? Original implementation marked just first one
517 so lets hope for the best. */
524 /* Mark ALIAS as an alias to DECL. */
526 static struct cgraph_node *
527 cgraph_same_body_alias_1 (tree alias, tree decl)
529 struct cgraph_node key, *alias_node, *decl_node, **slot;
531 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
532 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
533 decl_node = cgraph_node (decl);
537 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
539 /* If the cgraph_node has been already created, fail. */
543 alias_node = cgraph_allocate_node ();
544 alias_node->decl = alias;
545 alias_node->same_body_alias = 1;
546 alias_node->same_body = decl_node;
547 alias_node->previous = NULL;
548 if (decl_node->same_body)
549 decl_node->same_body->previous = alias_node;
550 alias_node->next = decl_node->same_body;
551 alias_node->thunk.alias = decl;
552 decl_node->same_body = alias_node;
557 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
558 Same body aliases are output whenever the body of DECL is output,
559 and cgraph_node (ALIAS) transparently returns cgraph_node (DECL). */
562 cgraph_same_body_alias (tree alias, tree decl)
564 #ifndef ASM_OUTPUT_DEF
565 /* If aliases aren't supported by the assembler, fail. */
569 /*gcc_assert (!assembler_name_hash);*/
571 return cgraph_same_body_alias_1 (alias, decl) != NULL;
575 cgraph_add_thunk (tree alias, tree decl, bool this_adjusting,
576 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
580 struct cgraph_node *node = cgraph_get_node (alias);
584 gcc_assert (node->local.finalized);
585 gcc_assert (!node->same_body);
586 cgraph_remove_node (node);
589 node = cgraph_same_body_alias_1 (alias, decl);
591 #ifdef ENABLE_CHECKING
592 gcc_assert (!virtual_offset
593 || tree_int_cst_equal (virtual_offset, size_int (virtual_value)));
595 node->thunk.fixed_offset = fixed_offset;
596 node->thunk.this_adjusting = this_adjusting;
597 node->thunk.virtual_value = virtual_value;
598 node->thunk.virtual_offset_p = virtual_offset != NULL;
599 node->thunk.alias = real_alias;
600 node->thunk.thunk_p = true;
603 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
607 cgraph_get_node_or_alias (tree decl)
609 struct cgraph_node key, *node = NULL, **slot;
611 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
618 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
626 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
630 cgraph_get_node (tree decl)
632 struct cgraph_node key, *node = NULL, **slot;
634 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
641 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
647 if (node->same_body_alias)
648 node = node->same_body;
653 /* Insert already constructed node into hashtable. */
656 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
658 struct cgraph_node **slot;
660 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
666 /* Returns a hash code for P. */
669 hash_node_by_assembler_name (const void *p)
671 const struct cgraph_node *n = (const struct cgraph_node *) p;
672 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
675 /* Returns nonzero if P1 and P2 are equal. */
678 eq_assembler_name (const void *p1, const void *p2)
680 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
681 const_tree name = (const_tree)p2;
682 return (decl_assembler_name_equal (n1->decl, name));
685 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
686 Return NULL if there's no such node. */
689 cgraph_node_for_asm (tree asmname)
691 struct cgraph_node *node;
694 if (!assembler_name_hash)
696 assembler_name_hash =
697 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
699 for (node = cgraph_nodes; node; node = node->next)
700 if (!node->global.inlined_to)
702 tree name = DECL_ASSEMBLER_NAME (node->decl);
703 slot = htab_find_slot_with_hash (assembler_name_hash, name,
704 decl_assembler_name_hash (name),
706 /* We can have multiple declarations with same assembler name. For C++
707 it is __builtin_strlen and strlen, for instance. Do we need to
708 record them all? Original implementation marked just first one
709 so lets hope for the best. */
714 struct cgraph_node *alias;
716 for (alias = node->same_body; alias; alias = alias->next)
719 name = DECL_ASSEMBLER_NAME (alias->decl);
720 hash = decl_assembler_name_hash (name);
721 slot = htab_find_slot_with_hash (assembler_name_hash, name,
730 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
731 decl_assembler_name_hash (asmname),
736 node = (struct cgraph_node *) *slot;
737 if (node->same_body_alias)
738 node = node->same_body;
744 /* Returns a hash value for X (which really is a die_struct). */
747 edge_hash (const void *x)
749 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
752 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
755 edge_eq (const void *x, const void *y)
757 return ((const struct cgraph_edge *) x)->call_stmt == y;
760 /* Add call graph edge E to call site hash of its caller. */
763 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
766 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
768 htab_hash_pointer (e->call_stmt),
774 /* Return the callgraph edge representing the GIMPLE_CALL statement
778 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
780 struct cgraph_edge *e, *e2;
783 if (node->call_site_hash)
784 return (struct cgraph_edge *)
785 htab_find_with_hash (node->call_site_hash, call_stmt,
786 htab_hash_pointer (call_stmt));
788 /* This loop may turn out to be performance problem. In such case adding
789 hashtables into call nodes with very many edges is probably best
790 solution. It is not good idea to add pointer into CALL_EXPR itself
791 because we want to make possible having multiple cgraph nodes representing
792 different clones of the same body before the body is actually cloned. */
793 for (e = node->callees; e; e = e->next_callee)
795 if (e->call_stmt == call_stmt)
801 for (e = node->indirect_calls; e; e = e->next_callee)
803 if (e->call_stmt == call_stmt)
810 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
811 for (e2 = node->callees; e2; e2 = e2->next_callee)
812 cgraph_add_edge_to_call_site_hash (e2);
813 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
814 cgraph_add_edge_to_call_site_hash (e2);
821 /* Change field call_stmt of edge E to NEW_STMT. */
824 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
828 if (e->caller->call_site_hash)
830 htab_remove_elt_with_hash (e->caller->call_site_hash,
832 htab_hash_pointer (e->call_stmt));
835 e->call_stmt = new_stmt;
836 if (e->indirect_unknown_callee
837 && (decl = gimple_call_fndecl (new_stmt)))
839 /* Constant propagation (and possibly also inlining?) can turn an
840 indirect call into a direct one. */
841 struct cgraph_node *new_callee = cgraph_node (decl);
843 cgraph_make_edge_direct (e, new_callee);
846 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
847 e->can_throw_external = stmt_can_throw_external (new_stmt);
849 if (e->caller->call_site_hash)
850 cgraph_add_edge_to_call_site_hash (e);
853 /* Like cgraph_set_call_stmt but walk the clone tree and update all
854 clones sharing the same function body. */
857 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
858 gimple old_stmt, gimple new_stmt)
860 struct cgraph_node *node;
861 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
864 cgraph_set_call_stmt (edge, new_stmt);
870 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
872 cgraph_set_call_stmt (edge, new_stmt);
875 else if (node->next_sibling_clone)
876 node = node->next_sibling_clone;
879 while (node != orig && !node->next_sibling_clone)
880 node = node->clone_of;
882 node = node->next_sibling_clone;
887 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
888 same function body. If clones already have edge for OLD_STMT; only
889 update the edge same way as cgraph_set_call_stmt_including_clones does.
891 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
892 frequencies of the clones. */
895 cgraph_create_edge_including_clones (struct cgraph_node *orig,
896 struct cgraph_node *callee,
898 gimple stmt, gcov_type count,
899 int freq, int loop_depth,
900 cgraph_inline_failed_t reason)
902 struct cgraph_node *node;
903 struct cgraph_edge *edge;
905 if (!cgraph_edge (orig, stmt))
907 edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth);
908 edge->inline_failed = reason;
915 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
917 /* It is possible that clones already contain the edge while
918 master didn't. Either we promoted indirect call into direct
919 call in the clone or we are processing clones of unreachable
920 master where edges has been rmeoved. */
922 cgraph_set_call_stmt (edge, stmt);
923 else if (!cgraph_edge (node, stmt))
925 edge = cgraph_create_edge (node, callee, stmt, count,
927 edge->inline_failed = reason;
932 else if (node->next_sibling_clone)
933 node = node->next_sibling_clone;
936 while (node != orig && !node->next_sibling_clone)
937 node = node->clone_of;
939 node = node->next_sibling_clone;
944 /* Give initial reasons why inlining would fail on EDGE. This gets either
945 nullified or usually overwritten by more precise reasons later. */
948 initialize_inline_failed (struct cgraph_edge *e)
950 struct cgraph_node *callee = e->callee;
952 if (e->indirect_unknown_callee)
953 e->inline_failed = CIF_INDIRECT_UNKNOWN_CALL;
954 else if (!callee->analyzed)
955 e->inline_failed = CIF_BODY_NOT_AVAILABLE;
956 else if (callee->local.redefined_extern_inline)
957 e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
958 else if (!callee->local.inlinable)
959 e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
960 else if (e->call_stmt && gimple_call_cannot_inline_p (e->call_stmt))
961 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
963 e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
966 /* Allocate a cgraph_edge structure and fill it with data according to the
967 parameters of which only CALLEE can be NULL (when creating an indirect call
970 static struct cgraph_edge *
971 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
972 gimple call_stmt, gcov_type count, int freq, int nest)
974 struct cgraph_edge *edge;
976 /* LTO does not actually have access to the call_stmt since these
977 have not been loaded yet. */
980 #ifdef ENABLE_CHECKING
981 /* This is rather pricely check possibly trigerring construction of
982 call stmt hashtable. */
983 gcc_assert (!cgraph_edge (caller, call_stmt));
986 gcc_assert (is_gimple_call (call_stmt));
992 free_edges = NEXT_FREE_EDGE (edge);
996 edge = ggc_alloc_cgraph_edge ();
997 edge->uid = cgraph_edge_max_uid++;
1001 edge->caller = caller;
1002 edge->callee = callee;
1003 edge->prev_caller = NULL;
1004 edge->next_caller = NULL;
1005 edge->prev_callee = NULL;
1006 edge->next_callee = NULL;
1008 edge->count = count;
1009 gcc_assert (count >= 0);
1010 edge->frequency = freq;
1011 gcc_assert (freq >= 0);
1012 gcc_assert (freq <= CGRAPH_FREQ_MAX);
1013 edge->loop_nest = nest;
1015 edge->call_stmt = call_stmt;
1016 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
1017 edge->can_throw_external
1018 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
1020 edge->call_stmt_cannot_inline_p =
1021 (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
1022 if (call_stmt && caller->call_site_hash)
1023 cgraph_add_edge_to_call_site_hash (edge);
1025 edge->indirect_info = NULL;
1026 edge->indirect_inlining_edge = 0;
1031 /* Create edge from CALLER to CALLEE in the cgraph. */
1033 struct cgraph_edge *
1034 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
1035 gimple call_stmt, gcov_type count, int freq, int nest)
1037 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
1040 edge->indirect_unknown_callee = 0;
1041 initialize_inline_failed (edge);
1043 edge->next_caller = callee->callers;
1044 if (callee->callers)
1045 callee->callers->prev_caller = edge;
1046 edge->next_callee = caller->callees;
1047 if (caller->callees)
1048 caller->callees->prev_callee = edge;
1049 caller->callees = edge;
1050 callee->callers = edge;
1056 /* Create an indirect edge with a yet-undetermined callee where the call
1057 statement destination is a formal parameter of the caller with index
1060 struct cgraph_edge *
1061 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
1063 gcov_type count, int freq, int nest)
1065 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
1068 edge->indirect_unknown_callee = 1;
1069 initialize_inline_failed (edge);
1071 edge->indirect_info = ggc_alloc_cleared_cgraph_indirect_call_info ();
1072 edge->indirect_info->param_index = -1;
1073 edge->indirect_info->ecf_flags = ecf_flags;
1075 edge->next_callee = caller->indirect_calls;
1076 if (caller->indirect_calls)
1077 caller->indirect_calls->prev_callee = edge;
1078 caller->indirect_calls = edge;
1083 /* Remove the edge E from the list of the callers of the callee. */
1086 cgraph_edge_remove_callee (struct cgraph_edge *e)
1088 gcc_assert (!e->indirect_unknown_callee);
1090 e->prev_caller->next_caller = e->next_caller;
1092 e->next_caller->prev_caller = e->prev_caller;
1093 if (!e->prev_caller)
1094 e->callee->callers = e->next_caller;
1097 /* Remove the edge E from the list of the callees of the caller. */
1100 cgraph_edge_remove_caller (struct cgraph_edge *e)
1103 e->prev_callee->next_callee = e->next_callee;
1105 e->next_callee->prev_callee = e->prev_callee;
1106 if (!e->prev_callee)
1108 if (e->indirect_unknown_callee)
1109 e->caller->indirect_calls = e->next_callee;
1111 e->caller->callees = e->next_callee;
1113 if (e->caller->call_site_hash)
1114 htab_remove_elt_with_hash (e->caller->call_site_hash,
1116 htab_hash_pointer (e->call_stmt));
1119 /* Put the edge onto the free list. */
1122 cgraph_free_edge (struct cgraph_edge *e)
1126 /* Clear out the edge so we do not dangle pointers. */
1127 memset (e, 0, sizeof (*e));
1129 NEXT_FREE_EDGE (e) = free_edges;
1133 /* Remove the edge E in the cgraph. */
1136 cgraph_remove_edge (struct cgraph_edge *e)
1138 /* Call all edge removal hooks. */
1139 cgraph_call_edge_removal_hooks (e);
1141 if (!e->indirect_unknown_callee)
1142 /* Remove from callers list of the callee. */
1143 cgraph_edge_remove_callee (e);
1145 /* Remove from callees list of the callers. */
1146 cgraph_edge_remove_caller (e);
1148 /* Put the edge onto the free list. */
1149 cgraph_free_edge (e);
1152 /* Set callee of call graph edge E and add it to the corresponding set of
1156 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1158 e->prev_caller = NULL;
1160 n->callers->prev_caller = e;
1161 e->next_caller = n->callers;
1166 /* Redirect callee of E to N. The function does not update underlying
1170 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1172 /* Remove from callers list of the current callee. */
1173 cgraph_edge_remove_callee (e);
1175 /* Insert to callers list of the new callee. */
1176 cgraph_set_edge_callee (e, n);
1179 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1183 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee)
1185 edge->indirect_unknown_callee = 0;
1187 /* Get the edge out of the indirect edge list. */
1188 if (edge->prev_callee)
1189 edge->prev_callee->next_callee = edge->next_callee;
1190 if (edge->next_callee)
1191 edge->next_callee->prev_callee = edge->prev_callee;
1192 if (!edge->prev_callee)
1193 edge->caller->indirect_calls = edge->next_callee;
1195 /* Put it into the normal callee list */
1196 edge->prev_callee = NULL;
1197 edge->next_callee = edge->caller->callees;
1198 if (edge->caller->callees)
1199 edge->caller->callees->prev_callee = edge;
1200 edge->caller->callees = edge;
1202 /* Insert to callers list of the new callee. */
1203 cgraph_set_edge_callee (edge, callee);
1205 /* We need to re-determine the inlining status of the edge. */
1206 initialize_inline_failed (edge);
1210 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1211 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1212 of OLD_STMT if it was previously call statement. */
1215 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1216 gimple old_stmt, tree old_call, gimple new_stmt)
1218 tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fndecl (new_stmt) : 0;
1220 /* We are seeing indirect calls, then there is nothing to update. */
1221 if (!new_call && !old_call)
1223 /* See if we turned indirect call into direct call or folded call to one builtin
1224 into different bultin. */
1225 if (old_call != new_call)
1227 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1228 struct cgraph_edge *ne = NULL;
1235 /* See if the edge is already there and has the correct callee. It
1236 might be so because of indirect inlining has already updated
1238 if (new_call && e->callee && e->callee->decl == new_call)
1241 /* Otherwise remove edge and create new one; we can't simply redirect
1242 since function has changed, so inline plan and other information
1243 attached to edge is invalid. */
1245 frequency = e->frequency;
1246 loop_nest = e->loop_nest;
1247 cgraph_remove_edge (e);
1251 /* We are seeing new direct call; compute profile info based on BB. */
1252 basic_block bb = gimple_bb (new_stmt);
1254 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1256 loop_nest = bb->loop_depth;
1261 ne = cgraph_create_edge (node, cgraph_node (new_call),
1262 new_stmt, count, frequency,
1264 gcc_assert (ne->inline_failed);
1267 /* We only updated the call stmt; update pointer in cgraph edge.. */
1268 else if (old_stmt != new_stmt)
1269 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1272 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1273 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1274 of OLD_STMT before it was updated (updating can happen inplace). */
1277 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1279 struct cgraph_node *orig = cgraph_node (cfun->decl);
1280 struct cgraph_node *node;
1282 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1284 for (node = orig->clones; node != orig;)
1286 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1288 node = node->clones;
1289 else if (node->next_sibling_clone)
1290 node = node->next_sibling_clone;
1293 while (node != orig && !node->next_sibling_clone)
1294 node = node->clone_of;
1296 node = node->next_sibling_clone;
1302 /* Remove all callees from the node. */
1305 cgraph_node_remove_callees (struct cgraph_node *node)
1307 struct cgraph_edge *e, *f;
1309 /* It is sufficient to remove the edges from the lists of callers of
1310 the callees. The callee list of the node can be zapped with one
1312 for (e = node->callees; e; e = f)
1315 cgraph_call_edge_removal_hooks (e);
1316 if (!e->indirect_unknown_callee)
1317 cgraph_edge_remove_callee (e);
1318 cgraph_free_edge (e);
1320 for (e = node->indirect_calls; e; e = f)
1323 cgraph_call_edge_removal_hooks (e);
1324 if (!e->indirect_unknown_callee)
1325 cgraph_edge_remove_callee (e);
1326 cgraph_free_edge (e);
1328 node->indirect_calls = NULL;
1329 node->callees = NULL;
1330 if (node->call_site_hash)
1332 htab_delete (node->call_site_hash);
1333 node->call_site_hash = NULL;
1337 /* Remove all callers from the node. */
1340 cgraph_node_remove_callers (struct cgraph_node *node)
1342 struct cgraph_edge *e, *f;
1344 /* It is sufficient to remove the edges from the lists of callees of
1345 the callers. The caller list of the node can be zapped with one
1347 for (e = node->callers; e; e = f)
1350 cgraph_call_edge_removal_hooks (e);
1351 cgraph_edge_remove_caller (e);
1352 cgraph_free_edge (e);
1354 node->callers = NULL;
1357 /* Release memory used to represent body of function NODE. */
1360 cgraph_release_function_body (struct cgraph_node *node)
1362 if (DECL_STRUCT_FUNCTION (node->decl))
1364 tree old_decl = current_function_decl;
1365 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1366 if (cfun->gimple_df)
1368 current_function_decl = node->decl;
1370 delete_tree_cfg_annotations ();
1372 current_function_decl = old_decl;
1376 gcc_assert (dom_computed[0] == DOM_NONE);
1377 gcc_assert (dom_computed[1] == DOM_NONE);
1380 if (cfun->value_histograms)
1382 gcc_assert (!current_loops);
1384 gimple_set_body (node->decl, NULL);
1385 VEC_free (ipa_opt_pass, heap,
1386 node->ipa_transforms_to_apply);
1387 /* Struct function hangs a lot of data that would leak if we didn't
1388 removed all pointers to it. */
1389 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1390 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1392 DECL_SAVED_TREE (node->decl) = NULL;
1393 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1394 of its associated function function declaration because it's
1395 needed to emit debug info later. */
1396 if (!node->abstract_and_needed)
1397 DECL_INITIAL (node->decl) = error_mark_node;
1400 /* Remove same body alias node. */
1403 cgraph_remove_same_body_alias (struct cgraph_node *node)
1406 int uid = node->uid;
1408 gcc_assert (node->same_body_alias);
1410 node->previous->next = node->next;
1412 node->same_body->same_body = node->next;
1414 node->next->previous = node->previous;
1416 node->previous = NULL;
1417 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1419 htab_clear_slot (cgraph_hash, slot);
1420 if (assembler_name_hash)
1422 tree name = DECL_ASSEMBLER_NAME (node->decl);
1423 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1424 decl_assembler_name_hash (name),
1426 if (slot && *slot == node)
1427 htab_clear_slot (assembler_name_hash, slot);
1430 /* Clear out the node to NULL all pointers and add the node to the free
1432 memset (node, 0, sizeof(*node));
1434 NEXT_FREE_NODE (node) = free_nodes;
1438 /* Remove the node from cgraph. */
1441 cgraph_remove_node (struct cgraph_node *node)
1444 bool kill_body = false;
1445 struct cgraph_node *n;
1446 int uid = node->uid;
1448 cgraph_call_node_removal_hooks (node);
1449 cgraph_node_remove_callers (node);
1450 cgraph_node_remove_callees (node);
1451 ipa_remove_all_references (&node->ref_list);
1452 ipa_remove_all_refering (&node->ref_list);
1453 VEC_free (ipa_opt_pass, heap,
1454 node->ipa_transforms_to_apply);
1456 /* Incremental inlining access removed nodes stored in the postorder list.
1458 node->needed = node->reachable = false;
1459 for (n = node->nested; n; n = n->next_nested)
1461 node->nested = NULL;
1464 struct cgraph_node **node2 = &node->origin->nested;
1466 while (*node2 != node)
1467 node2 = &(*node2)->next_nested;
1468 *node2 = node->next_nested;
1471 node->previous->next = node->next;
1473 cgraph_nodes = node->next;
1475 node->next->previous = node->previous;
1477 node->previous = NULL;
1478 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1481 struct cgraph_node *next_inline_clone;
1483 for (next_inline_clone = node->clones;
1484 next_inline_clone && next_inline_clone->decl != node->decl;
1485 next_inline_clone = next_inline_clone->next_sibling_clone)
1488 /* If there is inline clone of the node being removed, we need
1489 to put it into the position of removed node and reorganize all
1490 other clones to be based on it. */
1491 if (next_inline_clone)
1493 struct cgraph_node *n;
1494 struct cgraph_node *new_clones;
1496 *slot = next_inline_clone;
1498 /* Unlink inline clone from the list of clones of removed node. */
1499 if (next_inline_clone->next_sibling_clone)
1500 next_inline_clone->next_sibling_clone->prev_sibling_clone
1501 = next_inline_clone->prev_sibling_clone;
1502 if (next_inline_clone->prev_sibling_clone)
1504 gcc_assert (node->clones != next_inline_clone);
1505 next_inline_clone->prev_sibling_clone->next_sibling_clone
1506 = next_inline_clone->next_sibling_clone;
1510 gcc_assert (node->clones == next_inline_clone);
1511 node->clones = next_inline_clone->next_sibling_clone;
1514 new_clones = node->clones;
1515 node->clones = NULL;
1517 /* Copy clone info. */
1518 next_inline_clone->clone = node->clone;
1520 /* Now place it into clone tree at same level at NODE. */
1521 next_inline_clone->clone_of = node->clone_of;
1522 next_inline_clone->prev_sibling_clone = NULL;
1523 next_inline_clone->next_sibling_clone = NULL;
1526 if (node->clone_of->clones)
1527 node->clone_of->clones->prev_sibling_clone = next_inline_clone;
1528 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1529 node->clone_of->clones = next_inline_clone;
1532 /* Merge the clone list. */
1535 if (!next_inline_clone->clones)
1536 next_inline_clone->clones = new_clones;
1539 n = next_inline_clone->clones;
1540 while (n->next_sibling_clone)
1541 n = n->next_sibling_clone;
1542 n->next_sibling_clone = new_clones;
1543 new_clones->prev_sibling_clone = n;
1547 /* Update clone_of pointers. */
1551 n->clone_of = next_inline_clone;
1552 n = n->next_sibling_clone;
1557 htab_clear_slot (cgraph_hash, slot);
1562 if (node->prev_sibling_clone)
1563 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1564 else if (node->clone_of)
1565 node->clone_of->clones = node->next_sibling_clone;
1566 if (node->next_sibling_clone)
1567 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1570 struct cgraph_node *n, *next;
1574 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1575 n->clone_of = node->clone_of;
1576 n->clone_of = node->clone_of;
1577 n->next_sibling_clone = node->clone_of->clones;
1578 if (node->clone_of->clones)
1579 node->clone_of->clones->prev_sibling_clone = n;
1580 node->clone_of->clones = node->clones;
1584 /* We are removing node with clones. this makes clones inconsistent,
1585 but assume they will be removed subsequently and just keep clone
1586 tree intact. This can happen in unreachable function removal since
1587 we remove unreachable functions in random order, not by bottom-up
1588 walk of clone trees. */
1589 for (n = node->clones; n; n = next)
1591 next = n->next_sibling_clone;
1592 n->next_sibling_clone = NULL;
1593 n->prev_sibling_clone = NULL;
1599 while (node->same_body)
1600 cgraph_remove_same_body_alias (node->same_body);
1602 if (node->same_comdat_group)
1604 struct cgraph_node *prev;
1605 for (prev = node->same_comdat_group;
1606 prev->same_comdat_group != node;
1607 prev = prev->same_comdat_group)
1609 if (node->same_comdat_group == prev)
1610 prev->same_comdat_group = NULL;
1612 prev->same_comdat_group = node->same_comdat_group;
1613 node->same_comdat_group = NULL;
1616 /* While all the clones are removed after being proceeded, the function
1617 itself is kept in the cgraph even after it is compiled. Check whether
1618 we are done with this body and reclaim it proactively if this is the case.
1620 if (!kill_body && *slot)
1622 struct cgraph_node *n = (struct cgraph_node *) *slot;
1623 if (!n->clones && !n->clone_of && !n->global.inlined_to
1624 && (cgraph_global_info_ready
1625 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)
1626 || n->in_other_partition)))
1629 if (assembler_name_hash)
1631 tree name = DECL_ASSEMBLER_NAME (node->decl);
1632 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1633 decl_assembler_name_hash (name),
1635 /* Inline clones are not hashed. */
1636 if (slot && *slot == node)
1637 htab_clear_slot (assembler_name_hash, slot);
1641 cgraph_release_function_body (node);
1643 if (node->call_site_hash)
1645 htab_delete (node->call_site_hash);
1646 node->call_site_hash = NULL;
1650 /* Clear out the node to NULL all pointers and add the node to the free
1652 memset (node, 0, sizeof(*node));
1654 NEXT_FREE_NODE (node) = free_nodes;
1658 /* Remove the node from cgraph. */
1661 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1663 struct cgraph_edge *e, *next;
1664 for (e = node->callees; e; e = next)
1666 next = e->next_callee;
1667 if (!e->inline_failed)
1668 cgraph_remove_node_and_inline_clones (e->callee);
1670 cgraph_remove_node (node);
1673 /* Notify finalize_compilation_unit that given node is reachable. */
1676 cgraph_mark_reachable_node (struct cgraph_node *node)
1678 if (!node->reachable && node->local.finalized)
1680 if (cgraph_global_info_ready)
1682 /* Verify that function does not appear to be needed out of blue
1683 during the optimization process. This can happen for extern
1684 inlines when bodies was removed after inlining. */
1685 gcc_assert ((node->analyzed || node->in_other_partition
1686 || DECL_EXTERNAL (node->decl)));
1689 notice_global_symbol (node->decl);
1690 node->reachable = 1;
1692 node->next_needed = cgraph_nodes_queue;
1693 cgraph_nodes_queue = node;
1697 /* Likewise indicate that a node is needed, i.e. reachable via some
1701 cgraph_mark_needed_node (struct cgraph_node *node)
1704 gcc_assert (!node->global.inlined_to);
1705 cgraph_mark_reachable_node (node);
1708 /* Likewise indicate that a node is having address taken. */
1711 cgraph_mark_address_taken_node (struct cgraph_node *node)
1713 cgraph_mark_reachable_node (node);
1714 node->address_taken = 1;
1717 /* Return local info for the compiled function. */
1719 struct cgraph_local_info *
1720 cgraph_local_info (tree decl)
1722 struct cgraph_node *node;
1724 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1725 node = cgraph_node (decl);
1726 return &node->local;
1729 /* Return local info for the compiled function. */
1731 struct cgraph_global_info *
1732 cgraph_global_info (tree decl)
1734 struct cgraph_node *node;
1736 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1737 node = cgraph_node (decl);
1738 return &node->global;
1741 /* Return local info for the compiled function. */
1743 struct cgraph_rtl_info *
1744 cgraph_rtl_info (tree decl)
1746 struct cgraph_node *node;
1748 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1749 node = cgraph_node (decl);
1750 if (decl != current_function_decl
1751 && !TREE_ASM_WRITTEN (node->decl))
1756 /* Return a string describing the failure REASON. */
1759 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1762 #define DEFCIFCODE(code, string) string,
1764 static const char *cif_string_table[CIF_N_REASONS] = {
1765 #include "cif-code.def"
1768 /* Signedness of an enum type is implementation defined, so cast it
1769 to unsigned before testing. */
1770 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1771 return cif_string_table[reason];
1774 /* Return name of the node used in debug output. */
1776 cgraph_node_name (struct cgraph_node *node)
1778 return lang_hooks.decl_printable_name (node->decl, 2);
1781 /* Names used to print out the availability enum. */
1782 const char * const cgraph_availability_names[] =
1783 {"unset", "not_available", "overwritable", "available", "local"};
1786 /* Dump call graph node NODE to file F. */
1789 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1791 struct cgraph_edge *edge;
1792 int indirect_calls_count = 0;
1794 fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid,
1796 dump_addr (f, " @", (void *)node);
1797 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
1798 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1799 if (node->global.inlined_to)
1800 fprintf (f, " (inline copy in %s/%i)",
1801 cgraph_node_name (node->global.inlined_to),
1802 node->global.inlined_to->uid);
1804 fprintf (f, " (clone of %s/%i)",
1805 cgraph_node_name (node->clone_of),
1806 node->clone_of->uid);
1807 if (cgraph_function_flags_ready)
1808 fprintf (f, " availability:%s",
1809 cgraph_availability_names [cgraph_function_body_availability (node)]);
1811 fprintf (f, " analyzed");
1812 if (node->in_other_partition)
1813 fprintf (f, " in_other_partition");
1815 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1816 (HOST_WIDEST_INT)node->count);
1817 if (node->local.inline_summary.self_time)
1818 fprintf (f, " %i time, %i benefit", node->local.inline_summary.self_time,
1819 node->local.inline_summary.time_inlining_benefit);
1820 if (node->global.time && node->global.time
1821 != node->local.inline_summary.self_time)
1822 fprintf (f, " (%i after inlining)", node->global.time);
1823 if (node->local.inline_summary.self_size)
1824 fprintf (f, " %i size, %i benefit", node->local.inline_summary.self_size,
1825 node->local.inline_summary.size_inlining_benefit);
1826 if (node->global.size && node->global.size
1827 != node->local.inline_summary.self_size)
1828 fprintf (f, " (%i after inlining)", node->global.size);
1829 if (node->local.inline_summary.estimated_self_stack_size)
1830 fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
1831 if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
1832 fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
1834 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1836 fprintf (f, " needed");
1837 if (node->address_taken)
1838 fprintf (f, " address_taken");
1839 else if (node->reachable)
1840 fprintf (f, " reachable");
1841 else if (node->reachable_from_other_partition)
1842 fprintf (f, " reachable_from_other_partition");
1843 if (gimple_has_body_p (node->decl))
1844 fprintf (f, " body");
1846 fprintf (f, " process");
1847 if (node->local.local)
1848 fprintf (f, " local");
1849 if (node->local.externally_visible)
1850 fprintf (f, " externally_visible");
1851 if (node->local.used_from_object_file)
1852 fprintf (f, " used_from_object_file");
1853 if (node->local.finalized)
1854 fprintf (f, " finalized");
1855 if (node->local.disregard_inline_limits)
1856 fprintf (f, " always_inline");
1857 else if (node->local.inlinable)
1858 fprintf (f, " inlinable");
1859 else if (node->local.versionable)
1860 fprintf (f, " versionable");
1861 if (node->local.redefined_extern_inline)
1862 fprintf (f, " redefined_extern_inline");
1863 if (TREE_ASM_WRITTEN (node->decl))
1864 fprintf (f, " asm_written");
1866 fprintf (f, "\n called by: ");
1867 for (edge = node->callers; edge; edge = edge->next_caller)
1869 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1872 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1873 (HOST_WIDEST_INT)edge->count);
1874 if (edge->frequency)
1875 fprintf (f, "(%.2f per call) ",
1876 edge->frequency / (double)CGRAPH_FREQ_BASE);
1877 if (!edge->inline_failed)
1878 fprintf(f, "(inlined) ");
1879 if (edge->indirect_inlining_edge)
1880 fprintf(f, "(indirect_inlining) ");
1881 if (edge->can_throw_external)
1882 fprintf(f, "(can throw external) ");
1885 fprintf (f, "\n calls: ");
1886 for (edge = node->callees; edge; edge = edge->next_callee)
1888 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1890 if (!edge->inline_failed)
1891 fprintf(f, "(inlined) ");
1892 if (edge->indirect_inlining_edge)
1893 fprintf(f, "(indirect_inlining) ");
1895 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1896 (HOST_WIDEST_INT)edge->count);
1897 if (edge->frequency)
1898 fprintf (f, "(%.2f per call) ",
1899 edge->frequency / (double)CGRAPH_FREQ_BASE);
1900 if (edge->loop_nest)
1901 fprintf (f, "(nested in %i loops) ", edge->loop_nest);
1902 if (edge->can_throw_external)
1903 fprintf(f, "(can throw external) ");
1906 fprintf (f, " References: ");
1907 ipa_dump_references (f, &node->ref_list);
1908 fprintf (f, " Refering this function: ");
1909 ipa_dump_refering (f, &node->ref_list);
1911 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1912 indirect_calls_count++;
1913 if (indirect_calls_count)
1914 fprintf (f, " has %i outgoing edges for indirect calls.\n",
1915 indirect_calls_count);
1917 if (node->same_body)
1919 struct cgraph_node *n;
1920 fprintf (f, " aliases & thunks:");
1921 for (n = node->same_body; n; n = n->next)
1923 fprintf (f, " %s/%i", cgraph_node_name (n), n->uid);
1924 if (n->thunk.thunk_p)
1926 fprintf (f, " (thunk of %s fixed ofset %i virtual value %i has "
1927 "virtual offset %i",
1928 lang_hooks.decl_printable_name (n->thunk.alias, 2),
1929 (int)n->thunk.fixed_offset,
1930 (int)n->thunk.virtual_value,
1931 (int)n->thunk.virtual_offset_p);
1934 if (DECL_ASSEMBLER_NAME_SET_P (n->decl))
1935 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (n->decl)));
1942 /* Dump call graph node NODE to stderr. */
1945 debug_cgraph_node (struct cgraph_node *node)
1947 dump_cgraph_node (stderr, node);
1951 /* Dump the callgraph to file F. */
1954 dump_cgraph (FILE *f)
1956 struct cgraph_node *node;
1958 fprintf (f, "callgraph:\n\n");
1959 for (node = cgraph_nodes; node; node = node->next)
1960 dump_cgraph_node (f, node);
1964 /* Dump the call graph to stderr. */
1969 dump_cgraph (stderr);
1973 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
1976 change_decl_assembler_name (tree decl, tree name)
1978 gcc_assert (!assembler_name_hash);
1979 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
1981 SET_DECL_ASSEMBLER_NAME (decl, name);
1984 if (name == DECL_ASSEMBLER_NAME (decl))
1987 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
1988 && DECL_RTL_SET_P (decl))
1989 warning (0, "%D renamed after being referenced in assembly", decl);
1991 SET_DECL_ASSEMBLER_NAME (decl, name);
1994 /* Add a top-level asm statement to the list. */
1996 struct cgraph_asm_node *
1997 cgraph_add_asm_node (tree asm_str)
1999 struct cgraph_asm_node *node;
2001 node = ggc_alloc_cleared_cgraph_asm_node ();
2002 node->asm_str = asm_str;
2003 node->order = cgraph_order++;
2005 if (cgraph_asm_nodes == NULL)
2006 cgraph_asm_nodes = node;
2008 cgraph_asm_last_node->next = node;
2009 cgraph_asm_last_node = node;
2013 /* Return true when the DECL can possibly be inlined. */
2015 cgraph_function_possibly_inlined_p (tree decl)
2017 if (!cgraph_global_info_ready)
2018 return !DECL_UNINLINABLE (decl);
2019 return DECL_POSSIBLY_INLINED (decl);
2022 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
2023 struct cgraph_edge *
2024 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
2025 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
2026 int freq_scale, int loop_nest, bool update_original)
2028 struct cgraph_edge *new_edge;
2029 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
2032 /* We do not want to ignore loop nest after frequency drops to 0. */
2035 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
2036 if (freq > CGRAPH_FREQ_MAX)
2037 freq = CGRAPH_FREQ_MAX;
2039 if (e->indirect_unknown_callee)
2043 if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
2045 struct cgraph_node *callee = cgraph_node (decl);
2046 new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq,
2047 e->loop_nest + loop_nest);
2051 new_edge = cgraph_create_indirect_edge (n, call_stmt,
2052 e->indirect_info->ecf_flags,
2054 e->loop_nest + loop_nest);
2055 *new_edge->indirect_info = *e->indirect_info;
2059 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
2060 e->loop_nest + loop_nest);
2062 new_edge->inline_failed = e->inline_failed;
2063 new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
2064 new_edge->lto_stmt_uid = stmt_uid;
2065 if (update_original)
2067 e->count -= new_edge->count;
2071 cgraph_call_edge_duplication_hooks (e, new_edge);
2075 /* Create node representing clone of N executed COUNT times. Decrease
2076 the execution counts from original node too.
2077 The new clone will have decl set to DECL that may or may not be the same
2080 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
2081 function's profile to reflect the fact that part of execution is handled
2083 struct cgraph_node *
2084 cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
2085 int loop_nest, bool update_original,
2086 VEC(cgraph_edge_p,heap) *redirect_callers)
2088 struct cgraph_node *new_node = cgraph_create_node ();
2089 struct cgraph_edge *e;
2090 gcov_type count_scale;
2093 new_node->decl = decl;
2094 new_node->origin = n->origin;
2095 if (new_node->origin)
2097 new_node->next_nested = new_node->origin->nested;
2098 new_node->origin->nested = new_node;
2100 new_node->analyzed = n->analyzed;
2101 new_node->local = n->local;
2102 new_node->local.externally_visible = false;
2103 new_node->local.used_from_object_file = false;
2104 new_node->local.local = true;
2105 new_node->local.vtable_method = false;
2106 new_node->global = n->global;
2107 new_node->rtl = n->rtl;
2108 new_node->count = count;
2109 new_node->frequency = n->frequency;
2110 new_node->clone = n->clone;
2111 new_node->clone.tree_map = 0;
2114 if (new_node->count > n->count)
2115 count_scale = REG_BR_PROB_BASE;
2117 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
2121 if (update_original)
2128 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2130 /* Redirect calls to the old version node to point to its new
2132 cgraph_redirect_edge_callee (e, new_node);
2136 for (e = n->callees;e; e=e->next_callee)
2137 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2138 count_scale, freq, loop_nest, update_original);
2140 for (e = n->indirect_calls; e; e = e->next_callee)
2141 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2142 count_scale, freq, loop_nest, update_original);
2143 ipa_clone_references (new_node, NULL, &n->ref_list);
2145 new_node->next_sibling_clone = n->clones;
2147 n->clones->prev_sibling_clone = new_node;
2148 n->clones = new_node;
2149 new_node->clone_of = n;
2151 cgraph_call_node_duplication_hooks (n, new_node);
2152 if (n->decl != decl)
2154 struct cgraph_node **slot;
2155 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, new_node, INSERT);
2156 gcc_assert (!*slot);
2158 if (assembler_name_hash)
2161 tree name = DECL_ASSEMBLER_NAME (decl);
2163 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
2164 decl_assembler_name_hash (name),
2166 gcc_assert (!*aslot);
2173 /* Create a new name for clone of DECL, add SUFFIX. Returns an identifier. */
2175 static GTY(()) unsigned int clone_fn_id_num;
2178 clone_function_name (tree decl, const char *suffix)
2180 tree name = DECL_ASSEMBLER_NAME (decl);
2181 size_t len = IDENTIFIER_LENGTH (name);
2182 char *tmp_name, *prefix;
2184 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
2185 memcpy (prefix, IDENTIFIER_POINTER (name), len);
2186 strcpy (prefix + len + 1, suffix);
2187 #ifndef NO_DOT_IN_LABEL
2189 #elif !defined NO_DOLLAR_IN_LABEL
2194 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
2195 return get_identifier (tmp_name);
2198 /* Create callgraph node clone with new declaration. The actual body will
2199 be copied later at compilation stage.
2201 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
2204 struct cgraph_node *
2205 cgraph_create_virtual_clone (struct cgraph_node *old_node,
2206 VEC(cgraph_edge_p,heap) *redirect_callers,
2207 VEC(ipa_replace_map_p,gc) *tree_map,
2208 bitmap args_to_skip,
2209 const char * suffix)
2211 tree old_decl = old_node->decl;
2212 struct cgraph_node *new_node = NULL;
2215 struct ipa_replace_map *map;
2217 #ifdef ENABLE_CHECKING
2219 gcc_assert (tree_versionable_function_p (old_decl));
2222 /* Make a new FUNCTION_DECL tree node */
2224 new_decl = copy_node (old_decl);
2226 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2227 DECL_STRUCT_FUNCTION (new_decl) = NULL;
2229 /* Generate a new name for the new version. */
2230 DECL_NAME (new_decl) = clone_function_name (old_decl, suffix);
2231 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2232 SET_DECL_RTL (new_decl, NULL);
2234 new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
2235 CGRAPH_FREQ_BASE, 0, false,
2237 /* Update the properties.
2238 Make clone visible only within this translation unit. Make sure
2239 that is not weak also.
2240 ??? We cannot use COMDAT linkage because there is no
2241 ABI support for this. */
2242 DECL_EXTERNAL (new_node->decl) = 0;
2243 if (DECL_ONE_ONLY (old_decl))
2244 DECL_SECTION_NAME (new_node->decl) = NULL;
2245 DECL_COMDAT_GROUP (new_node->decl) = 0;
2246 TREE_PUBLIC (new_node->decl) = 0;
2247 DECL_COMDAT (new_node->decl) = 0;
2248 DECL_WEAK (new_node->decl) = 0;
2249 new_node->clone.tree_map = tree_map;
2250 new_node->clone.args_to_skip = args_to_skip;
2251 for (i = 0; VEC_iterate (ipa_replace_map_p, tree_map, i, map); i++)
2253 tree var = map->new_tree;
2256 if (TREE_CODE (var) != ADDR_EXPR)
2258 var = get_base_var (var);
2262 /* Record references of the future statement initializing the constant
2264 if (TREE_CODE (var) == FUNCTION_DECL)
2265 ipa_record_reference (new_node, NULL, cgraph_node (var),
2266 NULL, IPA_REF_ADDR, NULL);
2267 else if (TREE_CODE (var) == VAR_DECL)
2268 ipa_record_reference (new_node, NULL, NULL, varpool_node (var),
2269 IPA_REF_ADDR, NULL);
2272 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
2273 else if (old_node->clone.combined_args_to_skip)
2275 int newi = 0, oldi = 0;
2277 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
2278 struct cgraph_node *orig_node;
2279 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
2281 for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = TREE_CHAIN (arg), oldi++)
2283 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
2285 bitmap_set_bit (new_args_to_skip, oldi);
2288 if (bitmap_bit_p (args_to_skip, newi))
2289 bitmap_set_bit (new_args_to_skip, oldi);
2292 new_node->clone.combined_args_to_skip = new_args_to_skip;
2295 new_node->clone.combined_args_to_skip = args_to_skip;
2296 new_node->local.externally_visible = 0;
2297 new_node->local.used_from_object_file = 0;
2298 new_node->local.local = 1;
2299 new_node->lowered = true;
2300 new_node->reachable = true;
2306 /* NODE is no longer nested function; update cgraph accordingly. */
2308 cgraph_unnest_node (struct cgraph_node *node)
2310 struct cgraph_node **node2 = &node->origin->nested;
2311 gcc_assert (node->origin);
2313 while (*node2 != node)
2314 node2 = &(*node2)->next_nested;
2315 *node2 = node->next_nested;
2316 node->origin = NULL;
2319 /* Return function availability. See cgraph.h for description of individual
2322 cgraph_function_body_availability (struct cgraph_node *node)
2324 enum availability avail;
2325 gcc_assert (cgraph_function_flags_ready);
2326 if (!node->analyzed)
2327 avail = AVAIL_NOT_AVAILABLE;
2328 else if (node->local.local)
2329 avail = AVAIL_LOCAL;
2330 else if (!node->local.externally_visible)
2331 avail = AVAIL_AVAILABLE;
2332 /* Inline functions are safe to be analyzed even if their sybol can
2333 be overwritten at runtime. It is not meaningful to enfore any sane
2334 behaviour on replacing inline function by different body. */
2335 else if (DECL_DECLARED_INLINE_P (node->decl))
2336 avail = AVAIL_AVAILABLE;
2338 /* If the function can be overwritten, return OVERWRITABLE. Take
2339 care at least of two notable extensions - the COMDAT functions
2340 used to share template instantiations in C++ (this is symmetric
2341 to code cp_cannot_inline_tree_fn and probably shall be shared and
2342 the inlinability hooks completely eliminated).
2344 ??? Does the C++ one definition rule allow us to always return
2345 AVAIL_AVAILABLE here? That would be good reason to preserve this
2348 else if (DECL_REPLACEABLE_P (node->decl) && !DECL_EXTERNAL (node->decl))
2349 avail = AVAIL_OVERWRITABLE;
2350 else avail = AVAIL_AVAILABLE;
2355 /* Add the function FNDECL to the call graph.
2356 Unlike cgraph_finalize_function, this function is intended to be used
2357 by middle end and allows insertion of new function at arbitrary point
2358 of compilation. The function can be either in high, low or SSA form
2361 The function is assumed to be reachable and have address taken (so no
2362 API breaking optimizations are performed on it).
2364 Main work done by this function is to enqueue the function for later
2365 processing to avoid need the passes to be re-entrant. */
2368 cgraph_add_new_function (tree fndecl, bool lowered)
2370 struct cgraph_node *node;
2371 switch (cgraph_state)
2373 case CGRAPH_STATE_CONSTRUCTION:
2374 /* Just enqueue function to be processed at nearest occurrence. */
2375 node = cgraph_node (fndecl);
2376 node->next_needed = cgraph_new_nodes;
2378 node->lowered = true;
2379 cgraph_new_nodes = node;
2382 case CGRAPH_STATE_IPA:
2383 case CGRAPH_STATE_IPA_SSA:
2384 case CGRAPH_STATE_EXPANSION:
2385 /* Bring the function into finalized state and enqueue for later
2386 analyzing and compilation. */
2387 node = cgraph_node (fndecl);
2388 node->local.local = false;
2389 node->local.finalized = true;
2390 node->reachable = node->needed = true;
2391 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
2393 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2394 current_function_decl = fndecl;
2395 gimple_register_cfg_hooks ();
2396 tree_lowering_passes (fndecl);
2397 bitmap_obstack_initialize (NULL);
2398 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2399 execute_pass_list (pass_early_local_passes.pass.sub);
2400 bitmap_obstack_release (NULL);
2402 current_function_decl = NULL;
2407 node->lowered = true;
2408 node->next_needed = cgraph_new_nodes;
2409 cgraph_new_nodes = node;
2412 case CGRAPH_STATE_FINISHED:
2413 /* At the very end of compilation we have to do all the work up
2415 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2416 current_function_decl = fndecl;
2417 gimple_register_cfg_hooks ();
2419 tree_lowering_passes (fndecl);
2420 bitmap_obstack_initialize (NULL);
2421 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2422 execute_pass_list (pass_early_local_passes.pass.sub);
2423 bitmap_obstack_release (NULL);
2424 tree_rest_of_compilation (fndecl);
2426 current_function_decl = NULL;
2430 /* Set a personality if required and we already passed EH lowering. */
2432 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
2433 == eh_personality_lang))
2434 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
2437 /* Return true if NODE can be made local for API change.
2438 Extern inline functions and C++ COMDAT functions can be made local
2439 at the expense of possible code size growth if function is used in multiple
2440 compilation units. */
2442 cgraph_node_can_be_local_p (struct cgraph_node *node)
2444 return (!node->needed && !node->address_taken
2445 && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
2446 || !node->local.externally_visible));
2449 /* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
2450 but other code such as notice_global_symbol generates rtl. */
2452 cgraph_make_decl_local (tree decl)
2456 if (TREE_CODE (decl) == VAR_DECL)
2457 DECL_COMMON (decl) = 0;
2458 else gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
2460 if (DECL_COMDAT (decl))
2462 DECL_SECTION_NAME (decl) = 0;
2463 DECL_COMDAT (decl) = 0;
2465 DECL_COMDAT_GROUP (decl) = 0;
2466 DECL_WEAK (decl) = 0;
2467 DECL_EXTERNAL (decl) = 0;
2468 TREE_PUBLIC (decl) = 0;
2469 if (!DECL_RTL_SET_P (decl))
2472 /* Update rtl flags. */
2473 make_decl_rtl (decl);
2475 rtl = DECL_RTL (decl);
2479 symbol = XEXP (rtl, 0);
2480 if (GET_CODE (symbol) != SYMBOL_REF)
2483 SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
2486 /* Bring NODE local. */
2488 cgraph_make_node_local (struct cgraph_node *node)
2490 gcc_assert (cgraph_node_can_be_local_p (node));
2491 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2493 struct cgraph_node *alias;
2494 cgraph_make_decl_local (node->decl);
2496 for (alias = node->same_body; alias; alias = alias->next)
2497 cgraph_make_decl_local (alias->decl);
2499 node->local.externally_visible = false;
2500 node->local.local = true;
2501 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2505 /* Set TREE_NOTHROW on NODE's decl and on same_body aliases of NODE
2506 if any to NOTHROW. */
2509 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2511 struct cgraph_node *alias;
2512 TREE_NOTHROW (node->decl) = nothrow;
2513 for (alias = node->same_body; alias; alias = alias->next)
2514 TREE_NOTHROW (alias->decl) = nothrow;
2517 /* Set TREE_READONLY on NODE's decl and on same_body aliases of NODE
2518 if any to READONLY. */
2521 cgraph_set_readonly_flag (struct cgraph_node *node, bool readonly)
2523 struct cgraph_node *alias;
2524 TREE_READONLY (node->decl) = readonly;
2525 for (alias = node->same_body; alias; alias = alias->next)
2526 TREE_READONLY (alias->decl) = readonly;
2529 /* Set DECL_PURE_P on NODE's decl and on same_body aliases of NODE
2533 cgraph_set_pure_flag (struct cgraph_node *node, bool pure)
2535 struct cgraph_node *alias;
2536 DECL_PURE_P (node->decl) = pure;
2537 for (alias = node->same_body; alias; alias = alias->next)
2538 DECL_PURE_P (alias->decl) = pure;
2541 /* Set DECL_LOOPING_CONST_OR_PURE_P on NODE's decl and on
2542 same_body aliases of NODE if any to LOOPING_CONST_OR_PURE. */
2545 cgraph_set_looping_const_or_pure_flag (struct cgraph_node *node,
2546 bool looping_const_or_pure)
2548 struct cgraph_node *alias;
2549 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping_const_or_pure;
2550 for (alias = node->same_body; alias; alias = alias->next)
2551 DECL_LOOPING_CONST_OR_PURE_P (alias->decl) = looping_const_or_pure;
2554 /* See if the frequency of NODE can be updated based on frequencies of its
2557 cgraph_propagate_frequency (struct cgraph_node *node)
2559 bool maybe_unlikely_executed = true, maybe_executed_once = true;
2560 struct cgraph_edge *edge;
2561 if (!node->local.local)
2563 gcc_assert (node->analyzed);
2564 if (node->frequency == NODE_FREQUENCY_HOT)
2566 if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2568 if (dump_file && (dump_flags & TDF_DETAILS))
2569 fprintf (dump_file, "Processing frequency %s\n", cgraph_node_name (node));
2570 for (edge = node->callers;
2571 edge && (maybe_unlikely_executed || maybe_executed_once);
2572 edge = edge->next_caller)
2574 if (!edge->frequency)
2576 switch (edge->caller->frequency)
2578 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
2580 case NODE_FREQUENCY_EXECUTED_ONCE:
2581 if (dump_file && (dump_flags & TDF_DETAILS))
2582 fprintf (dump_file, " Called by %s that is executed once\n", cgraph_node_name (node));
2583 maybe_unlikely_executed = false;
2584 if (edge->loop_nest)
2586 maybe_executed_once = false;
2587 if (dump_file && (dump_flags & TDF_DETAILS))
2588 fprintf (dump_file, " Called in loop\n");
2591 case NODE_FREQUENCY_HOT:
2592 case NODE_FREQUENCY_NORMAL:
2593 if (dump_file && (dump_flags & TDF_DETAILS))
2594 fprintf (dump_file, " Called by %s that is normal or hot\n", cgraph_node_name (node));
2595 maybe_unlikely_executed = false;
2596 maybe_executed_once = false;
2600 if (maybe_unlikely_executed)
2602 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2604 fprintf (dump_file, "Node %s promoted to unlikely executed.\n", cgraph_node_name (node));
2607 if (maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
2609 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2611 fprintf (dump_file, "Node %s promoted to executed once.\n", cgraph_node_name (node));
2617 /* Return true when NODE can not return or throw and thus
2618 it is safe to ignore its side effects for IPA analysis. */
2621 cgraph_node_cannot_return (struct cgraph_node *node)
2623 int flags = flags_from_decl_or_type (node->decl);
2624 if (!flag_exceptions)
2625 return (flags & ECF_NORETURN) != 0;
2627 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2628 == (ECF_NORETURN | ECF_NOTHROW));
2631 /* Return true when call of E can not lead to return from caller
2632 and thus it is safe to ignore its side effects for IPA analysis
2633 when computing side effects of the caller.
2634 FIXME: We could actually mark all edges that have no reaching
2635 patch to EXIT_BLOCK_PTR or throw to get better results. */
2637 cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
2639 if (cgraph_node_cannot_return (e->caller))
2641 if (e->indirect_unknown_callee)
2643 int flags = e->indirect_info->ecf_flags;
2644 if (!flag_exceptions)
2645 return (flags & ECF_NORETURN) != 0;
2647 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2648 == (ECF_NORETURN | ECF_NOTHROW));
2651 return cgraph_node_cannot_return (e->callee);
2654 /* Return true when function NODE can be excpected to be removed
2655 from program when direct calls in this compilation unit are removed.
2657 As a special case COMDAT functions are
2658 cgraph_can_remove_if_no_direct_calls_p while the are not
2659 cgraph_only_called_directly_p (it is possible they are called from other
2662 This function behaves as cgraph_only_called_directly_p because eliminating
2663 all uses of COMDAT function does not make it neccesarily disappear from
2664 the program unless we are compiling whole program or we do LTO. In this
2665 case we know we win since dynamic linking will not really discard the
2666 linkonce section. */
2669 cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
2671 if (node->local.used_from_object_file)
2673 if (!in_lto_p && !flag_whole_program)
2674 return cgraph_only_called_directly_p (node);
2676 return cgraph_can_remove_if_no_direct_calls_p (node);
2679 #include "gt-cgraph.h"