1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains basic routines manipulating call graph
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
37 The callgraph at the moment does not represent indirect calls or calls
38 from other compilation unit. Flag NEEDED is set for each node that may
39 be accessed in such an invisible way and it shall be considered an
40 entry point to the callgraph.
42 Interprocedural information:
44 Callgraph is place to store data needed for interprocedural optimization.
45 All data structures are divided into three components: local_info that
46 is produced while analyzing the function, global_info that is result
47 of global walking of the callgraph on the end of compilation and
48 rtl_info used by RTL backend to propagate data from already compiled
49 functions to their callers.
53 The function inlining information is decided in advance and maintained
54 in the callgraph as so called inline plan.
55 For each inlined call, the callee's node is cloned to represent the
56 new function copy produced by inliner.
57 Each inlined call gets a unique corresponding clone node of the callee
58 and the data structure is updated while inlining is performed, so
59 the clones are eliminated and their callee edges redirected to the
62 Each edge has "inline_failed" field. When the field is set to NULL,
63 the call will be inlined. When it is non-NULL it contains a reason
64 why inlining wasn't performed. */
68 #include "coretypes.h"
71 #include "tree-inline.h"
72 #include "langhooks.h"
79 #include "basic-block.h"
84 #include "tree-dump.h"
85 #include "tree-flow.h"
86 #include "value-prof.h"
88 static void cgraph_node_remove_callers (struct cgraph_node *node);
89 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
90 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
92 /* Hash table used to convert declarations into nodes. */
93 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
94 /* Hash table used to convert assembler names into nodes. */
95 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
97 /* The linked list of cgraph nodes. */
98 struct cgraph_node *cgraph_nodes;
100 /* Queue of cgraph nodes scheduled to be lowered. */
101 struct cgraph_node *cgraph_nodes_queue;
103 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
104 secondary queue used during optimization to accommodate passes that
105 may generate new functions that need to be optimized and expanded. */
106 struct cgraph_node *cgraph_new_nodes;
108 /* Number of nodes in existence. */
111 /* Maximal uid used in cgraph nodes. */
114 /* Maximal uid used in cgraph edges. */
115 int cgraph_edge_max_uid;
117 /* Maximal pid used for profiling */
120 /* Set when whole unit has been analyzed so we can access global info. */
121 bool cgraph_global_info_ready = false;
123 /* What state callgraph is in right now. */
124 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
126 /* Set when the cgraph is fully build and the basic flags are computed. */
127 bool cgraph_function_flags_ready = false;
129 /* Linked list of cgraph asm nodes. */
130 struct cgraph_asm_node *cgraph_asm_nodes;
132 /* Last node in cgraph_asm_nodes. */
133 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
135 /* The order index of the next cgraph node to be created. This is
136 used so that we can sort the cgraph nodes in order by when we saw
137 them, to support -fno-toplevel-reorder. */
140 /* List of hooks trigerred on cgraph_edge events. */
141 struct cgraph_edge_hook_list {
142 cgraph_edge_hook hook;
144 struct cgraph_edge_hook_list *next;
147 /* List of hooks trigerred on cgraph_node events. */
148 struct cgraph_node_hook_list {
149 cgraph_node_hook hook;
151 struct cgraph_node_hook_list *next;
154 /* List of hooks trigerred on events involving two cgraph_edges. */
155 struct cgraph_2edge_hook_list {
156 cgraph_2edge_hook hook;
158 struct cgraph_2edge_hook_list *next;
161 /* List of hooks trigerred on events involving two cgraph_nodes. */
162 struct cgraph_2node_hook_list {
163 cgraph_2node_hook hook;
165 struct cgraph_2node_hook_list *next;
168 /* List of hooks triggered when an edge is removed. */
169 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
170 /* List of hooks triggered when a node is removed. */
171 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
172 /* List of hooks triggered when an edge is duplicated. */
173 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
174 /* List of hooks triggered when a node is duplicated. */
175 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
176 /* List of hooks triggered when an function is inserted. */
177 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
179 /* Head of a linked list of unused (freed) call graph nodes.
180 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
181 static GTY(()) struct cgraph_node *free_nodes;
182 /* Head of a linked list of unused (freed) call graph edges.
183 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
184 static GTY(()) struct cgraph_edge *free_edges;
186 /* Macros to access the next item in the list of free cgraph nodes and
188 #define NEXT_FREE_NODE(NODE) (NODE)->next
189 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
191 /* Register HOOK to be called with DATA on each removed edge. */
192 struct cgraph_edge_hook_list *
193 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
195 struct cgraph_edge_hook_list *entry;
196 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
198 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
208 /* Remove ENTRY from the list of hooks called on removing edges. */
210 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
212 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
214 while (*ptr != entry)
220 /* Call all edge removal hooks. */
222 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
224 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
227 entry->hook (e, entry->data);
232 /* Register HOOK to be called with DATA on each removed node. */
233 struct cgraph_node_hook_list *
234 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
236 struct cgraph_node_hook_list *entry;
237 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
239 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
249 /* Remove ENTRY from the list of hooks called on removing nodes. */
251 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
253 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
255 while (*ptr != entry)
261 /* Call all node removal hooks. */
263 cgraph_call_node_removal_hooks (struct cgraph_node *node)
265 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
268 entry->hook (node, entry->data);
273 /* Register HOOK to be called with DATA on each removed node. */
274 struct cgraph_node_hook_list *
275 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
277 struct cgraph_node_hook_list *entry;
278 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
280 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
290 /* Remove ENTRY from the list of hooks called on removing nodes. */
292 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
294 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
296 while (*ptr != entry)
302 /* Call all node removal hooks. */
304 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
306 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
309 entry->hook (node, entry->data);
314 /* Register HOOK to be called with DATA on each duplicated edge. */
315 struct cgraph_2edge_hook_list *
316 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
318 struct cgraph_2edge_hook_list *entry;
319 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
321 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
331 /* Remove ENTRY from the list of hooks called on duplicating edges. */
333 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
335 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
337 while (*ptr != entry)
343 /* Call all edge duplication hooks. */
345 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
346 struct cgraph_edge *cs2)
348 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
351 entry->hook (cs1, cs2, entry->data);
356 /* Register HOOK to be called with DATA on each duplicated node. */
357 struct cgraph_2node_hook_list *
358 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
360 struct cgraph_2node_hook_list *entry;
361 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
363 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
373 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
375 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
377 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
379 while (*ptr != entry)
385 /* Call all node duplication hooks. */
387 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
388 struct cgraph_node *node2)
390 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
393 entry->hook (node1, node2, entry->data);
398 /* Returns a hash code for P. */
401 hash_node (const void *p)
403 const struct cgraph_node *n = (const struct cgraph_node *) p;
404 return (hashval_t) DECL_UID (n->decl);
407 /* Returns nonzero if P1 and P2 are equal. */
410 eq_node (const void *p1, const void *p2)
412 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
413 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
414 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
417 /* Allocate new callgraph node and insert it into basic data structures. */
419 static struct cgraph_node *
420 cgraph_create_node (void)
422 struct cgraph_node *node;
427 free_nodes = NEXT_FREE_NODE (node);
431 node = GGC_CNEW (struct cgraph_node);
432 node->uid = cgraph_max_uid++;
435 node->next = cgraph_nodes;
437 node->order = cgraph_order++;
439 cgraph_nodes->previous = node;
440 node->previous = NULL;
441 node->global.estimated_growth = INT_MIN;
447 /* Return cgraph node assigned to DECL. Create new one when needed. */
450 cgraph_node (tree decl)
452 struct cgraph_node key, *node, **slot;
454 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
457 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
461 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
469 node = cgraph_create_node ();
472 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
474 node->origin = cgraph_node (DECL_CONTEXT (decl));
475 node->next_nested = node->origin->nested;
476 node->origin->nested = node;
478 if (assembler_name_hash)
481 tree name = DECL_ASSEMBLER_NAME (decl);
483 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
484 decl_assembler_name_hash (name),
486 /* We can have multiple declarations with same assembler name. For C++
487 it is __builtin_strlen and strlen, for instance. Do we need to
488 record them all? Original implementation marked just first one
489 so lets hope for the best. */
496 /* Insert already constructed node into hashtable. */
499 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
501 struct cgraph_node **slot;
503 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
509 /* Returns a hash code for P. */
512 hash_node_by_assembler_name (const void *p)
514 const struct cgraph_node *n = (const struct cgraph_node *) p;
515 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
518 /* Returns nonzero if P1 and P2 are equal. */
521 eq_assembler_name (const void *p1, const void *p2)
523 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
524 const_tree name = (const_tree)p2;
525 return (decl_assembler_name_equal (n1->decl, name));
528 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
529 Return NULL if there's no such node. */
532 cgraph_node_for_asm (tree asmname)
534 struct cgraph_node *node;
537 if (!assembler_name_hash)
539 assembler_name_hash =
540 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
542 for (node = cgraph_nodes; node; node = node->next)
543 if (!node->global.inlined_to)
545 tree name = DECL_ASSEMBLER_NAME (node->decl);
546 slot = htab_find_slot_with_hash (assembler_name_hash, name,
547 decl_assembler_name_hash (name),
549 /* We can have multiple declarations with same assembler name. For C++
550 it is __builtin_strlen and strlen, for instance. Do we need to
551 record them all? Original implementation marked just first one
552 so lets hope for the best. */
559 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
560 decl_assembler_name_hash (asmname),
564 return (struct cgraph_node *) *slot;
568 /* Returns a hash value for X (which really is a die_struct). */
571 edge_hash (const void *x)
573 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
576 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
579 edge_eq (const void *x, const void *y)
581 return ((const struct cgraph_edge *) x)->call_stmt == y;
585 /* Return the callgraph edge representing the GIMPLE_CALL statement
589 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
591 struct cgraph_edge *e, *e2;
594 if (node->call_site_hash)
595 return (struct cgraph_edge *)
596 htab_find_with_hash (node->call_site_hash, call_stmt,
597 htab_hash_pointer (call_stmt));
599 /* This loop may turn out to be performance problem. In such case adding
600 hashtables into call nodes with very many edges is probably best
601 solution. It is not good idea to add pointer into CALL_EXPR itself
602 because we want to make possible having multiple cgraph nodes representing
603 different clones of the same body before the body is actually cloned. */
604 for (e = node->callees; e; e= e->next_callee)
606 if (e->call_stmt == call_stmt)
613 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
614 for (e2 = node->callees; e2; e2 = e2->next_callee)
617 slot = htab_find_slot_with_hash (node->call_site_hash,
619 htab_hash_pointer (e2->call_stmt),
630 /* Change field call_stmt of edge E to NEW_STMT. */
633 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
635 if (e->caller->call_site_hash)
637 htab_remove_elt_with_hash (e->caller->call_site_hash,
639 htab_hash_pointer (e->call_stmt));
641 e->call_stmt = new_stmt;
642 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
643 e->can_throw_external = stmt_can_throw_external (new_stmt);
645 if (e->caller->call_site_hash)
648 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
651 (e->call_stmt), INSERT);
657 /* Like cgraph_set_call_stmt but walk the clone tree and update all clones sharing
658 same function body. */
661 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
662 gimple old_stmt, gimple new_stmt)
664 struct cgraph_node *node;
665 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
668 cgraph_set_call_stmt (edge, new_stmt);
670 for (node = orig->clones; node != orig;)
672 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
674 cgraph_set_call_stmt (edge, new_stmt);
677 else if (node->next_sibling_clone)
678 node = node->next_sibling_clone;
681 while (node != orig && !node->next_sibling_clone)
682 node = node->clone_of;
684 node = node->next_sibling_clone;
689 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
692 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
693 frequencies of the clones.
697 cgraph_create_edge_including_clones (struct cgraph_node *orig, struct cgraph_node *callee,
698 gimple stmt, gcov_type count, int freq,
700 cgraph_inline_failed_t reason)
702 struct cgraph_node *node;
704 cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth)->inline_failed =
708 for (node = orig->clones; node != orig;)
710 /* It is possible that we already constant propagated into the clone
711 and turned indirect call into dirrect call. */
712 if (!cgraph_edge (node, stmt))
713 cgraph_create_edge (node, callee, stmt, count, freq,
714 loop_depth)->inline_failed = reason;
718 else if (node->next_sibling_clone)
719 node = node->next_sibling_clone;
722 while (node != orig && !node->next_sibling_clone)
723 node = node->clone_of;
725 node = node->next_sibling_clone;
730 /* Give initial reasons why inlining would fail on EDGE. This gets either
731 nullified or usually overwritten by more precise reasons later. */
734 initialize_inline_failed (struct cgraph_edge *e)
736 struct cgraph_node *callee = e->callee;
738 if (!callee->analyzed)
739 e->inline_failed = CIF_BODY_NOT_AVAILABLE;
740 else if (callee->local.redefined_extern_inline)
741 e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
742 else if (!callee->local.inlinable)
743 e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
744 else if (gimple_call_cannot_inline_p (e->call_stmt))
745 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
747 e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
750 /* Create edge from CALLER to CALLEE in the cgraph. */
753 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
754 gimple call_stmt, gcov_type count, int freq, int nest)
756 struct cgraph_edge *edge;
758 #ifdef ENABLE_CHECKING
759 /* This is rather pricely check possibly trigerring construction of call stmt
761 gcc_assert (!cgraph_edge (caller, call_stmt));
764 gcc_assert (is_gimple_call (call_stmt));
769 free_edges = NEXT_FREE_EDGE (edge);
773 edge = GGC_NEW (struct cgraph_edge);
774 edge->uid = cgraph_edge_max_uid++;
779 edge->caller = caller;
780 edge->callee = callee;
781 edge->call_stmt = call_stmt;
782 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
783 edge->can_throw_external = stmt_can_throw_external (call_stmt);
785 edge->prev_caller = NULL;
786 edge->next_caller = callee->callers;
788 callee->callers->prev_caller = edge;
789 edge->prev_callee = NULL;
790 edge->next_callee = caller->callees;
792 caller->callees->prev_callee = edge;
793 caller->callees = edge;
794 callee->callers = edge;
796 gcc_assert (count >= 0);
797 edge->frequency = freq;
798 gcc_assert (freq >= 0);
799 gcc_assert (freq <= CGRAPH_FREQ_MAX);
800 edge->loop_nest = nest;
801 edge->indirect_call = 0;
802 if (caller->call_site_hash)
805 slot = htab_find_slot_with_hash (caller->call_site_hash,
814 initialize_inline_failed (edge);
819 /* Remove the edge E from the list of the callers of the callee. */
822 cgraph_edge_remove_callee (struct cgraph_edge *e)
825 e->prev_caller->next_caller = e->next_caller;
827 e->next_caller->prev_caller = e->prev_caller;
829 e->callee->callers = e->next_caller;
832 /* Remove the edge E from the list of the callees of the caller. */
835 cgraph_edge_remove_caller (struct cgraph_edge *e)
838 e->prev_callee->next_callee = e->next_callee;
840 e->next_callee->prev_callee = e->prev_callee;
842 e->caller->callees = e->next_callee;
843 if (e->caller->call_site_hash)
844 htab_remove_elt_with_hash (e->caller->call_site_hash,
846 htab_hash_pointer (e->call_stmt));
849 /* Put the edge onto the free list. */
852 cgraph_free_edge (struct cgraph_edge *e)
856 /* Clear out the edge so we do not dangle pointers. */
857 memset (e, 0, sizeof (*e));
859 NEXT_FREE_EDGE (e) = free_edges;
863 /* Remove the edge E in the cgraph. */
866 cgraph_remove_edge (struct cgraph_edge *e)
868 /* Call all edge removal hooks. */
869 cgraph_call_edge_removal_hooks (e);
871 /* Remove from callers list of the callee. */
872 cgraph_edge_remove_callee (e);
874 /* Remove from callees list of the callers. */
875 cgraph_edge_remove_caller (e);
877 /* Put the edge onto the free list. */
878 cgraph_free_edge (e);
881 /* Redirect callee of E to N. The function does not update underlying
885 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
887 /* Remove from callers list of the current callee. */
888 cgraph_edge_remove_callee (e);
890 /* Insert to callers list of the new callee. */
891 e->prev_caller = NULL;
893 n->callers->prev_caller = e;
894 e->next_caller = n->callers;
900 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
901 OLD_STMT changed into NEW_STMT. */
904 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
905 gimple old_stmt, gimple new_stmt)
907 tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fn (new_stmt) : 0;
908 tree old_call = (is_gimple_call (old_stmt)) ? gimple_call_fn (old_stmt) : 0;
910 if (old_call != new_call)
912 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
913 struct cgraph_edge *ne = NULL;
918 gcov_type count = e->count;
919 int frequency = e->frequency;
920 int loop_nest = e->loop_nest;
922 cgraph_remove_edge (e);
925 new_decl = gimple_call_fndecl (new_stmt);
928 ne = cgraph_create_edge (node, cgraph_node (new_decl),
929 new_stmt, count, frequency,
931 gcc_assert (ne->inline_failed);
936 else if (old_stmt != new_stmt)
938 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
941 cgraph_set_call_stmt (e, new_stmt);
945 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
946 OLD_STMT changed into NEW_STMT. */
949 cgraph_update_edges_for_call_stmt (gimple old_stmt, gimple new_stmt)
951 struct cgraph_node *orig = cgraph_node (cfun->decl);
952 struct cgraph_node *node;
954 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, new_stmt);
956 for (node = orig->clones; node != orig;)
958 cgraph_update_edges_for_call_stmt_node (node, old_stmt, new_stmt);
961 else if (node->next_sibling_clone)
962 node = node->next_sibling_clone;
965 while (node != orig && !node->next_sibling_clone)
966 node = node->clone_of;
968 node = node->next_sibling_clone;
974 /* Remove all callees from the node. */
977 cgraph_node_remove_callees (struct cgraph_node *node)
979 struct cgraph_edge *e, *f;
981 /* It is sufficient to remove the edges from the lists of callers of
982 the callees. The callee list of the node can be zapped with one
984 for (e = node->callees; e; e = f)
987 cgraph_call_edge_removal_hooks (e);
988 cgraph_edge_remove_callee (e);
989 cgraph_free_edge (e);
991 node->callees = NULL;
992 if (node->call_site_hash)
994 htab_delete (node->call_site_hash);
995 node->call_site_hash = NULL;
999 /* Remove all callers from the node. */
1002 cgraph_node_remove_callers (struct cgraph_node *node)
1004 struct cgraph_edge *e, *f;
1006 /* It is sufficient to remove the edges from the lists of callees of
1007 the callers. The caller list of the node can be zapped with one
1009 for (e = node->callers; e; e = f)
1012 cgraph_call_edge_removal_hooks (e);
1013 cgraph_edge_remove_caller (e);
1014 cgraph_free_edge (e);
1016 node->callers = NULL;
1019 /* Release memory used to represent body of function NODE. */
1022 cgraph_release_function_body (struct cgraph_node *node)
1024 if (DECL_STRUCT_FUNCTION (node->decl))
1026 tree old_decl = current_function_decl;
1027 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1028 if (cfun->gimple_df)
1030 current_function_decl = node->decl;
1032 delete_tree_cfg_annotations ();
1034 current_function_decl = old_decl;
1038 gcc_assert (dom_computed[0] == DOM_NONE);
1039 gcc_assert (dom_computed[1] == DOM_NONE);
1042 if (cfun->value_histograms)
1044 gcc_assert (!current_loops);
1046 gimple_set_body (node->decl, NULL);
1047 VEC_free (ipa_opt_pass, heap,
1048 DECL_STRUCT_FUNCTION (node->decl)->ipa_transforms_to_apply);
1049 /* Struct function hangs a lot of data that would leak if we didn't
1050 removed all pointers to it. */
1051 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1052 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1054 DECL_SAVED_TREE (node->decl) = NULL;
1055 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1056 of its associated function function declaration because it's
1057 needed to emit debug info later. */
1058 if (!node->abstract_and_needed)
1059 DECL_INITIAL (node->decl) = error_mark_node;
1062 /* Remove the node from cgraph. */
1065 cgraph_remove_node (struct cgraph_node *node)
1068 bool kill_body = false;
1069 struct cgraph_node *n;
1070 int uid = node->uid;
1072 cgraph_call_node_removal_hooks (node);
1073 cgraph_node_remove_callers (node);
1074 cgraph_node_remove_callees (node);
1076 /* Incremental inlining access removed nodes stored in the postorder list.
1078 node->needed = node->reachable = false;
1079 for (n = node->nested; n; n = n->next_nested)
1081 node->nested = NULL;
1084 struct cgraph_node **node2 = &node->origin->nested;
1086 while (*node2 != node)
1087 node2 = &(*node2)->next_nested;
1088 *node2 = node->next_nested;
1091 node->previous->next = node->next;
1093 cgraph_nodes = node->next;
1095 node->next->previous = node->previous;
1097 node->previous = NULL;
1098 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1101 struct cgraph_node *next_inline_clone;
1103 for (next_inline_clone = node->clones;
1104 next_inline_clone && next_inline_clone->decl != node->decl;
1105 next_inline_clone = next_inline_clone->next_sibling_clone)
1108 /* If there is inline clone of the node being removed, we need
1109 to put it into the position of removed node and reorganize all
1110 other clones to be based on it. */
1111 if (next_inline_clone)
1113 struct cgraph_node *n;
1114 struct cgraph_node *new_clones;
1116 *slot = next_inline_clone;
1118 /* Unlink inline clone from the list of clones of removed node. */
1119 if (next_inline_clone->next_sibling_clone)
1120 next_inline_clone->next_sibling_clone->prev_sibling_clone
1121 = next_inline_clone->prev_sibling_clone;
1122 if (next_inline_clone->prev_sibling_clone)
1124 next_inline_clone->prev_sibling_clone->next_sibling_clone
1125 = next_inline_clone->next_sibling_clone;
1128 node->clones = next_inline_clone->next_sibling_clone;
1130 new_clones = node->clones;
1131 node->clones = NULL;
1133 /* Copy clone info. */
1134 next_inline_clone->clone = node->clone;
1136 /* Now place it into clone tree at same level at NODE. */
1137 next_inline_clone->clone_of = node->clone_of;
1138 next_inline_clone->prev_sibling_clone = NULL;
1139 next_inline_clone->next_sibling_clone = NULL;
1142 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1143 node->clone_of->clones = next_inline_clone;
1146 /* Merge the clone list. */
1149 if (!next_inline_clone->clones)
1150 next_inline_clone->clones = new_clones;
1153 n = next_inline_clone->clones;
1154 while (n->next_sibling_clone)
1155 n = n->next_sibling_clone;
1156 n->next_sibling_clone = new_clones;
1157 new_clones->prev_sibling_clone = n;
1161 /* Update clone_of pointers. */
1165 n->clone_of = next_inline_clone;
1166 n = n->next_sibling_clone;
1171 htab_clear_slot (cgraph_hash, slot);
1177 gcc_assert (node->clone_of);
1178 if (node->prev_sibling_clone)
1179 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1180 else if (node->clone_of)
1181 node->clone_of->clones = node->next_sibling_clone;
1182 if (node->next_sibling_clone)
1183 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1186 struct cgraph_node *n;
1188 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1189 n->clone_of = node->clone_of;
1190 n->clone_of = node->clone_of;
1191 n->next_sibling_clone = node->clone_of->clones;
1192 if (node->clone_of->clones)
1193 node->clone_of->clones->prev_sibling_clone = n;
1194 node->clone_of->clones = node->clones;
1197 /* While all the clones are removed after being proceeded, the function
1198 itself is kept in the cgraph even after it is compiled. Check whether
1199 we are done with this body and reclaim it proactively if this is the case.
1201 if (!kill_body && *slot)
1203 struct cgraph_node *n = (struct cgraph_node *) *slot;
1204 if (!n->clones && !n->clone_of && !n->global.inlined_to
1205 && (cgraph_global_info_ready
1206 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
1209 if (assembler_name_hash)
1211 tree name = DECL_ASSEMBLER_NAME (node->decl);
1212 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1213 decl_assembler_name_hash (name),
1215 /* Inline clones are not hashed. */
1216 if (slot && *slot == node)
1217 htab_clear_slot (assembler_name_hash, slot);
1221 cgraph_release_function_body (node);
1223 if (node->call_site_hash)
1225 htab_delete (node->call_site_hash);
1226 node->call_site_hash = NULL;
1230 /* Clear out the node to NULL all pointers and add the node to the free
1232 memset (node, 0, sizeof(*node));
1234 NEXT_FREE_NODE (node) = free_nodes;
1238 /* Remove the node from cgraph. */
1241 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1243 struct cgraph_edge *e, *next;
1244 for (e = node->callees; e; e = next)
1246 next = e->next_callee;
1247 if (!e->inline_failed)
1248 cgraph_remove_node_and_inline_clones (e->callee);
1250 cgraph_remove_node (node);
1253 /* Notify finalize_compilation_unit that given node is reachable. */
1256 cgraph_mark_reachable_node (struct cgraph_node *node)
1258 if (!node->reachable && node->local.finalized)
1260 notice_global_symbol (node->decl);
1261 node->reachable = 1;
1262 gcc_assert (!cgraph_global_info_ready);
1264 node->next_needed = cgraph_nodes_queue;
1265 cgraph_nodes_queue = node;
1269 /* Likewise indicate that a node is needed, i.e. reachable via some
1273 cgraph_mark_needed_node (struct cgraph_node *node)
1276 cgraph_mark_reachable_node (node);
1279 /* Return local info for the compiled function. */
1281 struct cgraph_local_info *
1282 cgraph_local_info (tree decl)
1284 struct cgraph_node *node;
1286 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1287 node = cgraph_node (decl);
1288 return &node->local;
1291 /* Return local info for the compiled function. */
1293 struct cgraph_global_info *
1294 cgraph_global_info (tree decl)
1296 struct cgraph_node *node;
1298 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1299 node = cgraph_node (decl);
1300 return &node->global;
1303 /* Return local info for the compiled function. */
1305 struct cgraph_rtl_info *
1306 cgraph_rtl_info (tree decl)
1308 struct cgraph_node *node;
1310 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1311 node = cgraph_node (decl);
1312 if (decl != current_function_decl
1313 && !TREE_ASM_WRITTEN (node->decl))
1318 /* Return a string describing the failure REASON. */
1321 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1324 #define DEFCIFCODE(code, string) string,
1326 static const char *cif_string_table[CIF_N_REASONS] = {
1327 #include "cif-code.def"
1330 /* Signedness of an enum type is implementation defined, so cast it
1331 to unsigned before testing. */
1332 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1333 return cif_string_table[reason];
1336 /* Return name of the node used in debug output. */
1338 cgraph_node_name (struct cgraph_node *node)
1340 return lang_hooks.decl_printable_name (node->decl, 2);
1343 /* Names used to print out the availability enum. */
1344 const char * const cgraph_availability_names[] =
1345 {"unset", "not_available", "overwritable", "available", "local"};
1348 /* Dump call graph node NODE to file F. */
1351 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1353 struct cgraph_edge *edge;
1354 fprintf (f, "%s/%i(%i) [%p]:", cgraph_node_name (node), node->uid,
1355 node->pid, (void *) node);
1356 if (node->global.inlined_to)
1357 fprintf (f, " (inline copy in %s/%i)",
1358 cgraph_node_name (node->global.inlined_to),
1359 node->global.inlined_to->uid);
1361 fprintf (f, " (clone of %s/%i)",
1362 cgraph_node_name (node->clone_of),
1363 node->clone_of->uid);
1364 if (cgraph_function_flags_ready)
1365 fprintf (f, " availability:%s",
1366 cgraph_availability_names [cgraph_function_body_availability (node)]);
1368 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1369 (HOST_WIDEST_INT)node->count);
1370 if (node->local.inline_summary.self_insns)
1371 fprintf (f, " %i insns", node->local.inline_summary.self_insns);
1372 if (node->global.insns && node->global.insns
1373 != node->local.inline_summary.self_insns)
1374 fprintf (f, " (%i after inlining)", node->global.insns);
1375 if (node->local.inline_summary.estimated_self_stack_size)
1376 fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
1377 if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
1378 fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
1380 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1382 fprintf (f, " needed");
1383 else if (node->reachable)
1384 fprintf (f, " reachable");
1385 if (gimple_has_body_p (node->decl))
1386 fprintf (f, " body");
1388 fprintf (f, " process");
1389 if (node->local.local)
1390 fprintf (f, " local");
1391 if (node->local.externally_visible)
1392 fprintf (f, " externally_visible");
1393 if (node->local.finalized)
1394 fprintf (f, " finalized");
1395 if (node->local.disregard_inline_limits)
1396 fprintf (f, " always_inline");
1397 else if (node->local.inlinable)
1398 fprintf (f, " inlinable");
1399 if (node->local.redefined_extern_inline)
1400 fprintf (f, " redefined_extern_inline");
1401 if (TREE_ASM_WRITTEN (node->decl))
1402 fprintf (f, " asm_written");
1404 fprintf (f, "\n called by: ");
1405 for (edge = node->callers; edge; edge = edge->next_caller)
1407 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1410 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1411 (HOST_WIDEST_INT)edge->count);
1412 if (edge->frequency)
1413 fprintf (f, "(%.2f per call) ",
1414 edge->frequency / (double)CGRAPH_FREQ_BASE);
1415 if (!edge->inline_failed)
1416 fprintf(f, "(inlined) ");
1417 if (edge->indirect_call)
1418 fprintf(f, "(indirect) ");
1419 if (edge->can_throw_external)
1420 fprintf(f, "(can throw external) ");
1423 fprintf (f, "\n calls: ");
1424 for (edge = node->callees; edge; edge = edge->next_callee)
1426 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1428 if (!edge->inline_failed)
1429 fprintf(f, "(inlined) ");
1430 if (edge->indirect_call)
1431 fprintf(f, "(indirect) ");
1433 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1434 (HOST_WIDEST_INT)edge->count);
1435 if (edge->frequency)
1436 fprintf (f, "(%.2f per call) ",
1437 edge->frequency / (double)CGRAPH_FREQ_BASE);
1438 if (edge->loop_nest)
1439 fprintf (f, "(nested in %i loops) ", edge->loop_nest);
1440 if (edge->can_throw_external)
1441 fprintf(f, "(can throw external) ");
1447 /* Dump call graph node NODE to stderr. */
1450 debug_cgraph_node (struct cgraph_node *node)
1452 dump_cgraph_node (stderr, node);
1456 /* Dump the callgraph to file F. */
1459 dump_cgraph (FILE *f)
1461 struct cgraph_node *node;
1463 fprintf (f, "callgraph:\n\n");
1464 for (node = cgraph_nodes; node; node = node->next)
1465 dump_cgraph_node (f, node);
1469 /* Dump the call graph to stderr. */
1474 dump_cgraph (stderr);
1478 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
1481 change_decl_assembler_name (tree decl, tree name)
1483 gcc_assert (!assembler_name_hash);
1484 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
1486 SET_DECL_ASSEMBLER_NAME (decl, name);
1489 if (name == DECL_ASSEMBLER_NAME (decl))
1492 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
1493 && DECL_RTL_SET_P (decl))
1494 warning (0, "%D renamed after being referenced in assembly", decl);
1496 SET_DECL_ASSEMBLER_NAME (decl, name);
1499 /* Add a top-level asm statement to the list. */
1501 struct cgraph_asm_node *
1502 cgraph_add_asm_node (tree asm_str)
1504 struct cgraph_asm_node *node;
1506 node = GGC_CNEW (struct cgraph_asm_node);
1507 node->asm_str = asm_str;
1508 node->order = cgraph_order++;
1510 if (cgraph_asm_nodes == NULL)
1511 cgraph_asm_nodes = node;
1513 cgraph_asm_last_node->next = node;
1514 cgraph_asm_last_node = node;
1518 /* Return true when the DECL can possibly be inlined. */
1520 cgraph_function_possibly_inlined_p (tree decl)
1522 if (!cgraph_global_info_ready)
1523 return !DECL_UNINLINABLE (decl);
1524 return DECL_POSSIBLY_INLINED (decl);
1527 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
1528 struct cgraph_edge *
1529 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
1530 gimple call_stmt, gcov_type count_scale, int freq_scale,
1531 int loop_nest, bool update_original)
1533 struct cgraph_edge *new_edge;
1534 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
1535 gcov_type freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
1537 if (freq > CGRAPH_FREQ_MAX)
1538 freq = CGRAPH_FREQ_MAX;
1539 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
1540 e->loop_nest + loop_nest);
1542 new_edge->inline_failed = e->inline_failed;
1543 new_edge->indirect_call = e->indirect_call;
1544 if (update_original)
1546 e->count -= new_edge->count;
1550 cgraph_call_edge_duplication_hooks (e, new_edge);
1554 /* Create node representing clone of N executed COUNT times. Decrease
1555 the execution counts from original node too.
1557 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
1558 function's profile to reflect the fact that part of execution is handled
1560 struct cgraph_node *
1561 cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq,
1562 int loop_nest, bool update_original)
1564 struct cgraph_node *new_node = cgraph_create_node ();
1565 struct cgraph_edge *e;
1566 gcov_type count_scale;
1568 new_node->decl = n->decl;
1569 new_node->origin = n->origin;
1570 if (new_node->origin)
1572 new_node->next_nested = new_node->origin->nested;
1573 new_node->origin->nested = new_node;
1575 new_node->analyzed = n->analyzed;
1576 new_node->local = n->local;
1577 new_node->global = n->global;
1578 new_node->rtl = n->rtl;
1579 new_node->count = count;
1580 new_node->clone = n->clone;
1583 if (new_node->count > n->count)
1584 count_scale = REG_BR_PROB_BASE;
1586 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
1590 if (update_original)
1597 for (e = n->callees;e; e=e->next_callee)
1598 cgraph_clone_edge (e, new_node, e->call_stmt, count_scale, freq, loop_nest,
1601 new_node->next_sibling_clone = n->clones;
1603 n->clones->prev_sibling_clone = new_node;
1604 n->clones = new_node;
1605 new_node->clone_of = n;
1607 cgraph_call_node_duplication_hooks (n, new_node);
1611 /* Create a new name for omp child function. Returns an identifier. */
1613 static GTY(()) unsigned int clone_fn_id_num;
1616 clone_function_name (tree decl)
1618 tree name = DECL_ASSEMBLER_NAME (decl);
1619 size_t len = IDENTIFIER_LENGTH (name);
1620 char *tmp_name, *prefix;
1622 prefix = XALLOCAVEC (char, len + strlen ("_clone") + 1);
1623 memcpy (prefix, IDENTIFIER_POINTER (name), len);
1624 strcpy (prefix + len, "_clone");
1625 #ifndef NO_DOT_IN_LABEL
1627 #elif !defined NO_DOLLAR_IN_LABEL
1630 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
1631 return get_identifier (tmp_name);
1634 /* Create callgraph node clone with new declaration. The actual body will
1635 be copied later at compilation stage.
1637 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
1640 struct cgraph_node *
1641 cgraph_create_virtual_clone (struct cgraph_node *old_node,
1642 VEC(cgraph_edge_p,heap) *redirect_callers,
1643 VEC(ipa_replace_map_p,gc) *tree_map,
1644 bitmap args_to_skip)
1646 tree old_decl = old_node->decl;
1647 struct cgraph_node *new_node = NULL;
1649 struct cgraph_node key, **slot;
1651 struct cgraph_edge *e;
1653 gcc_assert (tree_versionable_function_p (old_decl));
1655 /* Make a new FUNCTION_DECL tree node */
1657 new_decl = copy_node (old_decl);
1659 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
1660 DECL_STRUCT_FUNCTION (new_decl) = NULL;
1662 /* Generate a new name for the new version. */
1663 DECL_NAME (new_decl) = clone_function_name (old_decl);
1664 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
1665 SET_DECL_RTL (new_decl, NULL);
1667 new_node = cgraph_clone_node (old_node, old_node->count,
1668 CGRAPH_FREQ_BASE, 0, false);
1669 new_node->decl = new_decl;
1670 /* Update the properties.
1671 Make clone visible only within this translation unit. Make sure
1672 that is not weak also.
1673 ??? We cannot use COMDAT linkage because there is no
1674 ABI support for this. */
1675 DECL_EXTERNAL (new_node->decl) = 0;
1676 DECL_ONE_ONLY (new_node->decl) = 0;
1677 TREE_PUBLIC (new_node->decl) = 0;
1678 DECL_COMDAT (new_node->decl) = 0;
1679 DECL_WEAK (new_node->decl) = 0;
1680 new_node->clone.tree_map = tree_map;
1681 new_node->clone.args_to_skip = args_to_skip;
1682 new_node->local.externally_visible = 0;
1683 new_node->local.local = 1;
1684 new_node->lowered = true;
1685 new_node->reachable = true;
1687 key.decl = new_decl;
1688 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
1689 gcc_assert (!*slot);
1691 if (assembler_name_hash)
1694 tree name = DECL_ASSEMBLER_NAME (new_decl);
1696 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
1697 decl_assembler_name_hash (name),
1699 gcc_assert (!*aslot);
1702 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1704 /* Redirect calls to the old version node to point to its new
1706 cgraph_redirect_edge_callee (e, new_node);
1712 /* NODE is no longer nested function; update cgraph accordingly. */
1714 cgraph_unnest_node (struct cgraph_node *node)
1716 struct cgraph_node **node2 = &node->origin->nested;
1717 gcc_assert (node->origin);
1719 while (*node2 != node)
1720 node2 = &(*node2)->next_nested;
1721 *node2 = node->next_nested;
1722 node->origin = NULL;
1725 /* Return function availability. See cgraph.h for description of individual
1728 cgraph_function_body_availability (struct cgraph_node *node)
1730 enum availability avail;
1731 gcc_assert (cgraph_function_flags_ready);
1732 if (!node->analyzed)
1733 avail = AVAIL_NOT_AVAILABLE;
1734 else if (node->local.local)
1735 avail = AVAIL_LOCAL;
1736 else if (!node->local.externally_visible)
1737 avail = AVAIL_AVAILABLE;
1738 /* Inline functions are safe to be analyzed even if their sybol can
1739 be overwritten at runtime. It is not meaningful to enfore any sane
1740 behaviour on replacing inline function by different body. */
1741 else if (DECL_DECLARED_INLINE_P (node->decl))
1742 avail = AVAIL_AVAILABLE;
1744 /* If the function can be overwritten, return OVERWRITABLE. Take
1745 care at least of two notable extensions - the COMDAT functions
1746 used to share template instantiations in C++ (this is symmetric
1747 to code cp_cannot_inline_tree_fn and probably shall be shared and
1748 the inlinability hooks completely eliminated).
1750 ??? Does the C++ one definition rule allow us to always return
1751 AVAIL_AVAILABLE here? That would be good reason to preserve this
1754 else if (DECL_REPLACEABLE_P (node->decl) && !DECL_EXTERNAL (node->decl))
1755 avail = AVAIL_OVERWRITABLE;
1756 else avail = AVAIL_AVAILABLE;
1761 /* Add the function FNDECL to the call graph.
1762 Unlike cgraph_finalize_function, this function is intended to be used
1763 by middle end and allows insertion of new function at arbitrary point
1764 of compilation. The function can be either in high, low or SSA form
1767 The function is assumed to be reachable and have address taken (so no
1768 API breaking optimizations are performed on it).
1770 Main work done by this function is to enqueue the function for later
1771 processing to avoid need the passes to be re-entrant. */
1774 cgraph_add_new_function (tree fndecl, bool lowered)
1776 struct cgraph_node *node;
1777 switch (cgraph_state)
1779 case CGRAPH_STATE_CONSTRUCTION:
1780 /* Just enqueue function to be processed at nearest occurrence. */
1781 node = cgraph_node (fndecl);
1782 node->next_needed = cgraph_new_nodes;
1784 node->lowered = true;
1785 cgraph_new_nodes = node;
1788 case CGRAPH_STATE_IPA:
1789 case CGRAPH_STATE_IPA_SSA:
1790 case CGRAPH_STATE_EXPANSION:
1791 /* Bring the function into finalized state and enqueue for later
1792 analyzing and compilation. */
1793 node = cgraph_node (fndecl);
1794 node->local.local = false;
1795 node->local.finalized = true;
1796 node->reachable = node->needed = true;
1797 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
1799 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
1800 current_function_decl = fndecl;
1801 gimple_register_cfg_hooks ();
1802 tree_lowering_passes (fndecl);
1803 bitmap_obstack_initialize (NULL);
1804 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
1805 execute_pass_list (pass_early_local_passes.pass.sub);
1806 bitmap_obstack_release (NULL);
1808 current_function_decl = NULL;
1813 node->lowered = true;
1814 node->next_needed = cgraph_new_nodes;
1815 cgraph_new_nodes = node;
1818 case CGRAPH_STATE_FINISHED:
1819 /* At the very end of compilation we have to do all the work up
1821 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
1822 current_function_decl = fndecl;
1823 gimple_register_cfg_hooks ();
1825 tree_lowering_passes (fndecl);
1826 bitmap_obstack_initialize (NULL);
1827 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
1828 execute_pass_list (pass_early_local_passes.pass.sub);
1829 bitmap_obstack_release (NULL);
1830 tree_rest_of_compilation (fndecl);
1832 current_function_decl = NULL;
1837 #include "gt-cgraph.h"