1 /* Callgraph construction.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "tree-flow.h"
28 #include "langhooks.h"
29 #include "pointer-set.h"
33 #include "tree-pass.h"
34 #include "ipa-utils.h"
36 #include "ipa-inline.h"
38 /* Context of record_reference. */
39 struct record_reference_ctx
42 struct varpool_node *varpool_node;
45 /* Walk tree and record all calls and references to functions/variables.
46 Called via walk_tree: TP is pointer to tree to be examined.
47 When DATA is non-null, record references to callgraph.
51 record_reference (tree *tp, int *walk_subtrees, void *data)
55 struct record_reference_ctx *ctx = (struct record_reference_ctx *)data;
57 t = canonicalize_constructor_val (t);
63 switch (TREE_CODE (t))
72 /* Record dereferences to the functions. This makes the
73 functions reachable unconditionally. */
74 decl = get_base_var (*tp);
75 if (TREE_CODE (decl) == FUNCTION_DECL)
77 struct cgraph_node *node = cgraph_get_create_node (decl);
79 cgraph_mark_address_taken_node (node);
80 ipa_record_reference (NULL, ctx->varpool_node, node, NULL,
84 if (TREE_CODE (decl) == VAR_DECL)
86 struct varpool_node *vnode = varpool_node (decl);
87 if (lang_hooks.callgraph.analyze_expr)
88 lang_hooks.callgraph.analyze_expr (&decl, walk_subtrees);
89 varpool_mark_needed_node (vnode);
90 if (vnode->alias && vnode->extra_name)
91 vnode = vnode->extra_name;
92 ipa_record_reference (NULL, ctx->varpool_node,
100 /* Save some cycles by not walking types and declaration as we
101 won't find anything useful there anyway. */
102 if (IS_TYPE_OR_DECL_P (*tp))
108 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
109 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees);
116 /* Record references to typeinfos in the type list LIST. */
119 record_type_list (struct cgraph_node *node, tree list)
121 for (; list; list = TREE_CHAIN (list))
123 tree type = TREE_VALUE (list);
126 type = lookup_type_for_runtime (type);
128 if (TREE_CODE (type) == ADDR_EXPR)
130 type = TREE_OPERAND (type, 0);
131 if (TREE_CODE (type) == VAR_DECL)
133 struct varpool_node *vnode = varpool_node (type);
134 varpool_mark_needed_node (vnode);
135 ipa_record_reference (node, NULL,
143 /* Record all references we will introduce by producing EH tables
147 record_eh_tables (struct cgraph_node *node, struct function *fun)
151 if (DECL_FUNCTION_PERSONALITY (node->decl))
153 struct cgraph_node *per_node;
155 per_node = cgraph_get_create_node (DECL_FUNCTION_PERSONALITY (node->decl));
156 ipa_record_reference (node, NULL, per_node, NULL, IPA_REF_ADDR, NULL);
157 cgraph_mark_address_taken_node (per_node);
160 i = fun->eh->region_tree;
169 case ERT_MUST_NOT_THROW:
175 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
176 record_type_list (node, c->type_list);
180 case ERT_ALLOWED_EXCEPTIONS:
181 record_type_list (node, i->u.allowed.type_list);
184 /* If there are sub-regions, process them. */
187 /* If there are peers, process them. */
188 else if (i->next_peer)
190 /* Otherwise, step back up the tree to the next peer. */
199 while (i->next_peer == NULL);
205 /* Reset inlining information of all incoming call edges of NODE. */
208 reset_inline_failed (struct cgraph_node *node)
210 struct cgraph_edge *e;
212 for (e = node->callers; e; e = e->next_caller)
214 e->callee->global.inlined_to = NULL;
215 initialize_inline_failed (e);
219 /* Computes the frequency of the call statement so that it can be stored in
220 cgraph_edge. BB is the basic block of the call statement. */
222 compute_call_stmt_bb_frequency (tree decl, basic_block bb)
224 int entry_freq = ENTRY_BLOCK_PTR_FOR_FUNCTION
225 (DECL_STRUCT_FUNCTION (decl))->frequency;
226 int freq = bb->frequency;
228 if (profile_status_for_function (DECL_STRUCT_FUNCTION (decl)) == PROFILE_ABSENT)
229 return CGRAPH_FREQ_BASE;
232 entry_freq = 1, freq++;
234 freq = freq * CGRAPH_FREQ_BASE / entry_freq;
235 if (freq > CGRAPH_FREQ_MAX)
236 freq = CGRAPH_FREQ_MAX;
241 /* Mark address taken in STMT. */
244 mark_address (gimple stmt, tree addr, void *data)
246 addr = get_base_address (addr);
247 if (TREE_CODE (addr) == FUNCTION_DECL)
249 struct cgraph_node *node = cgraph_get_create_node (addr);
250 cgraph_mark_address_taken_node (node);
251 ipa_record_reference ((struct cgraph_node *)data, NULL,
255 else if (addr && TREE_CODE (addr) == VAR_DECL
256 && (TREE_STATIC (addr) || DECL_EXTERNAL (addr)))
258 struct varpool_node *vnode = varpool_node (addr);
261 if (lang_hooks.callgraph.analyze_expr)
262 lang_hooks.callgraph.analyze_expr (&addr, &walk_subtrees);
263 varpool_mark_needed_node (vnode);
264 if (vnode->alias && vnode->extra_name)
265 vnode = vnode->extra_name;
266 ipa_record_reference ((struct cgraph_node *)data, NULL,
274 /* Mark load of T. */
277 mark_load (gimple stmt, tree t, void *data)
279 t = get_base_address (t);
280 if (t && TREE_CODE (t) == FUNCTION_DECL)
282 /* ??? This can happen on platforms with descriptors when these are
283 directly manipulated in the code. Pretend that it's an address. */
284 struct cgraph_node *node = cgraph_get_create_node (t);
285 cgraph_mark_address_taken_node (node);
286 ipa_record_reference ((struct cgraph_node *)data, NULL,
290 else if (t && TREE_CODE (t) == VAR_DECL
291 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
293 struct varpool_node *vnode = varpool_node (t);
296 if (lang_hooks.callgraph.analyze_expr)
297 lang_hooks.callgraph.analyze_expr (&t, &walk_subtrees);
298 varpool_mark_needed_node (vnode);
299 if (vnode->alias && vnode->extra_name)
300 vnode = vnode->extra_name;
301 ipa_record_reference ((struct cgraph_node *)data, NULL,
308 /* Mark store of T. */
311 mark_store (gimple stmt, tree t, void *data)
313 t = get_base_address (t);
314 if (t && TREE_CODE (t) == VAR_DECL
315 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
317 struct varpool_node *vnode = varpool_node (t);
320 if (lang_hooks.callgraph.analyze_expr)
321 lang_hooks.callgraph.analyze_expr (&t, &walk_subtrees);
322 varpool_mark_needed_node (vnode);
323 if (vnode->alias && vnode->extra_name)
324 vnode = vnode->extra_name;
325 ipa_record_reference ((struct cgraph_node *)data, NULL,
327 IPA_REF_STORE, stmt);
332 /* Create cgraph edges for function calls.
333 Also look for functions and variables having addresses taken. */
336 build_cgraph_edges (void)
339 struct cgraph_node *node = cgraph_get_node (current_function_decl);
340 struct pointer_set_t *visited_nodes = pointer_set_create ();
341 gimple_stmt_iterator gsi;
345 /* Create the callgraph edges and record the nodes referenced by the function.
349 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
351 gimple stmt = gsi_stmt (gsi);
354 if (is_gimple_call (stmt))
356 int freq = compute_call_stmt_bb_frequency (current_function_decl,
358 decl = gimple_call_fndecl (stmt);
360 cgraph_create_edge (node, cgraph_get_create_node (decl),
361 stmt, bb->count, freq);
363 cgraph_create_indirect_edge (node, stmt,
364 gimple_call_flags (stmt),
367 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
368 mark_store, mark_address);
369 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
370 && gimple_omp_parallel_child_fn (stmt))
372 tree fn = gimple_omp_parallel_child_fn (stmt);
373 ipa_record_reference (node, NULL, cgraph_get_create_node (fn),
374 NULL, IPA_REF_ADDR, stmt);
376 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
378 tree fn = gimple_omp_task_child_fn (stmt);
380 ipa_record_reference (node, NULL, cgraph_get_create_node (fn),
381 NULL, IPA_REF_ADDR, stmt);
382 fn = gimple_omp_task_copy_fn (stmt);
384 ipa_record_reference (node, NULL, cgraph_get_create_node (fn),
385 NULL, IPA_REF_ADDR, stmt);
388 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
389 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
390 mark_load, mark_store, mark_address);
393 /* Look for initializers of constant variables and private statics. */
394 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
395 if (TREE_CODE (decl) == VAR_DECL
396 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl)))
397 varpool_finalize_decl (decl);
398 record_eh_tables (node, cfun);
400 pointer_set_destroy (visited_nodes);
404 struct gimple_opt_pass pass_build_cgraph_edges =
408 "*build_cgraph_edges", /* name */
410 build_cgraph_edges, /* execute */
413 0, /* static_pass_number */
415 PROP_cfg, /* properties_required */
416 0, /* properties_provided */
417 0, /* properties_destroyed */
418 0, /* todo_flags_start */
419 0 /* todo_flags_finish */
423 /* Record references to functions and other variables present in the
424 initial value of DECL, a variable.
425 When ONLY_VARS is true, we mark needed only variables, not functions. */
428 record_references_in_initializer (tree decl, bool only_vars)
430 struct pointer_set_t *visited_nodes = pointer_set_create ();
431 struct varpool_node *node = varpool_node (decl);
432 struct record_reference_ctx ctx = {false, NULL};
434 ctx.varpool_node = node;
435 ctx.only_vars = only_vars;
436 walk_tree (&DECL_INITIAL (decl), record_reference,
437 &ctx, visited_nodes);
438 pointer_set_destroy (visited_nodes);
441 /* Rebuild cgraph edges for current function node. This needs to be run after
442 passes that don't update the cgraph. */
445 rebuild_cgraph_edges (void)
448 struct cgraph_node *node = cgraph_get_node (current_function_decl);
449 gimple_stmt_iterator gsi;
451 cgraph_node_remove_callees (node);
452 ipa_remove_all_references (&node->ref_list);
454 node->count = ENTRY_BLOCK_PTR->count;
458 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
460 gimple stmt = gsi_stmt (gsi);
463 if (is_gimple_call (stmt))
465 int freq = compute_call_stmt_bb_frequency (current_function_decl,
467 decl = gimple_call_fndecl (stmt);
469 cgraph_create_edge (node, cgraph_get_create_node (decl), stmt,
472 cgraph_create_indirect_edge (node, stmt,
473 gimple_call_flags (stmt),
476 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
477 mark_store, mark_address);
480 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
481 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
482 mark_load, mark_store, mark_address);
484 record_eh_tables (node, cfun);
485 gcc_assert (!node->global.inlined_to);
490 /* Rebuild cgraph edges for current function node. This needs to be run after
491 passes that don't update the cgraph. */
494 cgraph_rebuild_references (void)
497 struct cgraph_node *node = cgraph_get_node (current_function_decl);
498 gimple_stmt_iterator gsi;
500 ipa_remove_all_references (&node->ref_list);
502 node->count = ENTRY_BLOCK_PTR->count;
506 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
508 gimple stmt = gsi_stmt (gsi);
510 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
511 mark_store, mark_address);
514 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
515 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
516 mark_load, mark_store, mark_address);
518 record_eh_tables (node, cfun);
521 struct gimple_opt_pass pass_rebuild_cgraph_edges =
525 "*rebuild_cgraph_edges", /* name */
527 rebuild_cgraph_edges, /* execute */
530 0, /* static_pass_number */
531 TV_CGRAPH, /* tv_id */
532 PROP_cfg, /* properties_required */
533 0, /* properties_provided */
534 0, /* properties_destroyed */
535 0, /* todo_flags_start */
536 0, /* todo_flags_finish */
542 remove_cgraph_callee_edges (void)
544 cgraph_node_remove_callees (cgraph_get_node (current_function_decl));
548 struct gimple_opt_pass pass_remove_cgraph_callee_edges =
552 "*remove_cgraph_callee_edges", /* name */
554 remove_cgraph_callee_edges, /* execute */
557 0, /* static_pass_number */
559 0, /* properties_required */
560 0, /* properties_provided */
561 0, /* properties_destroyed */
562 0, /* todo_flags_start */
563 0, /* todo_flags_finish */