1 /* Top-level control of tree optimizations.
2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
33 #include "diagnostic.h"
34 #include "basic-block.h"
36 #include "tree-flow.h"
37 #include "tree-dump.h"
40 #include "langhooks.h"
44 #include "tree-inline.h"
45 #include "tree-mudflap.h"
46 #include "tree-pass.h"
54 /* Gate: execute, or not, all of the non-trivial optimizations. */
57 gate_all_optimizations (void)
60 /* Don't bother doing anything if the program has errors.
61 We have to pass down the queue if we already went into SSA */
62 && (!(errorcount || sorrycount) || gimple_in_ssa_p (cfun)));
65 struct tree_opt_pass pass_all_optimizations =
68 gate_all_optimizations, /* gate */
72 0, /* static_pass_number */
74 0, /* properties_required */
75 0, /* properties_provided */
76 0, /* properties_destroyed */
77 0, /* todo_flags_start */
78 0, /* todo_flags_finish */
82 /* Gate: execute, or not, all of the non-trivial optimizations. */
85 gate_all_early_local_passes (void)
87 /* Don't bother doing anything if the program has errors. */
88 return (!errorcount && !sorrycount);
91 struct tree_opt_pass pass_early_local_passes =
93 "early_local_cleanups", /* name */
94 gate_all_early_local_passes, /* gate */
98 0, /* static_pass_number */
100 0, /* properties_required */
101 0, /* properties_provided */
102 0, /* properties_destroyed */
103 0, /* todo_flags_start */
104 TODO_remove_functions, /* todo_flags_finish */
109 execute_early_local_optimizations (void)
111 if (flag_unit_at_a_time)
112 cgraph_state = CGRAPH_STATE_IPA_SSA;
116 /* Gate: execute, or not, all of the non-trivial optimizations. */
119 gate_all_early_optimizations (void)
121 return (optimize >= 1
122 /* Don't bother doing anything if the program has errors. */
123 && !(errorcount || sorrycount));
126 struct tree_opt_pass pass_all_early_optimizations =
128 "early_optimizations", /* name */
129 gate_all_early_optimizations, /* gate */
130 execute_early_local_optimizations, /* execute */
133 0, /* static_pass_number */
135 0, /* properties_required */
136 0, /* properties_provided */
137 0, /* properties_destroyed */
138 0, /* todo_flags_start */
139 0, /* todo_flags_finish */
143 /* Pass: cleanup the CFG just before expanding trees to RTL.
144 This is just a round of label cleanups and case node grouping
145 because after the tree optimizers have run such cleanups may
149 execute_cleanup_cfg_pre_ipa (void)
155 struct tree_opt_pass pass_cleanup_cfg =
157 "cleanup_cfg", /* name */
159 execute_cleanup_cfg_pre_ipa, /* execute */
162 0, /* static_pass_number */
164 PROP_cfg, /* properties_required */
165 0, /* properties_provided */
166 0, /* properties_destroyed */
167 0, /* todo_flags_start */
168 TODO_dump_func, /* todo_flags_finish */
173 /* Pass: cleanup the CFG just before expanding trees to RTL.
174 This is just a round of label cleanups and case node grouping
175 because after the tree optimizers have run such cleanups may
179 execute_cleanup_cfg_post_optimizing (void)
181 fold_cond_expr_cond ();
183 cleanup_dead_labels ();
184 group_case_labels ();
188 struct tree_opt_pass pass_cleanup_cfg_post_optimizing =
190 "final_cleanup", /* name */
192 execute_cleanup_cfg_post_optimizing, /* execute */
195 0, /* static_pass_number */
197 PROP_cfg, /* properties_required */
198 0, /* properties_provided */
199 0, /* properties_destroyed */
200 0, /* todo_flags_start */
201 TODO_dump_func, /* todo_flags_finish */
205 /* Pass: do the actions required to finish with tree-ssa optimization
209 execute_free_datastructures (void)
211 /* ??? This isn't the right place for this. Worse, it got computed
212 more or less at random in various passes. */
213 free_dominance_info (CDI_DOMINATORS);
214 free_dominance_info (CDI_POST_DOMINATORS);
216 /* Remove the ssa structures. Do it here since this includes statement
217 annotations that need to be intact during disband_implicit_edges. */
223 struct tree_opt_pass pass_free_datastructures =
227 execute_free_datastructures, /* execute */
230 0, /* static_pass_number */
232 PROP_cfg, /* properties_required */
233 0, /* properties_provided */
234 0, /* properties_destroyed */
235 0, /* todo_flags_start */
236 0, /* todo_flags_finish */
239 /* Pass: free cfg annotations. */
242 execute_free_cfg_annotations (void)
244 /* Emit gotos for implicit jumps. */
245 disband_implicit_edges ();
247 /* And get rid of annotations we no longer need. */
248 delete_tree_cfg_annotations ();
253 struct tree_opt_pass pass_free_cfg_annotations =
257 execute_free_cfg_annotations, /* execute */
260 0, /* static_pass_number */
262 PROP_cfg, /* properties_required */
263 0, /* properties_provided */
264 0, /* properties_destroyed */
265 0, /* todo_flags_start */
266 0, /* todo_flags_finish */
270 /* Return true if BB has at least one abnormal outgoing edge. */
273 has_abnormal_outgoing_edge_p (basic_block bb)
278 FOR_EACH_EDGE (e, ei, bb->succs)
279 if (e->flags & EDGE_ABNORMAL)
285 /* Pass: fixup_cfg. IPA passes, compilation of earlier functions or inlining
286 might have changed some properties, such as marked functions nothrow or
287 added calls that can potentially go to non-local labels. Remove redundant
288 edges and basic blocks, and create new ones if necessary. */
291 execute_fixup_cfg (void)
294 block_stmt_iterator bsi;
295 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
297 cfun->after_inlining = true;
302 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
304 tree stmt = bsi_stmt (bsi);
305 tree call = get_call_expr_in (stmt);
306 tree decl = call ? get_callee_fndecl (call) : NULL;
308 if (decl && call_expr_flags (call) & (ECF_CONST | ECF_PURE)
309 && TREE_SIDE_EFFECTS (call))
311 if (gimple_in_ssa_p (cfun))
313 todo |= TODO_update_ssa;
316 TREE_SIDE_EFFECTS (call) = 0;
318 if (decl && TREE_NOTHROW (decl))
319 TREE_NOTHROW (call) = 1;
320 if (!tree_could_throw_p (stmt) && lookup_stmt_eh_region (stmt))
321 remove_stmt_from_eh_region (stmt);
323 tree_purge_dead_eh_edges (bb);
326 if (current_function_has_nonlocal_label)
330 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
332 tree stmt = bsi_stmt (bsi);
333 if (tree_can_make_abnormal_goto (stmt))
335 if (stmt == bsi_stmt (bsi_last (bb)))
337 if (!has_abnormal_outgoing_edge_p (bb))
338 make_abnormal_goto_edges (bb, true);
342 edge e = split_block (bb, stmt);
344 make_abnormal_goto_edges (bb, true);
349 /* Update PHIs on nonlocal goto receivers we (possibly)
350 just created new edges into. */
351 if (TREE_CODE (stmt) == LABEL_EXPR
352 && gimple_in_ssa_p (cfun))
354 tree target = LABEL_EXPR_LABEL (stmt);
355 if (DECL_NONLOCAL (target))
359 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
361 todo |= TODO_update_ssa;
362 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
364 mark_sym_for_renaming
365 (SSA_NAME_VAR (PHI_RESULT (phi)));
373 /* Dump a textual representation of the flowgraph. */
375 dump_tree_cfg (dump_file, dump_flags);
380 struct tree_opt_pass pass_fixup_cfg =
382 "fixupcfg", /* name */
384 execute_fixup_cfg, /* execute */
387 0, /* static_pass_number */
389 PROP_cfg, /* properties_required */
390 0, /* properties_provided */
391 0, /* properties_destroyed */
392 0, /* todo_flags_start */
393 TODO_cleanup_cfg | TODO_ggc_collect
394 | TODO_dump_func | TODO_verify_flow
395 | TODO_verify_stmts,/* todo_flags_finish */
398 /* Do the actions required to initialize internal data structures used
399 in tree-ssa optimization passes. */
402 execute_init_datastructures (void)
404 /* Allocate hash tables, arrays and other structures. */
409 /* Gate: initialize or not the SSA datastructures. */
412 gate_init_datastructures (void)
414 return (optimize >= 1);
417 struct tree_opt_pass pass_init_datastructures =
420 gate_init_datastructures, /* gate */
421 execute_init_datastructures, /* execute */
424 0, /* static_pass_number */
426 PROP_cfg, /* properties_required */
427 0, /* properties_provided */
428 0, /* properties_destroyed */
429 0, /* todo_flags_start */
430 0, /* todo_flags_finish */
435 tree_lowering_passes (tree fn)
437 tree saved_current_function_decl = current_function_decl;
439 current_function_decl = fn;
440 push_cfun (DECL_STRUCT_FUNCTION (fn));
441 tree_register_cfg_hooks ();
442 bitmap_obstack_initialize (NULL);
443 execute_pass_list (all_lowering_passes);
444 if (optimize && cgraph_global_info_ready)
445 execute_pass_list (pass_early_local_passes.sub);
446 free_dominance_info (CDI_POST_DOMINATORS);
447 free_dominance_info (CDI_DOMINATORS);
449 current_function_decl = saved_current_function_decl;
450 bitmap_obstack_release (NULL);
454 /* Update recursively all inlined_to pointers of functions
455 inlined into NODE to INLINED_TO. */
457 update_inlined_to_pointers (struct cgraph_node *node,
458 struct cgraph_node *inlined_to)
460 struct cgraph_edge *e;
461 for (e = node->callees; e; e = e->next_callee)
463 if (e->callee->global.inlined_to)
465 e->callee->global.inlined_to = inlined_to;
466 update_inlined_to_pointers (e->callee, inlined_to);
472 /* For functions-as-trees languages, this performs all optimization and
473 compilation for FNDECL. */
476 tree_rest_of_compilation (tree fndecl)
478 location_t saved_loc;
479 struct cgraph_node *node;
481 timevar_push (TV_EXPAND);
483 gcc_assert (!flag_unit_at_a_time || cgraph_global_info_ready);
485 node = cgraph_node (fndecl);
487 /* Initialize the default bitmap obstack. */
488 bitmap_obstack_initialize (NULL);
490 /* We might need the body of this function so that we can expand
491 it inline somewhere else. */
492 if (cgraph_preserve_function_body_p (fndecl))
493 save_inline_function_body (node);
495 /* Initialize the RTL code for the function. */
496 current_function_decl = fndecl;
497 saved_loc = input_location;
498 input_location = DECL_SOURCE_LOCATION (fndecl);
499 init_function_start (fndecl);
501 /* Even though we're inside a function body, we still don't want to
502 call expand_expr to calculate the size of a variable-sized array.
503 We haven't necessarily assigned RTL to all variables yet, so it's
504 not safe to try to expand expressions involving them. */
505 cfun->x_dont_save_pending_sizes_p = 1;
507 tree_register_cfg_hooks ();
509 if (flag_inline_trees)
511 struct cgraph_edge *e;
512 for (e = node->callees; e; e = e->next_callee)
513 if (!e->inline_failed || warn_inline)
517 timevar_push (TV_INTEGRATION);
518 optimize_inline_calls (fndecl);
519 timevar_pop (TV_INTEGRATION);
522 /* In non-unit-at-a-time we must mark all referenced functions as needed.
524 if (!flag_unit_at_a_time)
526 struct cgraph_edge *e;
527 for (e = node->callees; e; e = e->next_callee)
528 if (e->callee->analyzed)
529 cgraph_mark_needed_node (e->callee);
532 /* We are not going to maintain the cgraph edges up to date.
533 Kill it so it won't confuse us. */
534 cgraph_node_remove_callees (node);
536 bitmap_obstack_initialize (®_obstack); /* FIXME, only at RTL generation*/
537 /* Perform all tree transforms and optimizations. */
538 execute_pass_list (all_passes);
540 bitmap_obstack_release (®_obstack);
542 /* Release the default bitmap obstack. */
543 bitmap_obstack_release (NULL);
545 DECL_SAVED_TREE (fndecl) = NULL;
548 /* If requested, warn about function definitions where the function will
549 return a value (usually of some struct or union type) which itself will
550 take up a lot of stack space. */
551 if (warn_larger_than && !DECL_EXTERNAL (fndecl) && TREE_TYPE (fndecl))
553 tree ret_type = TREE_TYPE (TREE_TYPE (fndecl));
555 if (ret_type && TYPE_SIZE_UNIT (ret_type)
556 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
557 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
560 unsigned int size_as_int
561 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
563 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
564 warning (0, "size of return value of %q+D is %u bytes",
565 fndecl, size_as_int);
567 warning (0, "size of return value of %q+D is larger than %wd bytes",
568 fndecl, larger_than_size);
572 if (!flag_inline_trees)
574 DECL_SAVED_TREE (fndecl) = NULL;
575 if (DECL_STRUCT_FUNCTION (fndecl) == 0
576 && !cgraph_node (fndecl)->origin)
578 /* Stop pointing to the local nodes about to be freed.
579 But DECL_INITIAL must remain nonzero so we know this
580 was an actual function definition.
581 For a nested function, this is done in c_pop_function_context.
582 If rest_of_compilation set this to 0, leave it 0. */
583 if (DECL_INITIAL (fndecl) != 0)
584 DECL_INITIAL (fndecl) = error_mark_node;
588 input_location = saved_loc;
591 timevar_pop (TV_EXPAND);