1 /* Top level of GCC compilers (cc1, cc1plus, etc.)
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This is the top level of cc1/c++.
23 It parses command args, opens files, invokes the various passes
24 in the proper order, and counts the time used by each.
25 Error messages and low-level interface to malloc also handled here. */
28 #undef FLOAT /* This is for hpux. They should change hpux. */
29 #undef FFS /* Some systems define this in param.h. */
31 #include "coretypes.h"
35 #ifdef HAVE_SYS_RESOURCE_H
36 # include <sys/resource.h>
39 #ifdef HAVE_SYS_TIMES_H
40 # include <sys/times.h>
49 #include "insn-attr.h"
50 #include "insn-config.h"
51 #include "insn-flags.h"
52 #include "hard-reg-set.h"
59 #include "basic-block.h"
65 #include "diagnostic.h"
68 #include "dwarf2asm.h"
69 #include "integrate.h"
73 #include "langhooks.h"
74 #include "cfglayout.h"
76 #include "hosthooks.h"
80 #include "value-prof.h"
81 #include "alloc-pool.h"
82 #include "tree-pass.h"
83 #include "tree-dump.h"
85 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
86 #include "dwarf2out.h"
89 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
93 #ifdef SDB_DEBUGGING_INFO
97 #ifdef XCOFF_DEBUGGING_INFO
98 #include "xcoffout.h" /* Needed for external data
99 declarations for e.g. AIX 4.x. */
102 #ifndef HAVE_conditional_execution
103 #define HAVE_conditional_execution 0
106 /* Format to use to print dumpfile index value */
107 #ifndef DUMPFILE_FORMAT
108 #define DUMPFILE_FORMAT ".%02d."
111 static int initializing_dump = 0;
113 /* Routine to open a dump file. Return true if the dump file is enabled. */
116 open_dump_file (enum tree_dump_index index, tree decl)
118 if (! dump_enabled_p (index))
121 timevar_push (TV_DUMP);
123 if (dump_file != NULL || dump_file_name != NULL)
126 dump_file_name = get_dump_file_name (index);
127 initializing_dump = !dump_initialized_p (index);
128 dump_file = dump_begin (index, NULL);
130 if (dump_file == NULL)
131 fatal_error ("can't open %s: %m", dump_file_name);
134 fprintf (dump_file, "\n;; Function %s%s\n\n",
135 lang_hooks.decl_printable_name (decl, 2),
136 cfun->function_frequency == FUNCTION_FREQUENCY_HOT
138 : cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
139 ? " (unlikely executed)"
142 timevar_pop (TV_DUMP);
146 /* Routine to close a dump file. */
149 close_dump_file (enum tree_dump_index index,
150 void (*func) (FILE *, rtx),
156 timevar_push (TV_DUMP);
158 && graph_dump_format != no_graph)
160 /* If we've not initialized the files, do so now. */
161 if (initializing_dump)
162 clean_graph_dump_file (dump_file_name);
164 print_rtl_graph_with_bb (dump_file_name, insns);
168 func (dump_file, insns);
170 dump_end (index, dump_file);
171 free ((char *) dump_file_name);
174 dump_file_name = NULL;
175 timevar_pop (TV_DUMP);
178 /* This is called from various places for FUNCTION_DECL, VAR_DECL,
181 This does nothing for local (non-static) variables, unless the
182 variable is a register variable with DECL_ASSEMBLER_NAME set. In
183 that case, or if the variable is not an automatic, it sets up the
184 RTL and outputs any assembler code (label definition, storage
185 allocation and initialization).
187 DECL is the declaration. TOP_LEVEL is nonzero
188 if this declaration is not within a function. */
191 rest_of_decl_compilation (tree decl,
195 /* We deferred calling assemble_alias so that we could collect
196 other attributes such as visibility. Emit the alias now. */
199 alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
202 alias = TREE_VALUE (TREE_VALUE (alias));
203 alias = get_identifier (TREE_STRING_POINTER (alias));
204 assemble_alias (decl, alias);
208 /* Can't defer this, because it needs to happen before any
209 later function definitions are processed. */
210 if (DECL_REGISTER (decl) && DECL_ASSEMBLER_NAME_SET_P (decl))
211 make_decl_rtl (decl);
213 /* Forward declarations for nested functions are not "external",
214 but we need to treat them as if they were. */
215 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
216 || TREE_CODE (decl) == FUNCTION_DECL)
218 timevar_push (TV_VARCONST);
220 /* Don't output anything when a tentative file-scope definition
221 is seen. But at end of compilation, do output code for them.
223 We do output all variables when unit-at-a-time is active and rely on
224 callgraph code to defer them except for forward declarations
225 (see gcc.c-torture/compile/920624-1.c) */
227 || !DECL_DEFER_OUTPUT (decl)
228 || (flag_unit_at_a_time && DECL_INITIAL (decl)))
229 && !DECL_EXTERNAL (decl))
231 if (flag_unit_at_a_time && !cgraph_global_info_ready
232 && TREE_CODE (decl) != FUNCTION_DECL && top_level)
233 cgraph_varpool_finalize_decl (decl);
235 assemble_variable (decl, top_level, at_end, 0);
238 #ifdef ASM_FINISH_DECLARE_OBJECT
239 if (decl == last_assemble_variable_decl)
241 ASM_FINISH_DECLARE_OBJECT (asm_out_file, decl,
246 timevar_pop (TV_VARCONST);
248 else if (TREE_CODE (decl) == TYPE_DECL)
250 timevar_push (TV_SYMOUT);
251 debug_hooks->type_decl (decl, !top_level);
252 timevar_pop (TV_SYMOUT);
256 /* Called after finishing a record, union or enumeral type. */
259 rest_of_type_compilation (tree type, int toplev)
261 /* Avoid confusing the debug information machinery when there are
263 if (errorcount != 0 || sorrycount != 0)
266 timevar_push (TV_SYMOUT);
267 debug_hooks->type_decl (TYPE_STUB_DECL (type), !toplev);
268 timevar_pop (TV_SYMOUT);
271 /* Turn the RTL into assembly. */
273 rest_of_handle_final (void)
275 timevar_push (TV_FINAL);
280 /* Get the function's name, as described by its RTL. This may be
281 different from the DECL_NAME name used in the source file. */
283 x = DECL_RTL (current_function_decl);
287 if (GET_CODE (x) != SYMBOL_REF)
289 fnname = XSTR (x, 0);
291 assemble_start_function (current_function_decl, fnname);
292 final_start_function (get_insns (), asm_out_file, optimize);
293 final (get_insns (), asm_out_file, optimize, 0);
294 final_end_function ();
296 #ifdef TARGET_UNWIND_INFO
297 /* ??? The IA-64 ".handlerdata" directive must be issued before
298 the ".endp" directive that closes the procedure descriptor. */
299 output_function_exception_table ();
302 assemble_end_function (current_function_decl, fnname);
304 #ifndef TARGET_UNWIND_INFO
305 /* Otherwise, it feels unclean to switch sections in the middle. */
306 output_function_exception_table ();
309 user_defined_section_attribute = false;
312 fflush (asm_out_file);
314 /* Release all memory allocated by flow. */
315 free_basic_block_vars ();
318 /* Write DBX symbols if requested. */
320 /* Note that for those inline functions where we don't initially
321 know for certain that we will be generating an out-of-line copy,
322 the first invocation of this routine (rest_of_compilation) will
323 skip over this code by doing a `goto exit_rest_of_compilation;'.
324 Later on, wrapup_global_declarations will (indirectly) call
325 rest_of_compilation again for those inline functions that need
326 to have out-of-line copies generated. During that call, we
327 *will* be routed past here. */
329 timevar_push (TV_SYMOUT);
330 (*debug_hooks->function_decl) (current_function_decl);
331 timevar_pop (TV_SYMOUT);
334 timevar_pop (TV_FINAL);
338 /* Run delay slot optimization. */
340 rest_of_handle_delay_slots (void)
342 timevar_push (TV_DBR_SCHED);
343 open_dump_file (DFI_dbr, current_function_decl);
345 dbr_schedule (get_insns (), dump_file);
347 close_dump_file (DFI_dbr, print_rtl, get_insns ());
351 timevar_pop (TV_DBR_SCHED);
356 /* Convert register usage from flat register file usage to a stack
359 rest_of_handle_stack_regs (void)
361 #if defined (HAVE_ATTR_length)
362 /* If flow2 creates new instructions which need splitting
363 and scheduling after reload is not done, they might not be
364 split until final which doesn't allow splitting
365 if HAVE_ATTR_length. */
366 #ifdef INSN_SCHEDULING
367 if (optimize && !flag_schedule_insns_after_reload)
372 timevar_push (TV_SHORTEN_BRANCH);
374 timevar_pop (TV_SHORTEN_BRANCH);
378 timevar_push (TV_REG_STACK);
379 open_dump_file (DFI_stack, current_function_decl);
381 if (reg_to_stack (dump_file) && optimize)
383 if (cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK
384 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0))
385 && (flag_reorder_blocks || flag_reorder_blocks_and_partition))
387 reorder_basic_blocks (0);
388 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK);
392 close_dump_file (DFI_stack, print_rtl_with_bb, get_insns ());
395 timevar_pop (TV_REG_STACK);
399 /* Track the variables, i.e. compute where the variable is stored at each position in function. */
401 rest_of_handle_variable_tracking (void)
403 timevar_push (TV_VAR_TRACKING);
404 open_dump_file (DFI_vartrack, current_function_decl);
406 variable_tracking_main ();
408 close_dump_file (DFI_vartrack, print_rtl_with_bb, get_insns ());
409 timevar_pop (TV_VAR_TRACKING);
412 /* Machine dependent reorg pass. */
414 rest_of_handle_machine_reorg (void)
416 timevar_push (TV_MACH_DEP);
417 open_dump_file (DFI_mach, current_function_decl);
419 targetm.machine_dependent_reorg ();
421 close_dump_file (DFI_mach, print_rtl, get_insns ());
424 timevar_pop (TV_MACH_DEP);
428 /* Run new register allocator. Return TRUE if we must exit
429 rest_of_compilation upon return. */
431 rest_of_handle_new_regalloc (void)
435 timevar_push (TV_LOCAL_ALLOC);
436 open_dump_file (DFI_lreg, current_function_decl);
438 delete_trivially_dead_insns (get_insns (), max_reg_num ());
441 timevar_pop (TV_LOCAL_ALLOC);
442 close_dump_file (DFI_lreg, NULL, NULL);
444 /* XXX clean up the whole mess to bring live info in shape again. */
445 timevar_push (TV_GLOBAL_ALLOC);
446 open_dump_file (DFI_greg, current_function_decl);
448 build_insn_chain (get_insns ());
449 failure = reload (get_insns (), 0);
451 timevar_pop (TV_GLOBAL_ALLOC);
455 if (dump_enabled_p (DFI_greg))
457 timevar_push (TV_DUMP);
458 dump_global_regs (dump_file);
459 timevar_pop (TV_DUMP);
460 close_dump_file (DFI_greg, print_rtl_with_bb, get_insns ());
466 reload_completed = 1;
471 /* Run old register allocator. Return TRUE if we must exit
472 rest_of_compilation upon return. */
474 rest_of_handle_old_regalloc (void)
479 timevar_push (TV_LOCAL_ALLOC);
480 open_dump_file (DFI_lreg, current_function_decl);
482 /* Allocate the reg_renumber array. */
483 allocate_reg_info (max_regno, FALSE, TRUE);
485 /* And the reg_equiv_memory_loc array. */
486 VARRAY_GROW (reg_equiv_memory_loc_varray, max_regno);
487 reg_equiv_memory_loc = &VARRAY_RTX (reg_equiv_memory_loc_varray, 0);
489 allocate_initial_values (reg_equiv_memory_loc);
491 regclass (get_insns (), max_reg_num (), dump_file);
492 rebuild_notes = local_alloc ();
494 timevar_pop (TV_LOCAL_ALLOC);
496 /* Local allocation may have turned an indirect jump into a direct
497 jump. If so, we must rebuild the JUMP_LABEL fields of jumping
501 timevar_push (TV_JUMP);
503 rebuild_jump_labels (get_insns ());
504 purge_all_dead_edges (0);
506 timevar_pop (TV_JUMP);
509 if (dump_enabled_p (DFI_lreg))
511 timevar_push (TV_DUMP);
512 dump_flow_info (dump_file);
513 dump_local_alloc (dump_file);
514 timevar_pop (TV_DUMP);
517 close_dump_file (DFI_lreg, print_rtl_with_bb, get_insns ());
521 timevar_push (TV_GLOBAL_ALLOC);
522 open_dump_file (DFI_greg, current_function_decl);
524 /* If optimizing, allocate remaining pseudo-regs. Do the reload
525 pass fixing up any insns that are invalid. */
528 failure = global_alloc (dump_file);
531 build_insn_chain (get_insns ());
532 failure = reload (get_insns (), 0);
535 if (dump_enabled_p (DFI_greg))
537 timevar_push (TV_DUMP);
538 dump_global_regs (dump_file);
539 timevar_pop (TV_DUMP);
541 close_dump_file (DFI_greg, print_rtl_with_bb, get_insns ());
546 timevar_pop (TV_GLOBAL_ALLOC);
551 /* Run the regrename and cprop passes. */
553 rest_of_handle_regrename (void)
555 timevar_push (TV_RENAME_REGISTERS);
556 open_dump_file (DFI_rnreg, current_function_decl);
558 if (flag_rename_registers)
559 regrename_optimize ();
560 if (flag_cprop_registers)
561 copyprop_hardreg_forward ();
563 close_dump_file (DFI_rnreg, print_rtl_with_bb, get_insns ());
564 timevar_pop (TV_RENAME_REGISTERS);
567 /* Reorder basic blocks. */
569 rest_of_handle_reorder_blocks (void)
572 unsigned int liveness_flags;
574 open_dump_file (DFI_bbro, current_function_decl);
576 /* Last attempt to optimize CFG, as scheduling, peepholing and insn
577 splitting possibly introduced more crossjumping opportunities. */
578 liveness_flags = (!HAVE_conditional_execution ? CLEANUP_UPDATE_LIFE : 0);
579 changed = cleanup_cfg (CLEANUP_EXPENSIVE | liveness_flags);
581 if (flag_sched2_use_traces && flag_schedule_insns_after_reload)
582 tracer (liveness_flags);
583 if (flag_reorder_blocks || flag_reorder_blocks_and_partition)
584 reorder_basic_blocks (liveness_flags);
585 if (flag_reorder_blocks || flag_reorder_blocks_and_partition
586 || (flag_sched2_use_traces && flag_schedule_insns_after_reload))
587 changed |= cleanup_cfg (CLEANUP_EXPENSIVE | liveness_flags);
589 /* On conditional execution targets we can not update the life cheaply, so
590 we deffer the updating to after both cleanups. This may lose some cases
591 but should not be terribly bad. */
592 if (changed && HAVE_conditional_execution)
593 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
595 close_dump_file (DFI_bbro, print_rtl_with_bb, get_insns ());
598 /* Partition hot and cold basic blocks. */
600 rest_of_handle_partition_blocks (void)
603 partition_hot_cold_basic_blocks ();
604 allocate_reg_life_data ();
605 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
606 PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
610 #ifdef INSN_SCHEDULING
611 /* Run instruction scheduler. */
612 /* Perform SMS module scheduling. */
614 rest_of_handle_sms (void)
616 timevar_push (TV_SMS);
617 open_dump_file (DFI_sms, current_function_decl);
619 /* We want to be able to create new pseudos. */
621 sms_schedule (dump_file);
622 close_dump_file (DFI_sms, print_rtl, get_insns ());
625 /* Update the life information, because we add pseudos. */
626 max_regno = max_reg_num ();
627 allocate_reg_info (max_regno, FALSE, FALSE);
628 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
630 | PROP_KILL_DEAD_CODE
631 | PROP_SCAN_DEAD_CODE));
635 timevar_pop (TV_SMS);
638 /* Run instruction scheduler. */
640 rest_of_handle_sched (void)
642 timevar_push (TV_SCHED);
644 /* Print function header into sched dump now
645 because doing the sched analysis makes some of the dump. */
646 open_dump_file (DFI_sched, current_function_decl);
648 /* Do control and data sched analysis,
649 and write some of the results to dump file. */
651 schedule_insns (dump_file);
653 close_dump_file (DFI_sched, print_rtl_with_bb, get_insns ());
656 timevar_pop (TV_SCHED);
659 /* Run second scheduling pass after reload. */
661 rest_of_handle_sched2 (void)
663 timevar_push (TV_SCHED2);
664 open_dump_file (DFI_sched2, current_function_decl);
666 /* Do control and data sched analysis again,
667 and write some more of the results to dump file. */
671 if (flag_sched2_use_superblocks || flag_sched2_use_traces)
673 schedule_ebbs (dump_file);
674 /* No liveness updating code yet, but it should be easy to do.
675 reg-stack recomputes the liveness when needed for now. */
676 count_or_remove_death_notes (NULL, 1);
677 cleanup_cfg (CLEANUP_EXPENSIVE);
680 schedule_insns (dump_file);
682 close_dump_file (DFI_sched2, print_rtl_with_bb, get_insns ());
686 timevar_pop (TV_SCHED2);
691 rest_of_handle_gcse2 (void)
693 timevar_push (TV_GCSE_AFTER_RELOAD);
694 open_dump_file (DFI_gcse2, current_function_decl);
696 gcse_after_reload_main (get_insns ());
697 rebuild_jump_labels (get_insns ());
698 delete_trivially_dead_insns (get_insns (), max_reg_num ());
699 close_dump_file (DFI_gcse2, print_rtl_with_bb, get_insns ());
703 #ifdef ENABLE_CHECKING
707 timevar_pop (TV_GCSE_AFTER_RELOAD);
710 /* Register allocation pre-pass, to reduce number of moves necessary
711 for two-address machines. */
713 rest_of_handle_regmove (void)
715 timevar_push (TV_REGMOVE);
716 open_dump_file (DFI_regmove, current_function_decl);
718 regmove_optimize (get_insns (), max_reg_num (), dump_file);
720 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
721 close_dump_file (DFI_regmove, print_rtl_with_bb, get_insns ());
724 timevar_pop (TV_REGMOVE);
729 rest_of_handle_tracer (void)
731 open_dump_file (DFI_tracer, current_function_decl);
733 dump_flow_info (dump_file);
735 cleanup_cfg (CLEANUP_EXPENSIVE);
736 reg_scan (get_insns (), max_reg_num (), 0);
737 close_dump_file (DFI_tracer, print_rtl_with_bb, get_insns ());
740 /* If-conversion and CFG cleanup. */
742 rest_of_handle_if_conversion (void)
744 timevar_push (TV_IFCVT);
745 open_dump_file (DFI_ce1, current_function_decl);
747 if (flag_if_conversion)
750 dump_flow_info (dump_file);
751 cleanup_cfg (CLEANUP_EXPENSIVE);
752 reg_scan (get_insns (), max_reg_num (), 0);
756 timevar_push (TV_JUMP);
757 cleanup_cfg (CLEANUP_EXPENSIVE);
758 reg_scan (get_insns (), max_reg_num (), 0);
759 timevar_pop (TV_JUMP);
761 close_dump_file (DFI_ce1, print_rtl_with_bb, get_insns ());
762 timevar_pop (TV_IFCVT);
765 /* Rerun if-conversion, as combine may have simplified things enough
766 to now meet sequence length restrictions. */
768 rest_of_handle_if_after_combine (void)
770 timevar_push (TV_IFCVT);
771 open_dump_file (DFI_ce2, current_function_decl);
777 close_dump_file (DFI_ce2, print_rtl_with_bb, get_insns ());
778 timevar_pop (TV_IFCVT);
782 rest_of_handle_if_after_reload (void)
784 timevar_push (TV_IFCVT2);
785 open_dump_file (DFI_ce3, current_function_decl);
787 /* Last attempt to optimize CFG, as scheduling, peepholing and insn
788 splitting possibly introduced more crossjumping opportunities. */
789 cleanup_cfg (CLEANUP_EXPENSIVE
790 | CLEANUP_UPDATE_LIFE
791 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
792 if (flag_if_conversion2)
794 close_dump_file (DFI_ce3, print_rtl_with_bb, get_insns ());
795 timevar_pop (TV_IFCVT2);
799 rest_of_handle_web (void)
801 open_dump_file (DFI_web, current_function_decl);
802 timevar_push (TV_WEB);
804 delete_trivially_dead_insns (get_insns (), max_reg_num ());
805 cleanup_cfg (CLEANUP_EXPENSIVE);
807 timevar_pop (TV_WEB);
808 close_dump_file (DFI_web, print_rtl_with_bb, get_insns ());
809 reg_scan (get_insns (), max_reg_num (), 0);
812 /* Do branch profiling and static profile estimation passes. */
814 rest_of_handle_branch_prob (void)
818 timevar_push (TV_BRANCH_PROB);
819 open_dump_file (DFI_bp, current_function_decl);
821 if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
824 /* Discover and record the loop depth at the head of each basic
825 block. The loop infrastructure does the real job for us. */
826 flow_loops_find (&loops, LOOP_TREE);
829 flow_loops_dump (&loops, dump_file, NULL, 0);
831 /* Estimate using heuristics if no profiling info is available. */
832 if (flag_guess_branch_prob)
833 estimate_probability (&loops);
835 flow_loops_free (&loops);
836 free_dominance_info (CDI_DOMINATORS);
837 close_dump_file (DFI_bp, print_rtl_with_bb, get_insns ());
838 timevar_pop (TV_BRANCH_PROB);
841 /* Do optimizations based on expression value profiles. */
843 rest_of_handle_value_profile_transformations (void)
845 open_dump_file (DFI_vpt, current_function_decl);
846 timevar_push (TV_VPT);
848 if (value_profile_transformations ())
849 cleanup_cfg (CLEANUP_EXPENSIVE);
851 timevar_pop (TV_VPT);
852 close_dump_file (DFI_vpt, print_rtl_with_bb, get_insns ());
855 /* Do control and data flow analysis; write some of the results to the
858 rest_of_handle_cfg (void)
860 open_dump_file (DFI_cfg, current_function_decl);
862 dump_flow_info (dump_file);
864 cleanup_cfg (CLEANUP_EXPENSIVE
865 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
867 /* It may make more sense to mark constant functions after dead code is
868 eliminated by life_analysis, but we need to do it early, as -fprofile-arcs
869 may insert code making function non-constant, but we still must consider
870 it as constant, otherwise -fbranch-probabilities will not read data back.
872 life_analysis rarely eliminates modification of external memory.
874 FIXME: now with tree based profiling we are in the trap described above
875 again. It seems to be easiest to disable the optimization for time
876 being before the problem is either solved by moving the transformation
877 to the IPA level (we need the CFG for this) or the very early optimization
878 passes are made to ignore the const/pure flags so code does not change. */
880 && (!flag_tree_based_profiling
881 || (!profile_arc_flag && !flag_branch_probabilities)))
883 /* Alias analysis depends on this information and mark_constant_function
884 depends on alias analysis. */
885 reg_scan (get_insns (), max_reg_num (), 1);
886 mark_constant_function ();
889 close_dump_file (DFI_cfg, print_rtl_with_bb, get_insns ());
892 /* Perform jump bypassing and control flow optimizations. */
894 rest_of_handle_jump_bypass (void)
896 timevar_push (TV_BYPASS);
897 open_dump_file (DFI_bypass, current_function_decl);
899 cleanup_cfg (CLEANUP_EXPENSIVE);
900 reg_scan (get_insns (), max_reg_num (), 1);
902 if (bypass_jumps (dump_file))
904 rebuild_jump_labels (get_insns ());
905 cleanup_cfg (CLEANUP_EXPENSIVE);
906 delete_trivially_dead_insns (get_insns (), max_reg_num ());
909 close_dump_file (DFI_bypass, print_rtl_with_bb, get_insns ());
910 timevar_pop (TV_BYPASS);
914 #ifdef ENABLE_CHECKING
919 /* Try combining insns through substitution. */
921 rest_of_handle_combine (void)
923 int rebuild_jump_labels_after_combine = 0;
925 timevar_push (TV_COMBINE);
926 open_dump_file (DFI_combine, current_function_decl);
928 rebuild_jump_labels_after_combine
929 = combine_instructions (get_insns (), max_reg_num ());
931 /* Combining insns may have turned an indirect jump into a
932 direct jump. Rebuild the JUMP_LABEL fields of jumping
934 if (rebuild_jump_labels_after_combine)
936 timevar_push (TV_JUMP);
937 rebuild_jump_labels (get_insns ());
938 timevar_pop (TV_JUMP);
940 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
943 close_dump_file (DFI_combine, print_rtl_with_bb, get_insns ());
944 timevar_pop (TV_COMBINE);
949 /* Perform life analysis. */
951 rest_of_handle_life (void)
953 open_dump_file (DFI_life, current_function_decl);
956 #ifdef ENABLE_CHECKING
959 life_analysis (dump_file, PROP_FINAL);
961 cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_UPDATE_LIFE
963 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
967 setjmp_vars_warning (DECL_INITIAL (current_function_decl));
968 setjmp_args_warning ();
973 if (!flag_new_regalloc && initialize_uninitialized_subregs ())
975 /* Insns were inserted, and possibly pseudos created, so
976 things might look a bit different. */
977 allocate_reg_life_data ();
978 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
979 PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
985 close_dump_file (DFI_life, print_rtl_with_bb, get_insns ());
990 /* Perform common subexpression elimination. Nonzero value from
991 `cse_main' means that jumps were simplified and some code may now
992 be unreachable, so do jump optimization again. */
994 rest_of_handle_cse (void)
998 open_dump_file (DFI_cse, current_function_decl);
1000 dump_flow_info (dump_file);
1001 timevar_push (TV_CSE);
1003 reg_scan (get_insns (), max_reg_num (), 1);
1005 tem = cse_main (get_insns (), max_reg_num (), dump_file);
1007 rebuild_jump_labels (get_insns ());
1008 if (purge_all_dead_edges (0))
1009 delete_unreachable_blocks ();
1011 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1013 /* If we are not running more CSE passes, then we are no longer
1014 expecting CSE to be run. But always rerun it in a cheap mode. */
1015 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
1017 if (tem || optimize > 1)
1018 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1020 timevar_pop (TV_CSE);
1021 close_dump_file (DFI_cse, print_rtl_with_bb, get_insns ());
1026 /* Run second CSE pass after loop optimizations. */
1028 rest_of_handle_cse2 (void)
1032 timevar_push (TV_CSE2);
1033 open_dump_file (DFI_cse2, current_function_decl);
1035 dump_flow_info (dump_file);
1036 /* CFG is no longer maintained up-to-date. */
1037 tem = cse_main (get_insns (), max_reg_num (), dump_file);
1039 /* Run a pass to eliminate duplicated assignments to condition code
1040 registers. We have to run this after bypass_jumps, because it
1041 makes it harder for that pass to determine whether a jump can be
1043 cse_condition_code_reg ();
1045 purge_all_dead_edges (0);
1046 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1050 timevar_push (TV_JUMP);
1051 rebuild_jump_labels (get_insns ());
1052 cleanup_cfg (CLEANUP_EXPENSIVE);
1053 timevar_pop (TV_JUMP);
1055 reg_scan (get_insns (), max_reg_num (), 0);
1056 close_dump_file (DFI_cse2, print_rtl_with_bb, get_insns ());
1057 timevar_pop (TV_CSE2);
1062 /* Perform global cse. */
1064 rest_of_handle_gcse (void)
1066 int save_csb, save_cfj;
1069 timevar_push (TV_GCSE);
1070 open_dump_file (DFI_gcse, current_function_decl);
1072 tem = gcse_main (get_insns (), dump_file);
1073 rebuild_jump_labels (get_insns ());
1074 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1076 save_csb = flag_cse_skip_blocks;
1077 save_cfj = flag_cse_follow_jumps;
1078 flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
1080 /* If -fexpensive-optimizations, re-run CSE to clean up things done
1082 if (flag_expensive_optimizations)
1084 timevar_push (TV_CSE);
1085 reg_scan (get_insns (), max_reg_num (), 1);
1086 tem2 = cse_main (get_insns (), max_reg_num (), dump_file);
1087 purge_all_dead_edges (0);
1088 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1089 timevar_pop (TV_CSE);
1090 cse_not_expected = !flag_rerun_cse_after_loop;
1093 /* If gcse or cse altered any jumps, rerun jump optimizations to clean
1094 things up. Then possibly re-run CSE again. */
1098 timevar_push (TV_JUMP);
1099 rebuild_jump_labels (get_insns ());
1100 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1101 timevar_pop (TV_JUMP);
1103 if (flag_expensive_optimizations)
1105 timevar_push (TV_CSE);
1106 reg_scan (get_insns (), max_reg_num (), 1);
1107 tem2 = cse_main (get_insns (), max_reg_num (), dump_file);
1108 purge_all_dead_edges (0);
1109 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1110 timevar_pop (TV_CSE);
1114 close_dump_file (DFI_gcse, print_rtl_with_bb, get_insns ());
1115 timevar_pop (TV_GCSE);
1118 flag_cse_skip_blocks = save_csb;
1119 flag_cse_follow_jumps = save_cfj;
1120 #ifdef ENABLE_CHECKING
1121 verify_flow_info ();
1125 /* Move constant computations out of loops. */
1127 rest_of_handle_loop_optimize (void)
1131 timevar_push (TV_LOOP);
1132 delete_dead_jumptables ();
1133 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1134 open_dump_file (DFI_loop, current_function_decl);
1136 /* CFG is no longer maintained up-to-date. */
1137 free_bb_for_insn ();
1138 profile_status = PROFILE_ABSENT;
1140 do_prefetch = flag_prefetch_loop_arrays ? LOOP_PREFETCH : 0;
1142 if (flag_rerun_loop_opt)
1144 cleanup_barriers ();
1146 /* We only want to perform unrolling once. */
1147 loop_optimize (get_insns (), dump_file, 0);
1149 /* The first call to loop_optimize makes some instructions
1150 trivially dead. We delete those instructions now in the
1151 hope that doing so will make the heuristics in loop work
1152 better and possibly speed up compilation. */
1153 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1155 /* The regscan pass is currently necessary as the alias
1156 analysis code depends on this information. */
1157 reg_scan (get_insns (), max_reg_num (), 1);
1159 cleanup_barriers ();
1160 loop_optimize (get_insns (), dump_file, do_prefetch);
1162 /* Loop can create trivially dead instructions. */
1163 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1164 find_basic_blocks (get_insns (), max_reg_num (), dump_file);
1165 close_dump_file (DFI_loop, print_rtl, get_insns ());
1166 timevar_pop (TV_LOOP);
1171 /* Perform loop optimizations. It might be better to do them a bit
1172 sooner, but we want the profile feedback to work more
1175 rest_of_handle_loop2 (void)
1177 struct loops *loops;
1180 if (!flag_move_loop_invariants
1181 && !flag_unswitch_loops
1183 && !flag_unroll_loops
1184 && !flag_branch_on_count_reg)
1187 timevar_push (TV_LOOP);
1188 open_dump_file (DFI_loop2, current_function_decl);
1190 dump_flow_info (dump_file);
1192 /* Initialize structures for layout changes. */
1193 cfg_layout_initialize (0);
1195 loops = loop_optimizer_init (dump_file);
1199 /* The optimizations: */
1200 if (flag_move_loop_invariants)
1201 move_loop_invariants (loops);
1203 if (flag_unswitch_loops)
1204 unswitch_loops (loops);
1206 if (flag_peel_loops || flag_unroll_loops)
1207 unroll_and_peel_loops (loops,
1208 (flag_peel_loops ? UAP_PEEL : 0) |
1209 (flag_unroll_loops ? UAP_UNROLL : 0) |
1210 (flag_unroll_all_loops ? UAP_UNROLL_ALL : 0));
1212 #ifdef HAVE_doloop_end
1213 if (flag_branch_on_count_reg && HAVE_doloop_end)
1214 doloop_optimize_loops (loops);
1215 #endif /* HAVE_doloop_end */
1217 loop_optimizer_finalize (loops, dump_file);
1220 free_dominance_info (CDI_DOMINATORS);
1222 /* Finalize layout changes. */
1224 if (bb->next_bb != EXIT_BLOCK_PTR)
1225 bb->rbi->next = bb->next_bb;
1226 cfg_layout_finalize ();
1228 cleanup_cfg (CLEANUP_EXPENSIVE);
1229 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1230 reg_scan (get_insns (), max_reg_num (), 0);
1232 dump_flow_info (dump_file);
1233 close_dump_file (DFI_loop2, print_rtl_with_bb, get_insns ());
1234 timevar_pop (TV_LOOP);
1239 rest_of_handle_branch_target_load_optimize (void)
1241 static int warned = 0;
1243 /* Leave this a warning for now so that it is possible to experiment
1244 with running this pass twice. In 3.6, we should either make this
1245 an error, or use separate dump files. */
1246 if (flag_branch_target_load_optimize
1247 && flag_branch_target_load_optimize2
1250 warning ("branch target register load optimization is not intended "
1256 open_dump_file (DFI_branch_target_load, current_function_decl);
1257 branch_target_load_optimize (epilogue_completed);
1258 close_dump_file (DFI_branch_target_load, print_rtl_with_bb, get_insns ());
1262 #ifdef OPTIMIZE_MODE_SWITCHING
1264 rest_of_handle_mode_switching (void)
1266 timevar_push (TV_MODE_SWITCH);
1269 optimize_mode_switching (NULL);
1272 timevar_pop (TV_MODE_SWITCH);
1277 rest_of_handle_jump (void)
1281 timevar_push (TV_JUMP);
1282 open_dump_file (DFI_sibling, current_function_decl);
1284 delete_unreachable_blocks ();
1285 #ifdef ENABLE_CHECKING
1286 verify_flow_info ();
1289 if (cfun->tail_call_emit)
1290 fixup_tail_calls ();
1292 close_dump_file (DFI_sibling, print_rtl, get_insns ());
1293 timevar_pop (TV_JUMP);
1297 rest_of_handle_eh (void)
1299 insn_locators_initialize ();
1300 /* Complete generation of exception handling code. */
1303 timevar_push (TV_JUMP);
1304 open_dump_file (DFI_eh, current_function_decl);
1306 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
1308 finish_eh_generation ();
1310 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
1312 close_dump_file (DFI_eh, print_rtl, get_insns ());
1313 timevar_pop (TV_JUMP);
1318 rest_of_handle_stack_adjustments (void)
1320 life_analysis (dump_file, PROP_POSTRELOAD);
1321 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE
1322 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
1324 /* This is kind of a heuristic. We need to run combine_stack_adjustments
1325 even for machines with possibly nonzero RETURN_POPS_ARGS
1326 and ACCUMULATE_OUTGOING_ARGS. We expect that only ports having
1327 push instructions will have popping returns. */
1328 #ifndef PUSH_ROUNDING
1329 if (!ACCUMULATE_OUTGOING_ARGS)
1331 combine_stack_adjustments ();
1335 rest_of_handle_flow2 (void)
1337 timevar_push (TV_FLOW2);
1338 open_dump_file (DFI_flow2, current_function_decl);
1340 /* Re-create the death notes which were deleted during reload. */
1341 #ifdef ENABLE_CHECKING
1342 verify_flow_info ();
1345 /* If optimizing, then go ahead and split insns now. */
1349 split_all_insns (0);
1351 if (flag_branch_target_load_optimize)
1352 rest_of_handle_branch_target_load_optimize ();
1355 cleanup_cfg (CLEANUP_EXPENSIVE);
1357 /* On some machines, the prologue and epilogue code, or parts thereof,
1358 can be represented as RTL. Doing so lets us schedule insns between
1359 it and the rest of the code and also allows delayed branch
1360 scheduling to operate in the epilogue. */
1361 thread_prologue_and_epilogue_insns (get_insns ());
1362 epilogue_completed = 1;
1365 rest_of_handle_stack_adjustments ();
1367 flow2_completed = 1;
1369 close_dump_file (DFI_flow2, print_rtl_with_bb, get_insns ());
1370 timevar_pop (TV_FLOW2);
1377 rest_of_handle_jump2 (void)
1379 open_dump_file (DFI_jump, current_function_decl);
1381 /* Always do one jump optimization pass to ensure that JUMP_LABEL fields
1382 are initialized and to compute whether control can drop off the end
1385 timevar_push (TV_JUMP);
1386 /* Turn NOTE_INSN_EXPECTED_VALUE into REG_BR_PROB. Do this
1387 before jump optimization switches branch directions. */
1388 if (flag_guess_branch_prob)
1389 expected_value_to_br_prob ();
1391 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1392 reg_scan (get_insns (), max_reg_num (), 0);
1394 dump_flow_info (dump_file);
1395 cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_PRE_LOOP
1396 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
1398 create_loop_notes ();
1400 purge_line_number_notes (get_insns ());
1403 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1405 /* Jump optimization, and the removal of NULL pointer checks, may
1406 have reduced the number of instructions substantially. CSE, and
1407 future passes, allocate arrays whose dimensions involve the
1408 maximum instruction UID, so if we can reduce the maximum UID
1409 we'll save big on memory. */
1410 renumber_insns (dump_file);
1412 close_dump_file (DFI_jump, print_rtl_with_bb, get_insns ());
1413 timevar_pop (TV_JUMP);
1418 #ifdef HAVE_peephole2
1420 rest_of_handle_peephole2 (void)
1422 timevar_push (TV_PEEPHOLE2);
1423 open_dump_file (DFI_peephole2, current_function_decl);
1425 peephole2_optimize (dump_file);
1427 close_dump_file (DFI_peephole2, print_rtl_with_bb, get_insns ());
1428 timevar_pop (TV_PEEPHOLE2);
1433 rest_of_handle_postreload (void)
1435 timevar_push (TV_RELOAD_CSE_REGS);
1436 open_dump_file (DFI_postreload, current_function_decl);
1438 /* Do a very simple CSE pass over just the hard registers. */
1439 reload_cse_regs (get_insns ());
1440 /* reload_cse_regs can eliminate potentially-trapping MEMs.
1441 Remove any EH edges associated with them. */
1442 if (flag_non_call_exceptions)
1443 purge_all_dead_edges (0);
1445 close_dump_file (DFI_postreload, print_rtl_with_bb, get_insns ());
1446 timevar_pop (TV_RELOAD_CSE_REGS);
1450 rest_of_handle_shorten_branches (void)
1452 /* Shorten branches. */
1453 timevar_push (TV_SHORTEN_BRANCH);
1454 shorten_branches (get_insns ());
1455 timevar_pop (TV_SHORTEN_BRANCH);
1459 rest_of_clean_state (void)
1462 coverage_end_function ();
1464 /* It is very important to decompose the RTL instruction chain here:
1465 debug information keeps pointing into CODE_LABEL insns inside the function
1466 body. If these remain pointing to the other insns, we end up preserving
1467 whole RTL chain and attached detailed debug info in memory. */
1468 for (insn = get_insns (); insn; insn = next)
1470 next = NEXT_INSN (insn);
1471 NEXT_INSN (insn) = NULL;
1472 PREV_INSN (insn) = NULL;
1475 /* In case the function was not output,
1476 don't leave any temporary anonymous types
1477 queued up for sdb output. */
1478 #ifdef SDB_DEBUGGING_INFO
1479 if (write_symbols == SDB_DEBUG)
1480 sdbout_types (NULL_TREE);
1483 reload_completed = 0;
1484 epilogue_completed = 0;
1485 flow2_completed = 0;
1488 timevar_push (TV_FINAL);
1490 /* Clear out the insn_length contents now that they are no
1492 init_insn_lengths ();
1494 /* Show no temporary slots allocated. */
1497 free_basic_block_vars ();
1498 free_bb_for_insn ();
1500 timevar_pop (TV_FINAL);
1502 if (targetm.binds_local_p (current_function_decl))
1504 int pref = cfun->preferred_stack_boundary;
1505 if (cfun->recursive_call_emit
1506 && cfun->stack_alignment_needed > cfun->preferred_stack_boundary)
1507 pref = cfun->stack_alignment_needed;
1508 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
1512 /* Make sure volatile mem refs aren't considered valid operands for
1513 arithmetic insns. We must call this here if this is a nested inline
1514 function, since the above code leaves us in the init_recog state
1515 (from final.c), and the function context push/pop code does not
1516 save/restore volatile_ok.
1518 ??? Maybe it isn't necessary for expand_start_function to call this
1519 anymore if we do it here? */
1521 init_recog_no_volatile ();
1523 /* We're done with this function. Free up memory if we can. */
1524 free_after_parsing (cfun);
1525 free_after_compilation (cfun);
1529 /* This function is called from the pass manager in tree-optimize.c
1530 after all tree passes have finished for a single function, and we
1531 have expanded the function body from trees to RTL.
1532 Once we are here, we have decided that we're supposed to output
1533 that function, i.e. that we should write assembler code for it.
1535 We run a series of low-level passes here on the function's RTL
1536 representation. Each pass is called via a rest_of_* function. */
1539 rest_of_compilation (void)
1541 /* Convert from NOTE_INSN_EH_REGION style notes, and do other
1542 sorts of eh initialization. */
1543 convert_from_eh_region_ranges ();
1545 /* If we're emitting a nested function, make sure its parent gets
1546 emitted as well. Doing otherwise confuses debug info. */
1549 for (parent = DECL_CONTEXT (current_function_decl);
1550 parent != NULL_TREE;
1551 parent = get_containing_scope (parent))
1552 if (TREE_CODE (parent) == FUNCTION_DECL)
1553 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1556 /* We are now committed to emitting code for this function. Do any
1557 preparation, such as emitting abstract debug info for the inline
1558 before it gets mangled by optimization. */
1559 if (cgraph_function_possibly_inlined_p (current_function_decl))
1560 (*debug_hooks->outlining_inline_function) (current_function_decl);
1562 /* Remove any notes we don't need. That will make iterating
1563 over the instruction sequence faster, and allow the garbage
1564 collector to reclaim the memory used by the notes. */
1565 remove_unnecessary_notes ();
1567 /* Initialize some variables used by the optimizers. */
1568 init_function_for_compilation ();
1570 TREE_ASM_WRITTEN (current_function_decl) = 1;
1572 /* Early return if there were errors. We can run afoul of our
1573 consistency checks, and there's not really much point in fixing them. */
1574 if (rtl_dump_and_exit || flag_syntax_only || errorcount || sorrycount)
1575 goto exit_rest_of_compilation;
1577 rest_of_handle_jump ();
1579 rest_of_handle_eh ();
1581 /* Delay emitting hard_reg_initial_value sets until after EH landing pad
1582 generation, which might create new sets. */
1583 emit_initial_value_sets ();
1586 /* If we are doing position-independent code generation, now
1587 is the time to output special prologues and epilogues.
1588 We do not want to do this earlier, because it just clutters
1589 up inline functions with meaningless insns. */
1594 /* Copy any shared structure that should not be shared. */
1597 #ifdef SETJMP_VIA_SAVE_AREA
1598 /* This must be performed before virtual register instantiation.
1599 Please be aware that everything in the compiler that can look
1600 at the RTL up to this point must understand that REG_SAVE_AREA
1601 is just like a use of the REG contained inside. */
1602 if (current_function_calls_alloca)
1603 optimize_save_area_alloca ();
1606 /* Instantiate all virtual registers. */
1607 instantiate_virtual_regs ();
1609 rest_of_handle_jump2 ();
1612 rest_of_handle_cse ();
1617 rest_of_handle_gcse ();
1619 if (flag_loop_optimize)
1620 rest_of_handle_loop_optimize ();
1623 rest_of_handle_jump_bypass ();
1626 timevar_push (TV_FLOW);
1627 rest_of_handle_cfg ();
1629 if (!flag_tree_based_profiling
1630 && (optimize > 0 || profile_arc_flag
1631 || flag_test_coverage || flag_branch_probabilities))
1633 rtl_register_profile_hooks ();
1634 rtl_register_value_prof_hooks ();
1635 rest_of_handle_branch_prob ();
1637 if (flag_branch_probabilities
1638 && flag_profile_values
1639 && (flag_value_profile_transformations
1640 || flag_speculative_prefetching))
1641 rest_of_handle_value_profile_transformations ();
1643 /* Remove the death notes created for vpt. */
1644 if (flag_profile_values)
1645 count_or_remove_death_notes (NULL, 1);
1649 rest_of_handle_if_conversion ();
1651 if (optimize > 0 && flag_tracer)
1652 rest_of_handle_tracer ();
1655 && flag_loop_optimize2)
1656 rest_of_handle_loop2 ();
1658 if (optimize > 0 && flag_web)
1659 rest_of_handle_web ();
1661 if (optimize > 0 && flag_rerun_cse_after_loop)
1662 rest_of_handle_cse2 ();
1664 cse_not_expected = 1;
1666 rest_of_handle_life ();
1667 timevar_pop (TV_FLOW);
1670 rest_of_handle_combine ();
1672 if (optimize > 0 && flag_if_conversion)
1673 rest_of_handle_if_after_combine ();
1675 /* The optimization to partition hot/cold basic blocks into separate
1676 sections of the .o file does not work well with linkonce or with
1677 user defined section attributes. Don't call it if either case
1680 if (flag_reorder_blocks_and_partition
1681 && !DECL_ONE_ONLY (current_function_decl)
1682 && !user_defined_section_attribute)
1683 rest_of_handle_partition_blocks ();
1685 if (optimize > 0 && (flag_regmove || flag_expensive_optimizations))
1686 rest_of_handle_regmove ();
1688 /* Do unconditional splitting before register allocation to allow machine
1689 description to add extra information not needed previously. */
1690 split_all_insns (1);
1692 #ifdef OPTIMIZE_MODE_SWITCHING
1693 rest_of_handle_mode_switching ();
1696 /* Any of the several passes since flow1 will have munged register
1697 lifetime data a bit. We need it to be up to date for scheduling
1698 (see handling of reg_known_equiv in init_alias_analysis). */
1699 recompute_reg_usage (get_insns (), !optimize_size);
1701 #ifdef INSN_SCHEDULING
1702 if (optimize > 0 && flag_modulo_sched)
1703 rest_of_handle_sms ();
1705 if (flag_schedule_insns)
1706 rest_of_handle_sched ();
1709 /* Determine if the current function is a leaf before running reload
1710 since this can impact optimizations done by the prologue and
1711 epilogue thus changing register elimination offsets. */
1712 current_function_is_leaf = leaf_function_p ();
1714 if (flag_new_regalloc)
1716 if (rest_of_handle_new_regalloc ())
1717 goto exit_rest_of_compilation;
1721 if (rest_of_handle_old_regalloc ())
1722 goto exit_rest_of_compilation;
1726 rest_of_handle_postreload ();
1728 if (optimize > 0 && flag_gcse_after_reload)
1729 rest_of_handle_gcse2 ();
1731 rest_of_handle_flow2 ();
1733 #ifdef HAVE_peephole2
1734 if (optimize > 0 && flag_peephole2)
1735 rest_of_handle_peephole2 ();
1739 rest_of_handle_if_after_reload ();
1743 if (flag_rename_registers || flag_cprop_registers)
1744 rest_of_handle_regrename ();
1746 rest_of_handle_reorder_blocks ();
1749 if (flag_branch_target_load_optimize2)
1750 rest_of_handle_branch_target_load_optimize ();
1752 #ifdef LEAF_REGISTERS
1753 current_function_uses_only_leaf_regs
1754 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
1757 #ifdef INSN_SCHEDULING
1758 if (optimize > 0 && flag_schedule_insns_after_reload)
1759 rest_of_handle_sched2 ();
1763 rest_of_handle_stack_regs ();
1766 compute_alignments ();
1768 if (flag_var_tracking)
1769 rest_of_handle_variable_tracking ();
1771 /* CFG is no longer maintained up-to-date. */
1772 free_bb_for_insn ();
1774 if (targetm.machine_dependent_reorg != 0)
1775 rest_of_handle_machine_reorg ();
1777 purge_line_number_notes (get_insns ());
1778 cleanup_barriers ();
1781 if (flag_delayed_branch)
1782 rest_of_handle_delay_slots ();
1785 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
1786 timevar_push (TV_SHORTEN_BRANCH);
1787 split_all_insns_noflow ();
1788 timevar_pop (TV_SHORTEN_BRANCH);
1791 convert_to_eh_region_ranges ();
1793 rest_of_handle_shorten_branches ();
1795 set_nothrow_function_flags ();
1797 rest_of_handle_final ();
1799 exit_rest_of_compilation:
1801 rest_of_clean_state ();
1805 finish_optimization_passes (void)
1807 enum tree_dump_index i;
1808 struct dump_file_info *dfi;
1811 timevar_push (TV_DUMP);
1812 if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
1814 open_dump_file (DFI_bp, NULL);
1816 close_dump_file (DFI_bp, NULL, NULL_RTX);
1819 if (optimize > 0 && open_dump_file (DFI_combine, NULL))
1821 dump_combine_total_stats (dump_file);
1822 close_dump_file (DFI_combine, NULL, NULL_RTX);
1825 /* Do whatever is necessary to finish printing the graphs. */
1826 if (graph_dump_format != no_graph)
1827 for (i = DFI_MIN; (dfi = get_dump_file_info (i)) != NULL; ++i)
1828 if (dump_initialized_p (i)
1829 && (dfi->flags & TDF_RTL) != 0
1830 && (name = get_dump_file_name (i)) != NULL)
1832 finish_graph_dump_file (name);
1836 timevar_pop (TV_DUMP);
1839 struct tree_opt_pass pass_rest_of_compilation =
1843 rest_of_compilation, /* execute */
1846 0, /* static_pass_number */
1847 TV_REST_OF_COMPILATION, /* tv_id */
1848 PROP_rtl, /* properties_required */
1849 0, /* properties_provided */
1850 PROP_rtl, /* properties_destroyed */
1851 0, /* todo_flags_start */
1852 TODO_ggc_collect, /* todo_flags_finish */