* gcc/tree-vrp.c (execute_vrp): Return value.
* gcc/regrename.c (rest_of_handle_regrename): Ditto.
* gcc/tree-into-ssa.c (rewrite_into_ssa): Ditto.
* gcc/tree-complex.c (tree_lower_complex): Ditto.
(tree_lower_complex_O0): Ditto.
* gcc/tracer.c (rest_of_handle_tracer): Ditto.
* gcc/postreload-gcse.c (rest_of_handle_gcse2): Ditto.
* gcc/postreload.c (rest_of_handle_postreload): Ditto.
* gcc/tree-tailcall.c (execute_tail_recursion): Ditto.
(execute_tail_calls): Ditto.
* gcc/tree-ssa-loop-ch.c (copy_loop_headers): Ditto.
* gcc/tree.h (init_function_for_compilation): Ditto.
* gcc/ipa-cp.c (ipcp_driver): Ditto.
* gcc/tree-scalar-evolution.c (scev_const_prop): Ditto.
* gcc/tree-scalar-evolution.h (scev_const_prop): Ditto.
* gcc/final.c (compute_alignments): Ditto.
(rest_of_handle_final): Ditto.
(rest_of_handle_shorten_branches): Ditto.
(rest_of_clean_state): Ditto.
* gcc/omp-low.c (execute_expand_omp): Ditto.
(execute_lower_omp): Ditto.
* gcc/tree-ssa-dse.c (tree_ssa_dse): Ditto.
* gcc/ipa-reference.c (static_execute): Ditto.
* gcc/tree-ssa-uncprop.c (tree_ssa_uncprop): Ditto.
* gcc/reorg.c (rest_of_handle_delay_slots): Ditto.
(rest_of_handle_machine_reorg): Ditto.
* gcc/cgraphunit.c (rebuild_cgraph_edges): Ditto.
* gcc/flow.c (recompute_reg_usage): Ditto.
(rest_of_handle_remove_death_notes): Ditto.
(rest_of_handle_life): Ditto.
(rest_of_handle_flow2): Ditto.
* gcc/tree-ssa-copyrename.c (rename_ssa_copies): Ditto.
* gcc/tree-ssa-ccp.c (do_ssa_ccp): Ditto.
(do_ssa_store_ccp): Ditto.
(execute_fold_all_builtins): Ditto.
* gcc/mode-switching.c (rest_of_handle_mode_switching): Ditto.
* gcc/modulo-sched.c (rest_of_handle_sms): Ditto.
* gcc/ipa-pure-const.c (static_execute): Ditto.
* gcc/cse.c (rest_of_handle_cse): Ditto.
(rest_of_handle_cse2): Ditto.
* gcc/web.c (rest_of_handle_web): Ditto.
* gcc/tree-stdarg.c (execute_optimize_stdarg): Ditto.
* gcc/tree-ssa-math-opts.c (execute_cse_reciprocals): Ditto.
* gcc/tree-ssa-dom.c (tree_ssa_dominator_optimize): Ditto.
* gcc/tree-nrv.c (tree_nrv): Ditto.
(execute_return_slot_opt): Ditto.
* gcc/tree-ssa-alias.c (compute_may_aliases): Ditto.
(create_structure_vars): Ditto.
* gcc/loop-init.c (rtl_loop_init): Ditto.
(rtl_loop_done): Ditto.
(rtl_move_loop_invariants): Ditto.
(rtl_unswitch): Ditto.
(rtl_unroll_and_peel_loops): Ditto.
(rtl_doloop): Ditto.
* gcc/gimple-low.c (lower_function_body): Ditto.
(mark_used_blocks): Ditto.
* gcc/tree-ssa-sink.c (execute_sink_code): Ditto.
* gcc/ipa-inline.c (cgraph_decide_inlining): Ditto.
(cgraph_early_inlining): Ditto.
* gcc/global.c (rest_of_handle_global_alloc): Ditto.
* gcc/jump.c (cleanup_barriers): Ditto.
(purge_line_number_notes): Ditto.
* gcc/ifcvt.c (rest_of_handle_if_conversion): Ditto.
(rest_of_handle_if_after_reload): Ditto.
* gcc/tree-ssa-loop.c (tree_ssa_loop_init): Ditto.
(tree_ssa_loop_im): Ditto.
(tree_ssa_loop_unswitch): Ditto.
(tree_vectorize): Ditto.
(tree_linear_transform): Ditto.
(tree_ssa_loop_ivcanon): Ditto.
(tree_ssa_empty_loop): Ditto.
(tree_ssa_loop_bounds): Ditto.
(tree_complete_unroll): Ditto.
(tree_ssa_loop_prefetch): Ditto.
(tree_ssa_loop_ivopts): Ditto.
(tree_ssa_loop_done): Ditto.
* gcc/predict.c (tree_estimate_probability): Ditto.
* gcc/recog.c (split_all_insns_noflow): Ditto.
(rest_of_handle_peephole2): Ditto.
(rest_of_handle_split_all_insns): Ditto.
* gcc/tree-eh.c (lower_eh_constructs): Ditto.
* gcc/regmove.c (rest_of_handle_regmove): Ditto.
(rest_of_handle_stack_adjustments): Ditto.
* gcc/local-alloc.c (rest_of_handle_local_alloc): Ditto.
* gcc/function.c (instantiate_virtual_regs): Ditto.
(init_function_for_compilation): Ditto.
(rest_of_handle_check_leaf_regs): Ditto.
* gcc/gcse.c (rest_of_handle_jump_bypass): Ditto.
(rest_of_handle_gcse): Ditto.
* gcc/ipa-type-escape.c (type_escape_execute): Ditto.
* gcc/alias.c (rest_of_handle_cfg): Ditto.
* gcc/tree-if-conv.c (main_tree_if_conversion): Ditto.
* gcc/profile.c (rest_of_handle_branch_prob): Ditto.
* gcc/tree-ssa-phiopt.c (tree_ssa_phiopt): Ditto.
* gcc/rtl-factoring.c (rest_of_rtl_seqabstr): Ditto.
* gcc/bt-load.c (rest_of_handle_branch_target_load_optimize): Ditto
* gcc/tree-dfa.c (find_referenced_vars): Ditto.
* gcc/except.c (set_nothrow_function_flags): Ditto.
(convert_to_eh_region_ranges): Ditto.
(rest_of_handle_eh): Ditto.
* gcc/emit-rtl.c (unshare_all_rtl): Ditto.
(remove_unnecessary_notes): Ditto.
* gcc/except.h (set_nothrow_function_flags): Ditto.
(convert_to_eh_region_ranges): Ditto.
* gcc/cfgexpand.c (tree_expand_cfg): Ditto.
* gcc/tree-cfgcleanup.c (merge_phi_nodes): Ditto.
* gcc/tree-ssa-pre.c (do_pre): Ditto.
(execute_fre): Ditto.
* gcc/cfgcleanup.c (rest_of_handle_jump): Ditto.
(rest_of_handle_jump2): Ditto.
* gcc/tree-sra.c (tree_sra): Ditto.
* gcc/tree-mudflap.c (execute_mudflap_function_ops): Ditto.
(execute_mudflap_function_decls): Ditto.
* gcc/tree-ssa-copy.c (do_copy_prop): Ditto.
(do_store_copy_prop): Ditto.
* gcc/ipa-prop.h (ipcp_driver): Ditto.
* gcc/cfglayout.c (insn_locators_initialize): Ditto.
* gcc/tree-ssa-forwprop.c
(tree_ssa_forward_propagate_single_use_vars): Ditto.
* gcc/cfglayout.h (insn_locators_initialize): Ditto.
* gcc/tree-ssa-dce.c (tree_ssa_dce): Ditto.
* gcc/tree-ssa.c (execute_early_warn_uninitialized): Ditto.
(execute_late_warn_uninitialized): Ditto.
* gcc/rtl.h (cleanup_barriers): Ditto.
(split_all_insns_noflow): Ditto.
(purge_line_number_notes): Ditto.
(unshare_all_rtl): Ditto.
(remove_unnecessary_notes): Ditto.
(recompute_reg_usage): Ditto.
(variable_tracking_main): Ditto.
* gcc/integrate.c (emit_initial_value_sets): Ditto.
* gcc/integrate.h (emit_initial_value_sets): Ditto.
* gcc/tree-optimize.c (execute_free_datastructures): Ditto
(execute_free_cfg_annotations): Ditto.
(execute_fixup_cfg): Ditto.
(execute_cleanup_cfg_pre_ipa): Ditto.
(execute_cleanup_cfg_post_optimizing): Ditto.
(execute_init_datastructures): Ditto.
* gcc/tree-object-size.c (compute_object_sizes): Ditto.
* gcc/combine.c (rest_of_handle_combine): Ditto.
* gcc/tree-outof-ssa.c (rewrite_out_of_ssa): Ditto.
* gcc/bb-reorder.c (duplicate_computed_gotos): Ditto.
(rest_of_handle_reorder_blocks): Ditto.
(rest_of_handle_partition_blocks): Ditto.
* gcc/var-tracking.c (variable_tracking_main): Ditto.
* gcc/tree-profile.c (tree_profiling): Ditto.
* gcc/tree-vect-generic.c (expand_vector_operations): Ditto.
* gcc/reg-stack.c (rest_of_handle_stack_regs): Ditto.
* gcc/sched-rgn.c (rest_of_handle_sched): Ditto.
(rest_of_handle_sched2): Ditto.
* gcc/basic-block.h (free_bb_insn): Ditto.
* gcc/tree-ssa-structalias.c (ipa_pta_execute): Ditto.
* gcc/tree-cfg.c (execute_build_cfg): Ditto.
(remove_useless_stmts): Ditto.
(split_critical_edges): Ditto.
(execute_warn_function_return): Ditto.
(execute_warn_function_noreturn): Ditto.
* gcc/tree-ssa-reassoc.c (execute_reassoc): Ditto.
* gcc/cfgrtl.c (free_bb_for_insn): Ditto.
* gcc/passes.c (execute_one_pass): Run additional
todos returned by execute function.
* gcc/tree-pass.h (struct tree_opt_pass): Make execute
return a value.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@111643
138bc75d-0d04-0410-961f-
82ee72b054a4
+2006-03-02 Daniel Berlin <dberlin@dberlin.org>
+
+ * gcc/tree-vrp.c (execute_vrp): Return value.
+ * gcc/regrename.c (rest_of_handle_regrename): Ditto.
+ * gcc/tree-into-ssa.c (rewrite_into_ssa): Ditto.
+ * gcc/tree-complex.c (tree_lower_complex): Ditto.
+ (tree_lower_complex_O0): Ditto.
+ * gcc/tracer.c (rest_of_handle_tracer): Ditto.
+ * gcc/postreload-gcse.c (rest_of_handle_gcse2): Ditto.
+ * gcc/postreload.c (rest_of_handle_postreload): Ditto.
+ * gcc/tree-tailcall.c (execute_tail_recursion): Ditto.
+ (execute_tail_calls): Ditto.
+ * gcc/tree-ssa-loop-ch.c (copy_loop_headers): Ditto.
+ * gcc/tree.h (init_function_for_compilation): Ditto.
+ * gcc/ipa-cp.c (ipcp_driver): Ditto.
+ * gcc/tree-scalar-evolution.c (scev_const_prop): Ditto.
+ * gcc/tree-scalar-evolution.h (scev_const_prop): Ditto.
+ * gcc/final.c (compute_alignments): Ditto.
+ (rest_of_handle_final): Ditto.
+ (rest_of_handle_shorten_branches): Ditto.
+ (rest_of_clean_state): Ditto.
+ * gcc/omp-low.c (execute_expand_omp): Ditto.
+ (execute_lower_omp): Ditto.
+ * gcc/tree-ssa-dse.c (tree_ssa_dse): Ditto.
+ * gcc/ipa-reference.c (static_execute): Ditto.
+ * gcc/tree-ssa-uncprop.c (tree_ssa_uncprop): Ditto.
+ * gcc/reorg.c (rest_of_handle_delay_slots): Ditto.
+ (rest_of_handle_machine_reorg): Ditto.
+ * gcc/cgraphunit.c (rebuild_cgraph_edges): Ditto.
+ * gcc/flow.c (recompute_reg_usage): Ditto.
+ (rest_of_handle_remove_death_notes): Ditto.
+ (rest_of_handle_life): Ditto.
+ (rest_of_handle_flow2): Ditto.
+ * gcc/tree-ssa-copyrename.c (rename_ssa_copies): Ditto.
+ * gcc/tree-ssa-ccp.c (do_ssa_ccp): Ditto.
+ (do_ssa_store_ccp): Ditto.
+ (execute_fold_all_builtins): Ditto.
+ * gcc/mode-switching.c (rest_of_handle_mode_switching): Ditto.
+ * gcc/modulo-sched.c (rest_of_handle_sms): Ditto.
+ * gcc/ipa-pure-const.c (static_execute): Ditto.
+ * gcc/cse.c (rest_of_handle_cse): Ditto.
+ (rest_of_handle_cse2): Ditto.
+ * gcc/web.c (rest_of_handle_web): Ditto.
+ * gcc/tree-stdarg.c (execute_optimize_stdarg): Ditto.
+ * gcc/tree-ssa-math-opts.c (execute_cse_reciprocals): Ditto.
+ * gcc/tree-ssa-dom.c (tree_ssa_dominator_optimize): Ditto.
+ * gcc/tree-nrv.c (tree_nrv): Ditto.
+ (execute_return_slot_opt): Ditto.
+ * gcc/tree-ssa-alias.c (compute_may_aliases): Ditto.
+ (create_structure_vars): Ditto.
+ * gcc/loop-init.c (rtl_loop_init): Ditto.
+ (rtl_loop_done): Ditto.
+ (rtl_move_loop_invariants): Ditto.
+ (rtl_unswitch): Ditto.
+ (rtl_unroll_and_peel_loops): Ditto.
+ (rtl_doloop): Ditto.
+ * gcc/gimple-low.c (lower_function_body): Ditto.
+ (mark_used_blocks): Ditto.
+ * gcc/tree-ssa-sink.c (execute_sink_code): Ditto.
+ * gcc/ipa-inline.c (cgraph_decide_inlining): Ditto.
+ (cgraph_early_inlining): Ditto.
+ * gcc/global.c (rest_of_handle_global_alloc): Ditto.
+ * gcc/jump.c (cleanup_barriers): Ditto.
+ (purge_line_number_notes): Ditto.
+ * gcc/ifcvt.c (rest_of_handle_if_conversion): Ditto.
+ (rest_of_handle_if_after_reload): Ditto.
+ * gcc/tree-ssa-loop.c (tree_ssa_loop_init): Ditto.
+ (tree_ssa_loop_im): Ditto.
+ (tree_ssa_loop_unswitch): Ditto.
+ (tree_vectorize): Ditto.
+ (tree_linear_transform): Ditto.
+ (tree_ssa_loop_ivcanon): Ditto.
+ (tree_ssa_empty_loop): Ditto.
+ (tree_ssa_loop_bounds): Ditto.
+ (tree_complete_unroll): Ditto.
+ (tree_ssa_loop_prefetch): Ditto.
+ (tree_ssa_loop_ivopts): Ditto.
+ (tree_ssa_loop_done): Ditto.
+ * gcc/predict.c (tree_estimate_probability): Ditto.
+ * gcc/recog.c (split_all_insns_noflow): Ditto.
+ (rest_of_handle_peephole2): Ditto.
+ (rest_of_handle_split_all_insns): Ditto.
+ * gcc/tree-eh.c (lower_eh_constructs): Ditto.
+ * gcc/regmove.c (rest_of_handle_regmove): Ditto.
+ (rest_of_handle_stack_adjustments): Ditto.
+ * gcc/local-alloc.c (rest_of_handle_local_alloc): Ditto.
+ * gcc/function.c (instantiate_virtual_regs): Ditto.
+ (init_function_for_compilation): Ditto.
+ (rest_of_handle_check_leaf_regs): Ditto.
+ * gcc/gcse.c (rest_of_handle_jump_bypass): Ditto.
+ (rest_of_handle_gcse): Ditto.
+ * gcc/ipa-type-escape.c (type_escape_execute): Ditto.
+ * gcc/alias.c (rest_of_handle_cfg): Ditto.
+ * gcc/tree-if-conv.c (main_tree_if_conversion): Ditto.
+ * gcc/profile.c (rest_of_handle_branch_prob): Ditto.
+ * gcc/tree-ssa-phiopt.c (tree_ssa_phiopt): Ditto.
+ * gcc/rtl-factoring.c (rest_of_rtl_seqabstr): Ditto.
+ * gcc/bt-load.c (rest_of_handle_branch_target_load_optimize): Ditto
+ * gcc/tree-dfa.c (find_referenced_vars): Ditto.
+ * gcc/except.c (set_nothrow_function_flags): Ditto.
+ (convert_to_eh_region_ranges): Ditto.
+ (rest_of_handle_eh): Ditto.
+ * gcc/emit-rtl.c (unshare_all_rtl): Ditto.
+ (remove_unnecessary_notes): Ditto.
+ * gcc/except.h (set_nothrow_function_flags): Ditto.
+ (convert_to_eh_region_ranges): Ditto.
+ * gcc/cfgexpand.c (tree_expand_cfg): Ditto.
+ * gcc/tree-cfgcleanup.c (merge_phi_nodes): Ditto.
+ * gcc/tree-ssa-pre.c (do_pre): Ditto.
+ (execute_fre): Ditto.
+ * gcc/cfgcleanup.c (rest_of_handle_jump): Ditto.
+ (rest_of_handle_jump2): Ditto.
+ * gcc/tree-sra.c (tree_sra): Ditto.
+ * gcc/tree-mudflap.c (execute_mudflap_function_ops): Ditto.
+ (execute_mudflap_function_decls): Ditto.
+ * gcc/tree-ssa-copy.c (do_copy_prop): Ditto.
+ (do_store_copy_prop): Ditto.
+ * gcc/ipa-prop.h (ipcp_driver): Ditto.
+ * gcc/cfglayout.c (insn_locators_initialize): Ditto.
+ * gcc/tree-ssa-forwprop.c
+ (tree_ssa_forward_propagate_single_use_vars): Ditto.
+ * gcc/cfglayout.h (insn_locators_initialize): Ditto.
+ * gcc/tree-ssa-dce.c (tree_ssa_dce): Ditto.
+ * gcc/tree-ssa.c (execute_early_warn_uninitialized): Ditto.
+ (execute_late_warn_uninitialized): Ditto.
+ * gcc/rtl.h (cleanup_barriers): Ditto.
+ (split_all_insns_noflow): Ditto.
+ (purge_line_number_notes): Ditto.
+ (unshare_all_rtl): Ditto.
+ (remove_unnecessary_notes): Ditto.
+ (recompute_reg_usage): Ditto.
+ (variable_tracking_main): Ditto.
+ * gcc/integrate.c (emit_initial_value_sets): Ditto.
+ * gcc/integrate.h (emit_initial_value_sets): Ditto.
+ * gcc/tree-optimize.c (execute_free_datastructures): Ditto
+ (execute_free_cfg_annotations): Ditto.
+ (execute_fixup_cfg): Ditto.
+ (execute_cleanup_cfg_pre_ipa): Ditto.
+ (execute_cleanup_cfg_post_optimizing): Ditto.
+ (execute_init_datastructures): Ditto.
+ * gcc/tree-object-size.c (compute_object_sizes): Ditto.
+ * gcc/combine.c (rest_of_handle_combine): Ditto.
+ * gcc/tree-outof-ssa.c (rewrite_out_of_ssa): Ditto.
+ * gcc/bb-reorder.c (duplicate_computed_gotos): Ditto.
+ (rest_of_handle_reorder_blocks): Ditto.
+ (rest_of_handle_partition_blocks): Ditto.
+ * gcc/var-tracking.c (variable_tracking_main): Ditto.
+ * gcc/tree-profile.c (tree_profiling): Ditto.
+ * gcc/tree-vect-generic.c (expand_vector_operations): Ditto.
+ * gcc/reg-stack.c (rest_of_handle_stack_regs): Ditto.
+ * gcc/sched-rgn.c (rest_of_handle_sched): Ditto.
+ (rest_of_handle_sched2): Ditto.
+ * gcc/basic-block.h (free_bb_insn): Ditto.
+ * gcc/tree-ssa-structalias.c (ipa_pta_execute): Ditto.
+ * gcc/tree-cfg.c (execute_build_cfg): Ditto.
+ (remove_useless_stmts): Ditto.
+ (split_critical_edges): Ditto.
+ (execute_warn_function_return): Ditto.
+ (execute_warn_function_noreturn): Ditto.
+ * gcc/tree-ssa-reassoc.c (execute_reassoc): Ditto.
+ * gcc/cfgrtl.c (free_bb_for_insn): Ditto.
+ * gcc/passes.c (execute_one_pass): Run additional
+ todos returned by execute function.
+ * gcc/tree-pass.h (struct tree_opt_pass): Make execute
+ return a value.
+
2006-03-02 Richard Guenther <rguenther@suse.de>
* tree-ssa-alias.c (find_used_portions): Consider taking
\f
/* Do control and data flow analysis; write some of the results to the
dump file. */
-static void
+static unsigned int
rest_of_handle_cfg (void)
{
if (dump_file)
if (optimize)
cleanup_cfg (CLEANUP_EXPENSIVE
| (flag_thread_jumps ? CLEANUP_THREADING : 0));
+ return 0;
}
struct tree_opt_pass pass_cfg =
#define set_block_for_insn(INSN, BB) (BLOCK_FOR_INSN (INSN) = BB)
extern void compute_bb_for_insn (void);
-extern void free_bb_for_insn (void);
+extern unsigned int free_bb_for_insn (void);
extern void update_bb_for_insn (basic_block);
extern void free_basic_block_vars (void);
}
-static void
+static unsigned int
duplicate_computed_gotos (void)
{
basic_block bb, new_bb;
int max_size;
if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
- return;
+ return 0;
if (targetm.cannot_modify_jumps_p ())
- return;
+ return 0;
cfg_layout_initialize (0);
cfg_layout_finalize ();
BITMAP_FREE (candidates);
+ return 0;
}
struct tree_opt_pass pass_duplicate_computed_gotos =
/* Reorder basic blocks. */
-static void
+static unsigned int
rest_of_handle_reorder_blocks (void)
{
bool changed;
/* Add NOTE_INSN_SWITCH_TEXT_SECTIONS notes. */
insert_section_boundary_note ();
+ return 0;
}
struct tree_opt_pass pass_reorder_blocks =
}
/* Partition hot and cold basic blocks. */
-static void
+static unsigned int
rest_of_handle_partition_blocks (void)
{
no_new_pseudos = 0;
update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
no_new_pseudos = 1;
+ return 0;
}
struct tree_opt_pass pass_partition_blocks =
}
-static void
+static unsigned int
rest_of_handle_branch_target_load_optimize (void)
{
static int warned = 0;
}
branch_target_load_optimize (epilogue_completed);
+ return 0;
}
struct tree_opt_pass pass_branch_target_load_optimize =
return changed;
}
\f
-static void
+static unsigned int
rest_of_handle_jump (void)
{
delete_unreachable_blocks ();
if (cfun->tail_call_emit)
fixup_tail_calls ();
+ return 0;
}
struct tree_opt_pass pass_jump =
};
-static void
+static unsigned int
rest_of_handle_jump2 (void)
{
/* Turn NOTE_INSN_EXPECTED_VALUE into REG_BR_PROB. Do this
maximum instruction UID, so if we can reduce the maximum UID
we'll save big on memory. */
renumber_insns ();
+ return 0;
}
confuse the CFG hooks, so be careful to not manipulate CFG during
the expansion. */
-static void
+static unsigned int
tree_expand_cfg (void)
{
basic_block bb, init_block;
/* After expanding, the return labels are no longer needed. */
return_label = NULL;
naked_return_label = NULL;
+ return 0;
}
struct tree_opt_pass pass_expand =
represented via INSN_NOTEs. Replace them by representation using
INSN_LOCATORs. */
-void
+unsigned int
insn_locators_initialize (void)
{
tree block = NULL;
set_block_levels (DECL_INITIAL (cfun->decl), 0);
free_block_changes ();
+ return 0;
}
struct tree_opt_pass pass_insn_locators_initialize =
extern void cfg_layout_initialize (unsigned int);
extern void cfg_layout_finalize (void);
-extern void insn_locators_initialize (void);
+extern unsigned int insn_locators_initialize (void);
extern void reemit_insn_block_notes (void);
extern bool can_copy_bbs_p (basic_block *, unsigned);
extern void copy_bbs (basic_block *, unsigned, basic_block *,
/* Release the basic_block_for_insn array. */
-void
+unsigned int
free_bb_for_insn (void)
{
rtx insn;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (!BARRIER_P (insn))
BLOCK_FOR_INSN (insn) = NULL;
+ return 0;
}
struct tree_opt_pass pass_free_cfg =
/* Rebuild call edges from current function after a passes not aware
of cgraph updating. */
-static void
+static unsigned int
rebuild_cgraph_edges (void)
{
basic_block bb;
}
initialize_inline_failed (node);
gcc_assert (!node->global.inlined_to);
+ return 0;
}
struct tree_opt_pass pass_rebuild_cgraph_edges =
}
/* Try combining insns through substitution. */
-static void
+static unsigned int
rest_of_handle_combine (void)
{
int rebuild_jump_labels_after_combine
delete_dead_jumptables ();
cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
}
+ return 0;
}
struct tree_opt_pass pass_combine =
return optimize > 0;
}
-static void
+static unsigned int
rest_of_handle_cse (void)
{
int tem;
if (tem || optimize > 1)
cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
+ return 0;
}
struct tree_opt_pass pass_cse =
}
/* Run second CSE pass after loop optimizations. */
-static void
+static unsigned int
rest_of_handle_cse2 (void)
{
int tem;
}
reg_scan (get_insns (), max_reg_num ());
cse_not_expected = 1;
+ return 0;
}
unshare_all_rtl_1 (cfun->decl, insn);
}
-void
+unsigned int
unshare_all_rtl (void)
{
unshare_all_rtl_1 (current_function_decl, get_insns ());
+ return 0;
}
struct tree_opt_pass pass_unshare_all_rtl =
/* Remove unnecessary notes from the instruction stream. */
-void
+unsigned int
remove_unnecessary_notes (void)
{
rtx eh_stack = NULL_RTX;
/* Too many EH_REGION_BEG notes. */
gcc_assert (!eh_stack);
+ return 0;
}
struct tree_opt_pass pass_remove_unnecessary_notes =
/* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
-void
+unsigned int
set_nothrow_function_flags (void)
{
rtx insn;
cfun->all_throwers_are_sibcalls = 1;
if (! flag_exceptions)
- return;
+ return 0;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (can_throw_external (insn))
if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
{
cfun->all_throwers_are_sibcalls = 0;
- return;
+ return 0;
}
}
if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
{
cfun->all_throwers_are_sibcalls = 0;
- return;
+ return 0;
}
}
+ return 0;
}
struct tree_opt_pass pass_set_nothrow_function_flags =
The new note numbers will not refer to region numbers, but
instead to call site entries. */
-void
+unsigned int
convert_to_eh_region_ranges (void)
{
rtx insn, iter, note;
int call_site = 0;
if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
- return;
+ return 0;
VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
}
htab_delete (ar_hash);
+ return 0;
}
struct tree_opt_pass pass_convert_to_eh_region_ranges =
}
/* Complete generation of exception handling code. */
-static void
+static unsigned int
rest_of_handle_eh (void)
{
cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
finish_eh_generation ();
cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
+ return 0;
}
struct tree_opt_pass pass_rtl_eh =
extern bool can_throw_external (rtx);
/* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
-extern void set_nothrow_function_flags (void);
+extern unsigned int set_nothrow_function_flags (void);
/* After initial rtl generation, call back to finish generating
exception support code. */
extern void maybe_remove_eh_handler (rtx);
extern void convert_from_eh_region_ranges (void);
-extern void convert_to_eh_region_ranges (void);
+extern unsigned int convert_to_eh_region_ranges (void);
extern void find_exception_handler_labels (void);
extern bool current_function_has_exception_handlers (void);
extern void output_function_exception_table (void);
/* Compute branch alignments based on frequency information in the
CFG. */
-static void
+static unsigned int
compute_alignments (void)
{
int log, max_skip, max_log;
/* If not optimizing or optimizing for size, don't assign any alignments. */
if (! optimize || optimize_size)
- return;
+ return 0;
FOR_EACH_BB (bb)
{
LABEL_TO_ALIGNMENT (label) = max_log;
LABEL_TO_MAX_SKIP (label) = max_skip;
}
+ return 0;
}
struct tree_opt_pass pass_compute_alignments =
}
\f
/* Turn the RTL into assembly. */
-static void
+static unsigned int
rest_of_handle_final (void)
{
rtx x;
timevar_push (TV_SYMOUT);
(*debug_hooks->function_decl) (current_function_decl);
timevar_pop (TV_SYMOUT);
+ return 0;
}
struct tree_opt_pass pass_final =
};
-static void
+static unsigned int
rest_of_handle_shorten_branches (void)
{
/* Shorten branches. */
shorten_branches (get_insns ());
+ return 0;
}
struct tree_opt_pass pass_shorten_branches =
};
-static void
+static unsigned int
rest_of_clean_state (void)
{
rtx insn, next;
/* We're done with this function. Free up memory if we can. */
free_after_parsing (cfun);
free_after_compilation (cfun);
+ return 0;
}
struct tree_opt_pass pass_clean_state =
It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
possibly other information which is used by the register allocators. */
-void
+unsigned int
recompute_reg_usage (void)
{
allocate_reg_life_data ();
if (dump_file)
dump_flow_info (dump_file, dump_flags);
+ return 0;
}
struct tree_opt_pass pass_recompute_reg_usage =
return flag_profile_values;
}
-static void
+static unsigned int
rest_of_handle_remove_death_notes (void)
{
count_or_remove_death_notes (NULL, 1);
+ return 0;
}
struct tree_opt_pass pass_remove_death_notes =
};
/* Perform life analysis. */
-static void
+static unsigned int
rest_of_handle_life (void)
{
regclass_init ();
}
no_new_pseudos = 1;
+ return 0;
}
struct tree_opt_pass pass_life =
'f' /* letter */
};
-static void
+static unsigned int
rest_of_handle_flow2 (void)
{
/* If optimizing, then go ahead and split insns now. */
thread_prologue_and_epilogue_insns (get_insns ());
epilogue_completed = 1;
flow2_completed = 1;
+ return 0;
}
struct tree_opt_pass pass_flow2 =
/* Pass through the INSNS of function FNDECL and convert virtual register
references to hard register references. */
-static void
+static unsigned int
instantiate_virtual_regs (void)
{
rtx insn;
/* Indicate that, from now on, assign_stack_local should use
frame_pointer_rtx. */
virtuals_instantiated = 1;
+ return 0;
}
struct tree_opt_pass pass_instantiate_virtual_regs =
/* Make sure all values used by the optimization passes have sane
defaults. */
-void
+unsigned int
init_function_for_compilation (void)
{
reg_renumber = 0;
gcc_assert (VEC_length (int, prologue) == 0);
gcc_assert (VEC_length (int, epilogue) == 0);
gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
+ return 0;
}
struct tree_opt_pass pass_init_function =
}
\f
-static void
+static unsigned int
rest_of_handle_check_leaf_regs (void)
{
#ifdef LEAF_REGISTERS
current_function_uses_only_leaf_regs
= optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
#endif
+ return 0;
}
struct tree_opt_pass pass_leaf_regs =
}
/* Perform jump bypassing and control flow optimizations. */
-static void
+static unsigned int
rest_of_handle_jump_bypass (void)
{
cleanup_cfg (CLEANUP_EXPENSIVE);
cleanup_cfg (CLEANUP_EXPENSIVE);
delete_trivially_dead_insns (get_insns (), max_reg_num ());
}
+ return 0;
}
struct tree_opt_pass pass_jump_bypass =
}
-static void
+static unsigned int
rest_of_handle_gcse (void)
{
int save_csb, save_cfj;
flag_cse_skip_blocks = save_csb;
flag_cse_follow_jumps = save_cfj;
+ return 0;
}
struct tree_opt_pass pass_gcse =
/* Lowers the body of current_function_decl. */
-static void
+static unsigned int
lower_function_body (void)
{
struct lower_data data;
= blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
clear_block_marks (data.block);
+ return 0;
}
struct tree_opt_pass pass_lower_cf =
/* Mark the used attribute on blocks correctly. */
-static void
+static unsigned int
mark_used_blocks (void)
{
mark_blocks_with_used_vars (DECL_INITIAL (current_function_decl));
+ return 0;
}
}
/* Run old register allocator. Return TRUE if we must exit
rest_of_compilation upon return. */
-static void
+static unsigned int
rest_of_handle_global_alloc (void)
{
bool failure;
gcc_assert (reload_completed || failure);
reload_completed = !failure;
+ return 0;
}
struct tree_opt_pass pass_global_alloc =
}
/* If-conversion and CFG cleanup. */
-static void
+static unsigned int
rest_of_handle_if_conversion (void)
{
if (flag_if_conversion)
cleanup_cfg (CLEANUP_EXPENSIVE);
reg_scan (get_insns (), max_reg_num ());
timevar_pop (TV_JUMP);
+ return 0;
}
struct tree_opt_pass pass_rtl_ifcvt =
/* Rerun if-conversion, as combine may have simplified things enough
to now meet sequence length restrictions. */
-static void
+static unsigned int
rest_of_handle_if_after_combine (void)
{
no_new_pseudos = 0;
if_convert (1);
no_new_pseudos = 1;
+ return 0;
}
struct tree_opt_pass pass_if_after_combine =
return (optimize > 0);
}
-static void
+static unsigned int
rest_of_handle_if_after_reload (void)
{
/* Last attempt to optimize CFG, as scheduling, peepholing and insn
| (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
if (flag_if_conversion2)
if_convert (1);
+ return 0;
}
return NULL_RTX;
}
-void
+unsigned int
emit_initial_value_sets (void)
{
struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
rtx seq;
if (ivs == 0)
- return;
+ return 0;
start_sequence ();
for (i = 0; i < ivs->num_entries; i++)
end_sequence ();
emit_insn_after (seq, entry_of_function ());
+ return 0;
}
struct tree_opt_pass pass_initial_value_sets =
it, else return NULL_RTX. */
extern rtx get_hard_reg_initial_reg (struct function *, rtx);
/* Called from rest_of_compilation. */
-extern void emit_initial_value_sets (void);
+extern unsigned int emit_initial_value_sets (void);
extern void allocate_initial_values (rtx *);
/* Check whether there's any attribute in a function declaration that
}
/* The IPCP driver. */
-void
+unsigned int
ipcp_driver (void)
{
if (dump_file)
if (dump_file)
fprintf (dump_file, "\nIPA constant propagation end\n");
cgraph_remove_unreachable_nodes (true, NULL);
+ return 0;
}
/* Gate for IPCP optimization. */
/* Decide on the inlining. We do so in the topological order to avoid
expenses on updating data structures. */
-static void
+static unsigned int
cgraph_decide_inlining (void)
{
struct cgraph_node *node;
overall_insns);
free (order);
timevar_pop (TV_INLINE_HEURISTICS);
+ return 0;
}
/* Decide on the inlining. We do so in the topological order to avoid
/* Do inlining of small functions. Doing so early helps profiling and other
passes to be somewhat more effective and avoids some code duplication in
later real inlining pass for testcases with very many function calls. */
-static void
+static unsigned int
cgraph_early_inlining (void)
{
struct cgraph_node *node;
int i;
if (sorrycount || errorcount)
- return;
+ return 0;
#ifdef ENABLE_CHECKING
for (node = cgraph_nodes; node; node = node->next)
gcc_assert (!node->aux);
gcc_assert (!node->global.inlined_to);
#endif
free (order);
+ return 0;
}
/* When inlining shall be performed. */
void ipa_method_tree_print (FILE *);
void ipa_method_modify_print (FILE *);
-void ipcp_driver (void);
+unsigned int ipcp_driver (void);
#endif /* IPA_PROP_H */
on the local information that was produced by ipa_analyze_function
and ipa_analyze_variable. */
-static void
+static unsigned int
static_execute (void)
{
struct cgraph_node *node;
}
free (order);
+ return 0;
}
static bool
on the local information that was produced by ipa_analyze_function
and ipa_analyze_variable. */
-static void
+static unsigned int
static_execute (void)
{
struct cgraph_node *node;
&& (cgraph_function_body_availability (node) == AVAIL_OVERWRITABLE))
clean_function (node);
}
+ return 0;
}
\f
/* The main entry point for type escape analysis. */
-static void
+static unsigned int
type_escape_execute (void)
{
struct cgraph_node *node;
BITMAP_FREE (been_there_done_that);
BITMAP_FREE (bitmap_tmp);
BITMAP_FREE (results_of_malloc);
+ return 0;
}
static bool
This simple pass moves barriers and removes duplicates so that the
old code is happy.
*/
-void
+unsigned int
cleanup_barriers (void)
{
rtx insn, next, prev;
reorder_insns (insn, insn, prev);
}
}
+ return 0;
}
struct tree_opt_pass pass_cleanup_barriers =
0 /* letter */
};
-void
+unsigned int
purge_line_number_notes (void)
{
rtx last_note = 0;
last_note = insn;
}
}
+ return 0;
}
struct tree_opt_pass pass_purge_lineno_notes =
/* Run old register allocator. Return TRUE if we must exit
rest_of_compilation upon return. */
-static void
+static unsigned int
rest_of_handle_local_alloc (void)
{
int rebuild_notes;
dump_local_alloc (dump_file);
timevar_pop (TV_DUMP);
}
+ return 0;
}
struct tree_opt_pass pass_local_alloc =
\f
/* Initialization of the RTL loop passes. */
-static void
+static unsigned int
rtl_loop_init (void)
{
if (dump_file)
cfg_layout_initialize (0);
current_loops = loop_optimizer_init (LOOPS_NORMAL);
+ return 0;
}
struct tree_opt_pass pass_rtl_loop_init =
\f
/* Finalization of the RTL loop passes. */
-static void
+static unsigned int
rtl_loop_done (void)
{
basic_block bb;
dump_flow_info (dump_file, dump_flags);
current_loops = NULL;
+ return 0;
}
struct tree_opt_pass pass_rtl_loop_done =
return flag_move_loop_invariants;
}
-static void
+static unsigned int
rtl_move_loop_invariants (void)
{
if (current_loops)
move_loop_invariants (current_loops);
+ return 0;
}
struct tree_opt_pass pass_rtl_move_loop_invariants =
return flag_unswitch_loops;
}
-static void
+static unsigned int
rtl_unswitch (void)
{
if (current_loops)
unswitch_loops (current_loops);
+ return 0;
}
struct tree_opt_pass pass_rtl_unswitch =
return (flag_peel_loops || flag_unroll_loops || flag_unroll_all_loops);
}
-static void
+static unsigned int
rtl_unroll_and_peel_loops (void)
{
if (current_loops)
unroll_and_peel_loops (current_loops, flags);
}
+ return 0;
}
struct tree_opt_pass pass_rtl_unroll_and_peel_loops =
#endif
}
-static void
+static unsigned int
rtl_doloop (void)
{
#ifdef HAVE_doloop_end
if (current_loops)
doloop_optimize_loops (current_loops);
#endif
+ return 0;
}
struct tree_opt_pass pass_rtl_doloop =
#endif
}
-static void
+static unsigned int
rest_of_handle_mode_switching (void)
{
#ifdef OPTIMIZE_MODE_SWITCHING
optimize_mode_switching ();
no_new_pseudos = 1;
#endif /* OPTIMIZE_MODE_SWITCHING */
+ return 0;
}
/* Run instruction scheduler. */
/* Perform SMS module scheduling. */
-static void
+static unsigned int
rest_of_handle_sms (void)
{
#ifdef INSN_SCHEDULING
cfg_layout_finalize ();
free_dominance_info (CDI_DOMINATORS);
#endif /* INSN_SCHEDULING */
+ return 0;
}
struct tree_opt_pass pass_sms =
/* Main entry point for expanding OMP-GIMPLE into runtime calls. */
-static void
+static unsigned int
execute_expand_omp (void)
{
build_omp_regions ();
root_omp_region = NULL;
omp_regions = NULL;
}
+ return 0;
}
static bool
\f
/* Main entry point. */
-static void
+static unsigned int
execute_lower_omp (void)
{
all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
splay_tree_delete (all_contexts);
all_contexts = NULL;
}
+ return 0;
}
static bool
execute_one_pass (struct tree_opt_pass *pass)
{
bool initializing_dump;
+ unsigned int todo_after = 0;
/* See if we're supposed to run this pass. */
if (pass->gate && !pass->gate ())
/* Do it! */
if (pass->execute)
{
- pass->execute ();
+ todo_after = pass->execute ();
last_verified = 0;
}
}
/* Run post-pass cleanup and verification. */
- execute_todo (pass->todo_flags_finish);
+ execute_todo (todo_after | pass->todo_flags_finish);
/* Flush and close dump file. */
if (dump_file_name)
}
-static void
+static unsigned int
rest_of_handle_gcse2 (void)
{
gcse_after_reload_main (get_insns ());
rebuild_jump_labels (get_insns ());
delete_trivially_dead_insns (get_insns (), max_reg_num ());
+ return 0;
}
struct tree_opt_pass pass_gcse2 =
}
-static void
+static unsigned int
rest_of_handle_postreload (void)
{
/* Do a very simple CSE pass over just the hard registers. */
Remove any EH edges associated with them. */
if (flag_non_call_exceptions)
purge_all_dead_edges ();
+ return 0;
}
struct tree_opt_pass pass_postreload_cse =
}
/* Predict branch probabilities and estimate profile of the tree CFG. */
-static void
+static unsigned int
tree_estimate_probability (void)
{
basic_block bb;
dump_tree_cfg (dump_file, dump_flags);
if (profile_status == PROFILE_ABSENT)
profile_status = PROFILE_GUESSED;
+ return 0;
}
\f
/* __builtin_expect dropped tokens into the insn stream describing expected
\f
/* Do branch profiling and static profile estimation passes. */
-static void
+static unsigned int
rest_of_handle_branch_prob (void)
{
struct loops loops;
flow_loops_free (&loops);
free_dominance_info (CDI_DOMINATORS);
+ return 0;
}
struct tree_opt_pass pass_branch_prob =
/* Same as split_all_insns, but do not expect CFG to be available.
Used by machine dependent reorg passes. */
-void
+unsigned int
split_all_insns_noflow (void)
{
rtx next, insn;
split_insn (insn);
}
}
+ return 0;
}
\f
#ifdef HAVE_peephole2
return (optimize > 0 && flag_peephole2);
}
-static void
+static unsigned int
rest_of_handle_peephole2 (void)
{
#ifdef HAVE_peephole2
peephole2_optimize ();
#endif
+ return 0;
}
struct tree_opt_pass pass_peephole2 =
'z' /* letter */
};
-static void
+static unsigned int
rest_of_handle_split_all_insns (void)
{
split_all_insns (1);
+ return 0;
}
struct tree_opt_pass pass_split_all_insns =
/* Convert register usage from flat register file usage to a stack
register file. */
-static void
+static unsigned int
rest_of_handle_stack_regs (void)
{
#ifdef STACK_REGS
}
}
#endif
+ return 0;
}
struct tree_opt_pass pass_stack_regs =
/* Register allocation pre-pass, to reduce number of moves necessary
for two-address machines. */
-static void
+static unsigned int
rest_of_handle_regmove (void)
{
regmove_optimize (get_insns (), max_reg_num ());
cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
+ return 0;
}
struct tree_opt_pass pass_regmove =
return (optimize > 0);
}
-static void
+static unsigned int
rest_of_handle_stack_adjustments (void)
{
life_analysis (PROP_POSTRELOAD);
if (!ACCUMULATE_OUTGOING_ARGS)
#endif
combine_stack_adjustments ();
+ return 0;
}
struct tree_opt_pass pass_stack_adjustments =
/* Run the regrename and cprop passes. */
-static void
+static unsigned int
rest_of_handle_regrename (void)
{
if (flag_rename_registers)
regrename_optimize ();
if (flag_cprop_registers)
copyprop_hardreg_forward ();
+ return 0;
}
struct tree_opt_pass pass_regrename =
}
/* Run delay slot optimization. */
-static void
+static unsigned int
rest_of_handle_delay_slots (void)
{
#ifdef DELAY_SLOTS
dbr_schedule (get_insns ());
#endif
+ return 0;
}
struct tree_opt_pass pass_delay_slots =
}
-static void
+static unsigned int
rest_of_handle_machine_reorg (void)
{
targetm.machine_dependent_reorg ();
+ return 0;
}
struct tree_opt_pass pass_machine_reorg =
/* The entry point of the sequence abstraction algorithm. */
-static void
+static unsigned int
rest_of_rtl_seqabstr (void)
{
life_analysis (PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
/* Abstract out common insn sequences. */
rtl_seqabstr ();
+ return 0;
}
struct tree_opt_pass pass_rtl_seqabstr = {
extern enum rtx_code unsigned_condition (enum rtx_code);
extern enum rtx_code signed_condition (enum rtx_code);
extern void mark_jump_label (rtx, rtx, int);
-extern void cleanup_barriers (void);
+extern unsigned int cleanup_barriers (void);
/* In jump.c */
extern bool squeeze_notes (rtx *, rtx *);
extern enum reg_class reg_alternate_class (int);
extern void split_all_insns (int);
-extern void split_all_insns_noflow (void);
+extern unsigned int split_all_insns_noflow (void);
#define MAX_SAVED_CONST_INT 64
extern GTY(()) rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
rtx, rtx, rtx);
extern void delete_for_peephole (rtx, rtx);
extern int condjump_in_parallel_p (rtx);
-extern void purge_line_number_notes (void);
+extern unsigned int purge_line_number_notes (void);
/* In emit-rtl.c. */
extern int max_reg_num (void);
extern void push_topmost_sequence (void);
extern void pop_topmost_sequence (void);
extern void set_new_first_and_last_insn (rtx, rtx);
-extern void unshare_all_rtl (void);
+extern unsigned int unshare_all_rtl (void);
extern void unshare_all_rtl_again (rtx);
extern void unshare_all_rtl_in_chain (rtx);
extern void verify_rtl_sharing (void);
extern void emit_insn_after_with_line_notes (rtx, rtx, rtx);
extern rtx emit (rtx);
extern void renumber_insns (void);
-extern void remove_unnecessary_notes (void);
+extern unsigned int remove_unnecessary_notes (void);
extern rtx delete_insn (rtx);
extern rtx entry_of_function (void);
extern void delete_insn_chain (rtx, rtx);
unsigned int, int);
/* In flow.c */
-extern void recompute_reg_usage (void);
+extern unsigned int recompute_reg_usage (void);
extern void delete_dead_jumptables (void);
extern void print_rtl_with_bb (FILE *, rtx);
extern void dump_flow_info (FILE *, int);
extern void tracer (unsigned int);
/* In var-tracking.c */
-extern void variable_tracking_main (void);
+extern unsigned int variable_tracking_main (void);
/* In stor-layout.c. */
extern void get_mode_bounds (enum machine_mode, int, enum machine_mode,
}
/* Run instruction scheduler. */
-static void
+static unsigned int
rest_of_handle_sched (void)
{
#ifdef INSN_SCHEDULING
schedule_insns ();
#endif
+ return 0;
}
static bool
}
/* Run second scheduling pass after reload. */
-static void
+static unsigned int
rest_of_handle_sched2 (void)
{
#ifdef INSN_SCHEDULING
else
schedule_insns ();
#endif
+ return 0;
}
struct tree_opt_pass pass_sched =
}
/* Run tracer. */
-static void
+static unsigned int
rest_of_handle_tracer (void)
{
if (dump_file)
tracer (0);
cleanup_cfg (CLEANUP_EXPENSIVE);
reg_scan (get_insns (), max_reg_num ());
+ return 0;
}
struct tree_opt_pass pass_tracer =
static void make_goto_expr_edges (basic_block);
static edge tree_redirect_edge_and_branch (edge, basic_block);
static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
-static void split_critical_edges (void);
+static unsigned int split_critical_edges (void);
/* Various helpers. */
static inline bool stmt_starts_bb_p (tree, tree);
dump_tree_cfg (dump_file, dump_flags);
}
-static void
+static unsigned int
execute_build_cfg (void)
{
build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
+ return 0;
}
struct tree_opt_pass pass_build_cfg =
}
}
-static void
+static unsigned int
remove_useless_stmts (void)
{
struct rus_data data;
remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
}
while (data.repeat);
+ return 0;
}
/* Split all critical edges. */
-static void
+static unsigned int
split_critical_edges (void)
{
basic_block bb;
}
}
end_recording_case_labels ();
+ return 0;
}
struct tree_opt_pass pass_split_crit_edges =
\f
/* Emit return warnings. */
-static void
+static unsigned int
execute_warn_function_return (void)
{
#ifdef USE_MAPPED_LOCATION
}
}
}
+ return 0;
}
/* Emit noreturn warnings. */
-static void
+static unsigned int
execute_warn_function_noreturn (void)
{
if (warn_missing_noreturn
warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
"for attribute %<noreturn%>",
cfun->decl);
+ return 0;
}
struct tree_opt_pass pass_warn_function_noreturn =
<L10>:;
*/
-static void
+static unsigned int
merge_phi_nodes (void)
{
basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks);
}
free (worklist);
+ return 0;
}
static bool
\f
/* Entry point for complex operation lowering during optimization. */
-static void
+static unsigned int
tree_lower_complex (void)
{
int old_last_basic_block;
basic_block bb;
if (!init_dont_simulate_again ())
- return;
+ return 0;
complex_lattice_values = VEC_alloc (complex_lattice_t, heap, num_ssa_names);
VEC_safe_grow (complex_lattice_t, heap,
htab_delete (complex_variable_components);
VEC_free (tree, heap, complex_ssa_name_components);
VEC_free (complex_lattice_t, heap, complex_lattice_values);
+ return 0;
}
struct tree_opt_pass pass_lower_complex =
\f
/* Entry point for complex operation lowering without optimization. */
-static void
+static unsigned int
tree_lower_complex_O0 (void)
{
int old_last_basic_block = last_basic_block;
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
expand_complex_operations_1 (&bsi);
}
+ return 0;
}
static bool
various attributes for each variable used by alias analysis and the
optimizer. */
-static void
+static unsigned int
find_referenced_vars (void)
{
htab_t vars_found;
}
htab_delete (vars_found);
+ return 0;
}
struct tree_opt_pass pass_referenced_vars =
}
}
-static void
+static unsigned int
lower_eh_constructs (void)
{
struct leh_state null_state;
htab_delete (finally_tree);
collect_eh_region_array ();
+ return 0;
}
struct tree_opt_pass pass_lower_eh =
#include "target.h"
/* local function prototypes */
-static void main_tree_if_conversion (void);
+static unsigned int main_tree_if_conversion (void);
static tree tree_if_convert_stmt (struct loop *loop, tree, tree,
block_stmt_iterator *);
static void tree_if_convert_cond_expr (struct loop *, tree, tree,
/* Tree if-conversion pass management. */
-static void
+static unsigned int
main_tree_if_conversion (void)
{
unsigned i, loop_num;
struct loop *loop;
if (!current_loops)
- return;
+ return 0;
loop_num = current_loops->num;
for (i = 0; i < loop_num; i++)
tree_if_conversion (loop, true);
}
-
+ return 0;
}
static bool
Steps 3 and 4 are done using the dominator tree walker
(walk_dominator_tree). */
-static void
+static unsigned int
rewrite_into_ssa (void)
{
bitmap *dfs;
timevar_pop (TV_TREE_SSA_OTHER);
in_ssa_p = true;
+ return 0;
}
static void mf_decl_cache_locals (void);
static void mf_decl_clear_locals (void);
static void mf_xform_derefs (void);
-static void execute_mudflap_function_ops (void);
+static unsigned int execute_mudflap_function_ops (void);
/* Addressable variables instrumentation. */
static void mf_xform_decls (tree, tree);
static tree mx_xfn_xform_decls (tree *, int *, void *);
static void mx_register_decls (tree, tree *);
-static void execute_mudflap_function_decls (void);
+static unsigned int execute_mudflap_function_decls (void);
/* ------------------------------------------------------------------------ */
tree optimizations have been performed, but we have to preserve the CFG
for expansion from trees to RTL. */
-static void
+static unsigned int
execute_mudflap_function_ops (void)
{
/* Don't instrument functions such as the synthetic constructor
built during mudflap_finish_file. */
if (mf_marked_p (current_function_decl) ||
DECL_ARTIFICIAL (current_function_decl))
- return;
+ return 0;
push_gimplify_context ();
mf_decl_clear_locals ();
pop_gimplify_context (NULL);
+ return 0;
}
/* Create and initialize local shadow variables for the lookup cache
of their BIND_EXPR binding context, and we lose liveness information
for the declarations we wish to instrument. */
-static void
+static unsigned int
execute_mudflap_function_decls (void)
{
/* Don't instrument functions such as the synthetic constructor
built during mudflap_finish_file. */
if (mf_marked_p (current_function_decl) ||
DECL_ARTIFICIAL (current_function_decl))
- return;
+ return 0;
push_gimplify_context ();
DECL_ARGUMENTS (current_function_decl));
pop_gimplify_context (NULL);
+ return 0;
}
/* This struct is passed between mf_xform_decls to store state needed
then we could either have the languages register the optimization or
we could change the gating function to check the current language. */
-static void
+static unsigned int
tree_nrv (void)
{
tree result = DECL_RESULT (current_function_decl);
/* If this function does not return an aggregate type in memory, then
there is nothing to do. */
if (!aggregate_value_p (result, current_function_decl))
- return;
+ return 0;
/* Look through each block for assignments to the RESULT_DECL. */
FOR_EACH_BB (bb)
than previous return statements, then we can not perform
NRV optimizations. */
if (found != ret_expr)
- return;
+ return 0;
}
else
found = ret_expr;
|| DECL_ALIGN (found) > DECL_ALIGN (result)
|| !lang_hooks.types_compatible_p (TREE_TYPE (found),
result_type))
- return;
+ return 0;
}
}
}
if (!found)
- return;
+ return 0;
/* If dumping details, then note once and only the NRV replacement. */
if (dump_file && (dump_flags & TDF_DETAILS))
/* FOUND is no longer used. Ensure it gets removed. */
var_ann (found)->used = 0;
+ return 0;
}
struct tree_opt_pass pass_nrv =
escaped prior to the call. If it has, modifications to the local
variable will produce visible changes elsewhere, as in PR c++/19317. */
-static void
+static unsigned int
execute_return_slot_opt (void)
{
basic_block bb;
}
}
}
+ return 0;
}
struct tree_opt_pass pass_return_slot =
static bool merge_object_sizes (struct object_size_info *, tree, tree,
unsigned HOST_WIDE_INT);
static bool plus_expr_object_size (struct object_size_info *, tree, tree);
-static void compute_object_sizes (void);
+static unsigned int compute_object_sizes (void);
static void init_offset_limit (void);
static void check_for_plus_in_loops (struct object_size_info *, tree);
static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
/* Simple pass to optimize all __builtin_object_size () builtins. */
-static void
+static unsigned int
compute_object_sizes (void)
{
basic_block bb;
}
fini_object_sizes ();
+ return 0;
}
struct tree_opt_pass pass_object_sizes =
because after the tree optimizers have run such cleanups may
be necessary. */
-static void
+static unsigned int
execute_cleanup_cfg_pre_ipa (void)
{
cleanup_tree_cfg ();
+ return 0;
}
struct tree_opt_pass pass_cleanup_cfg =
because after the tree optimizers have run such cleanups may
be necessary. */
-static void
+static unsigned int
execute_cleanup_cfg_post_optimizing (void)
{
fold_cond_expr_cond ();
cleanup_tree_cfg ();
cleanup_dead_labels ();
group_case_labels ();
+ return 0;
}
struct tree_opt_pass pass_cleanup_cfg_post_optimizing =
/* Pass: do the actions required to finish with tree-ssa optimization
passes. */
-static void
+static unsigned int
execute_free_datastructures (void)
{
/* ??? This isn't the right place for this. Worse, it got computed
/* Remove the ssa structures. Do it here since this includes statement
annotations that need to be intact during disband_implicit_edges. */
delete_tree_ssa ();
+ return 0;
}
struct tree_opt_pass pass_free_datastructures =
};
/* Pass: free cfg annotations. */
-static void
+static unsigned int
execute_free_cfg_annotations (void)
{
basic_block bb;
the integrity of statements in the EH throw table. */
verify_eh_throw_table_statements ();
#endif
+ return 0;
}
struct tree_opt_pass pass_free_cfg_annotations =
changed some properties - such as marked functions nothrow. Remove now
redundant edges and basic blocks. */
-static void
+static unsigned int
execute_fixup_cfg (void)
{
basic_block bb;
}
cleanup_tree_cfg ();
+ return 0;
}
struct tree_opt_pass pass_fixup_cfg =
/* Do the actions required to initialize internal data structures used
in tree-ssa optimization passes. */
-static void
+static unsigned int
execute_init_datastructures (void)
{
/* Allocate hash tables, arrays and other structures. */
init_tree_ssa ();
+ return 0;
}
struct tree_opt_pass pass_init_datastructures =
R. Morgan, ``Building an Optimizing Compiler'',
Butterworth-Heinemann, Boston, MA, 1998. pp 176-186. */
-static void
+static unsigned int
rewrite_out_of_ssa (void)
{
var_map map;
delete_var_map (map);
in_ssa_p = false;
+ return 0;
}
bool (*gate) (void);
/* This is the code to run. If null, then there should be sub-passes
- otherwise this pass does nothing. */
- void (*execute) (void);
+ otherwise this pass does nothing. The return value contains
+ TODOs to execute in addition to those in TODO_flags_finish. */
+ unsigned int (*execute) (void);
/* A list of sub-passes to run, dependent on gate predicate. */
struct tree_opt_pass *sub;
return false;
}
-static void
+static unsigned int
tree_profiling (void)
{
branch_prob ();
easy to adjust it, if and when there is some. */
free_dominance_info (CDI_DOMINATORS);
free_dominance_info (CDI_POST_DOMINATORS);
+ return 0;
}
struct tree_opt_pass pass_tree_profile =
We only consider SSA names defined by phi nodes; rest is left to the
ordinary constant propagation pass. */
-void
+unsigned int
scev_const_prop (void)
{
basic_block bb;
unsigned i;
if (!current_loops)
- return;
+ return 0;
FOR_EACH_BB (bb)
{
update_stmt (ass);
}
}
+ return 0;
}
extern tree instantiate_parameters (struct loop *, tree);
extern void gather_stats_on_scev_database (void);
extern void scev_analysis (void);
-void scev_const_prop (void);
+unsigned int scev_const_prop (void);
extern bool simple_iv (struct loop *, tree, tree, affine_iv *, bool);
/* Main entry point. */
-static void
+static unsigned int
tree_sra (void)
{
/* Initialize local variables. */
BITMAP_FREE (sra_type_decomp_cache);
BITMAP_FREE (sra_type_inst_cache);
obstack_free (&sra_obstack, NULL);
+ return 0;
}
static bool
max-aliased-vops}), alias sets are grouped to avoid severe
compile-time slow downs and memory consumption. See group_aliases. */
-static void
+static unsigned int
compute_may_aliases (void)
{
struct alias_info *ai;
}
recalculate_used_alone ();
updating_used_alone = false;
+ return 0;
}
/* Create structure field variables for structures used in this function. */
-static void
+static unsigned int
create_structure_vars (void)
{
basic_block bb;
}
htab_delete (used_portions);
VEC_free (tree, heap, varvec);
-
+ return 0;
}
static bool
}
-static void
+static unsigned int
do_ssa_ccp (void)
{
execute_ssa_ccp (false);
+ return 0;
}
};
-static void
+static unsigned int
do_ssa_store_ccp (void)
{
/* If STORE-CCP is not enabled, we just run regular CCP. */
execute_ssa_ccp (flag_tree_store_ccp != 0);
+ return 0;
}
static bool
/* A simple pass that attempts to fold all builtin functions. This pass
is run after we've propagated as many constants as we can. */
-static void
+static unsigned int
execute_fold_all_builtins (void)
{
bool cfg_changed = false;
/* Delete unreachable blocks. */
if (cfg_changed)
cleanup_tree_cfg ();
+ return 0;
}
return flag_tree_copy_prop != 0;
}
-static void
+static unsigned int
do_copy_prop (void)
{
execute_copy_prop (false, false);
+ return 0;
}
struct tree_opt_pass pass_copy_prop =
};
-static void
+static unsigned int
do_phi_only_copy_prop (void)
{
execute_copy_prop (false, true);
+ return 0;
}
struct tree_opt_pass pass_phi_only_copy_prop =
return flag_tree_store_copy_prop != 0 || flag_tree_copy_prop != 0;
}
-static void
+static unsigned int
store_copy_prop (void)
{
/* If STORE-COPY-PROP is not enabled, we just run regular COPY-PROP. */
execute_copy_prop (flag_tree_store_copy_prop != 0, false);
+ return 0;
}
struct tree_opt_pass pass_store_copy_prop =
then cause the SSA->normal pass to attempt to coalesce them all to the same
variable. */
-static void
+static unsigned int
rename_ssa_copies (void)
{
var_map map;
}
delete_var_map (map);
+ return 0;
}
/* Return true if copy rename is to be performed. */
}
/* Pass entry points. */
-static void
+static unsigned int
tree_ssa_dce (void)
{
perform_tree_ssa_dce (/*aggressive=*/false);
+ return 0;
}
-static void
+static unsigned int
tree_ssa_dce_loop (void)
{
perform_tree_ssa_dce (/*aggressive=*/false);
free_numbers_of_iterations_estimates (current_loops);
scev_reset ();
+ return 0;
}
-static void
+static unsigned int
tree_ssa_cd_dce (void)
{
perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
+ return 0;
}
static bool
every new symbol exposed, its corresponding bit will be set in
VARS_TO_RENAME. */
-static void
+static unsigned int
tree_ssa_dominator_optimize (void)
{
struct dom_walk_data walk_data;
VEC_free (tree, heap, avail_exprs_stack);
VEC_free (tree, heap, const_and_copies_stack);
VEC_free (tree, heap, stmts_to_rescan);
+ return 0;
}
static bool
};
static bool gate_dse (void);
-static void tree_ssa_dse (void);
+static unsigned int tree_ssa_dse (void);
static void dse_initialize_block_local_data (struct dom_walk_data *,
basic_block,
bool);
}
}
-static void
+static unsigned int
tree_ssa_dse (void)
{
struct dom_walk_data walk_data;
/* For now, just wipe the post-dominator information. */
free_dominance_info (CDI_POST_DOMINATORS);
+ return 0;
}
static bool
/* Main entry point for the forward propagation optimizer. */
-static void
+static unsigned int
tree_ssa_forward_propagate_single_use_vars (void)
{
basic_block bb;
if (cfg_changed)
cleanup_tree_cfg ();
+ return 0;
}
of the loop. This is beneficial since it increases efficiency of
code motion optimizations. It also saves one jump on entry to the loop. */
-static void
+static unsigned int
copy_loop_headers (void)
{
struct loops *loops;
loops = loop_optimizer_init (LOOPS_HAVE_PREHEADERS
| LOOPS_HAVE_SIMPLE_LATCHES);
if (!loops)
- return;
+ return 0;
#ifdef ENABLE_CHECKING
verify_loop_structure (loops);
free (copied_bbs);
loop_optimizer_finalize (loops);
+ return 0;
}
static bool
/* Loop optimizer initialization. */
-static void
+static unsigned int
tree_ssa_loop_init (void)
{
current_loops = tree_loop_optimizer_init ();
if (!current_loops)
- return;
+ return 0;
scev_initialize (current_loops);
+ return 0;
}
struct tree_opt_pass pass_tree_loop_init =
/* Loop invariant motion pass. */
-static void
+static unsigned int
tree_ssa_loop_im (void)
{
if (!current_loops)
- return;
+ return 0;
tree_ssa_lim (current_loops);
+ return 0;
}
static bool
/* Loop unswitching pass. */
-static void
+static unsigned int
tree_ssa_loop_unswitch (void)
{
if (!current_loops)
- return;
+ return 0;
tree_ssa_unswitch_loops (current_loops);
+ return 0;
}
static bool
/* Loop autovectorization. */
-static void
+static unsigned int
tree_vectorize (void)
{
vectorize_loops (current_loops);
+ return 0;
}
static bool
/* Loop nest optimizations. */
-static void
+static unsigned int
tree_linear_transform (void)
{
if (!current_loops)
- return;
+ return 0;
linear_transform_loops (current_loops);
+ return 0;
}
static bool
/* Canonical induction variable creation pass. */
-static void
+static unsigned int
tree_ssa_loop_ivcanon (void)
{
if (!current_loops)
- return;
+ return 0;
canonicalize_induction_variables (current_loops);
+ return 0;
}
static bool
/* Remove empty loops. */
-static void
+static unsigned int
tree_ssa_empty_loop (void)
{
if (!current_loops)
- return;
+ return 0;
remove_empty_loops (current_loops);
+ return 0;
}
struct tree_opt_pass pass_empty_loop =
/* Record bounds on numbers of iterations of loops. */
-static void
+static unsigned int
tree_ssa_loop_bounds (void)
{
if (!current_loops)
- return;
+ return 0;
estimate_numbers_of_iterations (current_loops);
scev_reset ();
+ return 0;
}
struct tree_opt_pass pass_record_bounds =
/* Complete unrolling of loops. */
-static void
+static unsigned int
tree_complete_unroll (void)
{
if (!current_loops)
- return;
+ return 0;
tree_unroll_loops_completely (current_loops,
flag_unroll_loops
|| flag_peel_loops
|| optimize >= 3);
+ return 0;
}
static bool
/* Prefetching. */
-static void
+static unsigned int
tree_ssa_loop_prefetch (void)
{
if (!current_loops)
- return;
+ return 0;
tree_ssa_prefetch_arrays (current_loops);
+ return 0;
}
static bool
/* Induction variable optimizations. */
-static void
+static unsigned int
tree_ssa_loop_ivopts (void)
{
if (!current_loops)
- return;
+ return 0;
tree_ssa_iv_optimize (current_loops);
+ return 0;
}
static bool
/* Loop optimizer finalization. */
-static void
+static unsigned int
tree_ssa_loop_done (void)
{
if (!current_loops)
- return;
+ return 0;
free_numbers_of_iterations_estimates (current_loops);
scev_finalize ();
loop_optimizer_finalize (current_loops);
current_loops = NULL;
+ return 0;
}
struct tree_opt_pass pass_tree_loop_done =
/* Go through all the floating-point SSA_NAMEs, and call
execute_cse_reciprocals_1 on each of them. */
-static void
+static unsigned int
execute_cse_reciprocals (void)
{
basic_block bb;
free_dominance_info (CDI_DOMINATORS | CDI_POST_DOMINATORS);
free_alloc_pool (occ_pool);
+ return 0;
}
struct tree_opt_pass pass_cse_reciprocals =
#include "tree-dump.h"
#include "langhooks.h"
-static void tree_ssa_phiopt (void);
+static unsigned int tree_ssa_phiopt (void);
static bool conditional_replacement (basic_block, basic_block,
edge, edge, tree, tree, tree);
static bool value_replacement (basic_block, basic_block,
A similar transformation is done for MAX_EXPR. */
-static void
+static unsigned int
tree_ssa_phiopt (void)
{
basic_block bb;
}
free (bb_order);
+ return 0;
}
/* Returns the list of basic blocks in the function in an order that guarantees
/* Gate and execute functions for PRE. */
-static void
+static unsigned int
do_pre (void)
{
execute_pre (false);
+ return 0;
}
static bool
/* Gate and execute functions for FRE. */
-static void
+static unsigned int
execute_fre (void)
{
execute_pre (true);
+ return 0;
}
static bool
/* Gate and execute functions for Reassociation. */
-static void
+static unsigned int
execute_reassoc (void)
{
init_reassoc ();
repropagate_negates ();
fini_reassoc ();
+ return 0;
}
struct tree_opt_pass pass_reassoc =
/* Gate and execute functions for PRE. */
-static void
+static unsigned int
do_sink (void)
{
execute_sink_code ();
+ return 0;
}
static bool
}
/* Execute the driver for IPA PTA. */
-static void
+static unsigned int
ipa_pta_execute (void)
{
struct cgraph_node *node;
in_ipa_mode = 0;
delete_alias_heapvars ();
delete_points_to_sets ();
+ return 0;
}
struct tree_opt_pass pass_ipa_pta =
/* Main driver for un-cprop. */
-static void
+static unsigned int
tree_ssa_uncprop (void)
{
struct dom_walk_data walk_data;
}
}
}
-
+ return 0;
}
}
}
-static void
+static unsigned int
execute_early_warn_uninitialized (void)
{
block_stmt_iterator bsi;
walk_tree (bsi_stmt_ptr (bsi), warn_uninitialized_var,
context, NULL);
}
+ return 0;
}
-static void
+static unsigned int
execute_late_warn_uninitialized (void)
{
basic_block bb;
FOR_EACH_BB (bb)
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
warn_uninitialized_phi (phi);
+ return 0;
}
static bool
/* Entry point to the stdarg optimization pass. */
-static void
+static unsigned int
execute_optimize_stdarg (void)
{
basic_block bb;
fprintf (dump_file, "%d", cfun->va_list_fpr_size);
fputs (" FPR units.\n", dump_file);
}
+ return 0;
}
add_virtual_phis ();
}
-static void
+static unsigned int
execute_tail_recursion (void)
{
tree_optimize_tail_calls_1 (false);
+ return 0;
}
static bool
return flag_optimize_sibling_calls != 0;
}
-static void
+static unsigned int
execute_tail_calls (void)
{
tree_optimize_tail_calls_1 (true);
+ return 0;
}
struct tree_opt_pass pass_tail_recursion =
return flag_tree_vectorize != 0;
}
-static void
+static unsigned int
expand_vector_operations (void)
{
block_stmt_iterator bsi;
update_stmt_if_modified (bsi_stmt (bsi));
}
}
+ return 0;
}
struct tree_opt_pass pass_lower_vector =
DON'T KNOW. In the future, it may be worthwhile to propagate
probabilities to aid branch prediction. */
-static void
+static unsigned int
execute_vrp (void)
{
insert_range_assertions ();
update_ssa (TODO_update_ssa);
finalize_jump_threads ();
-
+ return 0;
}
static bool
extern void expand_main_function (void);
extern void init_dummy_function_start (void);
extern void expand_dummy_function_end (void);
-extern void init_function_for_compilation (void);
+extern unsigned int init_function_for_compilation (void);
extern void allocate_struct_function (tree);
extern void init_function_start (tree);
extern bool use_register_for_decl (tree);
/* The entry point to variable tracking pass. */
-void
+unsigned int
variable_tracking_main (void)
{
if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20)
- return;
+ return 0;
mark_dfs_back_edges ();
vt_initialize ();
if (!vt_stack_adjustments ())
{
vt_finalize ();
- return;
+ return 0;
}
}
}
vt_finalize ();
+ return 0;
}
\f
static bool
return (optimize > 0 && flag_web);
}
-static void
+static unsigned int
rest_of_handle_web (void)
{
web_main ();
delete_trivially_dead_insns (get_insns (), max_reg_num ());
cleanup_cfg (CLEANUP_EXPENSIVE);
reg_scan (get_insns (), max_reg_num ());
+ return 0;
}
struct tree_opt_pass pass_web =