/* RTL dead code elimination.
- Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
+ Copyright (C) 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
-02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
Core mark/delete routines
------------------------------------------------------------------------- */
-/* The data-flow information needed by this pass. */
+/* True if we are invoked while the df engine is running; in this case,
+ we don't want to reenter it. */
static bool df_in_progress = false;
-/* True if we deleted at least one instruction. */
-static bool something_changed;
-
/* Instructions that have been marked but whose dependencies have not
yet been processed. */
static VEC(rtx,heap) *worklist;
+/* Bitmap of instructions marked as needed indexed by INSN_UID. */
+static sbitmap marked;
+
+/* Bitmap obstacks used for block processing by the fast algorithm. */
static bitmap_obstack dce_blocks_bitmap_obstack;
static bitmap_obstack dce_tmp_bitmap_obstack;
-static sbitmap marked = NULL;
/* A subroutine for which BODY is part of the instruction being tested;
either the top-level pattern, or an element of a PARALLEL. The
return false;
default:
- if (volatile_insn_p (body))
+ if (volatile_refs_p (body))
return false;
if (flag_non_call_exceptions && may_trap_p (body))
}
}
+
/* Return true if INSN is a normal instruction that can be deleted by
the DCE pass. */
rtx body, x;
int i;
+ if (CALL_P (insn)
+ /* We cannot delete calls inside of the recursive dce because
+ this may cause basic blocks to be deleted and this messes up
+ the rest of the stack of optimization passes. */
+ && (!df_in_progress)
+ /* We cannot delete pure or const sibling calls because it is
+ hard to see the result. */
+ && (!SIBLING_CALL_P (insn))
+ /* We can delete dead const or pure calls as long as they do not
+ infinite loop. */
+ && (RTL_CONST_OR_PURE_CALL_P (insn)
+ && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)))
+ return true;
+
if (!NONJUMP_INSN_P (insn))
return false;
}
-/* Return true if INSN has not been marked as needed. */
+/* Return true if INSN has been marked as needed. */
static inline int
marked_insn_p (rtx insn)
{
- if (insn)
- return TEST_BIT (marked, INSN_UID (insn));
- else
- /* Artificial defs are always needed and they do not have an
- insn. */
- return true;
+ /* Artificial defs are always needed and they do not have an insn.
+ We should never see them here. */
+ gcc_assert (insn);
+ return TEST_BIT (marked, INSN_UID (insn));
}
instruction containing DEST. */
static void
-mark_nonreg_stores_1 (rtx dest, rtx pattern, void *data)
+mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
{
if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
mark_insn ((rtx) data, true);
instruction containing DEST. */
static void
-mark_nonreg_stores_2 (rtx dest, rtx pattern, void *data)
+mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
{
if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
mark_insn ((rtx) data, false);
}
-/* Initialize global variables for a new DCE pass. */
-
-static void
-init_dce (bool fast)
-{
- if (!df_in_progress)
- {
- if (!fast)
- df_chain_add_problem (DF_UD_CHAIN);
- df_analyze ();
- }
-
- if (dump_file)
- df_dump (dump_file);
-
- bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
- bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
- marked = sbitmap_alloc (get_max_uid () + 1);
- sbitmap_zero (marked);
-}
-
-
/* Delete all REG_EQUAL notes of the registers INSN writes, to prevent
bad dangling REG_EQUAL notes. */
}
-/* Delete every instruction that hasn't been marked. Clear the insn
- from DCE_DF if DF_DELETE is true. */
+/* Delete every instruction that hasn't been marked. */
static void
delete_unmarked_insns (void)
{
basic_block bb;
rtx insn, next;
+ bool must_clean = false;
- something_changed = false;
FOR_EACH_BB (bb)
FOR_BB_INSNS_SAFE (bb, insn, next)
if (INSN_P (insn))
{
+ /* Always delete no-op moves. */
if (noop_move_p (insn))
- {
- /* Note that this code does not handle the case where
- the last insn of libcall is deleted. As it turns out
- this case is excluded in the call to noop_move_p. */
- rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
- if (note && (XEXP (note, 0) != insn))
- {
- rtx new_libcall_insn = next_real_insn (insn);
- rtx retval_note = find_reg_note (XEXP (note, 0),
- REG_RETVAL, NULL_RTX);
- REG_NOTES (new_libcall_insn)
- = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
- REG_NOTES (new_libcall_insn));
- XEXP (retval_note, 0) = new_libcall_insn;
- }
- }
+ ;
+
+ /* Otherwise rely only on the DCE algorithm. */
else if (marked_insn_p (insn))
continue;
- /* WARNING, this debugging can itself cause problems if the
- edge of the counter causes part of a libcall to be
- deleted but not all of it. */
if (!dbg_cnt (dce))
continue;
if (dump_file)
fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn));
- /* Before we delete the insn, we have to delete
- REG_EQUAL of the destination regs of the deleted insn
- to prevent dangling REG_EQUAL. */
- delete_corresponding_reg_eq_notes (insn);
+ /* Before we delete the insn we have to delete REG_EQUAL notes
+ for the destination regs in order to avoid dangling notes. */
+ delete_corresponding_reg_eq_notes (insn);
+
+ /* If a pure or const call is deleted, this may make the cfg
+ have unreachable blocks. We rememeber this and call
+ delete_unreachable_blocks at the end. */
+ if (CALL_P (insn))
+ must_clean = true;
+ /* Now delete the insn. */
delete_insn_and_edges (insn);
- something_changed = true;
}
-}
-
-
-/* Mark all insns using DELETE_PARM in the libcall that contains
- START_INSN. */
-static void
-mark_libcall (rtx start_insn, bool delete_parm)
-{
- rtx note = find_reg_note (start_insn, REG_LIBCALL_ID, NULL_RTX);
- int id = INTVAL (XEXP (note, 0));
- rtx insn;
- mark_insn (start_insn, delete_parm);
- insn = NEXT_INSN (start_insn);
-
- /* There are tales, long ago and far away, of the mystical nested
- libcall. No one alive has actually seen one, but other parts of
- the compiler support them so we will here. */
- for (insn = NEXT_INSN (start_insn); insn; insn = NEXT_INSN (insn))
- {
- if (INSN_P (insn))
- {
- /* Stay in the loop as long as we are in any libcall. */
- if ((note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX)))
- {
- if (id == INTVAL (XEXP (note, 0)))
- {
- mark_insn (insn, delete_parm);
- if (dump_file)
- fprintf (dump_file, "matching forward libcall %d[%d]\n",
- INSN_UID (insn), id);
- }
- }
- else
- break;
- }
- }
-
- for (insn = PREV_INSN (start_insn); insn; insn = PREV_INSN (insn))
- {
- if (INSN_P (insn))
- {
- /* Stay in the loop as long as we are in any libcall. */
- if ((note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX)))
- {
- if (id == INTVAL (XEXP (note, 0)))
- {
- mark_insn (insn, delete_parm);
- if (dump_file)
- fprintf (dump_file, "matching backward libcall %d[%d]\n",
- INSN_UID (insn), id);
- }
- }
- else
- break;
- }
- }
+ /* Deleted a pure or const call. */
+ if (must_clean)
+ delete_unreachable_blocks ();
}
prescan_insns_for_dce (bool fast)
{
basic_block bb;
- rtx insn;
+ rtx insn, next;
if (dump_file)
fprintf (dump_file, "Finding needed instructions:\n");
FOR_EACH_BB (bb)
- FOR_BB_INSNS (bb, insn)
- if (INSN_P (insn))
- {
- rtx note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX);
- if (note)
- mark_libcall (insn, fast);
- else if (deletable_insn_p (insn, fast))
- mark_nonreg_stores (PATTERN (insn), insn, fast);
- else
- mark_insn (insn, fast);
- }
+ FOR_BB_INSNS_SAFE (bb, insn, next)
+ if (INSN_P (insn))
+ {
+ if (deletable_insn_p (insn, fast))
+ mark_nonreg_stores (PATTERN (insn), insn, fast);
+ else
+ mark_insn (insn, fast);
+ }
if (dump_file)
fprintf (dump_file, "Finished finding needed instructions:\n");
for (use_rec = df_get_artificial_uses (bb->index);
*use_rec; use_rec++)
for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
- mark_insn (DF_REF_INSN (defs->ref), false);
+ if (! DF_REF_IS_ARTIFICIAL (defs->ref))
+ mark_insn (DF_REF_INSN (defs->ref), false);
}
}
+
/* Mark every instruction that defines a register value that INSN uses. */
static void
struct df_link *defs;
struct df_ref **use_rec;
- /* If this is part of a libcall, mark the entire libcall. */
- if (find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX))
- mark_libcall (insn, false);
-
for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
{
struct df_ref *use = *use_rec;
fprintf (dump_file, " in insn %d:\n", INSN_UID (insn));
}
for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
- mark_insn (DF_REF_INSN (defs->ref), false);
+ if (! DF_REF_IS_ARTIFICIAL (defs->ref))
+ mark_insn (DF_REF_INSN (defs->ref), false);
+ }
+}
+
+
+/* Initialize global variables for a new DCE pass. */
+
+static void
+init_dce (bool fast)
+{
+ if (!df_in_progress)
+ {
+ if (!fast)
+ df_chain_add_problem (DF_UD_CHAIN);
+ df_analyze ();
}
+
+ if (dump_file)
+ df_dump (dump_file);
+
+ if (fast)
+ {
+ bitmap_obstack_initialize (&dce_blocks_bitmap_obstack);
+ bitmap_obstack_initialize (&dce_tmp_bitmap_obstack);
+ }
+
+ marked = sbitmap_alloc (get_max_uid () + 1);
+ sbitmap_zero (marked);
}
+/* Free the data allocated by init_dce. */
+
static void
-end_ud_dce (void)
+fini_dce (bool fast)
{
sbitmap_free (marked);
- gcc_assert (VEC_empty (rtx, worklist));
+
+ if (fast)
+ {
+ bitmap_obstack_release (&dce_blocks_bitmap_obstack);
+ bitmap_obstack_release (&dce_tmp_bitmap_obstack);
+ }
}
{
rtx insn;
- df_in_progress = false;
init_dce (false);
prescan_insns_for_dce (false);
insn = VEC_pop (rtx, worklist);
mark_reg_dependencies (insn);
}
+ VEC_free (rtx, heap, worklist);
+
/* Before any insns are deleted, we must remove the chains since
they are not bidirectional. */
df_remove_problem (df_chain);
delete_unmarked_insns ();
- end_ud_dce ();
+ fini_dce (false);
return 0;
}
static bool
gate_ud_dce (void)
{
- return optimize > 1 && flag_dce;
+ return optimize > 1 && flag_dce
+ && dbg_cnt (dce_ud);
}
-struct tree_opt_pass pass_ud_rtl_dce =
+struct rtl_opt_pass pass_ud_rtl_dce =
{
+ {
+ RTL_PASS,
"dce", /* name */
gate_ud_dce, /* gate */
rest_of_handle_ud_dce, /* execute */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func |
- TODO_df_finish |
- TODO_ggc_collect, /* todo_flags_finish */
- 'w' /* letter */
+ TODO_df_finish | TODO_verify_rtl_sharing |
+ TODO_ggc_collect /* todo_flags_finish */
+ }
};
+
/* -------------------------------------------------------------------------
Fast DCE functions
------------------------------------------------------------------------- */
+/* Process basic block BB. Return true if the live_in set has
+ changed. REDO_OUT is true if the info at the bottom of the block
+ needs to be recalculated before starting. AU is the proper set of
+ artificial uses. */
-/* Free the data allocated by init_dce. */
-
-static void
-fini_dce (void)
+static bool
+byte_dce_process_block (basic_block bb, bool redo_out, bitmap au)
{
- sbitmap_free (marked);
- bitmap_obstack_release (&dce_blocks_bitmap_obstack);
- bitmap_obstack_release (&dce_tmp_bitmap_obstack);
- df_in_progress = false;
+ bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
+ rtx insn;
+ bool block_changed;
+ struct df_ref **def_rec;
+
+ if (redo_out)
+ {
+ /* Need to redo the live_out set of this block if when one of
+ the succs of this block has had a change in it live in
+ set. */
+ edge e;
+ edge_iterator ei;
+ df_confluence_function_n con_fun_n = df_byte_lr->problem->con_fun_n;
+ bitmap_clear (DF_BYTE_LR_OUT (bb));
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ (*con_fun_n) (e);
+ }
+
+ if (dump_file)
+ {
+ fprintf (dump_file, "processing block %d live out = ", bb->index);
+ df_print_byte_regset (dump_file, DF_BYTE_LR_OUT (bb));
+ }
+
+ bitmap_copy (local_live, DF_BYTE_LR_OUT (bb));
+
+ df_byte_lr_simulate_artificial_refs_at_end (bb, local_live);
+
+ FOR_BB_INSNS_REVERSE (bb, insn)
+ if (INSN_P (insn))
+ {
+ /* The insn is needed if there is someone who uses the output. */
+ for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ unsigned int last;
+ unsigned int dregno = DF_REF_REGNO (def);
+ unsigned int start = df_byte_lr_get_regno_start (dregno);
+ unsigned int len = df_byte_lr_get_regno_len (dregno);
+
+ unsigned int sb;
+ unsigned int lb;
+ /* This is one of the only places where DF_MM_MAY should
+ be used for defs. Need to make sure that we are
+ checking for all of the bits that may be used. */
+
+ if (!df_compute_accessed_bytes (def, DF_MM_MAY, &sb, &lb))
+ {
+ start += sb;
+ len = lb - sb;
+ }
+
+ if (bitmap_bit_p (au, dregno))
+ {
+ mark_insn (insn, true);
+ goto quickexit;
+ }
+
+ last = start + len;
+ while (start < last)
+ if (bitmap_bit_p (local_live, start++))
+ {
+ mark_insn (insn, true);
+ goto quickexit;
+ }
+ }
+
+ quickexit:
+
+ /* No matter if the instruction is needed or not, we remove
+ any regno in the defs from the live set. */
+ df_byte_lr_simulate_defs (insn, local_live);
+
+ /* On the other hand, we do not allow the dead uses to set
+ anything in local_live. */
+ if (marked_insn_p (insn))
+ df_byte_lr_simulate_uses (insn, local_live);
+
+ if (dump_file)
+ {
+ fprintf (dump_file, "finished processing insn %d live out = ",
+ INSN_UID (insn));
+ df_print_byte_regset (dump_file, local_live);
+ }
+ }
+
+ df_byte_lr_simulate_artificial_refs_at_top (bb, local_live);
+
+ block_changed = !bitmap_equal_p (local_live, DF_BYTE_LR_IN (bb));
+ if (block_changed)
+ bitmap_copy (DF_BYTE_LR_IN (bb), local_live);
+ BITMAP_FREE (local_live);
+ return block_changed;
}
/* Process basic block BB. Return true if the live_in set has
- changed. */
+ changed. REDO_OUT is true if the info at the bottom of the block
+ needs to be recalculated before starting. AU is the proper set of
+ artificial uses. */
static bool
-dce_process_block (basic_block bb, bool redo_out)
+dce_process_block (basic_block bb, bool redo_out, bitmap au)
{
bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
rtx insn;
bool block_changed;
- struct df_ref **def_rec, **use_rec;
- unsigned int bb_index = bb->index;
+ struct df_ref **def_rec;
if (redo_out)
{
bitmap_copy (local_live, DF_LR_OUT (bb));
- /* Process the artificial defs and uses at the bottom of the block. */
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- if (((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
- bitmap_clear_bit (local_live, DF_REF_REGNO (def));
- }
-
- for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
- {
- struct df_ref *use = *use_rec;
- if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
- bitmap_set_bit (local_live, DF_REF_REGNO (use));
- }
+ df_simulate_artificial_refs_at_end (bb, local_live);
FOR_BB_INSNS_REVERSE (bb, insn)
if (INSN_P (insn))
{
- /* If this is a recursive call, the libcall will have already
- been marked. */
- if (!marked_insn_p (insn))
- {
- bool needed = false;
-
- /* The insn is needed if there is someone who uses the output. */
- for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
- if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec)))
- {
- needed = true;
- break;
- }
+ bool needed = false;
+
+ /* The insn is needed if there is someone who uses the output. */
+ for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
+ if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
+ || bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
+ {
+ needed = true;
+ break;
+ }
- if (needed)
- {
- rtx note = find_reg_note (insn, REG_LIBCALL_ID, NULL_RTX);
-
- /* If we need to mark an insn in the middle of a
- libcall, we need to back up to mark the entire
- libcall. Given that libcalls are rare, rescanning
- the block should be a reasonable solution to trying
- to figure out how to back up. */
- if (note)
- {
- if (dump_file)
- fprintf (dump_file, "needed libcall %d\n", INSN_UID (insn));
- mark_libcall (insn, true);
- BITMAP_FREE (local_live);
- return dce_process_block (bb, false);
- }
- else
- mark_insn (insn, true);
- }
- }
+ if (needed)
+ mark_insn (insn, true);
/* No matter if the instruction is needed or not, we remove
any regno in the defs from the live set. */
df_simulate_uses (insn, local_live);
}
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
- bitmap_clear_bit (local_live, DF_REF_REGNO (def));
- }
-#ifdef EH_USES
- /* Process the uses that are live into an exception handler. */
- for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
- {
- /* Add use to set of uses in this BB. */
- struct df_ref *use = *use_rec;
- if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
- bitmap_set_bit (local_live, DF_REF_REGNO (use));
- }
-#endif
+ df_simulate_artificial_refs_at_top (bb, local_live);
block_changed = !bitmap_equal_p (local_live, DF_LR_IN (bb));
if (block_changed)
return block_changed;
}
+
+/* Perform fast DCE once initialization is done. If BYTE_LEVEL is
+ true, use the byte level dce, otherwise do it at the pseudo
+ level. */
+
static void
-fast_dce (void)
+fast_dce (bool byte_level)
{
int *postorder = df_get_postorder (DF_BACKWARD);
int n_blocks = df_get_n_blocks (DF_BACKWARD);
- int i;
/* The set of blocks that have been seen on this iteration. */
bitmap processed = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
/* The set of blocks that need to have the out vectors reset because
bitmap all_blocks = BITMAP_ALLOC (&dce_blocks_bitmap_obstack);
bool global_changed = true;
- int loop_count = 0;
+ /* These regs are considered always live so if they end up dying
+ because of some def, we need to bring the back again. Calling
+ df_simulate_fixup_sets has the disadvantage of calling
+ bb_has_eh_pred once per insn, so we cache the information
+ here. */
+ bitmap au = df->regular_block_artificial_uses;
+ bitmap au_eh = df->eh_block_artificial_uses;
+ int i;
prescan_insns_for_dce (true);
while (global_changed)
{
global_changed = false;
+
for (i = 0; i < n_blocks; i++)
{
int index = postorder[i];
continue;
}
- local_changed
- = dce_process_block (bb, bitmap_bit_p (redo_out, index));
+ if (byte_level)
+ local_changed
+ = byte_dce_process_block (bb, bitmap_bit_p (redo_out, index),
+ bb_has_eh_pred (bb) ? au_eh : au);
+ else
+ local_changed
+ = dce_process_block (bb, bitmap_bit_p (redo_out, index),
+ bb_has_eh_pred (bb) ? au_eh : au);
bitmap_set_bit (processed, index);
if (local_changed)
to redo the dataflow equations for the blocks that had a
change at the top of the block. Then we need to redo the
iteration. */
- df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
+ if (byte_level)
+ df_analyze_problem (df_byte_lr, all_blocks, postorder, n_blocks);
+ else
+ df_analyze_problem (df_lr, all_blocks, postorder, n_blocks);
if (old_flag & DF_LR_RUN_DCE)
df_set_flags (DF_LR_RUN_DCE);
+
prescan_insns_for_dce (true);
}
- loop_count++;
}
delete_unmarked_insns ();
}
-/* Callback for running pass_rtl_dce. */
+/* Fast register level DCE. */
static unsigned int
rest_of_handle_fast_dce (void)
{
init_dce (true);
- fast_dce ();
- fini_dce ();
- df_in_progress = false;
+ fast_dce (false);
+ fini_dce (true);
+ return 0;
+}
+
+
+/* Fast byte level DCE. */
+
+static unsigned int
+rest_of_handle_fast_byte_dce (void)
+{
+ df_byte_lr_add_problem ();
+ init_dce (true);
+ fast_dce (true);
+ fini_dce (true);
return 0;
}
info, and then returns to allow the rest of the problems to be run.
This can be called by elsewhere but it will not update the bit
- vectors for any other problems than LR.
-*/
+ vectors for any other problems than LR. */
void
run_fast_df_dce (void)
df_in_progress = true;
rest_of_handle_fast_dce ();
+ df_in_progress = false;
+
df_set_flags (old_flags);
}
}
-static bool
-gate_fast_dce (void)
-{
- return optimize > 0 && flag_dce;
-}
-
-/* Run a fast DCE pass and return true if any instructions were
- deleted. */
+/* Run a fast DCE pass. */
-bool
+void
run_fast_dce (void)
{
- return gate_fast_dce () && (rest_of_handle_fast_dce (), something_changed);
+ if (flag_dce)
+ rest_of_handle_fast_dce ();
}
-struct tree_opt_pass pass_fast_rtl_dce =
+static bool
+gate_fast_dce (void)
{
+ return optimize > 0 && flag_dce
+ && dbg_cnt (dce_fast);
+}
+
+struct rtl_opt_pass pass_fast_rtl_dce =
+{
+ {
+ RTL_PASS,
"dce", /* name */
gate_fast_dce, /* gate */
rest_of_handle_fast_dce, /* execute */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func |
- TODO_df_finish |
- TODO_ggc_collect, /* todo_flags_finish */
- 'w' /* letter */
+ TODO_df_finish | TODO_verify_rtl_sharing |
+ TODO_ggc_collect /* todo_flags_finish */
+ }
};
+struct rtl_opt_pass pass_fast_rtl_byte_dce =
+{
+ {
+ RTL_PASS,
+ "byte-dce", /* name */
+ gate_fast_dce, /* gate */
+ rest_of_handle_fast_byte_dce, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_DCE, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func |
+ TODO_df_finish | TODO_verify_rtl_sharing |
+ TODO_ggc_collect /* todo_flags_finish */
+ }
+};