/* Dead code elimination pass for the GNU compiler.
- Copyright (C) 2002, 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
Contributed by Ben Elliston <bje@redhat.com>
and Andrew MacLeod <amacleod@redhat.com>
Adapted to use control dependence by Steven Bosscher, SUSE Labs.
#include "rtl.h"
#include "tm_p.h"
#include "hard-reg-set.h"
+#include "obstack.h"
#include "basic-block.h"
#include "tree.h"
on the Ith edge. */
bitmap *control_dependence_map;
+/* Vector indicating that a basic block has already had all the edges
+ processed that it is control dependent on. */
+sbitmap visited_control_parents;
+
/* Execute CODE for each edge (given number EDGE_NUMBER within the CODE)
for which the block with index N is control dependent. */
#define EXECUTE_IF_CONTROL_DEPENDENT(N, EDGE_NUMBER, CODE) \
- EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[N], 0, EDGE_NUMBER, CODE)
+ { \
+ bitmap_iterator bi; \
+ \
+ EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[N], 0, EDGE_NUMBER, bi) \
+ { \
+ CODE; \
+ } \
+ }
/* Local function prototypes. */
static inline void set_control_dependence_map_bit (basic_block, int);
static inline basic_block find_pdom (basic_block);
static inline void mark_stmt_necessary (tree, bool);
-static inline void mark_operand_necessary (tree);
+static inline void mark_operand_necessary (tree, bool);
-static bool need_to_preserve_store (tree);
static void mark_stmt_if_obviously_necessary (tree, bool);
static void find_obviously_necessary_stmts (struct edge_list *);
{
if (bb == ENTRY_BLOCK_PTR)
return;
- if (bb == EXIT_BLOCK_PTR)
- abort ();
+ gcc_assert (bb != EXIT_BLOCK_PTR);
bitmap_set_bit (control_dependence_map[bb->index], edge_index);
}
basic_block current_block;
basic_block ending_block;
-#ifdef ENABLE_CHECKING
- if (INDEX_EDGE_PRED_BB (el, edge_index) == EXIT_BLOCK_PTR)
- abort ();
-#endif
+ gcc_assert (INDEX_EDGE_PRED_BB (el, edge_index) != EXIT_BLOCK_PTR);
if (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
ending_block = ENTRY_BLOCK_PTR->next_bb;
static inline basic_block
find_pdom (basic_block block)
{
- if (block == ENTRY_BLOCK_PTR)
- abort ();
- else if (block == EXIT_BLOCK_PTR)
+ gcc_assert (block != ENTRY_BLOCK_PTR);
+
+ if (block == EXIT_BLOCK_PTR)
return EXIT_BLOCK_PTR;
else
{
static inline void
mark_stmt_necessary (tree stmt, bool add_to_worklist)
{
-#ifdef ENABLE_CHECKING
- if (stmt == NULL
- || stmt == error_mark_node
- || (stmt && DECL_P (stmt)))
- abort ();
-#endif
+ gcc_assert (stmt);
+ gcc_assert (stmt != error_mark_node);
+ gcc_assert (!DECL_P (stmt));
if (NECESSARY (stmt))
return;
VARRAY_PUSH_TREE (worklist, stmt);
}
-/* Mark the statement defining operand OP as necessary. */
+/* Mark the statement defining operand OP as necessary. PHIONLY is true
+ if we should only mark it necessary if it is a phi node. */
static inline void
-mark_operand_necessary (tree op)
+mark_operand_necessary (tree op, bool phionly)
{
tree stmt;
int ver;
-#ifdef ENABLE_CHECKING
- if (op == NULL)
- abort ();
-#endif
+ gcc_assert (op);
ver = SSA_NAME_VERSION (op);
if (TEST_BIT (processed, ver))
SET_BIT (processed, ver);
stmt = SSA_NAME_DEF_STMT (op);
-#ifdef ENABLE_CHECKING
- if (stmt == NULL)
- abort ();
-#endif
+ gcc_assert (stmt);
if (NECESSARY (stmt)
- || IS_EMPTY_STMT (stmt))
+ || IS_EMPTY_STMT (stmt)
+ || (phionly && TREE_CODE (stmt) != PHI_NODE))
return;
NECESSARY (stmt) = 1;
VARRAY_PUSH_TREE (worklist, stmt);
}
\f
-/* Return true if a store to a variable needs to be preserved. */
-
-static inline bool
-need_to_preserve_store (tree ssa_name)
-{
- return (needs_to_live_in_memory (SSA_NAME_VAR (ssa_name)));
-}
-\f
-/* Mark STMT as necessary if it is obviously is. Add it to the worklist if
+/* Mark STMT as necessary if it obviously is. Add it to the worklist if
it can make other statements necessary.
If AGGRESSIVE is false, control statements are conservatively marked as
static void
mark_stmt_if_obviously_necessary (tree stmt, bool aggressive)
{
- def_optype defs;
v_may_def_optype v_may_defs;
v_must_def_optype v_must_defs;
stmt_ann_t ann;
- size_t i;
+ tree op, def;
+ ssa_op_iter iter;
/* Statements that are implicitly live. Most function calls, asm and return
statements are required. Labels and BIND_EXPR nodes are kept because
return;
case MODIFY_EXPR:
- if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR
- && TREE_SIDE_EFFECTS (TREE_OPERAND (stmt, 1)))
+ op = get_call_expr_in (stmt);
+ if (op && TREE_SIDE_EFFECTS (op))
{
mark_stmt_necessary (stmt, true);
return;
break;
case GOTO_EXPR:
- if (! simple_goto_p (stmt))
- mark_stmt_necessary (stmt, true);
+ gcc_assert (!simple_goto_p (stmt));
+ mark_stmt_necessary (stmt, true);
return;
case COND_EXPR:
- if (GOTO_DESTINATION (COND_EXPR_THEN (stmt))
- == GOTO_DESTINATION (COND_EXPR_ELSE (stmt)))
- {
- /* A COND_EXPR is obviously dead if the target labels are the same.
- We cannot kill the statement at this point, so to prevent the
- statement from being marked necessary, we replace the condition
- with a constant. The stmt is killed later on in cfg_cleanup. */
- COND_EXPR_COND (stmt) = integer_zero_node;
- modify_stmt (stmt);
- return;
- }
+ gcc_assert (EDGE_COUNT (bb_for_stmt (stmt)->succs) == 2);
/* Fall through. */
case SWITCH_EXPR:
}
ann = stmt_ann (stmt);
- /* If the statement has volatile operands, it needs to be preserved. Same
- for statements that can alter control flow in unpredictable ways. */
- if (ann->has_volatile_ops
- || is_ctrl_altering_stmt (stmt))
+
+ /* If the statement has volatile operands, it needs to be preserved.
+ Same for statements that can alter control flow in unpredictable
+ ways. */
+ if (ann->has_volatile_ops || is_ctrl_altering_stmt (stmt))
{
mark_stmt_necessary (stmt, true);
return;
get_stmt_operands (stmt);
- defs = DEF_OPS (ann);
- for (i = 0; i < NUM_DEFS (defs); i++)
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
{
- tree def = DEF_OP (defs, i);
- if (need_to_preserve_store (def))
+ if (is_global_var (SSA_NAME_VAR (def)))
{
mark_stmt_necessary (stmt, true);
return;
}
}
+ /* Check virtual definitions. If we get here, the only virtual
+ definitions we should see are those generated by assignment
+ statements. */
v_may_defs = V_MAY_DEF_OPS (ann);
- for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
- {
- tree v_may_def = V_MAY_DEF_RESULT (v_may_defs, i);
- if (need_to_preserve_store (v_may_def))
- {
- mark_stmt_necessary (stmt, true);
- return;
- }
- }
-
v_must_defs = V_MUST_DEF_OPS (ann);
- for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
+ if (NUM_V_MAY_DEFS (v_may_defs) > 0 || NUM_V_MUST_DEFS (v_must_defs) > 0)
{
- tree v_must_def = V_MUST_DEF_OP (v_must_defs, i);
- if (need_to_preserve_store (v_must_def))
+ tree lhs;
+
+ gcc_assert (TREE_CODE (stmt) == MODIFY_EXPR);
+
+ /* Note that we must not check the individual virtual operands
+ here. In particular, if this is an aliased store, we could
+ end up with something like the following (SSA notation
+ redacted for brevity):
+
+ foo (int *p, int i)
+ {
+ int x;
+ p_1 = (i_2 > 3) ? &x : p_1;
+
+ # x_4 = V_MAY_DEF <x_3>
+ *p_1 = 5;
+
+ return 2;
+ }
+
+ Notice that the store to '*p_1' should be preserved, if we
+ were to check the virtual definitions in that store, we would
+ not mark it needed. This is because 'x' is not a global
+ variable.
+
+ Therefore, we check the base address of the LHS. If the
+ address is a pointer, we check if its name tag or type tag is
+ a global variable. Otherwise, we check if the base variable
+ is a global. */
+ lhs = TREE_OPERAND (stmt, 0);
+ if (REFERENCE_CLASS_P (lhs))
+ lhs = get_base_address (lhs);
+
+ if (lhs == NULL_TREE)
{
+ /* If LHS is NULL, it means that we couldn't get the base
+ address of the reference. In which case, we should not
+ remove this store. */
mark_stmt_necessary (stmt, true);
- return;
- }
+ }
+ else if (DECL_P (lhs))
+ {
+ /* If the store is to a global symbol, we need to keep it. */
+ if (is_global_var (lhs))
+ mark_stmt_necessary (stmt, true);
+ }
+ else if (INDIRECT_REF_P (lhs))
+ {
+ tree ptr = TREE_OPERAND (lhs, 0);
+ struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
+ tree nmt = (pi) ? pi->name_mem_tag : NULL_TREE;
+ tree tmt = var_ann (SSA_NAME_VAR (ptr))->type_mem_tag;
+
+ /* If either the name tag or the type tag for PTR is a
+ global variable, then the store is necessary. */
+ if ((nmt && is_global_var (nmt))
+ || (tmt && is_global_var (tmt)))
+ {
+ mark_stmt_necessary (stmt, true);
+ return;
+ }
+ }
+ else
+ gcc_unreachable ();
}
return;
tree phi;
/* Check any PHI nodes in the block. */
- for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
{
NECESSARY (phi) = 0;
Thus, we only need to mark PHIs for real variables which
need their result preserved as being inherently necessary. */
if (is_gimple_reg (PHI_RESULT (phi))
- && need_to_preserve_store (PHI_RESULT (phi)))
+ && is_global_var (SSA_NAME_VAR (PHI_RESULT (phi))))
mark_stmt_necessary (phi, true);
}
NECESSARY (stmt) = 0;
mark_stmt_if_obviously_necessary (stmt, el != NULL);
}
-
- /* Mark this basic block as `not visited'. A block will be marked
- visited when the edges that it is control dependent on have been
- marked. */
- bb->flags &= ~BB_VISITED;
}
if (el)
and we currently do not have a means to recognize the finite ones. */
FOR_EACH_BB (bb)
{
- for (e = bb->succ; e; e = e->succ_next)
+ edge_iterator ei;
+ FOR_EACH_EDGE (e, ei, bb->succs)
if (e->flags & EDGE_DFS_BACK)
mark_control_dependent_edges_necessary (e->dest, el);
}
static void
mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el)
{
- int edge_number;
+ unsigned edge_number;
+
+ gcc_assert (bb != EXIT_BLOCK_PTR);
+
+ if (bb == ENTRY_BLOCK_PTR)
+ return;
EXECUTE_IF_CONTROL_DEPENDENT (bb->index, edge_number,
{
SET_BIT (last_stmt_necessary, cd_bb->index);
t = last_stmt (cd_bb);
- if (is_ctrl_stmt (t))
+ if (t && is_ctrl_stmt (t))
mark_stmt_necessary (t, true);
});
}
containing `i' is control dependent on, but only if we haven't
already done so. */
basic_block bb = bb_for_stmt (i);
- if (! (bb->flags & BB_VISITED))
+ if (bb != ENTRY_BLOCK_PTR
+ && ! TEST_BIT (visited_control_parents, bb->index))
{
- bb->flags |= BB_VISITED;
+ SET_BIT (visited_control_parents, bb->index);
mark_control_dependent_edges_necessary (bb, el);
}
}
{
tree arg = PHI_ARG_DEF (i, k);
if (TREE_CODE (arg) == SSA_NAME)
- mark_operand_necessary (arg);
+ mark_operand_necessary (arg, false);
}
if (aggressive)
for (k = 0; k < PHI_NUM_ARGS (i); k++)
{
basic_block arg_bb = PHI_ARG_EDGE (i, k)->src;
- if (! (arg_bb->flags & BB_VISITED))
+ if (arg_bb != ENTRY_BLOCK_PTR
+ && ! TEST_BIT (visited_control_parents, arg_bb->index))
{
- arg_bb->flags |= BB_VISITED;
+ SET_BIT (visited_control_parents, arg_bb->index);
mark_control_dependent_edges_necessary (arg_bb, el);
}
}
/* Propagate through the operands. Examine all the USE, VUSE and
V_MAY_DEF operands in this statement. Mark all the statements
which feed this statement's uses as necessary. */
- vuse_optype vuses;
- v_may_def_optype v_may_defs;
- use_optype uses;
- stmt_ann_t ann;
- size_t k;
+ ssa_op_iter iter;
+ tree use;
get_stmt_operands (i);
- ann = stmt_ann (i);
-
- uses = USE_OPS (ann);
- for (k = 0; k < NUM_USES (uses); k++)
- mark_operand_necessary (USE_OP (uses, k));
-
- vuses = VUSE_OPS (ann);
- for (k = 0; k < NUM_VUSES (vuses); k++)
- mark_operand_necessary (VUSE_OP (vuses, k));
/* The operands of V_MAY_DEF expressions are also needed as they
represent potential definitions that may reach this
statement (V_MAY_DEF operands allow us to follow def-def
links). */
- v_may_defs = V_MAY_DEF_OPS (ann);
- for (k = 0; k < NUM_V_MAY_DEFS (v_may_defs); k++)
- mark_operand_necessary (V_MAY_DEF_OP (v_may_defs, k));
+
+ FOR_EACH_SSA_TREE_OPERAND (use, i, iter, SSA_OP_ALL_USES)
+ mark_operand_necessary (use, false);
}
}
}
+
+
+/* Propagate necessity around virtual phi nodes used in kill operands.
+ The reason this isn't done during propagate_necessity is because we don't
+ want to keep phis around that are just there for must-defs, unless we
+ absolutely have to. After we've rewritten the reaching definitions to be
+ correct in the previous part of the fixup routine, we can simply propagate
+ around the information about which of these virtual phi nodes are really
+ used, and set the NECESSARY flag accordingly.
+ Note that we do the minimum here to ensure that we keep alive the phis that
+ are actually used in the corrected SSA form. In particular, some of these
+ phis may now have all of the same operand, and will be deleted by some
+ other pass. */
+
+static void
+mark_really_necessary_kill_operand_phis (void)
+{
+ basic_block bb;
+ int i;
+
+ /* Seed the worklist with the new virtual phi arguments and virtual
+ uses */
+ FOR_EACH_BB (bb)
+ {
+ block_stmt_iterator bsi;
+ tree phi;
+
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ {
+ if (!is_gimple_reg (PHI_RESULT (phi)) && NECESSARY (phi))
+ {
+ for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+ mark_operand_necessary (PHI_ARG_DEF (phi, i), true);
+ }
+ }
+
+ for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
+ {
+ tree stmt = bsi_stmt (bsi);
+
+ if (NECESSARY (stmt))
+ {
+ use_operand_p use_p;
+ ssa_op_iter iter;
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
+ SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
+ {
+ tree use = USE_FROM_PTR (use_p);
+ mark_operand_necessary (use, true);
+ }
+ }
+ }
+ }
+
+ /* Mark all virtual phis still in use as necessary, and all of their
+ arguments that are phis as necessary. */
+ while (VARRAY_ACTIVE_SIZE (worklist) > 0)
+ {
+ tree use = VARRAY_TOP_TREE (worklist);
+ VARRAY_POP (worklist);
+
+ for (i = 0; i < PHI_NUM_ARGS (use); i++)
+ mark_operand_necessary (PHI_ARG_DEF (use, i), true);
+ }
+}
+
+
\f
+
/* Eliminate unnecessary statements. Any instruction not marked as necessary
contributes nothing to the program, and can be deleted. */
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "\nEliminating unnecessary statements:\n");
-
+
clear_special_calls ();
FOR_EACH_BB (bb)
{
/* Remove dead statements. */
for (i = bsi_start (bb); ! bsi_end_p (i) ; )
{
- tree t = bsi_stmt (i);
-
- stats.total++;
-
- /* If `i' is not necessary then remove it. */
- if (! NECESSARY (t))
- remove_dead_stmt (&i, bb);
- else
- {
- if (TREE_CODE (t) == CALL_EXPR)
- notice_special_calls (t);
- else if (TREE_CODE (t) == MODIFY_EXPR
- && TREE_CODE (TREE_OPERAND (t, 1)) == CALL_EXPR)
- notice_special_calls (TREE_OPERAND (t, 1));
- bsi_next (&i);
- }
+ tree t = bsi_stmt (i);
+
+ stats.total++;
+
+ /* If `i' is not necessary then remove it. */
+ if (! NECESSARY (t))
+ remove_dead_stmt (&i, bb);
+ else
+ {
+ tree call = get_call_expr_in (t);
+ if (call)
+ notice_special_calls (call);
+ bsi_next (&i);
+ }
}
}
-}
+ }
\f
/* Remove dead PHI nodes from block BB. */
if (! NECESSARY (phi))
{
- tree next = TREE_CHAIN (phi);
+ tree next = PHI_CHAIN (phi);
if (dump_file && (dump_flags & TDF_DETAILS))
{
else
{
prev = phi;
- phi = TREE_CHAIN (phi);
+ phi = PHI_CHAIN (phi);
}
}
}
remove_dead_stmt (block_stmt_iterator *i, basic_block bb)
{
tree t = bsi_stmt (*i);
+ def_operand_p def_p;
+
+ ssa_op_iter iter;
if (dump_file && (dump_flags & TDF_DETAILS))
{
if (is_ctrl_stmt (t))
{
basic_block post_dom_bb;
- edge e;
-#ifdef ENABLE_CHECKING
/* The post dominance info has to be up-to-date. */
- if (dom_computed[CDI_POST_DOMINATORS] != DOM_OK)
- abort ();
-#endif
+ gcc_assert (dom_computed[CDI_POST_DOMINATORS] == DOM_OK);
/* Get the immediate post dominator of bb. */
post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
/* Some blocks don't have an immediate post dominator. This can happen
}
/* Redirect the first edge out of BB to reach POST_DOM_BB. */
- redirect_edge_and_branch (bb->succ, post_dom_bb);
- PENDING_STMT (bb->succ) = NULL;
+ redirect_edge_and_branch (EDGE_SUCC (bb, 0), post_dom_bb);
+ PENDING_STMT (EDGE_SUCC (bb, 0)) = NULL;
+ EDGE_SUCC (bb, 0)->probability = REG_BR_PROB_BASE;
+ EDGE_SUCC (bb, 0)->count = bb->count;
/* The edge is no longer associated with a conditional, so it does
not have TRUE/FALSE flags. */
- bb->succ->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
+ EDGE_SUCC (bb, 0)->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
/* If the edge reaches any block other than the exit, then it is a
fallthru edge; if it reaches the exit, then it is not a fallthru
edge. */
if (post_dom_bb != EXIT_BLOCK_PTR)
- bb->succ->flags |= EDGE_FALLTHRU;
+ EDGE_SUCC (bb, 0)->flags |= EDGE_FALLTHRU;
else
- bb->succ->flags &= ~EDGE_FALLTHRU;
+ EDGE_SUCC (bb, 0)->flags &= ~EDGE_FALLTHRU;
/* Remove the remaining the outgoing edges. */
- for (e = bb->succ->succ_next; e != NULL;)
- {
- edge tmp = e;
- e = e->succ_next;
- remove_edge (tmp);
- }
+ while (EDGE_COUNT (bb->succs) != 1)
+ remove_edge (EDGE_SUCC (bb, 1));
}
-
- bsi_remove (i);
+
+ FOR_EACH_SSA_DEF_OPERAND (def_p, t, iter,
+ SSA_OP_VIRTUAL_DEFS | SSA_OP_VIRTUAL_KILLS)
+ {
+ tree def = DEF_FROM_PTR (def_p);
+ bitmap_set_bit (vars_to_rename,
+ var_ann (SSA_NAME_VAR (def))->uid);
+ }
+ bsi_remove (i);
+ release_defs (t);
}
\f
/* Print out removed statement statistics. */
control_dependence_map
= xmalloc (last_basic_block * sizeof (bitmap));
for (i = 0; i < last_basic_block; ++i)
- control_dependence_map[i] = BITMAP_XMALLOC ();
+ control_dependence_map[i] = BITMAP_ALLOC (NULL);
last_stmt_necessary = sbitmap_alloc (last_basic_block);
sbitmap_zero (last_stmt_necessary);
int i;
for (i = 0; i < last_basic_block; ++i)
- BITMAP_XFREE (control_dependence_map[i]);
+ BITMAP_FREE (control_dependence_map[i]);
free (control_dependence_map);
+ sbitmap_free (visited_control_parents);
sbitmap_free (last_stmt_necessary);
}
find_all_control_dependences (el);
timevar_pop (TV_CONTROL_DEPENDENCES);
+ visited_control_parents = sbitmap_alloc (last_basic_block);
+ sbitmap_zero (visited_control_parents);
+
mark_dfs_back_edges ();
}
propagate_necessity (el);
+ mark_really_necessary_kill_operand_phis ();
eliminate_unnecessary_stmts ();
if (aggressive)
free_dominance_info (CDI_POST_DOMINATORS);
- cleanup_tree_cfg ();
-
/* Debugging dumps. */
if (dump_file)
- {
- dump_function_to_file (current_function_decl, dump_file, dump_flags);
- print_stats ();
- }
+ print_stats ();
tree_dce_done (aggressive);
NULL, /* next */
0, /* static_pass_number */
TV_TREE_DCE, /* tv_id */
- PROP_cfg | PROP_ssa, /* properties_required */
+ PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
+ TODO_dump_func | TODO_fix_def_def_chains | TODO_cleanup_cfg | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
+ 0 /* letter */
};
struct tree_opt_pass pass_cd_dce =
NULL, /* next */
0, /* static_pass_number */
TV_TREE_CD_DCE, /* tv_id */
- PROP_cfg | PROP_ssa, /* properties_required */
+ PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_ggc_collect | TODO_verify_ssa | TODO_verify_flow
+ TODO_dump_func | TODO_fix_def_def_chains | TODO_cleanup_cfg | TODO_ggc_collect | TODO_verify_ssa | TODO_verify_flow,
/* todo_flags_finish */
+ 0 /* letter */
};