1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 /* Dead code elimination.
28 Building an Optimizing Compiler,
29 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
31 Advanced Compiler Design and Implementation,
32 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
34 Dead-code elimination is the removal of statements which have no
35 impact on the program's output. "Dead statements" have no impact
36 on the program's output, while "necessary statements" may have
39 The algorithm consists of three phases:
40 1. Marking as necessary all statements known to be necessary,
41 e.g. most function calls, writing a value to memory, etc;
42 2. Propagating necessary statements, e.g., the statements
43 giving values to operands in necessary statements; and
44 3. Removing dead statements. */
48 #include "coretypes.h"
53 /* These RTL headers are needed for basic-block.h. */
56 #include "hard-reg-set.h"
58 #include "basic-block.h"
61 #include "diagnostic.h"
62 #include "tree-flow.h"
63 #include "tree-gimple.h"
64 #include "tree-dump.h"
65 #include "tree-pass.h"
69 static struct stmt_stats
77 static varray_type worklist;
79 /* Vector indicating an SSA name has already been processed and marked
81 static sbitmap processed;
83 /* Vector indicating that last_stmt if a basic block has already been
84 marked as necessary. */
85 static sbitmap last_stmt_necessary;
87 /* Before we can determine whether a control branch is dead, we need to
88 compute which blocks are control dependent on which edges.
90 We expect each block to be control dependent on very few edges so we
91 use a bitmap for each block recording its edges. An array holds the
92 bitmap. The Ith bit in the bitmap is set if that block is dependent
94 bitmap *control_dependence_map;
96 /* Execute CODE for each edge (given number EDGE_NUMBER within the CODE)
97 for which the block with index N is control dependent. */
98 #define EXECUTE_IF_CONTROL_DEPENDENT(N, EDGE_NUMBER, CODE) \
100 bitmap_iterator bi; \
102 EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[N], 0, EDGE_NUMBER, bi) \
108 /* Local function prototypes. */
109 static inline void set_control_dependence_map_bit (basic_block, int);
110 static inline void clear_control_dependence_bitmap (basic_block);
111 static void find_all_control_dependences (struct edge_list *);
112 static void find_control_dependence (struct edge_list *, int);
113 static inline basic_block find_pdom (basic_block);
115 static inline void mark_stmt_necessary (tree, bool);
116 static inline void mark_operand_necessary (tree, bool);
118 static void mark_stmt_if_obviously_necessary (tree, bool);
119 static void find_obviously_necessary_stmts (struct edge_list *);
121 static void mark_control_dependent_edges_necessary (basic_block, struct edge_list *);
122 static void propagate_necessity (struct edge_list *);
124 static void eliminate_unnecessary_stmts (void);
125 static void remove_dead_phis (basic_block);
126 static void remove_dead_stmt (block_stmt_iterator *, basic_block);
128 static void print_stats (void);
129 static void tree_dce_init (bool);
130 static void tree_dce_done (bool);
132 /* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */
134 set_control_dependence_map_bit (basic_block bb, int edge_index)
136 if (bb == ENTRY_BLOCK_PTR)
138 gcc_assert (bb != EXIT_BLOCK_PTR);
139 bitmap_set_bit (control_dependence_map[bb->index], edge_index);
142 /* Clear all control dependences for block BB. */
144 void clear_control_dependence_bitmap (basic_block bb)
146 bitmap_clear (control_dependence_map[bb->index]);
149 /* Record all blocks' control dependences on all edges in the edge
150 list EL, ala Morgan, Section 3.6. */
153 find_all_control_dependences (struct edge_list *el)
157 for (i = 0; i < NUM_EDGES (el); ++i)
158 find_control_dependence (el, i);
161 /* Determine all blocks' control dependences on the given edge with edge_list
162 EL index EDGE_INDEX, ala Morgan, Section 3.6. */
165 find_control_dependence (struct edge_list *el, int edge_index)
167 basic_block current_block;
168 basic_block ending_block;
170 gcc_assert (INDEX_EDGE_PRED_BB (el, edge_index) != EXIT_BLOCK_PTR);
172 if (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
173 ending_block = ENTRY_BLOCK_PTR->next_bb;
175 ending_block = find_pdom (INDEX_EDGE_PRED_BB (el, edge_index));
177 for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index);
178 current_block != ending_block && current_block != EXIT_BLOCK_PTR;
179 current_block = find_pdom (current_block))
181 edge e = INDEX_EDGE (el, edge_index);
183 /* For abnormal edges, we don't make current_block control
184 dependent because instructions that throw are always necessary
186 if (e->flags & EDGE_ABNORMAL)
189 set_control_dependence_map_bit (current_block, edge_index);
193 /* Find the immediate postdominator PDOM of the specified basic block BLOCK.
194 This function is necessary because some blocks have negative numbers. */
196 static inline basic_block
197 find_pdom (basic_block block)
199 gcc_assert (block != ENTRY_BLOCK_PTR);
201 if (block == EXIT_BLOCK_PTR)
202 return EXIT_BLOCK_PTR;
205 basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
207 return EXIT_BLOCK_PTR;
212 #define NECESSARY(stmt) stmt->common.asm_written_flag
214 /* If STMT is not already marked necessary, mark it, and add it to the
215 worklist if ADD_TO_WORKLIST is true. */
217 mark_stmt_necessary (tree stmt, bool add_to_worklist)
220 gcc_assert (stmt != error_mark_node);
221 gcc_assert (!DECL_P (stmt));
223 if (NECESSARY (stmt))
226 if (dump_file && (dump_flags & TDF_DETAILS))
228 fprintf (dump_file, "Marking useful stmt: ");
229 print_generic_stmt (dump_file, stmt, TDF_SLIM);
230 fprintf (dump_file, "\n");
233 NECESSARY (stmt) = 1;
235 VARRAY_PUSH_TREE (worklist, stmt);
238 /* Mark the statement defining operand OP as necessary. PHIONLY is true
239 if we should only mark it necessary if it is a phi node. */
242 mark_operand_necessary (tree op, bool phionly)
249 ver = SSA_NAME_VERSION (op);
250 if (TEST_BIT (processed, ver))
252 SET_BIT (processed, ver);
254 stmt = SSA_NAME_DEF_STMT (op);
258 || IS_EMPTY_STMT (stmt)
259 || (phionly && TREE_CODE (stmt) != PHI_NODE))
262 NECESSARY (stmt) = 1;
263 VARRAY_PUSH_TREE (worklist, stmt);
267 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
268 it can make other statements necessary.
270 If AGGRESSIVE is false, control statements are conservatively marked as
274 mark_stmt_if_obviously_necessary (tree stmt, bool aggressive)
276 v_may_def_optype v_may_defs;
277 v_must_def_optype v_must_defs;
282 /* Statements that are implicitly live. Most function calls, asm and return
283 statements are required. Labels and BIND_EXPR nodes are kept because
284 they are control flow, and we have no way of knowing whether they can be
285 removed. DCE can eliminate all the other statements in a block, and CFG
286 can then remove the block and labels. */
287 switch (TREE_CODE (stmt))
291 case CASE_LABEL_EXPR:
292 mark_stmt_necessary (stmt, false);
298 mark_stmt_necessary (stmt, true);
302 /* Most, but not all function calls are required. Function calls that
303 produce no result and have no side effects (i.e. const pure
304 functions) are unnecessary. */
305 if (TREE_SIDE_EFFECTS (stmt))
306 mark_stmt_necessary (stmt, true);
310 op = get_call_expr_in (stmt);
311 if (op && TREE_SIDE_EFFECTS (op))
313 mark_stmt_necessary (stmt, true);
317 /* These values are mildly magic bits of the EH runtime. We can't
318 see the entire lifetime of these values until landing pads are
320 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == EXC_PTR_EXPR
321 || TREE_CODE (TREE_OPERAND (stmt, 0)) == FILTER_EXPR)
323 mark_stmt_necessary (stmt, true);
329 gcc_assert (!simple_goto_p (stmt));
330 mark_stmt_necessary (stmt, true);
334 gcc_assert (EDGE_COUNT (bb_for_stmt (stmt)->succs) == 2);
339 mark_stmt_necessary (stmt, true);
346 ann = stmt_ann (stmt);
348 /* If the statement has volatile operands, it needs to be preserved.
349 Same for statements that can alter control flow in unpredictable
351 if (ann->has_volatile_ops || is_ctrl_altering_stmt (stmt))
353 mark_stmt_necessary (stmt, true);
357 get_stmt_operands (stmt);
359 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
361 if (is_global_var (SSA_NAME_VAR (def)))
363 mark_stmt_necessary (stmt, true);
368 /* Check virtual definitions. If we get here, the only virtual
369 definitions we should see are those generated by assignment
371 v_may_defs = V_MAY_DEF_OPS (ann);
372 v_must_defs = V_MUST_DEF_OPS (ann);
373 if (NUM_V_MAY_DEFS (v_may_defs) > 0 || NUM_V_MUST_DEFS (v_must_defs) > 0)
377 gcc_assert (TREE_CODE (stmt) == MODIFY_EXPR);
379 /* Note that we must not check the individual virtual operands
380 here. In particular, if this is an aliased store, we could
381 end up with something like the following (SSA notation
382 redacted for brevity):
387 p_1 = (i_2 > 3) ? &x : p_1;
389 # x_4 = V_MAY_DEF <x_3>
395 Notice that the store to '*p_1' should be preserved, if we
396 were to check the virtual definitions in that store, we would
397 not mark it needed. This is because 'x' is not a global
400 Therefore, we check the base address of the LHS. If the
401 address is a pointer, we check if its name tag or type tag is
402 a global variable. Otherwise, we check if the base variable
404 lhs = TREE_OPERAND (stmt, 0);
405 if (REFERENCE_CLASS_P (lhs))
406 lhs = get_base_address (lhs);
408 if (lhs == NULL_TREE)
410 /* If LHS is NULL, it means that we couldn't get the base
411 address of the reference. In which case, we should not
412 remove this store. */
413 mark_stmt_necessary (stmt, true);
415 else if (DECL_P (lhs))
417 /* If the store is to a global symbol, we need to keep it. */
418 if (is_global_var (lhs))
419 mark_stmt_necessary (stmt, true);
421 else if (INDIRECT_REF_P (lhs))
423 tree ptr = TREE_OPERAND (lhs, 0);
424 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
425 tree nmt = (pi) ? pi->name_mem_tag : NULL_TREE;
426 tree tmt = var_ann (SSA_NAME_VAR (ptr))->type_mem_tag;
428 /* If either the name tag or the type tag for PTR is a
429 global variable, then the store is necessary. */
430 if ((nmt && is_global_var (nmt))
431 || (tmt && is_global_var (tmt)))
433 mark_stmt_necessary (stmt, true);
444 /* Find obviously necessary statements. These are things like most function
445 calls, and stores to file level variables.
447 If EL is NULL, control statements are conservatively marked as
448 necessary. Otherwise it contains the list of edges used by control
449 dependence analysis. */
452 find_obviously_necessary_stmts (struct edge_list *el)
455 block_stmt_iterator i;
462 /* Check any PHI nodes in the block. */
463 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
467 /* PHIs for virtual variables do not directly affect code
468 generation and need not be considered inherently necessary
469 regardless of the bits set in their decl.
471 Thus, we only need to mark PHIs for real variables which
472 need their result preserved as being inherently necessary. */
473 if (is_gimple_reg (PHI_RESULT (phi))
474 && is_global_var (SSA_NAME_VAR (PHI_RESULT (phi))))
475 mark_stmt_necessary (phi, true);
478 /* Check all statements in the block. */
479 for (i = bsi_start (bb); ! bsi_end_p (i); bsi_next (&i))
481 tree stmt = bsi_stmt (i);
482 NECESSARY (stmt) = 0;
483 mark_stmt_if_obviously_necessary (stmt, el != NULL);
486 /* Mark this basic block as `not visited'. A block will be marked
487 visited when the edges that it is control dependent on have been
489 bb->flags &= ~BB_VISITED;
494 /* Prevent the loops from being removed. We must keep the infinite loops,
495 and we currently do not have a means to recognize the finite ones. */
499 FOR_EACH_EDGE (e, ei, bb->succs)
500 if (e->flags & EDGE_DFS_BACK)
501 mark_control_dependent_edges_necessary (e->dest, el);
506 /* Make corresponding control dependent edges necessary. We only
507 have to do this once for each basic block, so we clear the bitmap
510 mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el)
512 unsigned edge_number;
514 gcc_assert (bb != EXIT_BLOCK_PTR);
516 if (bb == ENTRY_BLOCK_PTR)
519 EXECUTE_IF_CONTROL_DEPENDENT (bb->index, edge_number,
522 basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number);
524 if (TEST_BIT (last_stmt_necessary, cd_bb->index))
526 SET_BIT (last_stmt_necessary, cd_bb->index);
528 t = last_stmt (cd_bb);
529 if (t && is_ctrl_stmt (t))
530 mark_stmt_necessary (t, true);
534 /* Propagate necessity using the operands of necessary statements. Process
535 the uses on each statement in the worklist, and add all feeding statements
536 which contribute to the calculation of this value to the worklist.
538 In conservative mode, EL is NULL. */
541 propagate_necessity (struct edge_list *el)
544 bool aggressive = (el ? true : false);
546 if (dump_file && (dump_flags & TDF_DETAILS))
547 fprintf (dump_file, "\nProcessing worklist:\n");
549 while (VARRAY_ACTIVE_SIZE (worklist) > 0)
551 /* Take `i' from worklist. */
552 i = VARRAY_TOP_TREE (worklist);
553 VARRAY_POP (worklist);
555 if (dump_file && (dump_flags & TDF_DETAILS))
557 fprintf (dump_file, "processing: ");
558 print_generic_stmt (dump_file, i, TDF_SLIM);
559 fprintf (dump_file, "\n");
564 /* Mark the last statements of the basic blocks that the block
565 containing `i' is control dependent on, but only if we haven't
567 basic_block bb = bb_for_stmt (i);
568 if (! (bb->flags & BB_VISITED))
570 bb->flags |= BB_VISITED;
571 mark_control_dependent_edges_necessary (bb, el);
575 if (TREE_CODE (i) == PHI_NODE)
577 /* PHI nodes are somewhat special in that each PHI alternative has
578 data and control dependencies. All the statements feeding the
579 PHI node's arguments are always necessary. In aggressive mode,
580 we also consider the control dependent edges leading to the
581 predecessor block associated with each PHI alternative as
584 for (k = 0; k < PHI_NUM_ARGS (i); k++)
586 tree arg = PHI_ARG_DEF (i, k);
587 if (TREE_CODE (arg) == SSA_NAME)
588 mark_operand_necessary (arg, false);
593 for (k = 0; k < PHI_NUM_ARGS (i); k++)
595 basic_block arg_bb = PHI_ARG_EDGE (i, k)->src;
596 if (! (arg_bb->flags & BB_VISITED))
598 arg_bb->flags |= BB_VISITED;
599 mark_control_dependent_edges_necessary (arg_bb, el);
606 /* Propagate through the operands. Examine all the USE, VUSE and
607 V_MAY_DEF operands in this statement. Mark all the statements
608 which feed this statement's uses as necessary. */
612 get_stmt_operands (i);
614 /* The operands of V_MAY_DEF expressions are also needed as they
615 represent potential definitions that may reach this
616 statement (V_MAY_DEF operands allow us to follow def-def
619 FOR_EACH_SSA_TREE_OPERAND (use, i, iter, SSA_OP_ALL_USES)
620 mark_operand_necessary (use, false);
626 /* Propagate necessity around virtual phi nodes used in kill operands.
627 The reason this isn't done during propagate_necessity is because we don't
628 want to keep phis around that are just there for must-defs, unless we
629 absolutely have to. After we've rewritten the reaching definitions to be
630 correct in the previous part of the fixup routine, we can simply propagate
631 around the information about which of these virtual phi nodes are really
632 used, and set the NECESSARY flag accordingly.
633 Note that we do the minimum here to ensure that we keep alive the phis that
634 are actually used in the corrected SSA form. In particular, some of these
635 phis may now have all of the same operand, and will be deleted by some
639 mark_really_necessary_kill_operand_phis (void)
644 /* Seed the worklist with the new virtual phi arguments and virtual
648 block_stmt_iterator bsi;
651 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
653 if (!is_gimple_reg (PHI_RESULT (phi)) && NECESSARY (phi))
655 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
656 mark_operand_necessary (PHI_ARG_DEF (phi, i), true);
660 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
662 tree stmt = bsi_stmt (bsi);
664 if (NECESSARY (stmt))
668 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
669 SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
671 tree use = USE_FROM_PTR (use_p);
672 mark_operand_necessary (use, true);
678 /* Mark all virtual phis still in use as necessary, and all of their
679 arguments that are phis as necessary. */
680 while (VARRAY_ACTIVE_SIZE (worklist) > 0)
682 tree use = VARRAY_TOP_TREE (worklist);
683 VARRAY_POP (worklist);
685 for (i = 0; i < PHI_NUM_ARGS (use); i++)
686 mark_operand_necessary (PHI_ARG_DEF (use, i), true);
693 /* Eliminate unnecessary statements. Any instruction not marked as necessary
694 contributes nothing to the program, and can be deleted. */
697 eliminate_unnecessary_stmts (void)
700 block_stmt_iterator i;
702 if (dump_file && (dump_flags & TDF_DETAILS))
703 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
705 clear_special_calls ();
708 /* Remove dead PHI nodes. */
709 remove_dead_phis (bb);
711 /* Remove dead statements. */
712 for (i = bsi_start (bb); ! bsi_end_p (i) ; )
714 tree t = bsi_stmt (i);
718 /* If `i' is not necessary then remove it. */
720 remove_dead_stmt (&i, bb);
723 tree call = get_call_expr_in (t);
725 notice_special_calls (call);
732 /* Remove dead PHI nodes from block BB. */
735 remove_dead_phis (basic_block bb)
740 phi = phi_nodes (bb);
745 if (! NECESSARY (phi))
747 tree next = PHI_CHAIN (phi);
749 if (dump_file && (dump_flags & TDF_DETAILS))
751 fprintf (dump_file, "Deleting : ");
752 print_generic_stmt (dump_file, phi, TDF_SLIM);
753 fprintf (dump_file, "\n");
756 remove_phi_node (phi, prev, bb);
757 stats.removed_phis++;
763 phi = PHI_CHAIN (phi);
768 /* Remove dead statement pointed by iterator I. Receives the basic block BB
769 containing I so that we don't have to look it up. */
772 remove_dead_stmt (block_stmt_iterator *i, basic_block bb)
774 tree t = bsi_stmt (*i);
779 if (dump_file && (dump_flags & TDF_DETAILS))
781 fprintf (dump_file, "Deleting : ");
782 print_generic_stmt (dump_file, t, TDF_SLIM);
783 fprintf (dump_file, "\n");
788 /* If we have determined that a conditional branch statement contributes
789 nothing to the program, then we not only remove it, but we also change
790 the flow graph so that the current block will simply fall-thru to its
791 immediate post-dominator. The blocks we are circumventing will be
792 removed by cleaup_cfg if this change in the flow graph makes them
794 if (is_ctrl_stmt (t))
796 basic_block post_dom_bb;
797 /* The post dominance info has to be up-to-date. */
798 gcc_assert (dom_computed[CDI_POST_DOMINATORS] == DOM_OK);
799 /* Get the immediate post dominator of bb. */
800 post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
801 /* Some blocks don't have an immediate post dominator. This can happen
802 for example with infinite loops. Removing an infinite loop is an
803 inappropriate transformation anyway... */
810 /* Redirect the first edge out of BB to reach POST_DOM_BB. */
811 redirect_edge_and_branch (EDGE_SUCC (bb, 0), post_dom_bb);
812 PENDING_STMT (EDGE_SUCC (bb, 0)) = NULL;
813 EDGE_SUCC (bb, 0)->probability = REG_BR_PROB_BASE;
814 EDGE_SUCC (bb, 0)->count = bb->count;
816 /* The edge is no longer associated with a conditional, so it does
817 not have TRUE/FALSE flags. */
818 EDGE_SUCC (bb, 0)->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
820 /* If the edge reaches any block other than the exit, then it is a
821 fallthru edge; if it reaches the exit, then it is not a fallthru
823 if (post_dom_bb != EXIT_BLOCK_PTR)
824 EDGE_SUCC (bb, 0)->flags |= EDGE_FALLTHRU;
826 EDGE_SUCC (bb, 0)->flags &= ~EDGE_FALLTHRU;
828 /* Remove the remaining the outgoing edges. */
829 while (EDGE_COUNT (bb->succs) != 1)
830 remove_edge (EDGE_SUCC (bb, 1));
833 FOR_EACH_SSA_DEF_OPERAND (def_p, t, iter,
834 SSA_OP_VIRTUAL_DEFS | SSA_OP_VIRTUAL_KILLS)
836 tree def = DEF_FROM_PTR (def_p);
837 bitmap_set_bit (vars_to_rename,
838 var_ann (SSA_NAME_VAR (def))->uid);
844 /* Print out removed statement statistics. */
849 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
853 percg = ((float) stats.removed / (float) stats.total) * 100;
854 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
855 stats.removed, stats.total, (int) percg);
857 if (stats.total_phis == 0)
860 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
862 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
863 stats.removed_phis, stats.total_phis, (int) percg);
867 /* Initialization for this pass. Set up the used data structures. */
870 tree_dce_init (bool aggressive)
872 memset ((void *) &stats, 0, sizeof (stats));
878 control_dependence_map
879 = xmalloc (last_basic_block * sizeof (bitmap));
880 for (i = 0; i < last_basic_block; ++i)
881 control_dependence_map[i] = BITMAP_XMALLOC ();
883 last_stmt_necessary = sbitmap_alloc (last_basic_block);
884 sbitmap_zero (last_stmt_necessary);
887 processed = sbitmap_alloc (num_ssa_names + 1);
888 sbitmap_zero (processed);
890 VARRAY_TREE_INIT (worklist, 64, "work list");
893 /* Cleanup after this pass. */
896 tree_dce_done (bool aggressive)
902 for (i = 0; i < last_basic_block; ++i)
903 BITMAP_XFREE (control_dependence_map[i]);
904 free (control_dependence_map);
906 sbitmap_free (last_stmt_necessary);
909 sbitmap_free (processed);
912 /* Main routine to eliminate dead code.
914 AGGRESSIVE controls the aggressiveness of the algorithm.
915 In conservative mode, we ignore control dependence and simply declare
916 all but the most trivially dead branches necessary. This mode is fast.
917 In aggressive mode, control dependences are taken into account, which
918 results in more dead code elimination, but at the cost of some time.
920 FIXME: Aggressive mode before PRE doesn't work currently because
921 the dominance info is not invalidated after DCE1. This is
922 not an issue right now because we only run aggressive DCE
923 as the last tree SSA pass, but keep this in mind when you
924 start experimenting with pass ordering. */
927 perform_tree_ssa_dce (bool aggressive)
929 struct edge_list *el = NULL;
931 tree_dce_init (aggressive);
935 /* Compute control dependence. */
936 timevar_push (TV_CONTROL_DEPENDENCES);
937 calculate_dominance_info (CDI_POST_DOMINATORS);
938 el = create_edge_list ();
939 find_all_control_dependences (el);
940 timevar_pop (TV_CONTROL_DEPENDENCES);
942 mark_dfs_back_edges ();
945 find_obviously_necessary_stmts (el);
947 propagate_necessity (el);
949 mark_really_necessary_kill_operand_phis ();
950 eliminate_unnecessary_stmts ();
953 free_dominance_info (CDI_POST_DOMINATORS);
955 /* Debugging dumps. */
959 tree_dce_done (aggressive);
964 /* Pass entry points. */
968 perform_tree_ssa_dce (/*aggressive=*/false);
972 tree_ssa_cd_dce (void)
974 perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
980 return flag_tree_dce != 0;
983 struct tree_opt_pass pass_dce =
987 tree_ssa_dce, /* execute */
990 0, /* static_pass_number */
991 TV_TREE_DCE, /* tv_id */
992 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
993 0, /* properties_provided */
994 0, /* properties_destroyed */
995 0, /* todo_flags_start */
996 TODO_dump_func | TODO_fix_def_def_chains | TODO_cleanup_cfg | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
1000 struct tree_opt_pass pass_cd_dce =
1003 gate_dce, /* gate */
1004 tree_ssa_cd_dce, /* execute */
1007 0, /* static_pass_number */
1008 TV_TREE_CD_DCE, /* tv_id */
1009 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1010 0, /* properties_provided */
1011 0, /* properties_destroyed */
1012 0, /* todo_flags_start */
1013 TODO_dump_func | TODO_fix_def_def_chains | TODO_cleanup_cfg | TODO_ggc_collect | TODO_verify_ssa | TODO_verify_flow,
1014 /* todo_flags_finish */