1 /* Dead-code elimination pass for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
3 Written by Jeffrey D. Oldham <oldham@codesourcery.com>.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 2, or (at your option) any
12 GNU CC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* Dead-code elimination is the removal of instructions which have no
23 impact on the program's output. "Dead instructions" have no impact
24 on the program's output, while "necessary instructions" may have
27 The algorithm consists of three phases:
28 1) marking as necessary all instructions known to be necessary,
29 e.g., writing a value to memory,
30 2) propagating necessary instructions, e.g., the instructions
31 giving values to operands in necessary instructions, and
32 3) removing dead instructions (except replacing dead conditionals
33 with unconditional jumps).
36 The last step can require adding labels, deleting insns, and
37 modifying basic block structures. Some conditional jumps may be
38 converted to unconditional jumps so the control-flow graph may be
41 Edges from some infinite loops to the exit block can be added to
42 the control-flow graph.
45 We decided to not simultaneously perform jump optimization and dead
46 loop removal during dead-code elimination. Thus, all jump
47 instructions originally present remain after dead-code elimination
48 but 1) unnecessary conditional jump instructions are changed to
49 unconditional jump instructions and 2) all unconditional jump
53 1) SSA has been performed.
54 2) The basic block and control-flow graph structures are accurate.
55 3) The flow graph permits constructing an edge_list.
56 4) note rtxes should be saved.
59 When replacing unnecessary conditional jumps with unconditional
60 jumps, the control-flow graph is not updated. It should be.
63 Building an Optimizing Compiler
65 Butterworth-Heinemann, 1998
73 #include "hard-reg-set.h"
74 #include "basic-block.h"
76 #include "insn-config.h"
80 /* We cannot use <assert.h> in GCC source, since that would include
81 GCC's assert.h, which may not be compatible with the host compiler. */
86 # define assert(e) do { if (! (e)) abort (); } while (0)
89 /* A map from blocks to the edges on which they are control dependent. */
91 /* An dynamically allocated array. The Nth element corresponds to
92 the block with index N + 2. The Ith bit in the bitmap is set if
93 that block is dependent on the Ith edge. */
95 /* The number of elements in the array. */
97 } control_dependent_block_to_edge_map_s, *control_dependent_block_to_edge_map;
99 /* Local function prototypes. */
100 static control_dependent_block_to_edge_map control_dependent_block_to_edge_map_create
101 PARAMS((size_t num_basic_blocks));
102 static void set_control_dependent_block_to_edge_map_bit
103 PARAMS ((control_dependent_block_to_edge_map c, basic_block bb,
105 static void control_dependent_block_to_edge_map_free
106 PARAMS ((control_dependent_block_to_edge_map c));
107 static void find_all_control_dependences
108 PARAMS ((struct edge_list *el, int *pdom,
109 control_dependent_block_to_edge_map cdbte));
110 static void find_control_dependence
111 PARAMS ((struct edge_list *el, int edge_index, int *pdom,
112 control_dependent_block_to_edge_map cdbte));
113 static basic_block find_pdom
114 PARAMS ((int *pdom, basic_block block));
115 static int inherently_necessary_register_1
116 PARAMS ((rtx *current_rtx, void *data));
117 static int inherently_necessary_register
118 PARAMS ((rtx current_rtx));
119 static int find_inherently_necessary
120 PARAMS ((rtx current_rtx));
121 static int propagate_necessity_through_operand
122 PARAMS ((rtx *current_rtx, void *data));
124 /* Unnecessary insns are indicated using insns' in_struct bit. */
126 /* Indicate INSN is dead-code; returns nothing. */
127 #define KILL_INSN(INSN) INSN_DEAD_CODE_P(INSN) = 1
128 /* Indicate INSN is necessary, i.e., not dead-code; returns nothing. */
129 #define RESURRECT_INSN(INSN) INSN_DEAD_CODE_P(INSN) = 0
130 /* Return nonzero if INSN is unnecessary. */
131 #define UNNECESSARY_P(INSN) INSN_DEAD_CODE_P(INSN)
132 static void mark_all_insn_unnecessary
134 /* Execute CODE with free variable INSN for all unnecessary insns in
135 an unspecified order, producing no output. */
136 #define EXECUTE_IF_UNNECESSARY(INSN, CODE) \
140 for (INSN = get_insns (); INSN != NULL_RTX; INSN = NEXT_INSN (INSN)) \
141 if (INSN_DEAD_CODE_P (INSN)) { \
145 /* Find the label beginning block BB. */
146 static rtx find_block_label
147 PARAMS ((basic_block bb));
148 /* Remove INSN, updating its basic block structure. */
149 static void delete_insn_bb
152 /* Recording which blocks are control dependent on which edges. We
153 expect each block to be control dependent on very few edges so we
154 use a bitmap for each block recording its edges. An array holds
155 the bitmap. Its position 0 entry holds the bitmap for block
156 INVALID_BLOCK+1 so that all blocks, including the entry and exit
157 blocks can participate in the data structure. */
159 /* Create a control_dependent_block_to_edge_map, given the number
160 NUM_BASIC_BLOCKS of non-entry, non-exit basic blocks, e.g.,
161 n_basic_blocks. This memory must be released using
162 control_dependent_block_to_edge_map_free (). */
164 static control_dependent_block_to_edge_map
165 control_dependent_block_to_edge_map_create (num_basic_blocks)
166 size_t num_basic_blocks;
169 control_dependent_block_to_edge_map c
170 = xmalloc (sizeof (control_dependent_block_to_edge_map_s));
171 c->length = num_basic_blocks - (INVALID_BLOCK+1);
172 c->data = xmalloc ((size_t) c->length*sizeof (bitmap));
173 for (i = 0; i < c->length; ++i)
174 c->data[i] = BITMAP_XMALLOC ();
179 /* Indicate block BB is control dependent on an edge with index
180 EDGE_INDEX in the mapping C of blocks to edges on which they are
181 control-dependent. */
184 set_control_dependent_block_to_edge_map_bit (c, bb, edge_index)
185 control_dependent_block_to_edge_map c;
189 assert(bb->index - (INVALID_BLOCK+1) < c->length);
190 bitmap_set_bit (c->data[bb->index - (INVALID_BLOCK+1)],
194 /* Execute CODE for each edge (given number EDGE_NUMBER within the
195 CODE) for which the block containing INSN is control dependent,
196 returning no output. CDBTE is the mapping of blocks to edges on
197 which they are control-dependent. */
199 #define EXECUTE_IF_CONTROL_DEPENDENT(CDBTE, INSN, EDGE_NUMBER, CODE) \
200 EXECUTE_IF_SET_IN_BITMAP \
201 (CDBTE->data[BLOCK_NUM (INSN) - (INVALID_BLOCK+1)], 0, \
204 /* Destroy a control_dependent_block_to_edge_map C. */
207 control_dependent_block_to_edge_map_free (c)
208 control_dependent_block_to_edge_map c;
211 for (i = 0; i < c->length; ++i)
212 BITMAP_XFREE (c->data[i]);
216 /* Record all blocks' control dependences on all edges in the edge
217 list EL, ala Morgan, Section 3.6. The mapping PDOM of blocks to
218 their postdominators are used, and results are stored in CDBTE,
219 which should be empty. */
222 find_all_control_dependences (el, pdom, cdbte)
223 struct edge_list *el;
225 control_dependent_block_to_edge_map cdbte;
229 for (i = 0; i < NUM_EDGES (el); ++i)
230 find_control_dependence (el, i, pdom, cdbte);
233 /* Determine all blocks' control dependences on the given edge with
234 edge_list EL index EDGE_INDEX, ala Morgan, Section 3.6. The
235 mapping PDOM of blocks to their postdominators are used, and
236 results are stored in CDBTE, which is assumed to be initialized
237 with zeros in each (block b', edge) position. */
240 find_control_dependence (el, edge_index, pdom, cdbte)
241 struct edge_list *el;
244 control_dependent_block_to_edge_map cdbte;
246 basic_block current_block;
247 basic_block ending_block;
249 assert (INDEX_EDGE_PRED_BB (el, edge_index) != EXIT_BLOCK_PTR);
251 (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
253 : find_pdom (pdom, INDEX_EDGE_PRED_BB (el, edge_index));
255 for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index);
256 current_block != ending_block && current_block != EXIT_BLOCK_PTR;
257 current_block = find_pdom (pdom, current_block))
259 set_control_dependent_block_to_edge_map_bit (cdbte,
265 /* Find the immediate postdominator PDOM of the specified basic block
266 BLOCK. This function is necessary because some blocks have
270 find_pdom (pdom, block)
274 assert (block != NULL);
275 assert (block->index != INVALID_BLOCK);
276 if (block == ENTRY_BLOCK_PTR)
277 return BASIC_BLOCK (0);
278 else if (block == EXIT_BLOCK_PTR || pdom[block->index] == EXIT_BLOCK)
279 return EXIT_BLOCK_PTR;
281 return BASIC_BLOCK (pdom[block->index]);
284 /* Determine if the given CURRENT_RTX uses a hard register not
285 converted to SSA. Returns nonzero only if it uses such a hard
286 register. DATA is not used.
288 The program counter (PC) is not considered inherently necessary
289 since code should be position-independent and thus not depend on
290 particular PC values. */
293 inherently_necessary_register_1 (current_rtx, data)
295 void *data ATTRIBUTE_UNUSED;
297 rtx x = *current_rtx;
301 switch (GET_CODE (x))
304 /* Do not traverse the rest of the clobber. */
311 if (CONVERT_REGISTER_TO_SSA_P (REGNO (x)) || x == pc_rtx)
322 /* Return nonzero if the insn CURRENT_RTX is inherently necessary. */
325 inherently_necessary_register (current_rtx)
328 return for_each_rtx (¤t_rtx,
329 &inherently_necessary_register_1, NULL);
332 /* Mark X as inherently necessary if appropriate. For example,
333 function calls and storing values into memory are inherently
334 necessary. This function is to be used with for_each_rtx ().
335 Return nonzero iff inherently necessary. */
338 find_inherently_necessary (x)
344 else if (inherently_necessary_register (x))
347 switch (GET_CODE (x))
356 return JUMP_TABLE_DATA_P (x) || computed_jump_p (x) != 0;
359 pattern = PATTERN (x);
360 switch (GET_CODE (pattern))
367 return GET_CODE (SET_DEST (pattern)) == MEM;
375 /* We treat assembler instructions as inherently
376 necessary, and we hope that its operands do not need to
384 /* Found an impossible insn type. */
390 /* Propagate necessity through REG and SUBREG operands of CURRENT_RTX.
391 This function is called with for_each_rtx () on necessary
392 instructions. The DATA must be a varray of unprocessed
396 propagate_necessity_through_operand (current_rtx, data)
400 rtx x = *current_rtx;
401 varray_type *unprocessed_instructions = (varray_type *) data;
405 switch ( GET_CODE (x))
408 if (CONVERT_REGISTER_TO_SSA_P (REGNO (x)))
410 rtx insn = VARRAY_RTX (ssa_definition, REGNO (x));
411 if (insn != NULL_RTX && UNNECESSARY_P (insn))
413 RESURRECT_INSN (insn);
414 VARRAY_PUSH_RTX (*unprocessed_instructions, insn);
424 /* Indicate all insns initially assumed to be unnecessary. */
427 mark_all_insn_unnecessary ()
430 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
434 /* Find the label beginning block BB, adding one if necessary. */
437 find_block_label (bb)
445 rtx new_label = emit_label_before (gen_label_rtx (), insn);
446 if (insn == bb->head)
447 bb->head = new_label;
452 /* Remove INSN, updating its basic block structure. */
455 delete_insn_bb (insn)
459 assert (insn != NULL_RTX);
460 bb = BLOCK_FOR_INSN (insn);
462 if (bb->head == bb->end)
464 /* Delete the insn by converting it to a note. */
465 PUT_CODE (insn, NOTE);
466 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
469 else if (insn == bb->head)
470 bb->head = NEXT_INSN (insn);
471 else if (insn == bb->end)
472 bb->end = PREV_INSN (insn);
476 /* Perform the dead-code elimination. */
479 eliminate_dead_code ()
483 /* Necessary instructions with operands to explore. */
484 varray_type unprocessed_instructions;
485 /* Map element (b,e) is nonzero if the block is control dependent on
486 edge. "cdbte" abbreviates control dependent block to edge. */
487 control_dependent_block_to_edge_map cdbte;
488 sbitmap *postdominators;
489 /* Element I is the immediate postdominator of block I. */
491 struct edge_list *el;
493 int max_insn_uid = get_max_uid ();
495 /* Initialize the data structures. */
496 mark_all_insn_unnecessary ();
497 VARRAY_RTX_INIT (unprocessed_instructions, 64,
498 "unprocessed instructions");
499 cdbte = control_dependent_block_to_edge_map_create (n_basic_blocks);
501 /* Prepare for use of BLOCK_NUM (). */
502 connect_infinite_loops_to_exit ();
503 /* Be careful not to clear the added edges. */
504 compute_bb_for_insn (max_insn_uid);
506 /* Compute control dependence. */
507 postdominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
508 compute_flow_dominators (NULL, postdominators);
509 pdom = (int *) xmalloc (n_basic_blocks * sizeof (int));
510 for (i = 0; i < n_basic_blocks; ++i)
511 pdom[i] = INVALID_BLOCK;
512 compute_immediate_postdominators (pdom, postdominators);
513 /* Assume there is a path from each node to the exit block. */
514 for (i = 0; i < n_basic_blocks; ++i)
515 if (pdom[i] == INVALID_BLOCK)
516 pdom[i] = EXIT_BLOCK;
517 sbitmap_vector_free (postdominators);
518 el = create_edge_list();
519 find_all_control_dependences (el, pdom, cdbte);
521 /* Find inherently necessary instructions. */
522 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
523 if (find_inherently_necessary (insn))
525 RESURRECT_INSN (insn);
526 VARRAY_PUSH_RTX (unprocessed_instructions, insn);
529 /* Propagate necessity using the operands of necessary instructions. */
530 while (VARRAY_ACTIVE_SIZE (unprocessed_instructions) > 0)
532 rtx current_instruction;
535 current_instruction = VARRAY_TOP_RTX (unprocessed_instructions);
536 VARRAY_POP (unprocessed_instructions);
538 /* Make corresponding control dependent edges necessary. */
539 /* Assume the only JUMP_INSN is the block's last insn. It appears
540 that the last instruction of the program need not be a
543 if (INSN_P (current_instruction)
544 && !JUMP_TABLE_DATA_P (current_instruction))
546 /* Notes and labels contain no interesting operands. */
547 EXECUTE_IF_CONTROL_DEPENDENT
548 (cdbte, current_instruction, edge_number,
550 rtx jump_insn = (INDEX_EDGE_PRED_BB (el, edge_number))->end;
551 if (GET_CODE (jump_insn) == JUMP_INSN &&
552 UNNECESSARY_P (jump_insn)) {
553 RESURRECT_INSN (jump_insn);
554 VARRAY_PUSH_RTX (unprocessed_instructions, jump_insn);
558 /* Propagate through the operands. */
559 for_each_rtx (¤t_instruction,
560 &propagate_necessity_through_operand,
561 (PTR) &unprocessed_instructions);
566 /* Remove the unnecessary instructions. */
567 EXECUTE_IF_UNNECESSARY (insn,
569 if (any_condjump_p (insn))
571 /* Convert unnecessary conditional insn to an unconditional
572 jump to immediate postdominator block. */
573 rtx old_label = JUMP_LABEL (insn);
574 int pdom_block_number =
575 find_pdom (pdom, BLOCK_FOR_INSN (insn))->index;
577 /* Prevent the conditional jump's label from being deleted so
578 we do not have to modify the basic block structure. */
579 ++LABEL_NUSES (old_label);
581 if (pdom_block_number != EXIT_BLOCK
582 && pdom_block_number != INVALID_BLOCK)
584 rtx lbl = find_block_label (BASIC_BLOCK (pdom_block_number));
585 rtx new_jump = emit_jump_insn_before (gen_jump (lbl), insn);
587 /* Let jump know that label is in use. */
588 JUMP_LABEL (new_jump) = lbl;
591 delete_insn_bb (insn);
593 /* A conditional branch is unnecessary if and only if any
594 block control-dependent on it is unnecessary. Thus,
595 any phi nodes in these unnecessary blocks are also
596 removed and these nodes need not be updated. */
598 /* A barrier must follow any unconditional jump. Barriers
599 are not in basic blocks so this must occur after
600 deleting the conditional jump. */
601 emit_barrier_after (new_jump);
604 /* The block drops off the end of the function and the
605 ending conditional jump is not needed. */
606 delete_insn_bb (insn);
608 else if (!JUMP_P (insn))
609 delete_insn_bb (insn);
612 /* Release allocated memory. */
613 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
614 RESURRECT_INSN (insn);
615 assert (VARRAY_ACTIVE_SIZE(unprocessed_instructions) == 0);
616 VARRAY_FREE (unprocessed_instructions);
617 control_dependent_block_to_edge_map_free (cdbte);