1 /* Dead-code elimination pass for the GNU compiler.
2 Copyright (C) 2000 Free Software Foundation, Inc.
3 Written by Jeffrey D. Oldham <oldham@codesourcery.com>.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 2, or (at your option) any
12 GNU CC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* Dead-code elimination is the removal of instructions which have no
23 impact on the program's output. "Dead instructions" have no impact
24 on the program's output, while "necessary instructions" may have
27 The algorithm consists of three phases:
28 1) marking as necessary all instructions known to be necessary,
29 e.g., writing a value to memory,
30 2) propagating necessary instructions, e.g., the instructions
31 giving values to operands in necessary instructions, and
32 3) removing dead instructions (except replacing dead conditionals
33 with unconditional jumps).
36 The last step can require adding labels, deleting insns, and
37 modifying basic block structures. Some conditional jumps may be
38 converted to unconditional jumps so the control-flow graph may be
41 Edges from some infinite loops to the exit block can be added to
42 the control-flow graph.
45 We decided to not simultaneously perform jump optimization and dead
46 loop removal during dead-code elimination. Thus, all jump
47 instructions originally present remain after dead-code elimination
48 but 1) unnecessary conditional jump instructions are changed to
49 unconditional jump instructions and 2) all unconditional jump
53 1) SSA has been performed.
54 2) The basic block and control-flow graph structures are accurate.
55 3) The flow graph permits constructing an edge_list.
56 4) note rtxes should be saved.
59 When replacing unnecessary conditional jumps with unconditional
60 jumps, the control-flow graph is not updated. It should be.
63 Building an Optimizing Compiler
65 Butterworth-Heinemann, 1998
73 #include "hard-reg-set.h"
74 #include "basic-block.h"
76 #include "insn-config.h"
81 /* A map from blocks to the edges on which they are control dependent. */
83 /* An dynamically allocated array. The Nth element corresponds to
84 the block with index N + 2. The Ith bit in the bitmap is set if
85 that block is dependent on the Ith edge. */
87 /* The number of elements in the array. */
89 } control_dependent_block_to_edge_map_s, *control_dependent_block_to_edge_map;
91 /* Local function prototypes. */
92 static control_dependent_block_to_edge_map control_dependent_block_to_edge_map_create
93 PARAMS((size_t num_basic_blocks));
94 static void set_control_dependent_block_to_edge_map_bit
95 PARAMS ((control_dependent_block_to_edge_map c, basic_block bb,
97 static void control_dependent_block_to_edge_map_free
98 PARAMS ((control_dependent_block_to_edge_map c));
99 static void find_all_control_dependences
100 PARAMS ((struct edge_list *el, int *pdom,
101 control_dependent_block_to_edge_map cdbte));
102 static void find_control_dependence
103 PARAMS ((struct edge_list *el, int edge_index, int *pdom,
104 control_dependent_block_to_edge_map cdbte));
105 static basic_block find_pdom
106 PARAMS ((int *pdom, basic_block block));
107 static int inherently_necessary_register_1
108 PARAMS ((rtx *current_rtx, void *data));
109 static int inherently_necessary_register
110 PARAMS ((rtx current_rtx));
111 static int find_inherently_necessary
112 PARAMS ((rtx current_rtx));
113 static int propagate_necessity_through_operand
114 PARAMS ((rtx *current_rtx, void *data));
116 /* Unnecessary insns are indicated using insns' in_struct bit. */
118 /* Indicate INSN is dead-code; returns nothing. */
119 #define KILL_INSN(INSN) INSN_DEAD_CODE_P(INSN) = 1
120 /* Indicate INSN is necessary, i.e., not dead-code; returns nothing. */
121 #define RESURRECT_INSN(INSN) INSN_DEAD_CODE_P(INSN) = 0
122 /* Return nonzero if INSN is unnecessary. */
123 #define UNNECESSARY_P(INSN) INSN_DEAD_CODE_P(INSN)
124 static void mark_all_insn_unnecessary
126 /* Execute CODE with free variable INSN for all unnecessary insns in
127 an unspecified order, producing no output. */
128 #define EXECUTE_IF_UNNECESSARY(INSN, CODE) \
132 for (INSN = get_insns (); INSN != NULL_RTX; INSN = NEXT_INSN (INSN)) \
133 if (INSN_DEAD_CODE_P (INSN)) { \
137 /* Find the label beginning block BB. */
138 static rtx find_block_label
139 PARAMS ((basic_block bb));
140 /* Remove INSN, updating its basic block structure. */
141 static void delete_insn_bb
144 /* Recording which blocks are control dependent on which edges. We
145 expect each block to be control dependent on very few edges so we
146 use a bitmap for each block recording its edges. An array holds
147 the bitmap. Its position 0 entry holds the bitmap for block
148 INVALID_BLOCK+1 so that all blocks, including the entry and exit
149 blocks can participate in the data structure. */
151 /* Create a control_dependent_block_to_edge_map, given the number
152 NUM_BASIC_BLOCKS of non-entry, non-exit basic blocks, e.g.,
153 n_basic_blocks. This memory must be released using
154 control_dependent_block_to_edge_map_free (). */
156 static control_dependent_block_to_edge_map
157 control_dependent_block_to_edge_map_create (num_basic_blocks)
158 size_t num_basic_blocks;
161 control_dependent_block_to_edge_map c
162 = xmalloc (sizeof (control_dependent_block_to_edge_map_s));
163 c->length = num_basic_blocks - (INVALID_BLOCK+1);
164 c->data = xmalloc ((size_t) c->length*sizeof (bitmap));
165 for (i = 0; i < c->length; ++i)
166 c->data[i] = BITMAP_XMALLOC ();
171 /* Indicate block BB is control dependent on an edge with index
172 EDGE_INDEX in the mapping C of blocks to edges on which they are
173 control-dependent. */
176 set_control_dependent_block_to_edge_map_bit (c, bb, edge_index)
177 control_dependent_block_to_edge_map c;
181 if (bb->index - (INVALID_BLOCK+1) >= c->length)
184 bitmap_set_bit (c->data[bb->index - (INVALID_BLOCK+1)],
188 /* Execute CODE for each edge (given number EDGE_NUMBER within the
189 CODE) for which the block containing INSN is control dependent,
190 returning no output. CDBTE is the mapping of blocks to edges on
191 which they are control-dependent. */
193 #define EXECUTE_IF_CONTROL_DEPENDENT(CDBTE, INSN, EDGE_NUMBER, CODE) \
194 EXECUTE_IF_SET_IN_BITMAP \
195 (CDBTE->data[BLOCK_NUM (INSN) - (INVALID_BLOCK+1)], 0, \
198 /* Destroy a control_dependent_block_to_edge_map C. */
201 control_dependent_block_to_edge_map_free (c)
202 control_dependent_block_to_edge_map c;
205 for (i = 0; i < c->length; ++i)
206 BITMAP_XFREE (c->data[i]);
210 /* Record all blocks' control dependences on all edges in the edge
211 list EL, ala Morgan, Section 3.6. The mapping PDOM of blocks to
212 their postdominators are used, and results are stored in CDBTE,
213 which should be empty. */
216 find_all_control_dependences (el, pdom, cdbte)
217 struct edge_list *el;
219 control_dependent_block_to_edge_map cdbte;
223 for (i = 0; i < NUM_EDGES (el); ++i)
224 find_control_dependence (el, i, pdom, cdbte);
227 /* Determine all blocks' control dependences on the given edge with
228 edge_list EL index EDGE_INDEX, ala Morgan, Section 3.6. The
229 mapping PDOM of blocks to their postdominators are used, and
230 results are stored in CDBTE, which is assumed to be initialized
231 with zeros in each (block b', edge) position. */
234 find_control_dependence (el, edge_index, pdom, cdbte)
235 struct edge_list *el;
238 control_dependent_block_to_edge_map cdbte;
240 basic_block current_block;
241 basic_block ending_block;
243 if (INDEX_EDGE_PRED_BB (el, edge_index) == EXIT_BLOCK_PTR)
246 (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
248 : find_pdom (pdom, INDEX_EDGE_PRED_BB (el, edge_index));
250 for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index);
251 current_block != ending_block && current_block != EXIT_BLOCK_PTR;
252 current_block = find_pdom (pdom, current_block))
254 set_control_dependent_block_to_edge_map_bit (cdbte,
260 /* Find the immediate postdominator PDOM of the specified basic block
261 BLOCK. This function is necessary because some blocks have
265 find_pdom (pdom, block)
271 if (block->index == INVALID_BLOCK)
274 if (block == ENTRY_BLOCK_PTR)
275 return BASIC_BLOCK (0);
276 else if (block == EXIT_BLOCK_PTR || pdom[block->index] == EXIT_BLOCK)
277 return EXIT_BLOCK_PTR;
279 return BASIC_BLOCK (pdom[block->index]);
282 /* Determine if the given CURRENT_RTX uses a hard register not
283 converted to SSA. Returns nonzero only if it uses such a hard
284 register. DATA is not used.
286 The program counter (PC) is not considered inherently necessary
287 since code should be position-independent and thus not depend on
288 particular PC values. */
291 inherently_necessary_register_1 (current_rtx, data)
293 void *data ATTRIBUTE_UNUSED;
295 rtx x = *current_rtx;
299 switch (GET_CODE (x))
302 /* Do not traverse the rest of the clobber. */
309 if (CONVERT_REGISTER_TO_SSA_P (REGNO (x)) || x == pc_rtx)
320 /* Return nonzero if the insn CURRENT_RTX is inherently necessary. */
323 inherently_necessary_register (current_rtx)
326 return for_each_rtx (¤t_rtx,
327 &inherently_necessary_register_1, NULL);
330 /* Mark X as inherently necessary if appropriate. For example,
331 function calls and storing values into memory are inherently
332 necessary. This function is to be used with for_each_rtx ().
333 Return nonzero iff inherently necessary. */
336 find_inherently_necessary (x)
342 else if (inherently_necessary_register (x))
345 switch (GET_CODE (x))
354 return JUMP_TABLE_DATA_P (x) || computed_jump_p (x) != 0;
357 pattern = PATTERN (x);
358 switch (GET_CODE (pattern))
365 return GET_CODE (SET_DEST (pattern)) == MEM;
373 /* We treat assembler instructions as inherently
374 necessary, and we hope that its operands do not need to
382 /* Found an impossible insn type. */
388 /* Propagate necessity through REG and SUBREG operands of CURRENT_RTX.
389 This function is called with for_each_rtx () on necessary
390 instructions. The DATA must be a varray of unprocessed
394 propagate_necessity_through_operand (current_rtx, data)
398 rtx x = *current_rtx;
399 varray_type *unprocessed_instructions = (varray_type *) data;
403 switch ( GET_CODE (x))
406 if (CONVERT_REGISTER_TO_SSA_P (REGNO (x)))
408 rtx insn = VARRAY_RTX (ssa_definition, REGNO (x));
409 if (insn != NULL_RTX && UNNECESSARY_P (insn))
411 RESURRECT_INSN (insn);
412 VARRAY_PUSH_RTX (*unprocessed_instructions, insn);
422 /* Indicate all insns initially assumed to be unnecessary. */
425 mark_all_insn_unnecessary ()
428 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
432 /* Find the label beginning block BB, adding one if necessary. */
435 find_block_label (bb)
443 rtx new_label = emit_label_before (gen_label_rtx (), insn);
444 if (insn == bb->head)
445 bb->head = new_label;
450 /* Remove INSN, updating its basic block structure. */
453 delete_insn_bb (insn)
459 bb = BLOCK_FOR_INSN (insn);
462 if (bb->head == bb->end)
464 /* Delete the insn by converting it to a note. */
465 PUT_CODE (insn, NOTE);
466 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
469 else if (insn == bb->head)
470 bb->head = NEXT_INSN (insn);
471 else if (insn == bb->end)
472 bb->end = PREV_INSN (insn);
476 /* Perform the dead-code elimination. */
479 eliminate_dead_code ()
483 /* Necessary instructions with operands to explore. */
484 varray_type unprocessed_instructions;
485 /* Map element (b,e) is nonzero if the block is control dependent on
486 edge. "cdbte" abbreviates control dependent block to edge. */
487 control_dependent_block_to_edge_map cdbte;
488 sbitmap *postdominators;
489 /* Element I is the immediate postdominator of block I. */
491 struct edge_list *el;
493 int max_insn_uid = get_max_uid ();
495 /* Initialize the data structures. */
496 mark_all_insn_unnecessary ();
497 VARRAY_RTX_INIT (unprocessed_instructions, 64,
498 "unprocessed instructions");
499 cdbte = control_dependent_block_to_edge_map_create (n_basic_blocks);
501 /* Prepare for use of BLOCK_NUM (). */
502 connect_infinite_loops_to_exit ();
503 /* Be careful not to clear the added edges. */
504 compute_bb_for_insn (max_insn_uid);
506 /* Compute control dependence. */
507 postdominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
508 compute_flow_dominators (NULL, postdominators);
509 pdom = (int *) xmalloc (n_basic_blocks * sizeof (int));
510 for (i = 0; i < n_basic_blocks; ++i)
511 pdom[i] = INVALID_BLOCK;
512 compute_immediate_postdominators (pdom, postdominators);
513 /* Assume there is a path from each node to the exit block. */
514 for (i = 0; i < n_basic_blocks; ++i)
515 if (pdom[i] == INVALID_BLOCK)
516 pdom[i] = EXIT_BLOCK;
517 sbitmap_vector_free (postdominators);
518 el = create_edge_list();
519 find_all_control_dependences (el, pdom, cdbte);
521 /* Find inherently necessary instructions. */
522 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
523 if (find_inherently_necessary (insn))
525 RESURRECT_INSN (insn);
526 VARRAY_PUSH_RTX (unprocessed_instructions, insn);
529 /* Propagate necessity using the operands of necessary instructions. */
530 while (VARRAY_ACTIVE_SIZE (unprocessed_instructions) > 0)
532 rtx current_instruction;
535 current_instruction = VARRAY_TOP_RTX (unprocessed_instructions);
536 VARRAY_POP (unprocessed_instructions);
538 /* Make corresponding control dependent edges necessary. */
539 /* Assume the only JUMP_INSN is the block's last insn. It appears
540 that the last instruction of the program need not be a
543 if (INSN_P (current_instruction)
544 && !JUMP_TABLE_DATA_P (current_instruction))
546 /* Notes and labels contain no interesting operands. */
547 EXECUTE_IF_CONTROL_DEPENDENT
548 (cdbte, current_instruction, edge_number,
550 rtx jump_insn = (INDEX_EDGE_PRED_BB (el, edge_number))->end;
551 if (GET_CODE (jump_insn) == JUMP_INSN &&
552 UNNECESSARY_P (jump_insn)) {
553 RESURRECT_INSN (jump_insn);
554 VARRAY_PUSH_RTX (unprocessed_instructions, jump_insn);
558 /* Propagate through the operands. */
559 for_each_rtx (¤t_instruction,
560 &propagate_necessity_through_operand,
561 (PTR) &unprocessed_instructions);
566 /* Remove the unnecessary instructions. */
567 EXECUTE_IF_UNNECESSARY (insn,
569 if (any_condjump_p (insn))
571 /* Convert unnecessary conditional insn to an unconditional
572 jump to immediate postdominator block. */
573 rtx old_label = JUMP_LABEL (insn);
574 int pdom_block_number =
575 find_pdom (pdom, BLOCK_FOR_INSN (insn))->index;
577 /* Prevent the conditional jump's label from being deleted so
578 we do not have to modify the basic block structure. */
579 ++LABEL_NUSES (old_label);
581 if (pdom_block_number != EXIT_BLOCK
582 && pdom_block_number != INVALID_BLOCK)
584 rtx lbl = find_block_label (BASIC_BLOCK (pdom_block_number));
585 rtx new_jump = emit_jump_insn_before (gen_jump (lbl), insn);
587 /* Let jump know that label is in use. */
588 JUMP_LABEL (new_jump) = lbl;
591 delete_insn_bb (insn);
593 /* A conditional branch is unnecessary if and only if any
594 block control-dependent on it is unnecessary. Thus,
595 any phi nodes in these unnecessary blocks are also
596 removed and these nodes need not be updated. */
598 /* A barrier must follow any unconditional jump. Barriers
599 are not in basic blocks so this must occur after
600 deleting the conditional jump. */
601 emit_barrier_after (new_jump);
604 /* The block drops off the end of the function and the
605 ending conditional jump is not needed. */
606 delete_insn_bb (insn);
608 else if (!JUMP_P (insn))
609 delete_insn_bb (insn);
612 /* Release allocated memory. */
613 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
614 RESURRECT_INSN (insn);
615 if (VARRAY_ACTIVE_SIZE (unprocessed_instructions) != 0)
617 VARRAY_FREE (unprocessed_instructions);
618 control_dependent_block_to_edge_map_free (cdbte);