/* Dead-code elimination pass for the GNU compiler.
- Copyright (C) 2000 Free Software Foundation, Inc.
+ Copyright (C) 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
Written by Jeffrey D. Oldham <oldham@codesourcery.com>.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify it
-under the terms of the GNU General Public License as published by the
-Free Software Foundation; either version 2, or (at your option) any
-later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful, but WITHOUT
-ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to the Free
+along with GCC; see the file COPYING. If not, write to the Free
Software Foundation, 59 Temple Place - Suite 330, Boston, MA
02111-1307, USA. */
The last step can require adding labels, deleting insns, and
modifying basic block structures. Some conditional jumps may be
converted to unconditional jumps so the control-flow graph may be
- out-of-date.
+ out-of-date.
Edges from some infinite loops to the exit block can be added to
the control-flow graph, but will be removed after this pass is
#include "config.h"
#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
#include "rtl.h"
#include "hard-reg-set.h"
\f
/* A map from blocks to the edges on which they are control dependent. */
typedef struct {
- /* An dynamically allocated array. The Nth element corresponds to
+ /* A dynamically allocated array. The Nth element corresponds to
the block with index N + 2. The Ith bit in the bitmap is set if
that block is dependent on the Ith edge. */
bitmap *data;
/* Local function prototypes. */
static control_dependent_block_to_edge_map control_dependent_block_to_edge_map_create
- PARAMS((size_t num_basic_blocks));
+ (size_t num_basic_blocks);
static void set_control_dependent_block_to_edge_map_bit
- PARAMS ((control_dependent_block_to_edge_map c, basic_block bb,
- int edge_index));
+ (control_dependent_block_to_edge_map c, basic_block bb, int edge_index);
static void control_dependent_block_to_edge_map_free
- PARAMS ((control_dependent_block_to_edge_map c));
+ (control_dependent_block_to_edge_map c);
static void find_all_control_dependences
- PARAMS ((struct edge_list *el, int *pdom,
- control_dependent_block_to_edge_map cdbte));
+ (struct edge_list *el, dominance_info pdom,
+ control_dependent_block_to_edge_map cdbte);
static void find_control_dependence
- PARAMS ((struct edge_list *el, int edge_index, int *pdom,
- control_dependent_block_to_edge_map cdbte));
-static basic_block find_pdom
- PARAMS ((int *pdom, basic_block block));
-static int inherently_necessary_register_1
- PARAMS ((rtx *current_rtx, void *data));
-static int inherently_necessary_register
- PARAMS ((rtx current_rtx));
-static int find_inherently_necessary
- PARAMS ((rtx current_rtx));
-static int propagate_necessity_through_operand
- PARAMS ((rtx *current_rtx, void *data));
-static void note_inherently_necessary_set
- PARAMS ((rtx, rtx, void *));
+ (struct edge_list *el, int edge_index, dominance_info pdom,
+ control_dependent_block_to_edge_map cdbte);
+static basic_block find_pdom (dominance_info pdom, basic_block block);
+static int inherently_necessary_register_1 (rtx *current_rtx, void *data);
+static int inherently_necessary_register (rtx current_rtx);
+static int find_inherently_necessary (rtx current_rtx);
+static int propagate_necessity_through_operand (rtx *current_rtx, void *data);
+static void note_inherently_necessary_set (rtx, rtx, void *);
\f
/* Unnecessary insns are indicated using insns' in_struct bit. */
#define RESURRECT_INSN(INSN) INSN_DEAD_CODE_P(INSN) = 0
/* Return nonzero if INSN is unnecessary. */
#define UNNECESSARY_P(INSN) INSN_DEAD_CODE_P(INSN)
-static void mark_all_insn_unnecessary
- PARAMS ((void));
+static void mark_all_insn_unnecessary (void);
/* Execute CODE with free variable INSN for all unnecessary insns in
an unspecified order, producing no output. */
#define EXECUTE_IF_UNNECESSARY(INSN, CODE) \
rtx INSN; \
\
for (INSN = get_insns (); INSN != NULL_RTX; INSN = NEXT_INSN (INSN)) \
- if (INSN_DEAD_CODE_P (INSN)) { \
- CODE; \
- } \
+ if (INSN_P (insn) && INSN_DEAD_CODE_P (INSN)) \
+ { \
+ CODE; \
+ } \
}
+
/* Find the label beginning block BB. */
-static rtx find_block_label
- PARAMS ((basic_block bb));
+static rtx find_block_label (basic_block bb);
/* Remove INSN, updating its basic block structure. */
-static void delete_insn_bb
- PARAMS ((rtx insn));
+static void delete_insn_bb (rtx insn);
\f
/* Recording which blocks are control dependent on which edges. We
expect each block to be control dependent on very few edges so we
control_dependent_block_to_edge_map_free (). */
static control_dependent_block_to_edge_map
-control_dependent_block_to_edge_map_create (num_basic_blocks)
- size_t num_basic_blocks;
+control_dependent_block_to_edge_map_create (size_t num_basic_blocks)
{
int i;
control_dependent_block_to_edge_map c
control-dependent. */
static void
-set_control_dependent_block_to_edge_map_bit (c, bb, edge_index)
- control_dependent_block_to_edge_map c;
- basic_block bb;
- int edge_index;
+set_control_dependent_block_to_edge_map_bit (control_dependent_block_to_edge_map c,
+ basic_block bb, int edge_index)
{
if (bb->index - (INVALID_BLOCK+1) >= c->length)
abort ();
/* Destroy a control_dependent_block_to_edge_map C. */
static void
-control_dependent_block_to_edge_map_free (c)
- control_dependent_block_to_edge_map c;
+control_dependent_block_to_edge_map_free (control_dependent_block_to_edge_map c)
{
int i;
for (i = 0; i < c->length; ++i)
BITMAP_XFREE (c->data[i]);
- free ((PTR) c);
+ free (c);
}
/* Record all blocks' control dependences on all edges in the edge
which should be empty. */
static void
-find_all_control_dependences (el, pdom, cdbte)
- struct edge_list *el;
- int *pdom;
- control_dependent_block_to_edge_map cdbte;
+find_all_control_dependences (struct edge_list *el, dominance_info pdom,
+ control_dependent_block_to_edge_map cdbte)
{
int i;
with zeros in each (block b', edge) position. */
static void
-find_control_dependence (el, edge_index, pdom, cdbte)
- struct edge_list *el;
- int edge_index;
- int *pdom;
- control_dependent_block_to_edge_map cdbte;
+find_control_dependence (struct edge_list *el, int edge_index,
+ dominance_info pdom,
+ control_dependent_block_to_edge_map cdbte)
{
basic_block current_block;
basic_block ending_block;
if (INDEX_EDGE_PRED_BB (el, edge_index) == EXIT_BLOCK_PTR)
abort ();
- ending_block =
- (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
- ? BASIC_BLOCK (0)
+ ending_block =
+ (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
+ ? ENTRY_BLOCK_PTR->next_bb
: find_pdom (pdom, INDEX_EDGE_PRED_BB (el, edge_index));
for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index);
negative numbers. */
static basic_block
-find_pdom (pdom, block)
- int *pdom;
- basic_block block;
+find_pdom (dominance_info pdom, basic_block block)
{
if (!block)
abort ();
abort ();
if (block == ENTRY_BLOCK_PTR)
- return BASIC_BLOCK (0);
- else if (block == EXIT_BLOCK_PTR || pdom[block->index] == EXIT_BLOCK)
+ return ENTRY_BLOCK_PTR->next_bb;
+ else if (block == EXIT_BLOCK_PTR)
return EXIT_BLOCK_PTR;
else
- return BASIC_BLOCK (pdom[block->index]);
+ {
+ basic_block bb = get_immediate_dominator (pdom, block);
+ if (!bb)
+ return EXIT_BLOCK_PTR;
+ return bb;
+ }
}
/* Determine if the given CURRENT_RTX uses a hard register not
particular PC values. */
static int
-inherently_necessary_register_1 (current_rtx, data)
- rtx *current_rtx;
- void *data ATTRIBUTE_UNUSED;
+inherently_necessary_register_1 (rtx *current_rtx,
+ void *data ATTRIBUTE_UNUSED)
{
rtx x = *current_rtx;
{
case CLOBBER:
/* Do not traverse the rest of the clobber. */
- return -1;
+ return -1;
break;
case PC:
return 0;
/* Return nonzero if the insn CURRENT_RTX is inherently necessary. */
static int
-inherently_necessary_register (current_rtx)
- rtx current_rtx;
+inherently_necessary_register (rtx current_rtx)
{
return for_each_rtx (¤t_rtx,
&inherently_necessary_register_1, NULL);
/* Called via note_stores for each store in an insn. Note whether
or not a particular store is inherently necessary. Store a
- nonzero value in inherently_necessary_p if such a storeis found. */
-
+ nonzero value in inherently_necessary_p if such a store is found. */
+
static void
-note_inherently_necessary_set (dest, set, data)
- rtx set;
- rtx dest;
- void *data;
+note_inherently_necessary_set (rtx dest, rtx set ATTRIBUTE_UNUSED, void *data)
{
- int *inherently_necessary_set_p = (int *)data;
+ int *inherently_necessary_set_p = (int *) data;
while (GET_CODE (dest) == SUBREG
|| GET_CODE (dest) == STRICT_LOW_PART
Return nonzero iff inherently necessary. */
static int
-find_inherently_necessary (x)
- rtx x;
+find_inherently_necessary (rtx x)
{
- rtx pattern;
if (x == NULL_RTX)
return 0;
else if (inherently_necessary_register (x))
return !0;
else
switch (GET_CODE (x))
- {
+ {
case CALL_INSN:
+ case BARRIER:
+ case PREFETCH:
return !0;
case CODE_LABEL:
case NOTE:
- case BARRIER:
return 0;
- break;
case JUMP_INSN:
return JUMP_TABLE_DATA_P (x) || computed_jump_p (x) != 0;
- break;
case INSN:
{
int inherently_necessary_set = 0;
}
default:
/* Found an impossible insn type. */
- abort();
+ abort ();
break;
}
}
instructions. */
static int
-propagate_necessity_through_operand (current_rtx, data)
- rtx *current_rtx;
- void *data;
+propagate_necessity_through_operand (rtx *current_rtx, void *data)
{
rtx x = *current_rtx;
varray_type *unprocessed_instructions = (varray_type *) data;
/* Indicate all insns initially assumed to be unnecessary. */
static void
-mark_all_insn_unnecessary ()
+mark_all_insn_unnecessary (void)
{
rtx insn;
- for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
- KILL_INSN (insn);
+ for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn)) {
+ if (INSN_P (insn))
+ KILL_INSN (insn);
+ }
+
}
/* Find the label beginning block BB, adding one if necessary. */
static rtx
-find_block_label (bb)
- basic_block bb;
+find_block_label (basic_block bb)
{
rtx insn = bb->head;
if (LABEL_P (insn))
/* Remove INSN, updating its basic block structure. */
static void
-delete_insn_bb (insn)
- rtx insn;
+delete_insn_bb (rtx insn)
{
- basic_block bb;
if (!insn)
abort ();
if (! INSN_P (insn))
return;
- bb = BLOCK_FOR_INSN (insn);
- if (!bb)
- abort ();
- if (bb->head == bb->end)
- {
- /* Delete the insn by converting it to a note. */
- PUT_CODE (insn, NOTE);
- NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
- return;
- }
- else if (insn == bb->head)
- bb->head = NEXT_INSN (insn);
- else if (insn == bb->end)
- bb->end = PREV_INSN (insn);
delete_insn (insn);
}
\f
/* Perform the dead-code elimination. */
void
-eliminate_dead_code ()
+ssa_eliminate_dead_code (void)
{
- int i;
rtx insn;
+ basic_block bb;
/* Necessary instructions with operands to explore. */
varray_type unprocessed_instructions;
/* Map element (b,e) is nonzero if the block is control dependent on
edge. "cdbte" abbreviates control dependent block to edge. */
control_dependent_block_to_edge_map cdbte;
/* Element I is the immediate postdominator of block I. */
- int *pdom;
+ dominance_info pdom;
struct edge_list *el;
- int max_insn_uid = get_max_uid ();
-
/* Initialize the data structures. */
mark_all_insn_unnecessary ();
VARRAY_RTX_INIT (unprocessed_instructions, 64,
"unprocessed instructions");
- cdbte = control_dependent_block_to_edge_map_create (n_basic_blocks);
+ cdbte = control_dependent_block_to_edge_map_create (last_basic_block);
/* Prepare for use of BLOCK_NUM (). */
connect_infinite_loops_to_exit ();
- /* Be careful not to clear the added edges. */
- compute_bb_for_insn (max_insn_uid);
/* Compute control dependence. */
- pdom = (int *) xmalloc (n_basic_blocks * sizeof (int));
- for (i = 0; i < n_basic_blocks; ++i)
- pdom[i] = INVALID_BLOCK;
- calculate_dominance_info (pdom, NULL, CDI_POST_DOMINATORS);
- /* Assume there is a path from each node to the exit block. */
- for (i = 0; i < n_basic_blocks; ++i)
- if (pdom[i] == INVALID_BLOCK)
- pdom[i] = EXIT_BLOCK;
- el = create_edge_list();
+ pdom = calculate_dominance_info (CDI_POST_DOMINATORS);
+ el = create_edge_list ();
find_all_control_dependences (el, pdom, cdbte);
/* Find inherently necessary instructions. */
for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
- if (find_inherently_necessary (insn))
+ if (find_inherently_necessary (insn) && INSN_P (insn))
{
RESURRECT_INSN (insn);
VARRAY_PUSH_RTX (unprocessed_instructions, insn);
/* Propagate through the operands. */
for_each_rtx (¤t_instruction,
&propagate_necessity_through_operand,
- (PTR) &unprocessed_instructions);
+ &unprocessed_instructions);
/* PHI nodes are somewhat special in that each PHI alternative
has data and control dependencies. The data dependencies
{
if (any_condjump_p (insn))
{
- /* Convert unnecessary conditional insn to an unconditional
- jump to immediate postdominator block. */
- rtx old_label = JUMP_LABEL (insn);
- int pdom_block_number =
- find_pdom (pdom, BLOCK_FOR_INSN (insn))->index;
-
- /* Prevent the conditional jump's label from being deleted so
- we do not have to modify the basic block structure. */
- ++LABEL_NUSES (old_label);
-
- if (pdom_block_number != EXIT_BLOCK
- && pdom_block_number != INVALID_BLOCK)
+ basic_block bb = BLOCK_FOR_INSN (insn);
+ basic_block pdom_bb = find_pdom (pdom, bb);
+ rtx lbl;
+ edge e;
+
+ /* Egad. The immediate post dominator is the exit block. We
+ would like to optimize this conditional jump to jump directly
+ to the exit block. That can be difficult as we may not have
+ a suitable CODE_LABEL that allows us to fall unmolested into
+ the exit block.
+
+ So, we just delete the conditional branch by turning it into
+ a deleted note. That is safe, but just not as optimal as
+ it could be. */
+ if (pdom_bb == EXIT_BLOCK_PTR)
{
- rtx lbl = find_block_label (BASIC_BLOCK (pdom_block_number));
- rtx new_jump = emit_jump_insn_before (gen_jump (lbl), insn);
-
- /* Let jump know that label is in use. */
- JUMP_LABEL (new_jump) = lbl;
- ++LABEL_NUSES (lbl);
-
- delete_insn_bb (insn);
-
- /* A conditional branch is unnecessary if and only if any
- block control-dependent on it is unnecessary. Thus,
- any phi nodes in these unnecessary blocks are also
- removed and these nodes need not be updated. */
-
- /* A barrier must follow any unconditional jump. Barriers
- are not in basic blocks so this must occur after
- deleting the conditional jump. */
- emit_barrier_after (new_jump);
+ /* Since we're going to just delete the branch, we need
+ look at all the edges and remove all those which are not
+ a fallthru edge. */
+ e = bb->succ;
+ while (e)
+ {
+ edge temp = e;
+
+ e = e->succ_next;
+ if ((temp->flags & EDGE_FALLTHRU) == 0)
+ {
+ /* We've found a non-fallthru edge, find any PHI nodes
+ at the target and clean them up. */
+ if (temp->dest != EXIT_BLOCK_PTR)
+ {
+ rtx insn
+ = first_insn_after_basic_block_note (temp->dest);
+
+ while (PHI_NODE_P (insn))
+ {
+ remove_phi_alternative (PATTERN (insn), temp->src);
+ insn = NEXT_INSN (insn);
+ }
+ }
+
+ remove_edge (temp);
+ }
+ }
+
+ /* Now "delete" the conditional jump. */
+ PUT_CODE (insn, NOTE);
+ NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
+ continue;
}
- else
- /* The block drops off the end of the function and the
- ending conditional jump is not needed. */
- delete_insn_bb (insn);
+
+ /* We've found a conditional branch that is unnecessary.
+
+ First, remove all outgoing edges from this block, updating
+ PHI nodes as appropriate. */
+ e = bb->succ;
+ while (e)
+ {
+ edge temp = e;
+
+ e = e->succ_next;
+
+ if (temp->flags & EDGE_ABNORMAL)
+ continue;
+
+ /* We found an edge that is not executable. First simplify
+ the PHI nodes in the target block. */
+ if (temp->dest != EXIT_BLOCK_PTR)
+ {
+ rtx insn = first_insn_after_basic_block_note (temp->dest);
+
+ while (PHI_NODE_P (insn))
+ {
+ remove_phi_alternative (PATTERN (insn), temp->src);
+ insn = NEXT_INSN (insn);
+ }
+ }
+
+ remove_edge (temp);
+ }
+
+ /* Create an edge from this block to the post dominator.
+ What about the PHI nodes at the target? */
+ make_edge (bb, pdom_bb, 0);
+
+ /* Third, transform this insn into an unconditional
+ jump to the label for the immediate postdominator. */
+ lbl = find_block_label (pdom_bb);
+ SET_SRC (PATTERN (insn)) = gen_rtx_LABEL_REF (VOIDmode, lbl);
+ INSN_CODE (insn) = -1;
+ JUMP_LABEL (insn) = lbl;
+ LABEL_NUSES (lbl)++;
+
+ /* A barrier must follow any unconditional jump. Barriers
+ are not in basic blocks so this must occur after
+ deleting the conditional jump. */
+ emit_barrier_after (insn);
}
else if (!JUMP_P (insn))
delete_insn_bb (insn);
});
-
+
/* Remove fake edges from the CFG. */
remove_fake_edges ();
+ /* Find any blocks with no successors and ensure they are followed
+ by a BARRIER. delete_insn has the nasty habit of deleting barriers
+ when deleting insns. */
+ FOR_EACH_BB (bb)
+ {
+ if (bb->succ == NULL)
+ {
+ rtx next = NEXT_INSN (bb->end);
+
+ if (!next || GET_CODE (next) != BARRIER)
+ emit_barrier_after (bb->end);
+ }
+ }
/* Release allocated memory. */
- for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
- RESURRECT_INSN (insn);
+ for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn)) {
+ if (INSN_P (insn))
+ RESURRECT_INSN (insn);
+ }
+
if (VARRAY_ACTIVE_SIZE (unprocessed_instructions) != 0)
abort ();
- VARRAY_FREE (unprocessed_instructions);
control_dependent_block_to_edge_map_free (cdbte);
- free ((PTR) pdom);
+ free (pdom);
free_edge_list (el);
}