/* Common subexpression elimination for GNU compiler.
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
- 1999, 2000, 2001 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
-any later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING. If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA. */
#include "config.h"
/* stdio.h must precede rtl.h for FFS. */
#include "system.h"
-#include <setjmp.h>
+#include "coretypes.h"
+#include "tm.h"
#include "rtl.h"
#include "tm_p.h"
#include "toplev.h"
#include "output.h"
#include "ggc.h"
+#include "timevar.h"
+#include "except.h"
+#include "target.h"
+#include "params.h"
/* The basic idea of common subexpression elimination is to go
through the code, keeping a record of expressions that would
rtx comparison_const;
int comparison_qty;
unsigned int first_reg, last_reg;
- enum machine_mode mode;
- enum rtx_code comparison_code;
+ /* The sizes of these fields should match the sizes of the
+ code and mode fields of struct rtx_def (see rtl.h). */
+ ENUM_BITFIELD(rtx_code) comparison_code : 16;
+ ENUM_BITFIELD(machine_mode) mode : 8;
};
/* The table of all qtys, indexed by qty number. */
static rtx prev_insn_cc0;
static enum machine_mode prev_insn_cc0_mode;
-#endif
/* Previous actual insn. 0 if at first insn of basic block. */
static rtx prev_insn;
+#endif
/* Insn being scanned. */
reg_tick value, such expressions existing in the hash table are
invalid. */
int reg_in_table;
+
+ /* The SUBREG that was set when REG_TICK was last incremented. Set
+ to -1 if the last store was to the whole register, not a subreg. */
+ unsigned int subreg_ticked;
};
/* A free list of cse_reg_info entries. */
struct table_elt *related_value;
int cost;
int regcost;
- enum machine_mode mode;
+ /* The size of this field should match the size
+ of the mode field of struct rtx_def (see rtl.h). */
+ ENUM_BITFIELD(machine_mode) mode : 8;
char in_memory;
char is_const;
char flag;
#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
+/* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
+ SUBREG). */
+
+#define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
+
/* Get the quantity number for REG. */
#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
static int constant_pool_entries_cost;
-/* Define maximum length of a branch path. */
-
-#define PATHLENGTH 10
-
/* This data describes a block that will be processed by cse_basic_block. */
struct cse_basic_block_data
except that it is used when the destination label is not preceded
by a BARRIER. */
enum taken {TAKEN, NOT_TAKEN, AROUND} status;
- } path[PATHLENGTH];
+ } *path;
};
-/* Nonzero if X has the form (PLUS frame-pointer integer). We check for
- virtual regs here because the simplify_*_operation routines are called
- by integrate.c, which is called before virtual register instantiation.
-
- ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
- a header file so that their definitions can be shared with the
- simplification routines in simplify-rtx.c. Until then, do not
- change these macros without also changing the copy in simplify-rtx.c. */
-
-#define FIXED_BASE_PLUS_P(X) \
- ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
- || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
- || (X) == virtual_stack_vars_rtx \
- || (X) == virtual_incoming_args_rtx \
- || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
- && (XEXP (X, 0) == frame_pointer_rtx \
- || XEXP (X, 0) == hard_frame_pointer_rtx \
- || ((X) == arg_pointer_rtx \
- && fixed_regs[ARG_POINTER_REGNUM]) \
- || XEXP (X, 0) == virtual_stack_vars_rtx \
- || XEXP (X, 0) == virtual_incoming_args_rtx)) \
- || GET_CODE (X) == ADDRESSOF)
-
-/* Similar, but also allows reference to the stack pointer.
-
- This used to include FIXED_BASE_PLUS_P, however, we can't assume that
- arg_pointer_rtx by itself is nonzero, because on at least one machine,
- the i960, the arg pointer is zero when it is unused. */
-
-#define NONZERO_BASE_PLUS_P(X) \
- ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
- || (X) == virtual_stack_vars_rtx \
- || (X) == virtual_incoming_args_rtx \
- || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
- && (XEXP (X, 0) == frame_pointer_rtx \
- || XEXP (X, 0) == hard_frame_pointer_rtx \
- || ((X) == arg_pointer_rtx \
- && fixed_regs[ARG_POINTER_REGNUM]) \
- || XEXP (X, 0) == virtual_stack_vars_rtx \
- || XEXP (X, 0) == virtual_incoming_args_rtx)) \
- || (X) == stack_pointer_rtx \
- || (X) == virtual_stack_dynamic_rtx \
- || (X) == virtual_outgoing_args_rtx \
- || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
- && (XEXP (X, 0) == stack_pointer_rtx \
- || XEXP (X, 0) == virtual_stack_dynamic_rtx \
- || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
- || GET_CODE (X) == ADDRESSOF)
-
+static bool fixed_base_plus_p PARAMS ((rtx x));
static int notreg_cost PARAMS ((rtx, enum rtx_code));
static int approx_reg_cost_1 PARAMS ((rtx *, void *));
static int approx_reg_cost PARAMS ((rtx));
static void flush_hash_table PARAMS ((void));
static bool insn_live_p PARAMS ((rtx, int *));
static bool set_live_p PARAMS ((rtx, rtx, int *));
-static bool dead_libcall_p PARAMS ((rtx));
+static bool dead_libcall_p PARAMS ((rtx, int *));
\f
+/* Nonzero if X has the form (PLUS frame-pointer integer). We check for
+ virtual regs here because the simplify_*_operation routines are called
+ by integrate.c, which is called before virtual register instantiation. */
+
+static bool
+fixed_base_plus_p (x)
+ rtx x;
+{
+ switch (GET_CODE (x))
+ {
+ case REG:
+ if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
+ return true;
+ if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
+ return true;
+ if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
+ && REGNO (x) <= LAST_VIRTUAL_REGISTER)
+ return true;
+ return false;
+
+ case PLUS:
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ return false;
+ return fixed_base_plus_p (XEXP (x, 0));
+
+ case ADDRESSOF:
+ return true;
+
+ default:
+ return false;
+ }
+}
+
/* Dump the expressions in the equivalence class indicated by CLASSP.
This function is used only for debugging. */
void
void *data;
{
rtx x = *xp;
- regset set = (regset) data;
+ int *cost_p = data;
if (x && GET_CODE (x) == REG)
- SET_REGNO_REG_SET (set, REGNO (x));
+ {
+ unsigned int regno = REGNO (x);
+
+ if (! CHEAP_REGNO (regno))
+ {
+ if (regno < FIRST_PSEUDO_REGISTER)
+ {
+ if (SMALL_REGISTER_CLASSES)
+ return 1;
+ *cost_p += 2;
+ }
+ else
+ *cost_p += 1;
+ }
+ }
+
return 0;
}
/* Return an estimate of the cost of the registers used in an rtx.
This is mostly the number of different REG expressions in the rtx;
- however for some excecptions like fixed registers we use a cost of
+ however for some exceptions like fixed registers we use a cost of
0. If any other hard register reference occurs, return MAX_COST. */
static int
approx_reg_cost (x)
rtx x;
{
- regset_head set;
- int i;
int cost = 0;
- int hardregs = 0;
-
- INIT_REG_SET (&set);
- for_each_rtx (&x, approx_reg_cost_1, (void *)&set);
-
- EXECUTE_IF_SET_IN_REG_SET
- (&set, 0, i,
- {
- if (! CHEAP_REGNO (i))
- {
- if (i < FIRST_PSEUDO_REGISTER)
- hardregs++;
- cost += i < FIRST_PSEUDO_REGISTER ? 2 : 1;
- }
- });
+ if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
+ return MAX_COST;
- CLEAR_REG_SET (&set);
- return hardregs && SMALL_REGISTER_CLASSES ? MAX_COST : cost;
+ return cost;
}
/* Return a negative value if an rtx A, whose costs are given by COST_A
preferrable (cost_a, regcost_a, cost_b, regcost_b)
int cost_a, regcost_a, cost_b, regcost_b;
{
- /* First, get rid of a cases involving expressions that are entirely
+ /* First, get rid of cases involving expressions that are entirely
unwanted. */
if (cost_a != cost_b)
{
rtx x;
enum rtx_code outer_code ATTRIBUTE_UNUSED;
{
- register int i, j;
- register enum rtx_code code;
- register const char *fmt;
- register int total;
+ int i, j;
+ enum rtx_code code;
+ const char *fmt;
+ int total;
if (x == 0)
return 0;
/* Compute the default costs of certain things.
- Note that RTX_COSTS can override the defaults. */
+ Note that targetm.rtx_costs can override the defaults. */
code = GET_CODE (x);
switch (code)
{
case MULT:
- /* Count multiplication by 2**n as a shift,
- because if we are considering it, we would output it as a shift. */
- if (GET_CODE (XEXP (x, 1)) == CONST_INT
- && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
- total = 2;
- else
- total = COSTS_N_INSNS (5);
+ total = COSTS_N_INSNS (5);
break;
case DIV:
case UDIV:
+ GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
break;
-#ifdef RTX_COSTS
- RTX_COSTS (x, code, outer_code);
-#endif
-#ifdef CONST_COSTS
- CONST_COSTS (x, code, outer_code);
-#endif
-
default:
-#ifdef DEFAULT_RTX_COSTS
- DEFAULT_RTX_COSTS (x, code, outer_code);
-#endif
+ if ((*targetm.rtx_costs) (x, code, outer_code, &total))
+ return total;
break;
}
}
\f
/* Return cost of address expression X.
- Expect that X is propertly formed address reference. */
+ Expect that X is properly formed address reference. */
int
address_cost (x, mode)
rtx x;
enum machine_mode mode;
{
- /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
+ /* The address_cost target hook does not deal with ADDRESSOF nodes. But,
during CSE, such nodes are present. Using an ADDRESSOF node which
refers to the address of a REG is a good thing because we can then
turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
/* We may be asked for cost of various unusual addresses, such as operands
of push instruction. It is not worthwhile to complicate writing
- of ADDRESS_COST macro by such cases. */
+ of the target hook by such cases. */
if (!memory_address_p (mode, x))
return 1000;
-#ifdef ADDRESS_COST
- return ADDRESS_COST (x);
-#else
- return rtx_cost (x, MEM);
-#endif
+
+ return (*targetm.address_cost) (x);
}
+/* If the target doesn't override, compute the cost as with arithmetic. */
+
+int
+default_address_cost (x)
+ rtx x;
+{
+ return rtx_cost (x, MEM);
+}
\f
static struct cse_reg_info *
get_cse_reg_info (regno)
/* Initialize it. */
p->reg_tick = 1;
p->reg_in_table = -1;
+ p->subreg_ticked = -1;
p->reg_qty = regno;
p->regno = regno;
p->next = cse_reg_info_used_list;
static void
new_basic_block ()
{
- register int i;
+ int i;
next_qty = max_reg;
}
}
- prev_insn = 0;
-
#ifdef HAVE_cc0
+ prev_insn = 0;
prev_insn_cc0 = 0;
#endif
}
unsigned int reg;
enum machine_mode mode;
{
- register int q;
- register struct qty_table_elem *ent;
- register struct reg_eqv_elem *eqv;
+ int q;
+ struct qty_table_elem *ent;
+ struct reg_eqv_elem *eqv;
if (next_qty >= max_qty)
abort ();
delete_reg_equiv (reg)
unsigned int reg;
{
- register struct qty_table_elem *ent;
- register int q = REG_QTY (reg);
- register int p, n;
+ struct qty_table_elem *ent;
+ int q = REG_QTY (reg);
+ int p, n;
/* If invalid, do nothing. */
if (q == (int) reg)
mention_regs (x)
rtx x;
{
- register enum rtx_code code;
- register int i, j;
- register const char *fmt;
- register int changed = 0;
+ enum rtx_code code;
+ int i, j;
+ const char *fmt;
+ int changed = 0;
if (x == 0)
return 0;
remove_invalid_refs (i);
REG_IN_TABLE (i) = REG_TICK (i);
+ SUBREG_TICKED (i) = -1;
}
return 0;
if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
{
- /* If reg_tick has been incremented more than once since
- reg_in_table was last set, that means that the entire
- register has been set before, so discard anything memorized
- for the entire register, including all SUBREG expressions. */
- if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
+ /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
+ the last store to this register really stored into this
+ subreg, then remove the memory of this subreg.
+ Otherwise, remove any memory of the entire register and
+ all its subregs from the table. */
+ if (REG_TICK (i) - REG_IN_TABLE (i) > 1
+ || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
remove_invalid_refs (i);
else
remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
}
REG_IN_TABLE (i) = REG_TICK (i);
+ SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
return 0;
}
static void
remove_from_table (elt, hash)
- register struct table_elt *elt;
+ struct table_elt *elt;
unsigned hash;
{
if (elt == 0)
/* Remove the table element from its equivalence class. */
{
- register struct table_elt *prev = elt->prev_same_value;
- register struct table_elt *next = elt->next_same_value;
+ struct table_elt *prev = elt->prev_same_value;
+ struct table_elt *next = elt->next_same_value;
if (next)
next->prev_same_value = prev;
prev->next_same_value = next;
else
{
- register struct table_elt *newfirst = next;
+ struct table_elt *newfirst = next;
while (next)
{
next->first_same_value = newfirst;
/* Remove the table element from its hash bucket. */
{
- register struct table_elt *prev = elt->prev_same_hash;
- register struct table_elt *next = elt->next_same_hash;
+ struct table_elt *prev = elt->prev_same_hash;
+ struct table_elt *next = elt->next_same_hash;
if (next)
next->prev_same_hash = prev;
if (elt->related_value != 0 && elt->related_value != elt)
{
- register struct table_elt *p = elt->related_value;
+ struct table_elt *p = elt->related_value;
while (p->related_value != elt)
p = p->related_value;
unsigned hash;
enum machine_mode mode;
{
- register struct table_elt *p;
+ struct table_elt *p;
for (p = table[hash]; p; p = p->next_same_hash)
if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
unsigned hash;
enum machine_mode mode;
{
- register struct table_elt *p;
+ struct table_elt *p;
if (GET_CODE (x) == REG)
{
rtx x;
enum rtx_code code;
{
- register struct table_elt *p
+ struct table_elt *p
= lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
/* If we are looking for a CONST_INT, the mode doesn't really matter, as
static struct table_elt *
insert (x, classp, hash, mode)
- register rtx x;
- register struct table_elt *classp;
+ rtx x;
+ struct table_elt *classp;
unsigned hash;
enum machine_mode mode;
{
- register struct table_elt *elt;
+ struct table_elt *elt;
/* If X is a register and we haven't made a quantity for it,
something is wrong. */
elt->is_const = (CONSTANT_P (x)
/* GNU C++ takes advantage of this for `this'
(and other const values). */
- || (RTX_UNCHANGING_P (x)
- && GET_CODE (x) == REG
+ || (GET_CODE (x) == REG
+ && RTX_UNCHANGING_P (x)
&& REGNO (x) >= FIRST_PSEUDO_REGISTER)
- || FIXED_BASE_PLUS_P (x));
+ || fixed_base_plus_p (x));
if (table[hash])
table[hash]->prev_same_hash = elt;
if (CHEAPER (elt, classp))
/* Insert at the head of the class */
{
- register struct table_elt *p;
+ struct table_elt *p;
elt->next_same_value = classp;
classp->prev_same_value = elt;
elt->first_same_value = elt;
{
/* Insert not at head of the class. */
/* Put it after the last element cheaper than X. */
- register struct table_elt *p, *next;
+ struct table_elt *p, *next;
for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
p = next);
&& ! qty_table[REG_QTY (REGNO (x))].const_rtx
&& ! elt->is_const)
{
- register struct table_elt *p;
+ struct table_elt *p;
for (p = classp; p != 0; p = p->next_same_value)
{
rtx x;
enum machine_mode full_mode;
{
- register int i;
- register struct table_elt *p;
+ int i;
+ struct table_elt *p;
switch (GET_CODE (x))
{
delete_reg_equiv (regno);
REG_TICK (regno)++;
+ SUBREG_TICKED (regno) = -1;
if (regno >= FIRST_PSEUDO_REGISTER)
{
unsigned int endregno
= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
unsigned int tregno, tendregno, rn;
- register struct table_elt *p, *next;
+ struct table_elt *p, *next;
CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
delete_reg_equiv (rn);
REG_TICK (rn)++;
+ SUBREG_TICKED (rn) = -1;
}
if (in_table)
for (i = 0; i < HASH_SIZE; i++)
{
- register struct table_elt *next;
+ struct table_elt *next;
for (p = table[i]; p; p = next)
{
{
next = p->next_same_hash;
if (GET_CODE (p->exp) != REG
- && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*)0))
+ && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
remove_from_table (p, i);
}
}
|| (((SUBREG_BYTE (exp)
+ (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
&& SUBREG_BYTE (exp) <= end))
- && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*)0))
+ && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
remove_from_table (p, i);
}
}
{
delete_reg_equiv (regno);
if (REG_TICK (regno) >= 0)
- REG_TICK (regno)++;
+ {
+ REG_TICK (regno)++;
+ SUBREG_TICKED (regno) = -1;
+ }
in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
}
rtx x;
struct table_elt *elt;
{
- register struct table_elt *relt = 0;
- register struct table_elt *p, *q;
+ struct table_elt *relt = 0;
+ struct table_elt *p, *q;
HOST_WIDE_INT offset;
/* First, is there anything related known?
const char *ps;
{
unsigned hash = 0;
- const unsigned char *p = (const unsigned char *)ps;
-
+ const unsigned char *p = (const unsigned char *) ps;
+
if (p)
while (*p)
hash += *p++;
rtx x;
enum machine_mode mode;
{
- register int i, j;
- register unsigned hash = 0;
- register enum rtx_code code;
- register const char *fmt;
+ int i, j;
+ unsigned hash = 0;
+ enum rtx_code code;
+ const char *fmt;
/* repeat is used to turn tail-recursion into iteration. */
repeat:
case REG:
{
unsigned int regno = REGNO (x);
+ bool record;
/* On some machines, we can't record any non-fixed hard register,
because extending its life will cause reload problems. We
- consider ap, fp, and sp to be fixed for this purpose.
+ consider ap, fp, sp, gp to be fixed for this purpose.
We also consider CCmode registers to be fixed for this purpose;
failure to do so leads to failure to simplify 0<100 type of
conditionals.
- On all machines, we can't record any global registers.
+ On all machines, we can't record any global registers.
Nor should we record any register that is in a small
class, as defined by CLASS_LIKELY_SPILLED_P. */
- if (regno < FIRST_PSEUDO_REGISTER
- && (global_regs[regno]
- || CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno))
- || (SMALL_REGISTER_CLASSES
- && ! fixed_regs[regno]
- && regno != FRAME_POINTER_REGNUM
- && regno != HARD_FRAME_POINTER_REGNUM
- && regno != ARG_POINTER_REGNUM
- && regno != STACK_POINTER_REGNUM
- && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
+ if (regno >= FIRST_PSEUDO_REGISTER)
+ record = true;
+ else if (x == frame_pointer_rtx
+ || x == hard_frame_pointer_rtx
+ || x == arg_pointer_rtx
+ || x == stack_pointer_rtx
+ || x == pic_offset_table_rtx)
+ record = true;
+ else if (global_regs[regno])
+ record = false;
+ else if (fixed_regs[regno])
+ record = true;
+ else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
+ record = true;
+ else if (SMALL_REGISTER_CLASSES)
+ record = false;
+ else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
+ record = false;
+ else
+ record = true;
+
+ if (!record)
{
do_not_record = 1;
return 0;
the integers representing the constant. */
hash += (unsigned) code + (unsigned) GET_MODE (x);
if (GET_MODE (x) != VOIDmode)
- for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
- {
- unsigned HOST_WIDE_INT tem = XWINT (x, i);
- hash += tem;
- }
+ hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
else
hash += ((unsigned) CONST_DOUBLE_LOW (x)
+ (unsigned) CONST_DOUBLE_HIGH (x));
return hash;
+ case CONST_VECTOR:
+ {
+ int units;
+ rtx elt;
+
+ units = CONST_VECTOR_NUNITS (x);
+
+ for (i = 0; i < units; ++i)
+ {
+ elt = CONST_VECTOR_ELT (x, i);
+ hash += canon_hash (elt, GET_MODE (elt));
+ }
+
+ return hash;
+ }
+
/* Assume there is only one rtx object for any given label. */
case LABEL_REF:
hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
do_not_record = 1;
return 0;
}
- if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
- {
- hash_arg_in_memory = 1;
- }
+ if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
+ hash_arg_in_memory = 1;
+
/* Now that we have already found this special case,
might as well speed it up as much as possible. */
hash += (unsigned) MEM;
if (GET_CODE (XEXP (x, 0)) == MEM
&& ! MEM_VOLATILE_P (XEXP (x, 0)))
{
- hash += (unsigned)USE;
+ hash += (unsigned) USE;
x = XEXP (x, 0);
- if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
+ if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
hash_arg_in_memory = 1;
/* Now that we have already found this special case,
hash += canon_hash_string (XSTR (x, i));
else if (fmt[i] == 'i')
{
- register unsigned tem = XINT (x, i);
+ unsigned tem = XINT (x, i);
hash += tem;
}
else if (fmt[i] == '0' || fmt[i] == 't')
int validate;
int equal_values;
{
- register int i, j;
- register enum rtx_code code;
- register const char *fmt;
+ int i, j;
+ enum rtx_code code;
+ const char *fmt;
/* Note: it is incorrect to assume an expression is equivalent to itself
if VALIDATE is nonzero. */
static int
cse_rtx_varies_p (x, from_alias)
- register rtx x;
+ rtx x;
int from_alias;
{
/* We need not check for X and the equivalence class being of the same
replace each register reference inside it
with the "oldest" equivalent register.
- If INSN is non-zero and we are replacing a pseudo with a hard register
+ If INSN is nonzero and we are replacing a pseudo with a hard register
or vice versa, validate_change is used to ensure that INSN remains valid
- after we make our substitution. The calls are made with IN_GROUP non-zero
+ after we make our substitution. The calls are made with IN_GROUP nonzero
so apply_change_group must be called upon the outermost return from this
function (unless INSN is zero). The result of apply_change_group can
generally be discarded since the changes we are making are optional. */
rtx x;
rtx insn;
{
- register int i;
- register enum rtx_code code;
- register const char *fmt;
+ int i;
+ enum rtx_code code;
+ const char *fmt;
if (x == 0)
return x;
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
case ADDR_VEC:
case REG:
{
- register int first;
- register int q;
- register struct qty_table_elem *ent;
+ int first;
+ int q;
+ struct qty_table_elem *ent;
/* Never replace a hard reg, because hard regs can appear
in more than one machine mode, and we must preserve the mode
fmt = GET_RTX_FORMAT (code);
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
{
- register int j;
+ int j;
if (fmt[i] == 'e')
{
is a good approximation for that cost. However, most RISC machines have
only a few (usually only one) memory reference formats. If an address is
valid at all, it is often just as cheap as any other address. Hence, for
- RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
- costs of various addresses. For two addresses of equal cost, choose the one
- with the highest `rtx_cost' value as that has the potential of eliminating
- the most insns. For equal costs, we choose the first in the equivalence
- class. Note that we ignore the fact that pseudo registers are cheaper
- than hard registers here because we would also prefer the pseudo registers.
- */
+ RISC machines, we use `address_cost' to compare the costs of various
+ addresses. For two addresses of equal cost, choose the one with the
+ highest `rtx_cost' value as that has the potential of eliminating the
+ most insns. For equal costs, we choose the first in the equivalence
+ class. Note that we ignore the fact that pseudo registers are cheaper than
+ hard registers here because we would also prefer the pseudo registers. */
static void
find_best_addr (insn, loc, mode)
{
struct table_elt *elt;
rtx addr = *loc;
-#ifdef ADDRESS_COST
struct table_elt *p;
int found_better = 1;
-#endif
int save_do_not_record = do_not_record;
int save_hash_arg_in_memory = hash_arg_in_memory;
int addr_volatile;
elt = lookup (addr, hash, Pmode);
-#ifndef ADDRESS_COST
- if (elt)
- {
- int our_cost = elt->cost;
-
- /* Find the lowest cost below ours that works. */
- for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
- if (elt->cost < our_cost
- && (GET_CODE (elt->exp) == REG
- || exp_equiv_p (elt->exp, elt->exp, 1, 0))
- && validate_change (insn, loc,
- canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
- return;
- }
-#else
-
if (elt)
{
/* We need to find the best (under the criteria documented above) entry
if (flag_expensive_optimizations
&& (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
|| GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
- && GET_CODE (XEXP (*loc, 0)) == REG
- && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
+ && GET_CODE (XEXP (*loc, 0)) == REG)
{
- rtx c = XEXP (*loc, 1);
+ rtx op1 = XEXP (*loc, 1);
do_not_record = 0;
hash = HASH (XEXP (*loc, 0), Pmode);
|| exp_equiv_p (p->exp, p->exp, 1, 0)))
{
rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
- p->exp, c);
+ p->exp, op1);
int new_cost;
new_cost = address_cost (new, mode);
}
}
}
-#endif
}
\f
/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
while (arg2 == CONST0_RTX (GET_MODE (arg1)))
{
- /* Set non-zero when we find something of interest. */
+ /* Set nonzero when we find something of interest. */
rtx x = 0;
int reverse_code = 0;
struct table_elt *p = 0;
else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
{
+#ifdef FLOAT_STORE_FLAG_VALUE
+ REAL_VALUE_TYPE fsfv;
+#endif
+
if (code == NE
|| (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
&& code == LT && STORE_FLAG_VALUE == -1)
#ifdef FLOAT_STORE_FLAG_VALUE
|| (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
- && (REAL_VALUE_NEGATIVE
- (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
+ && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
+ REAL_VALUE_NEGATIVE (fsfv)))
#endif
)
x = arg1;
&& code == GE && STORE_FLAG_VALUE == -1)
#ifdef FLOAT_STORE_FLAG_VALUE
|| (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
- && (REAL_VALUE_NEGATIVE
- (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
+ && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
+ REAL_VALUE_NEGATIVE (fsfv)))
#endif
)
x = arg1, reverse_code = 1;
for (; p; p = p->next_same_value)
{
enum machine_mode inner_mode = GET_MODE (p->exp);
+#ifdef FLOAT_STORE_FLAG_VALUE
+ REAL_VALUE_TYPE fsfv;
+#endif
/* If the entry isn't valid, skip it. */
if (! exp_equiv_p (p->exp, p->exp, 1, 0))
#ifdef FLOAT_STORE_FLAG_VALUE
|| (code == LT
&& GET_MODE_CLASS (inner_mode) == MODE_FLOAT
- && (REAL_VALUE_NEGATIVE
- (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
+ && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
+ REAL_VALUE_NEGATIVE (fsfv)))
#endif
)
&& GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
#ifdef FLOAT_STORE_FLAG_VALUE
|| (code == GE
&& GET_MODE_CLASS (inner_mode) == MODE_FLOAT
- && (REAL_VALUE_NEGATIVE
- (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
+ && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
+ REAL_VALUE_NEGATIVE (fsfv)))
#endif
)
&& GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
break;
}
- /* If this is fp + constant, the equivalent is a better operand since
- it may let us predict the value of the comparison. */
- else if (NONZERO_BASE_PLUS_P (p->exp))
+ /* If this non-trapping address, e.g. fp + constant, the
+ equivalent is a better operand since it may let us predict
+ the value of the comparison. */
+ else if (!rtx_addr_can_trap_p (p->exp))
{
arg1 = p->exp;
continue;
enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
if (reversed == UNKNOWN)
break;
- else code = reversed;
+ else
+ code = reversed;
}
else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
code = GET_CODE (x);
rtx x;
rtx insn;
{
- register enum rtx_code code;
- register enum machine_mode mode;
- register const char *fmt;
- register int i;
+ enum rtx_code code;
+ enum machine_mode mode;
+ const char *fmt;
+ int i;
rtx new = 0;
int copied = 0;
int must_swap = 0;
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
case REG:
case PC:
/* If the next insn is a CODE_LABEL followed by a jump table,
PC's value is a LABEL_REF pointing to that label. That
- lets us fold switch statements on the Vax. */
- if (insn && GET_CODE (insn) == JUMP_INSN)
- {
- rtx next = next_nonnote_insn (insn);
-
- if (next && GET_CODE (next) == CODE_LABEL
- && NEXT_INSN (next) != 0
- && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
- && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
- || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
- return gen_rtx_LABEL_REF (Pmode, next);
- }
+ lets us fold switch statements on the VAX. */
+ {
+ rtx next;
+ if (insn && tablejump_p (insn, &next, NULL))
+ return gen_rtx_LABEL_REF (Pmode, next);
+ }
break;
case SUBREG:
&& GET_CODE (elt->exp) != SIGN_EXTEND
&& GET_CODE (elt->exp) != ZERO_EXTEND
&& GET_CODE (XEXP (elt->exp, 0)) == SUBREG
- && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
+ && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
+ && (GET_MODE_CLASS (mode)
+ == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
{
rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
rtx cheap_arg, expensive_arg;
rtx replacements[2];
int j;
+ int old_cost = COST_IN (XEXP (x, i), code);
/* Most arguments are cheap, so handle them specially. */
switch (GET_CODE (arg))
case SYMBOL_REF:
case LABEL_REF:
case CONST_DOUBLE:
+ case CONST_VECTOR:
const_arg = arg;
break;
replacements[0] = cheap_arg;
replacements[1] = expensive_arg;
- for (j = 0; j < 2 && replacements[j]; j++)
+ for (j = 0; j < 2 && replacements[j]; j++)
{
- int old_cost = COST_IN (XEXP (x, i), code);
int new_cost = COST_IN (replacements[j], code);
/* Stop if what existed before was cheaper. Prefer constants
|| (GET_CODE (const_arg0) == CONST_INT
&& GET_CODE (const_arg1) != CONST_INT))))
{
- register rtx tem = XEXP (x, 0);
+ rtx tem = XEXP (x, 0);
if (insn == 0 && ! copied)
{
if (GET_MODE_CLASS (mode) == MODE_FLOAT)
{
true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
- (FLOAT_STORE_FLAG_VALUE (mode), mode));
+ (FLOAT_STORE_FLAG_VALUE (mode), mode));
false_rtx = CONST0_RTX (mode);
}
#endif
comparison. */
if (const_arg0 == 0 || const_arg1 == 0)
{
- /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
- non-explicit constant? These aren't zero, but we
- don't know their sign. */
+ /* Some addresses are known to be nonzero. We don't know
+ their sign, but equality comparisons are known. */
if (const_arg1 == const0_rtx
- && (NONZERO_BASE_PLUS_P (folded_arg0)
-#if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
- come out as 0. */
- || GET_CODE (folded_arg0) == SYMBOL_REF
-#endif
- || GET_CODE (folded_arg0) == LABEL_REF
- || GET_CODE (folded_arg0) == CONST))
+ && nonzero_address_p (folded_arg0))
{
if (code == EQ)
return false_rtx;
& HASH_MASK), mode_arg0))
&& p0->first_same_value == p1->first_same_value))
{
- /* Sadly two equal NaNs are not equivalent. */
- if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
- || ! FLOAT_MODE_P (mode_arg0)
- || flag_unsafe_math_optimizations)
- return ((code == EQ || code == LE || code == GE
- || code == LEU || code == GEU || code == UNEQ
- || code == UNLE || code == UNGE || code == ORDERED)
- ? true_rtx : false_rtx);
- /* Take care for the FP compares we can resolve. */
- if (code == UNEQ || code == UNLE || code == UNGE)
- return true_rtx;
- if (code == LTGT || code == LT || code == GT)
- return false_rtx;
+ /* Sadly two equal NaNs are not equivalent. */
+ if (!HONOR_NANS (mode_arg0))
+ return ((code == EQ || code == LE || code == GE
+ || code == LEU || code == GEU || code == UNEQ
+ || code == UNLE || code == UNGE
+ || code == ORDERED)
+ ? true_rtx : false_rtx);
+ /* Take care for the FP compares we can resolve. */
+ if (code == UNEQ || code == UNLE || code == UNGE)
+ return true_rtx;
+ if (code == LTGT || code == LT || code == GT)
+ return false_rtx;
}
/* If FOLDED_ARG0 is a register, see if the comparison we are
CONST_INT, see if we can find a register equivalent to the
positive constant. Make a MINUS if so. Don't do this for
a non-negative constant since we might then alternate between
- chosing positive and negative constants. Having the positive
+ choosing positive and negative constants. Having the positive
constant previously-used is the more common case. Be sure
the resulting constant is non-negative; if const_arg1 were
the smallest negative number this would overflow: depending
from_plus:
case SMIN: case SMAX: case UMIN: case UMAX:
case IOR: case AND: case XOR:
- case MULT: case DIV: case UDIV:
+ case MULT:
case ASHIFT: case LSHIFTRT: case ASHIFTRT:
/* If we have (<op> <reg> <const_int>) for an associative OP and REG
is known to be of similar form, we may be able to replace the
break;
/* Compute the code used to compose the constants. For example,
- A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
+ A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
- associate_code
- = (code == MULT || code == DIV || code == UDIV ? MULT
- : is_shift || code == PLUS || code == MINUS ? PLUS : code);
+ associate_code = (is_shift || code == MINUS ? PLUS : code);
new_const = simplify_binary_operation (associate_code, mode,
const_arg1, inner_const);
}
break;
+ case DIV: case UDIV:
+ /* ??? The associative optimization performed immediately above is
+ also possible for DIV and UDIV using associate_code of MULT.
+ However, we would need extra code to verify that the
+ multiplication does not overflow, that is, there is no overflow
+ in the calculation of new_const. */
+ break;
+
default:
break;
}
break;
case 'x':
- /* Always eliminate CONSTANT_P_RTX at this stage. */
+ /* Eliminate CONSTANT_P_RTX if its constant. */
if (code == CONSTANT_P_RTX)
- return (const_arg0 ? const1_rtx : const0_rtx);
+ {
+ if (const_arg0)
+ return const1_rtx;
+ if (optimize == 0 || !flag_gcse)
+ return const0_rtx;
+ }
break;
}
rtx
gen_lowpart_if_possible (mode, x)
enum machine_mode mode;
- register rtx x;
+ rtx x;
{
rtx result = gen_lowpart_common (mode, x);
else if (GET_CODE (x) == MEM)
{
/* This is the only other case we handle. */
- register int offset = 0;
+ int offset = 0;
rtx new;
if (WORDS_BIG_ENDIAN)
/* Nonzero if the SET_SRC contains something
whose value cannot be predicted and understood. */
char src_volatile;
- /* Original machine mode, in case it becomes a CONST_INT. */
- enum machine_mode mode;
+ /* Original machine mode, in case it becomes a CONST_INT.
+ The size of this field should match the size of the mode
+ field of struct rtx_def (see rtl.h). */
+ ENUM_BITFIELD(machine_mode) mode : 8;
/* A constant equivalent for SET_SRC, if any. */
rtx src_const;
/* Original SET_SRC value used for libcall notes. */
rtx insn;
rtx libcall_insn;
{
- register rtx x = PATTERN (insn);
- register int i;
+ rtx x = PATTERN (insn);
+ int i;
rtx tem;
- register int n_sets = 0;
+ int n_sets = 0;
#ifdef HAVE_cc0
/* Records what this insn does to set CC0. */
}
else if (GET_CODE (x) == PARALLEL)
{
- register int lim = XVECLEN (x, 0);
+ int lim = XVECLEN (x, 0);
sets = (struct set *) alloca (lim * sizeof (struct set));
anything in that case. */
for (i = 0; i < lim; i++)
{
- register rtx y = XVECEXP (x, 0, i);
+ rtx y = XVECEXP (x, 0, i);
if (GET_CODE (y) == CLOBBER)
{
rtx clobbered = XEXP (y, 0);
for (i = 0; i < lim; i++)
{
- register rtx y = XVECEXP (x, 0, i);
+ rtx y = XVECEXP (x, 0, i);
if (GET_CODE (y) == SET)
{
/* As above, we ignore unconditional jumps and call-insns and
&& (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
&& (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
|| GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
- src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
+ {
+ src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
+ XEXP (tem, 0) = src_eqv;
+ }
/* Canonicalize sources and addresses of destinations.
We do this in a separate pass to avoid problems when a MATCH_DUP is
for (i = 0; i < n_sets; i++)
{
- register rtx src, dest;
- register rtx src_folded;
- register struct table_elt *elt = 0, *p;
+ rtx src, dest;
+ rtx src_folded;
+ struct table_elt *elt = 0, *p;
enum machine_mode mode;
rtx src_eqv_here;
rtx src_const = 0;
int src_folded_regcost = MAX_COST;
int src_related_regcost = MAX_COST;
int src_elt_regcost = MAX_COST;
- /* Set non-zero if we need to call force_const_mem on with the
+ /* Set nonzero if we need to call force_const_mem on with the
contents of src_folded before using it. */
int src_folded_force_flag = 0;
eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
do_not_record = 0;
hash_arg_in_memory = 0;
- src_eqv = fold_rtx (src_eqv, insn);
src_eqv_hash = HASH (src_eqv, eqvmode);
/* Find the equivalence class for the equivalent expression. */
if (src == src_folded)
src_folded = 0;
- /* At this point, ELT, if non-zero, points to a class of expressions
+ /* At this point, ELT, if nonzero, points to a class of expressions
equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
- and SRC_RELATED, if non-zero, each contain additional equivalent
+ and SRC_RELATED, if nonzero, each contain additional equivalent
expressions. Prune these latter expressions by deleting expressions
already in the equivalence class.
continue;
}
- if (elt)
+ if (elt)
{
src_elt_cost = elt->cost;
src_elt_regcost = elt->regcost;
}
- /* Find cheapest and skip it for the next time. For items
+ /* Find cheapest and skip it for the next time. For items
of equal cost, use this order:
src_folded, src, src_eqv, src_related and hash table entry. */
if (src_folded
else if (src_related
&& preferrable (src_related_cost, src_related_regcost,
src_elt_cost, src_elt_regcost) <= 0)
- trial = copy_rtx (src_related), src_related_cost = MAX_COST;
+ trial = copy_rtx (src_related), src_related_cost = MAX_COST;
else
{
trial = copy_rtx (elt->exp);
/* Look for a substitution that makes a valid insn. */
else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
{
+ rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
+
/* If we just made a substitution inside a libcall, then we
need to make the same substitution in any notes attached
to the RETVAL insn. */
&& (GET_CODE (sets[i].orig_src) == REG
|| GET_CODE (sets[i].orig_src) == SUBREG
|| GET_CODE (sets[i].orig_src) == MEM))
- replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
- canon_reg (SET_SRC (sets[i].rtl), insn));
+ simplify_replace_rtx (REG_NOTES (libcall_insn),
+ sets[i].orig_src, copy_rtx (new));
/* The result of apply_change_group can be ignored; see
canon_reg. */
- validate_change (insn, &SET_SRC (sets[i].rtl),
- canon_reg (SET_SRC (sets[i].rtl), insn),
- 1);
+ validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
apply_change_group ();
break;
}
&& GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
&& GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
{
- tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
-
- /* Make sure that the rtx is not shared with any other insn. */
- src_const = copy_rtx (src_const);
+ /* We only want a REG_EQUAL note if src_const != src. */
+ if (! rtx_equal_p (src, src_const))
+ {
+ /* Make sure that the rtx is not shared. */
+ src_const = copy_rtx (src_const);
- /* Record the actual constant value in a REG_EQUAL note, making
- a new one if one does not already exist. */
- if (tem)
- XEXP (tem, 0) = src_const;
- else
- REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
- src_const, REG_NOTES (insn));
+ /* Record the actual constant value in a REG_EQUAL note,
+ making a new one if one does not already exist. */
+ set_unique_reg_note (insn, REG_EQUAL, src_const);
+ }
- /* If storing a constant value in a register that
+ /* If storing a constant value in a register that
previously held the constant value 0,
record this fact with a REG_WAS_0 note on this insn.
else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
{
/* One less use of the label this insn used to jump to. */
- if (JUMP_LABEL (insn) != 0)
- --LABEL_NUSES (JUMP_LABEL (insn));
- PUT_CODE (insn, NOTE);
- NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (insn) = 0;
+ delete_insn (insn);
cse_jumps_altered = 1;
/* No more processing for this set. */
sets[i].rtl = 0;
be a conditional or computed branch. */
else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
{
+ /* Now emit a BARRIER after the unconditional jump. */
+ if (NEXT_INSN (insn) == 0
+ || GET_CODE (NEXT_INSN (insn)) != BARRIER)
+ emit_barrier_after (insn);
+
/* We reemit the jump in as many cases as possible just in
case the form of an unconditional jump is significantly
different than a computed jump or conditional jump.
and hope for the best. */
if (n_sets == 1)
{
- rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
+ rtx new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
+
JUMP_LABEL (new) = XEXP (src, 0);
LABEL_NUSES (XEXP (src, 0))++;
+ delete_insn (insn);
insn = new;
+
+ /* Now emit a BARRIER after the unconditional jump. */
+ if (NEXT_INSN (insn) == 0
+ || GET_CODE (NEXT_INSN (insn)) != BARRIER)
+ emit_barrier_after (insn);
}
else
INSN_CODE (insn) = -1;
- never_reached_warning (insn);
+ never_reached_warning (insn, NULL);
- /* Now emit a BARRIER after the unconditional jump. Do not bother
- deleting any unreachable code, let jump/flow do that. */
- if (NEXT_INSN (insn) != 0
- && GET_CODE (NEXT_INSN (insn)) != BARRIER)
- emit_barrier_after (insn);
+ /* Do not bother deleting any unreachable code,
+ let jump/flow do that. */
cse_jumps_altered = 1;
sets[i].rtl = 0;
if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
&& ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
{
- register struct table_elt *elt;
- register struct table_elt *classp = sets[0].src_elt;
+ struct table_elt *elt;
+ struct table_elt *classp = sets[0].src_elt;
rtx dest = SET_DEST (sets[0].rtl);
enum machine_mode eqvmode = GET_MODE (dest);
{
/* Insert source and constant equivalent into hash table, if not
already present. */
- register struct table_elt *classp = src_eqv_elt;
- register rtx src = sets[i].src;
- register rtx dest = SET_DEST (sets[i].rtl);
+ struct table_elt *classp = src_eqv_elt;
+ rtx src = sets[i].src;
+ rtx dest = SET_DEST (sets[i].rtl);
enum machine_mode mode
= GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
/* Don't put a hard register source into the table if this is
the last insn of a libcall. In this case, we only need
to put src_eqv_elt in src_elt. */
- if (GET_CODE (src) != REG
- || REGNO (src) >= FIRST_PSEUDO_REGISTER
- || ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
+ if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
{
- register struct table_elt *elt;
+ struct table_elt *elt;
/* Note that these insert_regs calls cannot remove
any of the src_elt's, because they would have failed to
if (GET_CODE (insn) == CALL_INSN)
{
- if (! CONST_CALL_P (insn))
+ if (! CONST_OR_PURE_CALL_P (insn))
invalidate_memory ();
invalidate_for_call ();
}
{
/* We can't use the inner dest, because the mode associated with
a ZERO_EXTRACT is significant. */
- register rtx dest = SET_DEST (sets[i].rtl);
+ rtx dest = SET_DEST (sets[i].rtl);
/* Needed for registers to remove the register from its
previous quantity's chain.
/* If elt was removed, find current head of same class,
or 0 if nothing remains of that class. */
{
- register struct table_elt *elt = sets[i].src_elt;
+ struct table_elt *elt = sets[i].src_elt;
while (elt && elt->prev_same_value)
elt = elt->prev_same_value;
for (i = 0; i < n_sets; i++)
if (sets[i].rtl)
{
- register rtx dest = SET_DEST (sets[i].rtl);
+ rtx dest = SET_DEST (sets[i].rtl);
rtx inner_dest = sets[i].inner_dest;
- register struct table_elt *elt;
+ struct table_elt *elt;
/* Don't record value if we are not supposed to risk allocating
floating-point values in registers that might be wider than
elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
&& (! RTX_UNCHANGING_P (sets[i].inner_dest)
- || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
+ || fixed_base_plus_p (XEXP (sets[i].inner_dest,
0))));
/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
rtx new_src = 0;
unsigned src_hash;
struct table_elt *src_elt;
+ int byte = 0;
/* Ignore invalid entries. */
if (GET_CODE (elt->exp) != REG
&& ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
continue;
- new_src = gen_lowpart_if_possible (new_mode, elt->exp);
- if (new_src == 0)
- new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
+ /* We may have already been playing subreg games. If the
+ mode is already correct for the destination, use it. */
+ if (GET_MODE (elt->exp) == new_mode)
+ new_src = elt->exp;
+ else
+ {
+ /* Calculate big endian correction for the SUBREG_BYTE.
+ We have already checked that M1 (GET_MODE (dest))
+ is not narrower than M2 (new_mode). */
+ if (BYTES_BIG_ENDIAN)
+ byte = (GET_MODE_SIZE (GET_MODE (dest))
+ - GET_MODE_SIZE (new_mode));
+
+ new_src = simplify_gen_subreg (new_mode, elt->exp,
+ GET_MODE (dest), byte);
+ }
+
+ /* The call to simplify_gen_subreg fails if the value
+ is VOIDmode, yet we can't do any simplification, e.g.
+ for EXPR_LISTs denoting function call results.
+ It is invalid to construct a SUBREG with a VOIDmode
+ SUBREG_REG, hence a zero new_src means we can't do
+ this substitution. */
+ if (! new_src)
+ continue;
src_hash = HASH (new_src, new_mode);
src_elt = lookup (new_src, src_hash, new_mode);
if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
&& ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
{
- rtx prev = prev_nonnote_insn (insn);
-
+ rtx prev = insn;
+ /* Scan for the previous nonnote insn, but stop at a basic
+ block boundary. */
+ do
+ {
+ prev = PREV_INSN (prev);
+ }
+ while (prev && GET_CODE (prev) == NOTE
+ && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
+
/* Do not swap the registers around if the previous instruction
attaches a REG_EQUIV note to REG1.
This section previously turned the REG_EQUIV into a REG_EQUAL
note. We cannot do that because REG_EQUIV may provide an
- uninitialised stack slot when REG_PARM_STACK_SPACE is used. */
+ uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
if (prev != 0 && GET_CODE (prev) == INSN
&& GET_CODE (PATTERN (prev)) == SET
&& (tem = single_set (prev_insn)) != 0
&& SET_DEST (tem) == cc0_rtx
&& ! reg_mentioned_p (cc0_rtx, x))
- {
- PUT_CODE (prev_insn, NOTE);
- NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (prev_insn) = 0;
- }
+ delete_insn (prev_insn);
prev_insn_cc0 = this_insn_cc0;
prev_insn_cc0_mode = this_insn_cc0_mode;
-#endif
-
prev_insn = insn;
+#endif
}
\f
/* Remove from the hash table all expressions that reference memory. */
static void
invalidate_memory ()
{
- register int i;
- register struct table_elt *p, *next;
+ int i;
+ struct table_elt *p, *next;
for (i = 0; i < HASH_SIZE; i++)
for (p = table[i]; p; p = next)
static int
addr_affects_sp_p (addr)
- register rtx addr;
+ rtx addr;
{
if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
&& GET_CODE (XEXP (addr, 0)) == REG
&& REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
{
if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
- REG_TICK (STACK_POINTER_REGNUM)++;
+ {
+ REG_TICK (STACK_POINTER_REGNUM)++;
+ /* Is it possible to use a subreg of SP? */
+ SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
+ }
/* This should be *very* rare. */
if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
}
else if (GET_CODE (x) == PARALLEL)
{
- register int i;
+ int i;
for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
{
- register rtx y = XVECEXP (x, 0, i);
+ rtx y = XVECEXP (x, 0, i);
if (GET_CODE (y) == CLOBBER)
{
rtx ref = XEXP (y, 0);
case SYMBOL_REF:
case LABEL_REF:
case CONST_DOUBLE:
+ case CONST_VECTOR:
case PC:
case CC0:
case LO_SUM:
return x;
case MEM:
- XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
+ validate_change (x, &XEXP (x, 0),
+ cse_process_notes (XEXP (x, 0), x), 0);
return x;
case EXPR_LIST:
}
if (GET_CODE (set) == CLOBBER
-#ifdef HAVE_cc0
- || dest == cc0_rtx
-#endif
+ || CC0_P (dest)
|| dest == pc_rtx)
return;
if (GET_CODE (insn) == CALL_INSN)
{
- if (! CONST_CALL_P (insn))
+ if (! CONST_OR_PURE_CALL_P (insn))
invalidate_memory ();
invalidate_for_call ();
}
SET_SRC, add an insn after P to copy its destination
to what we will be replacing SET_SRC with. */
if (cse_check_loop_start_value
+ && single_set (p)
+ && !can_throw_internal (insn)
&& validate_change (insn, &SET_SRC (x),
src_elt->exp, 0))
{
the total number of SETs in all the insns of the block, the last insn of the
block, and the branch path.
- The branch path indicates which branches should be followed. If a non-zero
+ The branch path indicates which branches should be followed. If a nonzero
path size is specified, the block should be rescanned and a different set
of branches will be taken. The branch path is only used if
- FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
+ FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
DATA is a pointer to a struct cse_basic_block_data, defined below, that is
used to describe the block. It is filled in with the information about
/* Update the previous branch path, if any. If the last branch was
previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
shorten the path by one and look at the previous branch. We know that
- at least one branch must have been taken if PATH_SIZE is non-zero. */
+ at least one branch must have been taken if PATH_SIZE is nonzero. */
while (path_size > 0)
{
if (data->path[path_size - 1].status != NOT_TAKEN)
&& NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
break;
- /* Don't cse over a call to setjmp; on some machines (eg vax)
+ /* Don't cse over a call to setjmp; on some machines (eg VAX)
the regs restored by the longjmp come from
a later time than the setjmp. */
- if (GET_CODE (p) == NOTE
- && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
+ if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
+ && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
break;
/* A PARALLEL can have lots of SETs in it,
In this case invalidate_skipped_block will be called to invalidate any
registers set in the block when following the jump. */
- else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
+ else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
&& GET_CODE (p) == JUMP_INSN
&& GET_CODE (PATTERN (p)) == SET
&& GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
if ((GET_CODE (q) != NOTE
|| NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
- || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
+ || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
+ && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
&& (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
break;
/* Detect a branch around a block of code. */
else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
{
- register rtx tmp;
+ rtx tmp;
if (next_real_insn (q) == next)
{
FILE *file;
{
struct cse_basic_block_data val;
- register rtx insn = f;
- register int i;
+ rtx insn = f;
+ int i;
+
+ val.path = xmalloc (sizeof (struct branch_path)
+ * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
cse_jumps_altered = 0;
recorded_label_ref = 0;
end_alias_analysis ();
free (uid_cuid);
free (reg_eqv_table);
+ free (val.path);
return cse_jumps_altered || recorded_label_ref;
}
block. NEXT_BRANCH points to the branch path when following jumps or
a null path when not following jumps.
- AROUND_LOOP is non-zero if we are to try to cse around to the start of a
+ AROUND_LOOP is nonzero if we are to try to cse around to the start of a
loop. This is true when we are being called for the last time on a
block and this CSE pass is before loop.c. */
static rtx
cse_basic_block (from, to, next_branch, around_loop)
- register rtx from, to;
+ rtx from, to;
struct branch_path *next_branch;
int around_loop;
{
- register rtx insn;
+ rtx insn;
int to_usage = 0;
rtx libcall_insn = NULL_RTX;
int num_insns = 0;
for (insn = from; insn != to; insn = NEXT_INSN (insn))
{
- register enum rtx_code code = GET_CODE (insn);
+ enum rtx_code code = GET_CODE (insn);
/* If we have processed 1,000 insns, flush the hash table to
avoid extreme quadratic behavior. We must not include NOTEs
Then follow this branch. */
#ifdef HAVE_cc0
prev_insn_cc0 = 0;
-#endif
prev_insn = insn;
+#endif
insn = JUMP_LABEL (insn);
continue;
}
following branches in this case. */
to_usage = 0;
val.path_size = 0;
+ val.path = xmalloc (sizeof (struct branch_path)
+ * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
cse_end_of_basic_block (insn, &val, 0, 0, 0);
+ free (val.path);
/* If the tables we allocated have enough space left
to handle all the SETs in the next basic block,
we can cse into the loop. Don't do this if we changed the jump
structure of a loop unless we aren't going to be following jumps. */
+ insn = prev_nonnote_insn (to);
if ((cse_jumps_altered == 0
|| (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
&& around_loop && to != 0
&& GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
- && GET_CODE (PREV_INSN (to)) == JUMP_INSN
- && JUMP_LABEL (PREV_INSN (to)) != 0
- && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
- cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
+ && GET_CODE (insn) == JUMP_INSN
+ && JUMP_LABEL (insn) != 0
+ && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
+ cse_around_loop (JUMP_LABEL (insn));
free (qty_table + max_reg);
}
\f
/* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
- there isn't a REG_DEAD note. Return one if so. DATA is the insn. */
+ there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
static int
check_for_label_ref (rtl, data)
/* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
we must rerun jump since it needs to place the note. If this is a
LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
- since no REG_LABEL will be added. */
+ since no REG_LABEL will be added. */
return (GET_CODE (*rtl) == LABEL_REF
+ && ! LABEL_REF_NONLOCAL_P (*rtl)
+ && LABEL_P (XEXP (*rtl, 0))
&& INSN_UID (XEXP (*rtl, 0)) != 0
&& ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
}
int incr;
{
enum rtx_code code;
+ rtx note;
const char *fmt;
int i, j;
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
return;
/* Unless we are setting a REG, count everything in SET_DEST. */
if (GET_CODE (SET_DEST (x)) != REG)
count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
-
- /* If SRC has side-effects, then we can't delete this insn, so the
- usage of SET_DEST inside SRC counts.
-
- ??? Strictly-speaking, we might be preserving this insn
- because some other SET has side-effects, but that's hard
- to do and can't happen now. */
count_reg_usage (SET_SRC (x), counts,
- side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
+ SET_DEST (x),
incr);
return;
/* Things used in a REG_EQUAL note aren't dead since loop may try to
use them. */
- count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
+ note = find_reg_equal_equiv_note (x);
+ if (note)
+ {
+ rtx eqv = XEXP (note, 0);
+
+ if (GET_CODE (eqv) == EXPR_LIST)
+ /* This REG_EQUAL note describes the result of a function call.
+ Process all the arguments. */
+ do
+ {
+ count_reg_usage (XEXP (eqv, 0), counts, NULL_RTX, incr);
+ eqv = XEXP (eqv, 1);
+ }
+ while (eqv && GET_CODE (eqv) == EXPR_LIST);
+ else
+ count_reg_usage (eqv, counts, NULL_RTX, incr);
+ }
return;
case EXPR_LIST:
- case INSN_LIST:
if (REG_NOTE_KIND (x) == REG_EQUAL
- || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
+ || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
+ /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
+ involving registers in the address. */
+ || GET_CODE (XEXP (x, 0)) == CLOBBER)
count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
+
count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
return;
+ case INSN_LIST:
+ abort ();
+
default:
break;
}
static bool
set_live_p (set, insn, counts)
rtx set;
- rtx insn;
+ rtx insn ATTRIBUTE_UNUSED; /* Only used with HAVE_cc0. */
int *counts;
{
#ifdef HAVE_cc0
int *counts;
{
int i;
- if (GET_CODE (PATTERN (insn)) == SET)
+ if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
+ return true;
+ else if (GET_CODE (PATTERN (insn)) == SET)
return set_live_p (PATTERN (insn), insn, counts);
else if (GET_CODE (PATTERN (insn)) == PARALLEL)
{
/* Return true if libcall is dead as a whole. */
static bool
-dead_libcall_p (insn)
+dead_libcall_p (insn, counts)
rtx insn;
+ int *counts;
{
- rtx note;
+ rtx note, set, new;
+
/* See if there's a REG_EQUAL note on this insn and try to
replace the source with the REG_EQUAL expression.
We assume that insns with REG_RETVALs can only be reg->reg
copies at this point. */
note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
- if (note)
- {
- rtx set = single_set (insn);
- rtx new = simplify_rtx (XEXP (note, 0));
+ if (!note)
+ return false;
+
+ set = single_set (insn);
+ if (!set)
+ return false;
- if (!new)
- new = XEXP (note, 0);
+ new = simplify_rtx (XEXP (note, 0));
+ if (!new)
+ new = XEXP (note, 0);
- if (set && validate_change (insn, &SET_SRC (set), new, 0))
+ /* While changing insn, we must update the counts accordingly. */
+ count_reg_usage (insn, counts, NULL_RTX, -1);
+
+ if (validate_change (insn, &SET_SRC (set), new, 0))
+ {
+ count_reg_usage (insn, counts, NULL_RTX, 1);
+ remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
+ remove_note (insn, note);
+ return true;
+ }
+
+ if (CONSTANT_P (new))
+ {
+ new = force_const_mem (GET_MODE (SET_DEST (set)), new);
+ if (new && validate_change (insn, &SET_SRC (set), new, 0))
{
+ count_reg_usage (insn, counts, NULL_RTX, 1);
remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
+ remove_note (insn, note);
return true;
}
}
+
+ count_reg_usage (insn, counts, NULL_RTX, 1);
return false;
}
move dead invariants out of loops or make givs for dead quantities. The
remaining passes of the compilation are also sped up. */
-void
-delete_trivially_dead_insns (insns, nreg, preserve_basic_blocks)
+int
+delete_trivially_dead_insns (insns, nreg)
rtx insns;
int nreg;
- int preserve_basic_blocks;
{
int *counts;
rtx insn, prev;
- int i;
int in_libcall = 0, dead_libcall = 0;
- basic_block bb;
+ int ndead = 0, nlastdead, niterations = 0;
+ timevar_push (TV_DELETE_TRIVIALLY_DEAD);
/* First count the number of times each register is used. */
counts = (int *) xcalloc (nreg, sizeof (int));
for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
count_reg_usage (insn, counts, NULL_RTX, 1);
- /* Go from the last insn to the first and delete insns that only set unused
- registers or copy a register to itself. As we delete an insn, remove
- usage counts for registers it uses.
-
- The first jump optimization pass may leave a real insn as the last
- insn in the function. We must not skip that insn or we may end
- up deleting code that is not really dead. */
- insn = get_last_insn ();
- if (! INSN_P (insn))
- insn = prev_real_insn (insn);
-
- if (!preserve_basic_blocks)
- for (; insn; insn = prev)
- {
- int live_insn = 0;
-
- prev = prev_real_insn (insn);
-
- /* Don't delete any insns that are part of a libcall block unless
- we can delete the whole libcall block.
-
- Flow or loop might get confused if we did that. Remember
- that we are scanning backwards. */
- if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
- {
- in_libcall = 1;
- live_insn = 1;
- dead_libcall = dead_libcall_p (insn);
- }
- else if (in_libcall)
- live_insn = ! dead_libcall;
- else
- live_insn = insn_live_p (insn, counts);
-
- /* If this is a dead insn, delete it and show registers in it aren't
- being used. */
-
- if (! live_insn)
- {
- count_reg_usage (insn, counts, NULL_RTX, -1);
- delete_insn (insn);
- }
+ do
+ {
+ nlastdead = ndead;
+ niterations++;
+ /* Go from the last insn to the first and delete insns that only set unused
+ registers or copy a register to itself. As we delete an insn, remove
+ usage counts for registers it uses.
+
+ The first jump optimization pass may leave a real insn as the last
+ insn in the function. We must not skip that insn or we may end
+ up deleting code that is not really dead. */
+ insn = get_last_insn ();
+ if (! INSN_P (insn))
+ insn = prev_real_insn (insn);
- if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
- {
- in_libcall = 0;
- dead_libcall = 0;
- }
- }
- else
- for (i = 0; i < n_basic_blocks; i++)
- for (bb = BASIC_BLOCK (i), insn = bb->end; insn != bb->head; insn = prev)
+ for (; insn; insn = prev)
{
int live_insn = 0;
- prev = PREV_INSN (insn);
- if (!INSN_P (insn))
- continue;
+ prev = prev_real_insn (insn);
/* Don't delete any insns that are part of a libcall block unless
we can delete the whole libcall block.
{
in_libcall = 1;
live_insn = 1;
- dead_libcall = dead_libcall_p (insn);
+ dead_libcall = dead_libcall_p (insn, counts);
}
else if (in_libcall)
live_insn = ! dead_libcall;
if (! live_insn)
{
count_reg_usage (insn, counts, NULL_RTX, -1);
- if (insn == bb->end)
- bb->end = PREV_INSN (insn);
- flow_delete_insn (insn);
+ delete_insn_and_edges (insn);
+ ndead++;
}
if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
dead_libcall = 0;
}
}
+ }
+ while (ndead != nlastdead);
+ if (rtl_dump_file && ndead)
+ fprintf (rtl_dump_file, "Deleted %i trivially dead insns; %i iterations\n",
+ ndead, niterations);
/* Clean up. */
free (counts);
+ timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
+ return ndead;
}