/* Common subexpression elimination for GNU compiler.
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
- 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
This file is part of GCC.
#include "system.h"
#include "coretypes.h"
#include "tm.h"
-
#include "rtl.h"
#include "tm_p.h"
-#include "regs.h"
#include "hard-reg-set.h"
+#include "regs.h"
#include "basic-block.h"
#include "flags.h"
#include "real.h"
so that it is possible to find out if there exists any
register equivalent to an expression related to a given expression. */
-/* One plus largest register number used in this function. */
-
-static int max_reg;
-
-/* One plus largest instruction UID used in this function at time of
- cse_main call. */
-
-static int max_insn_uid;
-
/* Length of qty_table vector. We know in advance we will not need
a quantity number this big. */
/* The table of all qtys, indexed by qty number. */
static struct qty_table_elem *qty_table;
+/* Structure used to pass arguments via for_each_rtx to function
+ cse_change_cc_mode. */
+struct change_cc_mode_args
+{
+ rtx insn;
+ rtx newreg;
+};
+
#ifdef HAVE_cc0
/* For machines that have a CC0, we do not record its value in the hash
table since its use is guaranteed to be the insn immediately following
struct cse_reg_info
{
- /* Next in hash chain. */
- struct cse_reg_info *hash_next;
-
- /* The next cse_reg_info structure in the free or used list. */
- struct cse_reg_info *next;
-
- /* Search key */
- unsigned int regno;
+ /* The timestamp at which this register is initialized. */
+ unsigned int timestamp;
/* The quantity number of the register's current contents. */
int reg_qty;
unsigned int subreg_ticked;
};
-/* A free list of cse_reg_info entries. */
-static struct cse_reg_info *cse_reg_info_free_list;
-
-/* A used list of cse_reg_info entries. */
-static struct cse_reg_info *cse_reg_info_used_list;
-static struct cse_reg_info *cse_reg_info_used_list_end;
+/* A table of cse_reg_info indexed by register numbers. */
+struct cse_reg_info *cse_reg_info_table;
-/* A mapping from registers to cse_reg_info data structures. */
-#define REGHASH_SHIFT 7
-#define REGHASH_SIZE (1 << REGHASH_SHIFT)
-#define REGHASH_MASK (REGHASH_SIZE - 1)
-static struct cse_reg_info *reg_hash[REGHASH_SIZE];
+/* The size of the above table. */
+static unsigned int cse_reg_info_table_size;
-#define REGHASH_FN(REGNO) \
- (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
+/* The index of the first entry that has not been initialized. */
+static unsigned int cse_reg_info_table_first_uninitialized;
-/* The last lookup we did into the cse_reg_info_tree. This allows us
- to cache repeated lookups. */
-static unsigned int cached_regno;
-static struct cse_reg_info *cached_cse_reg_info;
+/* The timestamp at the beginning of the current run of
+ cse_basic_block. We increment this variable at at the beginning of
+ the current run of cse_basic_block. The timestamp field of a
+ cse_reg_info entry matches the value of this variable if and only
+ if the entry has been initialized during the current run of
+ cse_basic_block. */
+static unsigned int cse_reg_info_timestamp;
/* A HARD_REG_SET containing all the hard registers for which there is
currently a REG expression in the hash table. Note the difference
of 0. Next come pseudos with a cost of one and other hard registers with
a cost of 2. Aside from these special cases, call `rtx_cost'. */
-#define CHEAP_REGNO(N) \
- ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
- || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
- || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
- || ((N) < FIRST_PSEUDO_REGISTER \
+#define CHEAP_REGNO(N) \
+ (REGNO_PTR_FRAME_P(N) \
+ || (HARD_REGISTER_NUM_P (N) \
&& FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
#define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
#define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
-/* Get the info associated with register N. */
-
-#define GET_CSE_REG_INFO(N) \
- (((N) == cached_regno && cached_cse_reg_info) \
- ? cached_cse_reg_info : get_cse_reg_info ((N)))
-
/* Get the number of times this register has been updated in this
basic block. */
-#define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
+#define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
/* Get the point at which REG was recorded in the table. */
-#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
+#define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
/* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
SUBREG). */
-#define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
+#define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
/* Get the quantity number for REG. */
-#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
+#define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
/* Determine if the quantity number for register X represents a valid index
into the qty_table. */
static struct table_elt *free_element_chain;
-/* Number of `struct table_elt' structures made so far for this function. */
-
-static int n_elements_made;
-
-/* Maximum value `n_elements_made' has had so far in this compilation
- for functions previously processed. */
-
-static int max_elements_made;
-
/* Set to the cost of a constant pool reference if one was found for a
symbolic constant. If this was found, it means we should try to
convert constants into constant pool entries if they don't fit in
static void count_reg_usage (rtx, int *, int);
static int check_for_label_ref (rtx *, void *);
extern void dump_class (struct table_elt*);
-static struct cse_reg_info * get_cse_reg_info (unsigned int);
+static void get_cse_reg_info_1 (unsigned int regno);
+static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
static int check_dependence (rtx *, void *);
static void flush_hash_table (void);
static bool set_live_p (rtx, rtx, int *);
static bool dead_libcall_p (rtx, int *);
static int cse_change_cc_mode (rtx *, void *);
+static void cse_change_cc_mode_insn (rtx, rtx);
static void cse_change_cc_mode_insns (rtx, rtx, rtx);
static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
\f
return cost;
}
+/* Returns a canonical version of X for the address, from the point of view,
+ that all multiplications are represented as MULT instead of the multiply
+ by a power of 2 being represented as ASHIFT. */
+
+static rtx
+canon_for_address (rtx x)
+{
+ enum rtx_code code;
+ enum machine_mode mode;
+ rtx new = 0;
+ int i;
+ const char *fmt;
+
+ if (!x)
+ return x;
+
+ code = GET_CODE (x);
+ mode = GET_MODE (x);
+
+ switch (code)
+ {
+ case ASHIFT:
+ if (GET_CODE (XEXP (x, 1)) == CONST_INT
+ && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)
+ && INTVAL (XEXP (x, 1)) >= 0)
+ {
+ new = canon_for_address (XEXP (x, 0));
+ new = gen_rtx_MULT (mode, new,
+ gen_int_mode ((HOST_WIDE_INT) 1
+ << INTVAL (XEXP (x, 1)),
+ mode));
+ }
+ break;
+ default:
+ break;
+
+ }
+ if (new)
+ return new;
+
+ /* Now recursively process each operand of this operation. */
+ fmt = GET_RTX_FORMAT (code);
+ for (i = 0; i < GET_RTX_LENGTH (code); i++)
+ if (fmt[i] == 'e')
+ {
+ new = canon_for_address (XEXP (x, i));
+ XEXP (x, i) = new;
+ }
+ return x;
+}
+
/* Return a negative value if an rtx A, whose costs are given by COST_A
and REGCOST_A, is more desirable than an rtx B.
Return a positive value if A is less desirable, or 0 if the two are
}
\f
-static struct cse_reg_info *
-get_cse_reg_info (unsigned int regno)
-{
- struct cse_reg_info **hash_head = ®_hash[REGHASH_FN (regno)];
- struct cse_reg_info *p;
-
- for (p = *hash_head; p != NULL; p = p->hash_next)
- if (p->regno == regno)
- break;
+/* Initialize CSE_REG_INFO_TABLE. */
- if (p == NULL)
+static void
+init_cse_reg_info (unsigned int nregs)
+{
+ /* Do we need to grow the table? */
+ if (nregs > cse_reg_info_table_size)
{
- /* Get a new cse_reg_info structure. */
- if (cse_reg_info_free_list)
+ unsigned int new_size;
+
+ if (cse_reg_info_table_size < 2048)
{
- p = cse_reg_info_free_list;
- cse_reg_info_free_list = p->next;
+ /* Compute a new size that is a power of 2 and no smaller
+ than the large of NREGS and 64. */
+ new_size = (cse_reg_info_table_size
+ ? cse_reg_info_table_size : 64);
+
+ while (new_size < nregs)
+ new_size *= 2;
}
else
- p = xmalloc (sizeof (struct cse_reg_info));
-
- /* Insert into hash table. */
- p->hash_next = *hash_head;
- *hash_head = p;
-
- /* Initialize it. */
- p->reg_tick = 1;
- p->reg_in_table = -1;
- p->subreg_ticked = -1;
- p->reg_qty = -regno - 1;
- p->regno = regno;
- p->next = cse_reg_info_used_list;
- cse_reg_info_used_list = p;
- if (!cse_reg_info_used_list_end)
- cse_reg_info_used_list_end = p;
+ {
+ /* If we need a big table, allocate just enough to hold
+ NREGS registers. */
+ new_size = nregs;
+ }
+
+ /* Reallocate the table with NEW_SIZE entries. */
+ cse_reg_info_table = xrealloc (cse_reg_info_table,
+ (sizeof (struct cse_reg_info)
+ * new_size));
+ cse_reg_info_table_size = new_size;
+ }
+
+ /* Do we have all of the first NREGS entries initialized? */
+ if (cse_reg_info_table_first_uninitialized < nregs)
+ {
+ unsigned int old_timestamp = cse_reg_info_timestamp - 1;
+ unsigned int i;
+
+ /* Put the old timestamp on newly allocated entries so that they
+ will all be considered out of date. We do not touch those
+ entries beyond the first NREGS entries to be nice to the
+ virtual memory. */
+ for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
+ cse_reg_info_table[i].timestamp = old_timestamp;
+
+ cse_reg_info_table_first_uninitialized = nregs;
}
+}
- /* Cache this lookup; we tend to be looking up information about the
- same register several times in a row. */
- cached_regno = regno;
- cached_cse_reg_info = p;
+/* Given REGNO, ensure that a cse_reg_info entry exists for REGNO by
+ growing the cse_reg_info_table and/or initializing the entry for
+ REGNO. */
+
+static void
+get_cse_reg_info_1 (unsigned int regno)
+{
+ /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
+ entry will be considered to have been initialized. */
+ cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
+
+ /* Initialize the rest of the entry. */
+ cse_reg_info_table[regno].reg_tick = 1;
+ cse_reg_info_table[regno].reg_in_table = -1;
+ cse_reg_info_table[regno].subreg_ticked = -1;
+ cse_reg_info_table[regno].reg_qty = -regno - 1;
+}
+
+/* Find a cse_reg_info entry for REGNO. */
+
+static inline struct cse_reg_info *
+get_cse_reg_info (unsigned int regno)
+{
+ struct cse_reg_info *p = &cse_reg_info_table[regno];
+
+ /* If this entry has not been initialized, go ahead and initialize
+ it. */
+ if (p->timestamp != cse_reg_info_timestamp)
+ get_cse_reg_info_1 (regno);
return p;
}
next_qty = 0;
- /* Clear out hash table state for this pass. */
-
- memset (reg_hash, 0, sizeof reg_hash);
-
- if (cse_reg_info_used_list)
- {
- cse_reg_info_used_list_end->next = cse_reg_info_free_list;
- cse_reg_info_free_list = cse_reg_info_used_list;
- cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
- }
- cached_cse_reg_info = 0;
+ /* Invalidate cse_reg_info_table and its cache. */
+ cse_reg_info_timestamp++;
+ /* Clear out hash table state for this pass. */
CLEAR_HARD_REG_SET (hard_regs_in_table);
/* The per-quantity values used to be initialized here, but it is
if (elt)
free_element_chain = elt->next_same_hash;
else
- {
- n_elements_made++;
- elt = xmalloc (sizeof (struct table_elt));
- }
+ elt = xmalloc (sizeof (struct table_elt));
elt->exp = x;
elt->canon_exp = NULL_RTX;
be valid and produce better code. */
if (!REG_P (addr))
{
- rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
- int addr_folded_cost = address_cost (folded, mode);
- int addr_cost = address_cost (addr, mode);
-
- if ((addr_folded_cost < addr_cost
- || (addr_folded_cost == addr_cost
- /* ??? The rtx_cost comparison is left over from an older
- version of this code. It is probably no longer helpful. */
- && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
- || approx_reg_cost (folded) < approx_reg_cost (addr))))
- && validate_change (insn, loc, folded, 0))
- addr = folded;
+ rtx folded = fold_rtx (addr, NULL_RTX);
+ if (folded != addr)
+ {
+ int addr_folded_cost = address_cost (folded, mode);
+ int addr_cost = address_cost (addr, mode);
+
+ if ((addr_folded_cost < addr_cost
+ || (addr_folded_cost == addr_cost
+ /* ??? The rtx_cost comparison is left over from an older
+ version of this code. It is probably no longer helpful.*/
+ && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
+ || approx_reg_cost (folded) < approx_reg_cost (addr))))
+ && validate_change (insn, loc, folded, 0))
+ addr = folded;
+ }
}
/* If this address is not in the hash table, we can't look for equivalences
rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
p->exp, op1);
int new_cost;
+
+ /* Get the canonical version of the address so we can accept
+ more. */
+ new = canon_for_address (new);
+
new_cost = address_cost (new, mode);
if (new_cost < best_addr_cost
case SYMBOL_REF:
case LABEL_REF:
case REG:
+ case PC:
/* No use simplifying an EXPR_LIST
since they are used only for lists of args
in a function call's REG_EQUAL note. */
return prev_insn_cc0;
#endif
- case PC:
- /* If the next insn is a CODE_LABEL followed by a jump table,
- PC's value is a LABEL_REF pointing to that label. That
- lets us fold switch statements on the VAX. */
- {
- rtx next;
- if (insn && tablejump_p (insn, &next, NULL))
- return gen_rtx_LABEL_REF (Pmode, next);
- }
- break;
-
case SUBREG:
/* See if we previously assigned a constant value to this SUBREG. */
if ((new = lookup_as_function (x, CONST_INT)) != 0
#endif
case ASM_OPERANDS:
- for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
- validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
- fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
+ if (insn)
+ {
+ for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
+ validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
+ fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
+ }
break;
default:
constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
do anything if both operands are already known to be constant. */
+ /* ??? Vector mode comparisons are not supported yet. */
+ if (VECTOR_MODE_P (mode))
+ break;
+
if (const_arg0 == 0 || const_arg1 == 0)
{
struct table_elt *p0, *p1;
code = find_comparison_args (code, &folded_arg0, &folded_arg1,
&mode_arg0, &mode_arg1);
- const_arg0 = equiv_constant (folded_arg0);
- const_arg1 = equiv_constant (folded_arg1);
/* If the mode is VOIDmode or a MODE_CC mode, we don't know
what kinds of things are being compared, so we can't do
if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
break;
+ const_arg0 = equiv_constant (folded_arg0);
+ const_arg1 = equiv_constant (folded_arg1);
+
/* If we do not now have two constants being compared, see
if we can nevertheless deduce some things about the
comparison. */
record_jump_cond (code, mode, op0, op1, reversed_nonequality);
}
+/* Yet another form of subreg creation. In this case, we want something in
+ MODE, and we should assume OP has MODE iff it is naturally modeless. */
+
+static rtx
+record_jump_cond_subreg (enum machine_mode mode, rtx op)
+{
+ enum machine_mode op_mode = GET_MODE (op);
+ if (op_mode == mode || op_mode == VOIDmode)
+ return op;
+ return lowpart_subreg (mode, op, op_mode);
+}
+
/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
Make any useful entries we can with that information. Called from
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
{
enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
- rtx tem = gen_lowpart (inner_mode, op1);
-
- record_jump_cond (code, mode, SUBREG_REG (op0),
- tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
- reversed_nonequality);
+ rtx tem = record_jump_cond_subreg (inner_mode, op1);
+ if (tem)
+ record_jump_cond (code, mode, SUBREG_REG (op0), tem,
+ reversed_nonequality);
}
if (code == EQ && GET_CODE (op1) == SUBREG
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
{
enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
- rtx tem = gen_lowpart (inner_mode, op0);
-
- record_jump_cond (code, mode, SUBREG_REG (op1),
- tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
- reversed_nonequality);
+ rtx tem = record_jump_cond_subreg (inner_mode, op0);
+ if (tem)
+ record_jump_cond (code, mode, SUBREG_REG (op1), tem,
+ reversed_nonequality);
}
/* Similarly, if this is an NE comparison, and either is a SUBREG
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
{
enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
- rtx tem = gen_lowpart (inner_mode, op1);
-
- record_jump_cond (code, mode, SUBREG_REG (op0),
- tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
- reversed_nonequality);
+ rtx tem = record_jump_cond_subreg (inner_mode, op1);
+ if (tem)
+ record_jump_cond (code, mode, SUBREG_REG (op0), tem,
+ reversed_nonequality);
}
if (code == NE && GET_CODE (op1) == SUBREG
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
{
enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
- rtx tem = gen_lowpart (inner_mode, op0);
-
- record_jump_cond (code, mode, SUBREG_REG (op1),
- tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
- reversed_nonequality);
+ rtx tem = record_jump_cond_subreg (inner_mode, op0);
+ if (tem)
+ record_jump_cond (code, mode, SUBREG_REG (op1), tem,
+ reversed_nonequality);
}
/* Hash both operands. */
else
SET_SRC (sets[i].rtl) = new;
- if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
+ if (GET_CODE (dest) == ZERO_EXTRACT)
{
validate_change (insn, &XEXP (dest, 1),
canon_reg (XEXP (dest, 1), insn), 1);
canon_reg (XEXP (dest, 2), insn), 1);
}
- while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
+ while (GET_CODE (dest) == SUBREG
|| GET_CODE (dest) == ZERO_EXTRACT
- || GET_CODE (dest) == SIGN_EXTRACT)
+ || GET_CODE (dest) == STRICT_LOW_PART)
dest = XEXP (dest, 0);
if (MEM_P (dest))
causes later instructions to be mis-optimized. */
/* If storing a constant in a bitfield, pre-truncate the constant
so we will be able to record it later. */
- if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
- || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
+ if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
{
rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
/* Now deal with the destination. */
do_not_record = 0;
- /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
- to the MEM or REG within it. */
- while (GET_CODE (dest) == SIGN_EXTRACT
+ /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
+ while (GET_CODE (dest) == SUBREG
|| GET_CODE (dest) == ZERO_EXTRACT
- || GET_CODE (dest) == SUBREG
|| GET_CODE (dest) == STRICT_LOW_PART)
dest = XEXP (dest, 0);
because the value in it after the store
may not equal what was stored, due to truncation. */
- if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
- || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
+ if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
{
rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
if (REG_P (dest) || GET_CODE (dest) == SUBREG)
invalidate (dest, VOIDmode);
else if (MEM_P (dest))
- {
- /* Outgoing arguments for a libcall don't
- affect any recorded expressions. */
- if (! libcall_insn || insn == libcall_insn)
- invalidate (dest, VOIDmode);
- }
+ invalidate (dest, VOIDmode);
else if (GET_CODE (dest) == STRICT_LOW_PART
|| GET_CODE (dest) == ZERO_EXTRACT)
invalidate (XEXP (dest, 0), GET_MODE (dest));
if (REG_P (dest) || GET_CODE (dest) == SUBREG)
invalidate (dest, VOIDmode);
else if (MEM_P (dest))
- {
- /* Outgoing arguments for a libcall don't
- affect any recorded expressions. */
- if (! libcall_insn || insn == libcall_insn)
- invalidate (dest, VOIDmode);
- }
+ invalidate (dest, VOIDmode);
else if (GET_CODE (dest) == STRICT_LOW_PART
|| GET_CODE (dest) == ZERO_EXTRACT)
invalidate (XEXP (dest, 0), GET_MODE (dest));
rtx insn = f;
int i;
+ init_cse_reg_info (nregs);
+
val.path = xmalloc (sizeof (struct branch_path)
* PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
init_recog ();
init_alias_analysis ();
- max_reg = nregs;
-
- max_insn_uid = get_max_uid ();
-
reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
- /* Reset the counter indicating how many elements have been made
- thus far. */
- n_elements_made = 0;
-
/* Find the largest uid. */
max_uid = get_max_uid ();
#endif
}
- if (max_elements_made < n_elements_made)
- max_elements_made = n_elements_made;
-
/* Clean up. */
end_alias_analysis ();
free (uid_cuid);
/* Process a single basic block. FROM and TO and the limits of the basic
block. NEXT_BRANCH points to the branch path when following jumps or
- a null path when not following jumps.
-
- AROUND_LOOP is nonzero if we are to try to cse around to the start of a
- loop. This is true when we are being called for the last time on a
- block and this CSE pass is before loop.c. */
+ a null path when not following jumps. */
static rtx
cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
int *counts;
rtx insn, prev;
int in_libcall = 0, dead_libcall = 0;
- int ndead = 0, nlastdead, niterations = 0;
+ int ndead = 0;
timevar_push (TV_DELETE_TRIVIALLY_DEAD);
/* First count the number of times each register is used. */
for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
count_reg_usage (insn, counts, 1);
- do
- {
- nlastdead = ndead;
- niterations++;
- /* Go from the last insn to the first and delete insns that only set unused
- registers or copy a register to itself. As we delete an insn, remove
- usage counts for registers it uses.
-
- The first jump optimization pass may leave a real insn as the last
- insn in the function. We must not skip that insn or we may end
- up deleting code that is not really dead. */
- insn = get_last_insn ();
- if (! INSN_P (insn))
- insn = prev_real_insn (insn);
+ /* Go from the last insn to the first and delete insns that only set unused
+ registers or copy a register to itself. As we delete an insn, remove
+ usage counts for registers it uses.
- for (; insn; insn = prev)
- {
- int live_insn = 0;
+ The first jump optimization pass may leave a real insn as the last
+ insn in the function. We must not skip that insn or we may end
+ up deleting code that is not really dead. */
+ insn = get_last_insn ();
+ if (! INSN_P (insn))
+ insn = prev_real_insn (insn);
- prev = prev_real_insn (insn);
+ for (; insn; insn = prev)
+ {
+ int live_insn = 0;
- /* Don't delete any insns that are part of a libcall block unless
- we can delete the whole libcall block.
+ prev = prev_real_insn (insn);
- Flow or loop might get confused if we did that. Remember
- that we are scanning backwards. */
- if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
- {
- in_libcall = 1;
- live_insn = 1;
- dead_libcall = dead_libcall_p (insn, counts);
- }
- else if (in_libcall)
- live_insn = ! dead_libcall;
- else
- live_insn = insn_live_p (insn, counts);
+ /* Don't delete any insns that are part of a libcall block unless
+ we can delete the whole libcall block.
- /* If this is a dead insn, delete it and show registers in it aren't
- being used. */
+ Flow or loop might get confused if we did that. Remember
+ that we are scanning backwards. */
+ if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
+ {
+ in_libcall = 1;
+ live_insn = 1;
+ dead_libcall = dead_libcall_p (insn, counts);
+ }
+ else if (in_libcall)
+ live_insn = ! dead_libcall;
+ else
+ live_insn = insn_live_p (insn, counts);
- if (! live_insn)
- {
- count_reg_usage (insn, counts, -1);
- delete_insn_and_edges (insn);
- ndead++;
- }
+ /* If this is a dead insn, delete it and show registers in it aren't
+ being used. */
- if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
- {
- in_libcall = 0;
- dead_libcall = 0;
- }
+ if (! live_insn)
+ {
+ count_reg_usage (insn, counts, -1);
+ delete_insn_and_edges (insn);
+ ndead++;
+ }
+
+ if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
+ {
+ in_libcall = 0;
+ dead_libcall = 0;
}
}
- while (ndead != nlastdead);
if (dump_file && ndead)
- fprintf (dump_file, "Deleted %i trivially dead insns; %i iterations\n",
- ndead, niterations);
+ fprintf (dump_file, "Deleted %i trivially dead insns\n",
+ ndead);
/* Clean up. */
free (counts);
timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
static int
cse_change_cc_mode (rtx *loc, void *data)
{
- rtx newreg = (rtx) data;
+ struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
if (*loc
&& REG_P (*loc)
- && REGNO (*loc) == REGNO (newreg)
- && GET_MODE (*loc) != GET_MODE (newreg))
+ && REGNO (*loc) == REGNO (args->newreg)
+ && GET_MODE (*loc) != GET_MODE (args->newreg))
{
- *loc = newreg;
+ validate_change (args->insn, loc, args->newreg, 1);
+
return -1;
}
return 0;
}
/* Change the mode of any reference to the register REGNO (NEWREG) to
+ GET_MODE (NEWREG) in INSN. */
+
+static void
+cse_change_cc_mode_insn (rtx insn, rtx newreg)
+{
+ struct change_cc_mode_args args;
+ int success;
+
+ if (!INSN_P (insn))
+ return;
+
+ args.insn = insn;
+ args.newreg = newreg;
+
+ for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
+ for_each_rtx (®_NOTES (insn), cse_change_cc_mode, &args);
+
+ /* If the following assertion was triggered, there is most probably
+ something wrong with the cc_modes_compatible back end function.
+ CC modes only can be considered compatible if the insn - with the mode
+ replaced by any of the compatible modes - can still be recognized. */
+ success = apply_change_group ();
+ gcc_assert (success);
+}
+
+/* Change the mode of any reference to the register REGNO (NEWREG) to
GET_MODE (NEWREG), starting at START. Stop before END. Stop at
any instruction which modifies NEWREG. */
if (reg_set_p (newreg, insn))
return;
- for_each_rtx (&PATTERN (insn), cse_change_cc_mode, newreg);
- for_each_rtx (®_NOTES (insn), cse_change_cc_mode, newreg);
+ cse_change_cc_mode_insn (insn, newreg);
}
}
{
gcc_assert (can_change_mode);
mode = comp_mode;
+
+ /* The modified insn will be re-recognized later. */
PUT_MODE (cc_src, mode);
}
}
{
rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
- /* Change the mode of CC_REG in CC_SRC_INSN to
- GET_MODE (NEWREG). */
- for_each_rtx (&PATTERN (cc_src_insn), cse_change_cc_mode,
- newreg);
- for_each_rtx (®_NOTES (cc_src_insn), cse_change_cc_mode,
- newreg);
+ cse_change_cc_mode_insn (cc_src_insn, newreg);
/* Do the same in the following insns that use the
current value of CC_REG within BB. */