/* Common subexpression elimination for GNU compiler.
- Copyright (C) 1987, 88, 89, 92-7, 1998, 1999 Free Software Foundation, Inc.
+ Copyright (C) 1987, 88, 89, 92-99, 2000 Free Software Foundation, Inc.
This file is part of GNU CC.
#include "expr.h"
#include "toplev.h"
#include "output.h"
-#include "hashtab.h"
#include "ggc.h"
/* The basic idea of common subexpression elimination is to go
struct cse_reg_info
{
- /* The number of times the register has been altered in the current
- basic block. */
- int reg_tick;
+ /* Next in hash chain. */
+ struct cse_reg_info *hash_next;
/* The next cse_reg_info structure in the free or used list. */
struct cse_reg_info *next;
+ /* Search key */
+ int regno;
+
+ /* The quantity number of the register's current contents. */
+ int reg_qty;
+
+ /* The number of times the register has been altered in the current
+ basic block. */
+ int reg_tick;
+
/* The REG_TICK value at which rtx's containing this register are
valid in the hash table. If this does not equal the current
reg_tick value, such expressions existing in the hash table are
invalid. */
int reg_in_table;
-
- /* The quantity number of the register's current contents. */
- int reg_qty;
-
- /* Search key */
- int regno;
};
/* A free list of cse_reg_info entries. */
static struct cse_reg_info *cse_reg_info_used_list_end;
/* A mapping from registers to cse_reg_info data structures. */
-static hash_table_t cse_reg_info_tree;
+#define REGHASH_SHIFT 7
+#define REGHASH_SIZE (1 << REGHASH_SHIFT)
+#define REGHASH_MASK (REGHASH_SIZE - 1)
+static struct cse_reg_info *reg_hash[REGHASH_SIZE];
+
+#define REGHASH_FN(REGNO) \
+ (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
/* The last lookup we did into the cse_reg_info_tree. This allows us
to cache repeated lookups. */
/* We don't want a lot of buckets, because we rarely have very many
things stored in the hash table, and a lot of buckets slows
down a lot of loops that happen frequently. */
-#define NBUCKETS 31
+#define HASH_SHIFT 5
+#define HASH_SIZE (1 << HASH_SHIFT)
+#define HASH_MASK (HASH_SIZE - 1)
/* Compute hash code of X in mode M. Special-case case where X is a pseudo
register (hard registers may require `do_not_record' to be set). */
#define HASH(X, M) \
- (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
- ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) % NBUCKETS \
- : canon_hash (X, M) % NBUCKETS)
+ ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
+ ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
+ : canon_hash (X, M)) & HASH_MASK)
/* Determine whether register number N is considered a fixed register for CSE.
It is desirable to replace other regs with fixed regs, to reduce need for
? -1 : ADDRESS_COST(RTX))
#endif
-static struct table_elt *table[NBUCKETS];
+static struct table_elt *table[HASH_SIZE];
/* Chain of `struct table_elt's made so far for this function
but currently removed from the table. */
|| XEXP (X, 0) == virtual_outgoing_args_rtx)) \
|| GET_CODE (X) == ADDRESSOF)
-static int notreg_cost PROTO((rtx));
-static void new_basic_block PROTO((void));
-static void make_new_qty PROTO((int, enum machine_mode));
-static void make_regs_eqv PROTO((int, int));
-static void delete_reg_equiv PROTO((int));
-static int mention_regs PROTO((rtx));
-static int insert_regs PROTO((rtx, struct table_elt *, int));
-static void free_element PROTO((struct table_elt *));
-static void remove_from_table PROTO((struct table_elt *, unsigned));
-static struct table_elt *get_element PROTO((void));
-static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
- *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
-static rtx lookup_as_function PROTO((rtx, enum rtx_code));
-static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
- enum machine_mode));
-static void merge_equiv_classes PROTO((struct table_elt *,
- struct table_elt *));
-static void invalidate PROTO((rtx, enum machine_mode));
-static int cse_rtx_varies_p PROTO((rtx));
-static void remove_invalid_refs PROTO((int));
-static void remove_invalid_subreg_refs PROTO((int, int, enum machine_mode));
-static void rehash_using_reg PROTO((rtx));
-static void invalidate_memory PROTO((void));
-static void invalidate_for_call PROTO((void));
-static rtx use_related_value PROTO((rtx, struct table_elt *));
-static unsigned canon_hash PROTO((rtx, enum machine_mode));
-static unsigned safe_hash PROTO((rtx, enum machine_mode));
-static int exp_equiv_p PROTO((rtx, rtx, int, int));
-static rtx canon_reg PROTO((rtx, rtx));
-static void find_best_addr PROTO((rtx, rtx *));
-static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
- enum machine_mode *,
- enum machine_mode *));
-static rtx fold_rtx PROTO((rtx, rtx));
-static rtx equiv_constant PROTO((rtx));
-static void record_jump_equiv PROTO((rtx, int));
-static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
- rtx, rtx, int));
-static void cse_insn PROTO((rtx, rtx));
-static int addr_affects_sp_p PROTO((rtx));
-static void invalidate_from_clobbers PROTO((rtx));
-static rtx cse_process_notes PROTO((rtx, rtx));
-static void cse_around_loop PROTO((rtx));
-static void invalidate_skipped_set PROTO((rtx, rtx, void *));
-static void invalidate_skipped_block PROTO((rtx));
-static void cse_check_loop_start PROTO((rtx, rtx, void *));
-static void cse_set_around_loop PROTO((rtx, rtx, rtx));
-static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
-static void count_reg_usage PROTO((rtx, int *, rtx, int));
-extern void dump_class PROTO((struct table_elt*));
-static struct cse_reg_info* get_cse_reg_info PROTO((int));
-static unsigned int hash_cse_reg_info PROTO((hash_table_entry_t));
-static int cse_reg_info_equal_p PROTO((hash_table_entry_t,
- hash_table_entry_t));
-
-static void flush_hash_table PROTO((void));
+static int notreg_cost PARAMS ((rtx));
+static void new_basic_block PARAMS ((void));
+static void make_new_qty PARAMS ((int, enum machine_mode));
+static void make_regs_eqv PARAMS ((int, int));
+static void delete_reg_equiv PARAMS ((int));
+static int mention_regs PARAMS ((rtx));
+static int insert_regs PARAMS ((rtx, struct table_elt *, int));
+static void remove_from_table PARAMS ((struct table_elt *, unsigned));
+static struct table_elt *lookup PARAMS ((rtx, unsigned, enum machine_mode)),
+ *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
+static rtx lookup_as_function PARAMS ((rtx, enum rtx_code));
+static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
+ enum machine_mode));
+static void merge_equiv_classes PARAMS ((struct table_elt *,
+ struct table_elt *));
+static void invalidate PARAMS ((rtx, enum machine_mode));
+static int cse_rtx_varies_p PARAMS ((rtx));
+static void remove_invalid_refs PARAMS ((int));
+static void remove_invalid_subreg_refs PARAMS ((int, int, enum machine_mode));
+static void rehash_using_reg PARAMS ((rtx));
+static void invalidate_memory PARAMS ((void));
+static void invalidate_for_call PARAMS ((void));
+static rtx use_related_value PARAMS ((rtx, struct table_elt *));
+static unsigned canon_hash PARAMS ((rtx, enum machine_mode));
+static unsigned safe_hash PARAMS ((rtx, enum machine_mode));
+static int exp_equiv_p PARAMS ((rtx, rtx, int, int));
+static rtx canon_reg PARAMS ((rtx, rtx));
+static void find_best_addr PARAMS ((rtx, rtx *));
+static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
+ enum machine_mode *,
+ enum machine_mode *));
+static rtx fold_rtx PARAMS ((rtx, rtx));
+static rtx equiv_constant PARAMS ((rtx));
+static void record_jump_equiv PARAMS ((rtx, int));
+static void record_jump_cond PARAMS ((enum rtx_code, enum machine_mode,
+ rtx, rtx, int));
+static void cse_insn PARAMS ((rtx, rtx));
+static int addr_affects_sp_p PARAMS ((rtx));
+static void invalidate_from_clobbers PARAMS ((rtx));
+static rtx cse_process_notes PARAMS ((rtx, rtx));
+static void cse_around_loop PARAMS ((rtx));
+static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
+static void invalidate_skipped_block PARAMS ((rtx));
+static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
+static void cse_set_around_loop PARAMS ((rtx, rtx, rtx));
+static rtx cse_basic_block PARAMS ((rtx, rtx, struct branch_path *, int));
+static void count_reg_usage PARAMS ((rtx, int *, rtx, int));
+extern void dump_class PARAMS ((struct table_elt*));
+static struct cse_reg_info* get_cse_reg_info PARAMS ((int));
+
+static void flush_hash_table PARAMS ((void));
\f
/* Dump the expressions in the equivalence class indicated by CLASSP.
This function is used only for debugging. */
}
}
-/* Return an estimate of the cost of computing rtx X.
- One use is in cse, to decide which expression to keep in the hash table.
- Another is in rtl generation, to pick the cheapest way to multiply.
- Other uses like the latter are expected in the future. */
-
/* Internal function, to compute cost when X is not a register; called
from COST macro to keep it simple. */
#define COSTS_N_INSNS(N) ((N) * 4 - 2)
+/* Return an estimate of the cost of computing rtx X.
+ One use is in cse, to decide which expression to keep in the hash table.
+ Another is in rtl generation, to pick the cheapest way to multiply.
+ Other uses like the latter are expected in the future. */
+
int
rtx_cost (x, outer_code)
rtx x;
get_cse_reg_info (regno)
int regno;
{
- struct cse_reg_info *cri;
- struct cse_reg_info **entry;
- struct cse_reg_info temp;
-
- /* See if we already have this entry. */
- temp.regno = regno;
- entry = (struct cse_reg_info **) find_hash_table_entry (cse_reg_info_tree,
- &temp, TRUE);
-
- if (*entry)
- cri = *entry;
- else
+ struct cse_reg_info **hash_head = ®_hash[REGHASH_FN (regno)];
+ struct cse_reg_info *p;
+
+ for (p = *hash_head ; p != NULL; p = p->hash_next)
+ if (p->regno == regno)
+ break;
+
+ if (p == NULL)
{
/* Get a new cse_reg_info structure. */
- if (cse_reg_info_free_list)
+ if (cse_reg_info_free_list)
{
- cri = cse_reg_info_free_list;
- cse_reg_info_free_list = cri->next;
+ p = cse_reg_info_free_list;
+ cse_reg_info_free_list = p->next;
}
else
- cri = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
+ p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
+
+ /* Insert into hash table. */
+ p->hash_next = *hash_head;
+ *hash_head = p;
/* Initialize it. */
- cri->reg_tick = 0;
- cri->reg_in_table = -1;
- cri->reg_qty = regno;
- cri->regno = regno;
- cri->next = cse_reg_info_used_list;
- cse_reg_info_used_list = cri;
+ p->reg_tick = 1;
+ p->reg_in_table = -1;
+ p->reg_qty = regno;
+ p->regno = regno;
+ p->next = cse_reg_info_used_list;
+ cse_reg_info_used_list = p;
if (!cse_reg_info_used_list_end)
- cse_reg_info_used_list_end = cri;
-
- *entry = cri;
+ cse_reg_info_used_list_end = p;
}
/* Cache this lookup; we tend to be looking up information about the
same register several times in a row. */
cached_regno = regno;
- cached_cse_reg_info = cri;
+ cached_cse_reg_info = p;
- return cri;
-}
-
-static unsigned int
-hash_cse_reg_info (el_ptr)
- hash_table_entry_t el_ptr;
-{
- return ((const struct cse_reg_info *) el_ptr)->regno;
-}
-
-static int
-cse_reg_info_equal_p (el_ptr1, el_ptr2)
- hash_table_entry_t el_ptr1;
- hash_table_entry_t el_ptr2;
-{
- return (((const struct cse_reg_info *) el_ptr1)->regno
- == ((const struct cse_reg_info *) el_ptr2)->regno);
+ return p;
}
/* Clear the hash table and initialize each register with its own quantity,
next_qty = max_reg;
- if (cse_reg_info_tree)
+ /* Clear out hash table state for this pass. */
+
+ bzero ((char *) reg_hash, sizeof reg_hash);
+
+ if (cse_reg_info_used_list)
{
- delete_hash_table (cse_reg_info_tree);
- if (cse_reg_info_used_list)
- {
- cse_reg_info_used_list_end->next = cse_reg_info_free_list;
- cse_reg_info_free_list = cse_reg_info_used_list;
- cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
- }
- cached_cse_reg_info = 0;
+ cse_reg_info_used_list_end->next = cse_reg_info_free_list;
+ cse_reg_info_free_list = cse_reg_info_used_list;
+ cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
}
-
- cse_reg_info_tree = create_hash_table (0, hash_cse_reg_info,
- cse_reg_info_equal_p);
+ cached_cse_reg_info = 0;
CLEAR_HARD_REG_SET (hard_regs_in_table);
/* The per-quantity values used to be initialized here, but it is
much faster to initialize each as it is made in `make_new_qty'. */
- for (i = 0; i < NBUCKETS; i++)
+ for (i = 0; i < HASH_SIZE; i++)
{
- register struct table_elt *this, *next;
- for (this = table[i]; this; this = next)
+ struct table_elt *first;
+
+ first = table[i];
+ if (first != NULL)
{
- next = this->next_same_hash;
- free_element (this);
+ struct table_elt *last = first;
+
+ table[i] = NULL;
+
+ while (last->next_same_hash != NULL)
+ last = last->next_same_hash;
+
+ /* Now relink this hash entire chain into
+ the free element list. */
+
+ last->next_same_hash = free_element_chain;
+ free_element_chain = first;
}
}
- bzero ((char *) table, sizeof table);
-
prev_insn = 0;
#ifdef HAVE_cc0
\f
/* Look in or update the hash table. */
-/* Put the element ELT on the list of free elements. */
-
-static void
-free_element (elt)
- struct table_elt *elt;
-{
- elt->next_same_hash = free_element_chain;
- free_element_chain = elt;
-}
-
-/* Return an element that is free for use. */
-
-static struct table_elt *
-get_element ()
-{
- struct table_elt *elt = free_element_chain;
- if (elt)
- {
- free_element_chain = elt->next_same_hash;
- return elt;
- }
- n_elements_made++;
- return (struct table_elt *) oballoc (sizeof (struct table_elt));
-}
-
/* Remove table element ELT from use in the table.
HASH is its hash code, made using the HASH macro.
It's an argument because often that is known in advance
when two classes were merged by `merge_equiv_classes'. Search
for the hash bucket that it heads. This happens only very
rarely, so the cost is acceptable. */
- for (hash = 0; hash < NBUCKETS; hash++)
+ for (hash = 0; hash < HASH_SIZE; hash++)
if (table[hash] == elt)
table[hash] = next;
}
p->related_value = 0;
}
- free_element (elt);
+ /* Now add it to the free element chain. */
+ elt->next_same_hash = free_element_chain;
+ free_element_chain = elt;
}
/* Look up X in the hash table and return its table element,
rtx x;
enum rtx_code code;
{
- register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
+ register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK,
GET_MODE (x));
/* If we are looking for a CONST_INT, the mode doesn't really matter, as
long as we are narrowing. So if we looked in vain for a mode narrower
{
x = copy_rtx (x);
PUT_MODE (x, word_mode);
- p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS, word_mode);
+ p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
}
if (p == 0)
/* Put an element for X into the right hash bucket. */
- elt = get_element ();
+ elt = free_element_chain;
+ if (elt)
+ {
+ free_element_chain = elt->next_same_hash;
+ }
+ else
+ {
+ n_elements_made++;
+ elt = (struct table_elt *) oballoc (sizeof (struct table_elt));
+ }
+
elt->exp = x;
elt->cost = COST (x);
elt->next_same_value = 0;
if (subexp != 0)
{
/* Get the integer-free subexpression in the hash table. */
- subhash = safe_hash (subexp, mode) % NBUCKETS;
+ subhash = safe_hash (subexp, mode) & HASH_MASK;
subelt = lookup (subexp, subhash, mode);
if (subelt == 0)
subelt = insert (subexp, NULL_PTR, subhash, mode);
int i;
struct table_elt *p;
- for (i = 0; i < NBUCKETS; i++)
+ for (i = 0; i < HASH_SIZE; i++)
for (p = table[i]; p; p = table[i])
{
/* Note that invalidate can remove elements
}
if (in_table)
- for (hash = 0; hash < NBUCKETS; hash++)
+ for (hash = 0; hash < HASH_SIZE; hash++)
for (p = table[hash]; p; p = next)
{
next = p->next_same_hash;
if (full_mode == VOIDmode)
full_mode = GET_MODE (x);
- for (i = 0; i < NBUCKETS; i++)
+ for (i = 0; i < HASH_SIZE; i++)
{
register struct table_elt *next;
register int i;
register struct table_elt *p, *next;
- for (i = 0; i < NBUCKETS; i++)
+ for (i = 0; i < HASH_SIZE; i++)
for (p = table[i]; p; p = next)
{
next = p->next_same_hash;
register struct table_elt *p, *next;
int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
- for (i = 0; i < NBUCKETS; i++)
+ for (i = 0; i < HASH_SIZE; i++)
for (p = table[i]; p; p = next)
{
rtx exp;
If we find one and it is in the wrong hash chain, move it. We can skip
objects that are registers, since they are handled specially. */
- for (i = 0; i < NBUCKETS; i++)
+ for (i = 0; i < HASH_SIZE; i++)
for (p = table[i]; p; p = next)
{
next = p->next_same_hash;
if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
&& exp_equiv_p (p->exp, p->exp, 1, 0)
- && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
+ && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
{
if (p->next_same_hash)
p->next_same_hash->prev_same_hash = p->prev_same_hash;
entry that overlaps a call-clobbered register. */
if (in_table)
- for (hash = 0; hash < NBUCKETS; hash++)
+ for (hash = 0; hash < HASH_SIZE; hash++)
for (p = table[hash]; p; p = next)
{
next = p->next_same_hash;
rtx subexp = get_related_value (x);
if (subexp != 0)
relt = lookup (subexp,
- safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
+ safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
GET_MODE (subexp));
}
&& code == LT && STORE_FLAG_VALUE == -1)
#ifdef FLOAT_STORE_FLAG_VALUE
|| (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
- && FLOAT_STORE_FLAG_VALUE < 0)
+ && (REAL_VALUE_NEGATIVE
+ (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
#endif
)
x = arg1;
&& code == GE && STORE_FLAG_VALUE == -1)
#ifdef FLOAT_STORE_FLAG_VALUE
|| (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
- && FLOAT_STORE_FLAG_VALUE < 0)
+ && (REAL_VALUE_NEGATIVE
+ (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
#endif
)
x = arg1, reverse_code = 1;
if (x == 0)
/* Look up ARG1 in the hash table and see if it has an equivalence
that lets us see what is being compared. */
- p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
+ p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
GET_MODE (arg1));
if (p) p = p->first_same_value;
#ifdef FLOAT_STORE_FLAG_VALUE
|| (code == LT
&& GET_MODE_CLASS (inner_mode) == MODE_FLOAT
- && FLOAT_STORE_FLAG_VALUE < 0)
+ && (REAL_VALUE_NEGATIVE
+ (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
#endif
)
&& GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
#ifdef FLOAT_STORE_FLAG_VALUE
|| (code == GE
&& GET_MODE_CLASS (inner_mode) == MODE_FLOAT
- && FLOAT_STORE_FLAG_VALUE < 0)
+ && (REAL_VALUE_NEGATIVE
+ (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
#endif
)
&& GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
#ifdef FLOAT_STORE_FLAG_VALUE
if (GET_MODE_CLASS (mode) == MODE_FLOAT)
{
- true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
- mode);
+ true = (CONST_DOUBLE_FROM_REAL_VALUE
+ (FLOAT_STORE_FLAG_VALUE (mode), mode));
false = CONST0_RTX (mode);
}
#endif
== REG_QTY (REGNO (folded_arg1))))
|| ((p0 = lookup (folded_arg0,
(safe_hash (folded_arg0, mode_arg0)
- % NBUCKETS), mode_arg0))
+ & HASH_MASK), mode_arg0))
&& (p1 = lookup (folded_arg1,
(safe_hash (folded_arg1, mode_arg0)
- % NBUCKETS), mode_arg0))
+ & HASH_MASK), mode_arg0))
&& p0->first_same_value == p1->first_same_value)))
return ((code == EQ || code == LE || code == GE
|| code == LEU || code == GEU)
struct qty_table_elem *ent = &qty_table[qty];
if ((comparison_dominates_p (ent->comparison_code, code)
- || (comparison_dominates_p (ent->comparison_code,
- reverse_condition (code))
- && ! FLOAT_MODE_P (mode_arg0)))
+ || (! FLOAT_MODE_P (mode_arg0)
+ && comparison_dominates_p (ent->comparison_code,
+ reverse_condition (code))))
&& (rtx_equal_p (ent->comparison_const, folded_arg1)
|| (const_arg1
&& rtx_equal_p (ent->comparison_const,
#ifdef FLOAT_STORE_FLAG_VALUE
if (GET_MODE_CLASS (mode) == MODE_FLOAT)
{
- true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
- mode);
+ true = (CONST_DOUBLE_FROM_REAL_VALUE
+ (FLOAT_STORE_FLAG_VALUE (mode), mode));
false = CONST0_RTX (mode);
}
#endif
const_arg1 ? const_arg1 : folded_arg1);
#ifdef FLOAT_STORE_FLAG_VALUE
if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
- new = ((new == const0_rtx) ? CONST0_RTX (mode)
- : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
+ {
+ if (new == const0_rtx)
+ new = CONST0_RTX (mode);
+ else
+ new = (CONST_DOUBLE_FROM_REAL_VALUE
+ (FLOAT_STORE_FLAG_VALUE (mode), mode));
+ }
#endif
break;
{
rtx new_const = GEN_INT (- INTVAL (const_arg1));
struct table_elt *p
- = lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
+ = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
mode);
if (p)
if (CONSTANT_P (x))
return x;
- elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
+ elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
if (elt == 0)
return 0;
{
reversed_nonequality = (code != EQ && code != NE);
code = reverse_condition (code);
+
+ /* Don't remember if we can't find the inverse. */
+ if (code == UNKNOWN)
+ return;
}
/* The mode is the mode of the non-constant. */
int src_eqv_in_memory = 0;
unsigned src_eqv_hash = 0;
- struct set *sets = NULL_PTR;
+ struct set *sets = (struct set *) NULL_PTR;
this_insn = insn;
trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
- SET_SRC (sets[i].rtl) = trial;
- cse_jumps_altered = 1;
+ if (trial == pc_rtx)
+ {
+ SET_SRC (sets[i].rtl) = trial;
+ cse_jumps_altered = 1;
+ break;
+ }
+
+ /* We must actually validate the change. Consider a target
+ where unconditional jumps are more complex than
+ (set (pc) (label_ref)) such as the fr30. */
+ if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
+ cse_jumps_altered = 1;
break;
}
/* We used to rely on all references to a register becoming
inaccessible when a register changes to a new quantity,
since that changes the hash code. However, that is not
- safe, since after NBUCKETS new quantities we get a
+ safe, since after HASH_SIZE new quantities we get a
hash 'collision' of a register with its own invalid
entries. And since SUBREGs have been changed not to
change their hash code with the hash code of the register,
register int i;
register struct table_elt *p, *next;
- for (i = 0; i < NBUCKETS; i++)
+ for (i = 0; i < HASH_SIZE; i++)
for (p = table[i]; p; p = next)
{
next = p->next_same_hash;
max_insn_uid = get_max_uid ();
reg_eqv_table = (struct reg_eqv_elem *)
- xmalloc(nregs * sizeof(struct reg_eqv_elem));
+ xmalloc (nregs * sizeof (struct reg_eqv_elem));
#ifdef LOAD_EXTEND_OP
/* This array is undefined before max_reg, so only allocate
the space actually needed and adjust the start. */
- qty_table = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
- * sizeof(struct qty_table_elem));
+ qty_table
+ = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
+ * sizeof (struct qty_table_elem));
qty_table -= max_reg;
new_basic_block ();
if (simplejump_p (insn))
{
if (to == 0)
- return 0;
+ {
+ free (qty_table + max_reg);
+ return 0;
+ }
if (JUMP_LABEL (insn) == to)
to_usage = 1;
/* If TO was the last insn in the function, we are done. */
if (insn == 0)
- return 0;
+ {
+ free (qty_table + max_reg);
+ return 0;
+ }
/* If TO was preceded by a BARRIER we are done with this block
because it has no continuation. */
prev = prev_nonnote_insn (to);
if (prev && GET_CODE (prev) == BARRIER)
- return insn;
+ {
+ free (qty_table + max_reg);
+ return insn;
+ }
/* Find the end of the following block. Note that we won't be
following branches in this case. */