/* Common subexpression elimination library for GNU compiler.
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
This file is part of GCC.
#include "target.h"
static bool cselib_record_memory;
+static bool cselib_preserve_constants;
static int entry_and_rtx_equal_p (const void *, const void *);
static hashval_t get_value_hash (const void *);
static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
bitmap regs_active;
cselib_expand_callback callback;
void *callback_arg;
+ bool dummy;
};
static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
It is used in new_elt_loc_list to set SETTING_INSN. */
static rtx cselib_current_insn;
-/* Every new unknown value gets a unique number. */
-static unsigned int next_unknown_value;
+/* The unique id that the next create value will take. */
+static unsigned int next_uid;
/* The number of registers we had when the varrays were last resized. */
static unsigned int cselib_nregs;
-/* Count values without known locations. Whenever this grows too big, we
- remove these useless values from the table. */
+/* Count values without known locations, or with only locations that
+ wouldn't have been known except for debug insns. Whenever this
+ grows too big, we remove these useless values from the table.
+
+ Counting values with only debug values is a bit tricky. We don't
+ want to increment n_useless_values when we create a value for a
+ debug insn, for this would get n_useless_values out of sync, but we
+ want increment it if all locs in the list that were ever referenced
+ in nondebug insns are removed from the list.
+
+ In the general case, once we do that, we'd have to stop accepting
+ nondebug expressions in the loc list, to avoid having two values
+ equivalent that, without debug insns, would have been made into
+ separate values. However, because debug insns never introduce
+ equivalences themselves (no assignments), the only means for
+ growing loc lists is through nondebug assignments. If the locs
+ also happen to be referenced in debug insns, it will work just fine.
+
+ A consequence of this is that there's at most one debug-only loc in
+ each loc list. If we keep it in the first entry, testing whether
+ we have a debug-only loc list takes O(1).
+
+ Furthermore, since any additional entry in a loc list containing a
+ debug loc would have to come from an assignment (nondebug) that
+ references both the initial debug loc and the newly-equivalent loc,
+ the initial debug loc would be promoted to a nondebug loc, and the
+ loc list would not contain debug locs any more.
+
+ So the only case we have to be careful with in order to keep
+ n_useless_values in sync between debug and nondebug compilations is
+ to avoid incrementing n_useless_values when removing the single loc
+ from a value that turns out to not appear outside debug values. We
+ increment n_useless_debug_values instead, and leave such values
+ alone until, for other reasons, we garbage-collect useless
+ values. */
static int n_useless_values;
+static int n_useless_debug_values;
+
+/* Count values whose locs have been taken exclusively from debug
+ insns for the entire life of the value. */
+static int n_debug_values;
/* Number of useless values before we remove them from the hash table. */
#define MAX_USELESS_VALUES 32
presence in the list by checking the next pointer. */
static cselib_val dummy_val;
+/* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
+ that is constant through the whole function and should never be
+ eliminated. */
+static cselib_val *cfa_base_preserved_val;
+
/* Used to list all values that contain memory reference.
May or may not contain the useless values - the list is compacted
each time memory is invalidated. */
#define PRESERVED_VALUE_P(RTX) \
(RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
-#define LONG_TERM_PRESERVED_VALUE_P(RTX) \
- (RTL_FLAG_CHECK1("LONG_TERM_PRESERVED_VALUE_P", (RTX), VALUE)->in_struct)
\f
el->next = next;
el->loc = loc;
el->setting_insn = cselib_current_insn;
+ gcc_assert (!next || !next->setting_insn
+ || !DEBUG_INSN_P (next->setting_insn));
+
+ /* If we're creating the first loc in a debug insn context, we've
+ just created a debug value. Count it. */
+ if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
+ n_debug_values++;
+
return el;
}
+/* Promote loc L to a nondebug cselib_current_insn if L is marked as
+ originating from a debug insn, maintaining the debug values
+ count. */
+
+static inline void
+promote_debug_loc (struct elt_loc_list *l)
+{
+ if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)
+ && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
+ {
+ n_debug_values--;
+ l->setting_insn = cselib_current_insn;
+ gcc_assert (!l->next);
+ }
+}
+
/* The elt_list at *PL is no longer needed. Unchain it and free its
storage. */
void
cselib_clear_table (void)
{
- cselib_reset_table_with_next_value (0);
+ cselib_reset_table (1);
+}
+
+/* Remove from hash table all VALUEs except constants. */
+
+static int
+preserve_only_constants (void **x, void *info ATTRIBUTE_UNUSED)
+{
+ cselib_val *v = (cselib_val *)*x;
+
+ if (v->locs != NULL
+ && v->locs->next == NULL)
+ {
+ if (CONSTANT_P (v->locs->loc)
+ && (GET_CODE (v->locs->loc) != CONST
+ || !references_value_p (v->locs->loc, 0)))
+ return 1;
+ if (cfa_base_preserved_val)
+ {
+ if (v == cfa_base_preserved_val)
+ return 1;
+ if (GET_CODE (v->locs->loc) == PLUS
+ && CONST_INT_P (XEXP (v->locs->loc, 1))
+ && XEXP (v->locs->loc, 0) == cfa_base_preserved_val->val_rtx)
+ return 1;
+ }
+ }
+
+ htab_clear_slot (cselib_hash_table, x);
+ return 1;
}
/* Remove all entries from the hash table, arranging for the next
value to be numbered NUM. */
void
-cselib_reset_table_with_next_value (unsigned int num)
+cselib_reset_table (unsigned int num)
{
unsigned int i;
- for (i = 0; i < n_used_regs; i++)
- REG_VALUES (used_regs[i]) = 0;
-
max_value_regs = 0;
- n_used_regs = 0;
+ if (cfa_base_preserved_val)
+ {
+ unsigned int regno = REGNO (cfa_base_preserved_val->locs->loc);
+ unsigned int new_used_regs = 0;
+ for (i = 0; i < n_used_regs; i++)
+ if (used_regs[i] == regno)
+ {
+ new_used_regs = 1;
+ continue;
+ }
+ else
+ REG_VALUES (used_regs[i]) = 0;
+ gcc_assert (new_used_regs == 1);
+ n_used_regs = new_used_regs;
+ used_regs[0] = regno;
+ max_value_regs
+ = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
+ }
+ else
+ {
+ for (i = 0; i < n_used_regs; i++)
+ REG_VALUES (used_regs[i]) = 0;
+ n_used_regs = 0;
+ }
- /* ??? Preserve constants? */
- htab_empty (cselib_hash_table);
+ if (cselib_preserve_constants)
+ htab_traverse (cselib_hash_table, preserve_only_constants, NULL);
+ else
+ htab_empty (cselib_hash_table);
n_useless_values = 0;
+ n_useless_debug_values = 0;
+ n_debug_values = 0;
- next_unknown_value = num;
+ next_uid = num;
first_containing_mem = &dummy_val;
}
/* Return the number of the next value that will be generated. */
unsigned int
-cselib_get_next_unknown_value (void)
+cselib_get_next_uid (void)
{
- return next_unknown_value;
+ return next_uid;
}
/* The equality test for our hash table. The first argument ENTRY is a table
gcc_assert (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
&& (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE));
-
+
if (mode != GET_MODE (v->val_rtx))
return 0;
so we need to do a comparison. */
for (l = v->locs; l; l = l->next)
if (rtx_equal_for_cselib_p (l->loc, x))
- return 1;
+ {
+ promote_debug_loc (l);
+ return 1;
+ }
return 0;
}
get_value_hash (const void *entry)
{
const cselib_val *const v = (const cselib_val *) entry;
- return v->value;
+ return v->hash;
}
/* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
{
cselib_val *v = (cselib_val *)*x;
struct elt_loc_list **p = &v->locs;
- int had_locs = v->locs != 0;
+ bool had_locs = v->locs != NULL;
+ rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
while (*p)
{
if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
{
- n_useless_values++;
+ if (setting_insn && DEBUG_INSN_P (setting_insn))
+ n_useless_debug_values++;
+ else
+ n_useless_values++;
values_became_useless = 1;
}
return 1;
remove_useless_values (void)
{
cselib_val **p, *v;
+
/* First pass: eliminate locations that reference the value. That in
turn can make more values useless. */
do
}
*p = &dummy_val;
+ n_useless_values += n_useless_debug_values;
+ n_debug_values -= n_useless_debug_values;
+ n_useless_debug_values = 0;
+
htab_traverse (cselib_hash_table, discard_useless_values, 0);
gcc_assert (!n_useless_values);
return PRESERVED_VALUE_P (v->val_rtx);
}
-/* Mark preserved values as preserved for the long term. */
+/* Arrange for a REG value to be assumed constant through the whole function,
+ never invalidated and preserved across cselib_reset_table calls. */
-static int
-cselib_preserve_definitely (void **slot, void *info ATTRIBUTE_UNUSED)
+void
+cselib_preserve_cfa_base_value (cselib_val *v)
{
- cselib_val *v = (cselib_val *)*slot;
-
- if (PRESERVED_VALUE_P (v->val_rtx)
- && !LONG_TERM_PRESERVED_VALUE_P (v->val_rtx))
- LONG_TERM_PRESERVED_VALUE_P (v->val_rtx) = true;
-
- return 1;
-}
-
-/* Clear the preserve marks for values not preserved for the long
- term. */
-
-static int
-cselib_clear_preserve (void **slot, void *info ATTRIBUTE_UNUSED)
-{
- cselib_val *v = (cselib_val *)*slot;
-
- if (PRESERVED_VALUE_P (v->val_rtx)
- && !LONG_TERM_PRESERVED_VALUE_P (v->val_rtx))
- {
- PRESERVED_VALUE_P (v->val_rtx) = false;
- if (!v->locs)
- n_useless_values++;
- }
-
- return 1;
+ if (cselib_preserve_constants
+ && v->locs
+ && REG_P (v->locs->loc))
+ cfa_base_preserved_val = v;
}
/* Clean all non-constant expressions in the hash table, but retain
their values. */
void
-cselib_preserve_only_values (bool retain)
+cselib_preserve_only_values (void)
{
int i;
- htab_traverse (cselib_hash_table,
- retain ? cselib_preserve_definitely : cselib_clear_preserve,
- NULL);
-
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
cselib_invalidate_regno (i, reg_raw_mode[i]);
{
case CONST_DOUBLE:
case CONST_FIXED:
+ case DEBUG_EXPR:
return 0;
case LABEL_REF:
return 1;
}
+/* We need to pass down the mode of constants through the hash table
+ functions. For that purpose, wrap them in a CONST of the appropriate
+ mode. */
+static rtx
+wrap_constant (enum machine_mode mode, rtx x)
+{
+ if (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
+ && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
+ return x;
+ gcc_assert (mode != VOIDmode);
+ return gen_rtx_CONST (mode, x);
+}
+
/* Hash an rtx. Return 0 if we couldn't hash the rtx.
For registers and memory locations, we look up their cselib_val structure
and return its VALUE element.
if (! e)
return 0;
- return e->value;
+ return e->hash;
+
+ case DEBUG_EXPR:
+ hash += ((unsigned) DEBUG_EXPR << 7)
+ + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
+ return hash ? hash : (unsigned int) DEBUG_EXPR;
case CONST_INT:
hash += ((unsigned) CONST_INT << 7) + INTVAL (x);
{
rtx tem = XEXP (x, i);
unsigned int tem_hash = cselib_hash_rtx (tem, create);
-
+
if (tem_hash == 0)
return 0;
-
+
hash += tem_hash;
}
break;
{
unsigned int tem_hash
= cselib_hash_rtx (XVECEXP (x, i, j), create);
-
+
if (tem_hash == 0)
return 0;
-
+
hash += tem_hash;
}
break;
case 's':
{
const unsigned char *p = (const unsigned char *) XSTR (x, i);
-
+
if (p)
while (*p)
hash += *p++;
break;
}
-
+
case 'i':
hash += XINT (x, i);
break;
case 't':
/* unused */
break;
-
+
default:
gcc_unreachable ();
}
value is MODE. */
static inline cselib_val *
-new_cselib_val (unsigned int value, enum machine_mode mode, rtx x)
+new_cselib_val (unsigned int hash, enum machine_mode mode, rtx x)
{
cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
- gcc_assert (value);
+ gcc_assert (hash);
+ gcc_assert (next_uid);
- e->value = value;
+ e->hash = hash;
+ e->uid = next_uid++;
/* We use an alloc pool to allocate this RTL construct because it
accounts for about 8% of the overall memory usage. We know
precisely when we can have VALUE RTXen (when cselib is active)
if (dump_file && (dump_flags & TDF_DETAILS))
{
- fprintf (dump_file, "cselib value %u ", value);
+ fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
if (flag_dump_noaddr || flag_dump_unnumbered)
fputs ("# ", dump_file);
else
for (l = mem_elt->locs; l; l = l->next)
if (MEM_P (l->loc)
&& CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
- return;
+ {
+ promote_debug_loc (l);
+ return;
+ }
addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
mem_elt->locs
/* Find a value that describes a value of our mode at that address. */
for (l = addr->addr_list; l; l = l->next)
if (GET_MODE (l->elt->val_rtx) == mode)
- return l->elt;
+ {
+ promote_debug_loc (l->elt->locs);
+ return l->elt;
+ }
if (! create)
return 0;
- mem_elt = new_cselib_val (++next_unknown_value, mode, x);
+ mem_elt = new_cselib_val (next_uid, mode, x);
add_mem_for_addr (addr, mem_elt, x);
slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
- mem_elt->value, INSERT);
+ mem_elt->hash, INSERT);
*slot = mem_elt;
return mem_elt;
}
non-reg results, we just take the first one because they will all
expand to the same place. */
-static rtx
+static rtx
expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
int max_depth)
{
{
/* Avoid infinite recursion trying to expand a reg into a
the same reg. */
- if ((REG_P (p->loc))
- && (REGNO (p->loc) < regno)
+ if ((REG_P (p->loc))
+ && (REGNO (p->loc) < regno)
&& !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
{
reg_result = p->loc;
}
/* Avoid infinite recursion and do not try to expand the
value. */
- else if (GET_CODE (p->loc) == VALUE
+ else if (GET_CODE (p->loc) == VALUE
&& CSELIB_VAL_PTR (p->loc)->locs == p_in)
continue;
else if (!REG_P (p->loc))
if (result)
return result;
}
-
+
}
-
+
if (regno != UINT_MAX)
{
rtx result;
print_inline_rtx (dump_file, reg_result, 0);
fprintf (dump_file, "\n");
}
- else
+ else
fprintf (dump_file, "NULL\n");
}
return reg_result;
This is the opposite of common subexpression. Because local value
numbering is such a weak optimization, the expanded expression is
pretty much unique (not from a pointer equals point of view but
- from a tree shape point of view.
+ from a tree shape point of view.
This function returns NULL if the expansion fails. The expansion
will fail if there is no value number for one of the operands or if
evd.regs_active = regs_active;
evd.callback = NULL;
evd.callback_arg = NULL;
+ evd.dummy = false;
return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
}
/* Same as cselib_expand_value_rtx, but using a callback to try to
- resolve VALUEs that expand to nothing. */
+ resolve some expressions. The CB function should return ORIG if it
+ can't or does not want to deal with a certain RTX. Any other
+ return value, including NULL, will be used as the expansion for
+ VALUE, without any further changes. */
rtx
cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
evd.regs_active = regs_active;
evd.callback = cb;
evd.callback_arg = data;
+ evd.dummy = false;
return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
}
+/* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
+ or simplified. Useful to find out whether cselib_expand_value_rtx_cb
+ would return NULL or non-NULL, without allocating new rtx. */
+
+bool
+cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
+ cselib_expand_callback cb, void *data)
+{
+ struct expand_value_data evd;
+
+ evd.regs_active = regs_active;
+ evd.callback = cb;
+ evd.callback_arg = data;
+ evd.dummy = true;
+
+ return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
+}
+
+/* Internal implementation of cselib_expand_value_rtx and
+ cselib_expand_value_rtx_cb. */
+
static rtx
cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
int max_depth)
{
rtx result;
int regno = REGNO (orig);
-
+
/* The only thing that we are not willing to do (this
is requirement of dse and if others potential uses
need this function we should add a parm to control
if (result)
return result;
- else
+ else
return orig;
}
}
-
+
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
case SUBREG:
{
- rtx subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
- max_depth - 1);
+ rtx subreg;
+
+ if (evd->callback)
+ {
+ subreg = evd->callback (orig, evd->regs_active, max_depth,
+ evd->callback_arg);
+ if (subreg != orig)
+ return subreg;
+ }
+
+ subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
+ max_depth - 1);
if (!subreg)
return NULL;
scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
if (scopy == NULL
|| (GET_CODE (scopy) == SUBREG
&& !REG_P (SUBREG_REG (scopy))
- && !MEM_P (SUBREG_REG (scopy))
- && (REG_P (SUBREG_REG (orig))
- || MEM_P (SUBREG_REG (orig)))))
- return shallow_copy_rtx (orig);
+ && !MEM_P (SUBREG_REG (scopy))))
+ return NULL;
+
return scopy;
}
case VALUE:
{
rtx result;
+
if (dump_file && (dump_flags & TDF_DETAILS))
{
fputs ("\nexpanding ", dump_file);
fputs (" into...", dump_file);
}
- if (!evd->callback)
- result = NULL;
- else
+ if (evd->callback)
{
result = evd->callback (orig, evd->regs_active, max_depth,
evd->callback_arg);
- if (result == orig)
- result = NULL;
- else if (result)
- result = cselib_expand_value_rtx_1 (result, evd, max_depth);
+
+ if (result != orig)
+ return result;
}
- if (!result)
- result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
+ result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
return result;
}
+
+ case DEBUG_EXPR:
+ if (evd->callback)
+ return evd->callback (orig, evd->regs_active, max_depth,
+ evd->callback_arg);
+ return orig;
+
default:
break;
}
that all fields need copying, and then clear the fields that should
not be copied. That is the sensible default behavior, and forces
us to explicitly document why we are *not* copying a flag. */
- copy = shallow_copy_rtx (orig);
+ if (evd->dummy)
+ copy = NULL;
+ else
+ copy = shallow_copy_rtx (orig);
format_ptr = GET_RTX_FORMAT (code);
max_depth - 1);
if (!result)
return NULL;
- XEXP (copy, i) = result;
+ if (copy)
+ XEXP (copy, i) = result;
}
break;
case 'V':
if (XVEC (orig, i) != NULL)
{
- XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
- for (j = 0; j < XVECLEN (copy, i); j++)
+ if (copy)
+ XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
+ for (j = 0; j < XVECLEN (orig, i); j++)
{
rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
evd, max_depth - 1);
if (!result)
return NULL;
- XVECEXP (copy, i, j) = result;
+ if (copy)
+ XVECEXP (copy, i, j) = result;
}
}
break;
gcc_unreachable ();
}
+ if (evd->dummy)
+ return orig;
+
mode = GET_MODE (copy);
/* If an operand has been simplified into CONST_INT, which doesn't
have a mode and the mode isn't derivable from whole rtx's mode,
default:
break;
}
- if (scopy == NULL_RTX)
- {
- XEXP (copy, 0)
- = gen_rtx_CONST (GET_MODE (XEXP (orig, 0)), XEXP (copy, 0));
- if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, " wrapping const_int result in const to preserve mode %s\n",
- GET_MODE_NAME (GET_MODE (XEXP (copy, 0))));
- }
scopy = simplify_rtx (copy);
if (scopy)
- {
- if (GET_MODE (copy) != GET_MODE (scopy))
- scopy = wrap_constant (GET_MODE (copy), scopy);
- return scopy;
- }
+ return scopy;
return copy;
}
{
/* This happens for autoincrements. Assign a value that doesn't
match any other. */
- e = new_cselib_val (++next_unknown_value, GET_MODE (x), x);
+ e = new_cselib_val (next_uid, GET_MODE (x), x);
}
return e->val_rtx;
case PRE_DEC:
case POST_MODIFY:
case PRE_MODIFY:
- e = new_cselib_val (++next_unknown_value, GET_MODE (x), x);
+ e = new_cselib_val (next_uid, GET_MODE (x), x);
return e->val_rtx;
default:
{
rtx t = cselib_subst_to_values (XEXP (x, i));
- if (t != XEXP (x, i) && x == copy)
- copy = shallow_copy_rtx (x);
-
- XEXP (copy, i) = t;
+ if (t != XEXP (x, i))
+ {
+ if (x == copy)
+ copy = shallow_copy_rtx (x);
+ XEXP (copy, i) = t;
+ }
}
else if (fmt[i] == 'E')
{
- int j, k;
+ int j;
for (j = 0; j < XVECLEN (x, i); j++)
{
rtx t = cselib_subst_to_values (XVECEXP (x, i, j));
- if (t != XVECEXP (x, i, j) && XVEC (x, i) == XVEC (copy, i))
+ if (t != XVECEXP (x, i, j))
{
- if (x == copy)
- copy = shallow_copy_rtx (x);
-
- XVEC (copy, i) = rtvec_alloc (XVECLEN (x, i));
- for (k = 0; k < j; k++)
- XVECEXP (copy, i, k) = XVECEXP (x, i, k);
+ if (XVEC (x, i) == XVEC (copy, i))
+ {
+ if (x == copy)
+ copy = shallow_copy_rtx (x);
+ XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
+ }
+ XVECEXP (copy, i, j) = t;
}
-
- XVECEXP (copy, i, j) = t;
}
}
}
return copy;
}
-/* Log a lookup of X to the cselib table along with the result RET. */
-
-static cselib_val *
-cselib_log_lookup (rtx x, cselib_val *ret)
-{
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- fputs ("cselib lookup ", dump_file);
- print_inline_rtx (dump_file, x, 2);
- fprintf (dump_file, " => %u\n", ret ? ret->value : 0);
- }
-
- return ret;
-}
-
/* Look up the rtl expression X in our tables and return the value it has.
If CREATE is zero, we return NULL if we don't know the value. Otherwise,
we create a new one if possible, using mode MODE if X doesn't have a mode
(i.e. because it's a constant). */
-cselib_val *
-cselib_lookup (rtx x, enum machine_mode mode, int create)
+static cselib_val *
+cselib_lookup_1 (rtx x, enum machine_mode mode, int create)
{
void **slot;
cselib_val *e;
l = l->next;
for (; l; l = l->next)
if (mode == GET_MODE (l->elt->val_rtx))
- return cselib_log_lookup (x, l->elt);
+ {
+ promote_debug_loc (l->elt->locs);
+ return l->elt;
+ }
if (! create)
- return cselib_log_lookup (x, 0);
+ return 0;
if (i < FIRST_PSEUDO_REGISTER)
{
max_value_regs = n;
}
- e = new_cselib_val (++next_unknown_value, GET_MODE (x), x);
+ e = new_cselib_val (next_uid, GET_MODE (x), x);
e->locs = new_elt_loc_list (e->locs, x);
if (REG_VALUES (i) == 0)
{
REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
}
REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
- slot = htab_find_slot_with_hash (cselib_hash_table, x, e->value, INSERT);
+ slot = htab_find_slot_with_hash (cselib_hash_table, x, e->hash, INSERT);
*slot = e;
- return cselib_log_lookup (x, e);
+ return e;
}
if (MEM_P (x))
- return cselib_log_lookup (x, cselib_lookup_mem (x, create));
+ return cselib_lookup_mem (x, create);
hashval = cselib_hash_rtx (x, create);
/* Can't even create if hashing is not possible. */
if (! hashval)
- return cselib_log_lookup (x, 0);
+ return 0;
slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
hashval, create ? INSERT : NO_INSERT);
if (slot == 0)
- return cselib_log_lookup (x, 0);
+ return 0;
e = (cselib_val *) *slot;
if (e)
- return cselib_log_lookup (x, e);
+ return e;
e = new_cselib_val (hashval, mode, x);
cselib_subst_to_values will need to do lookups. */
*slot = (void *) e;
e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
- return cselib_log_lookup (x, e);
+ return e;
+}
+
+/* Wrapper for cselib_lookup, that indicates X is in INSN. */
+
+cselib_val *
+cselib_lookup_from_insn (rtx x, enum machine_mode mode,
+ int create, rtx insn)
+{
+ cselib_val *ret;
+
+ gcc_assert (!cselib_current_insn);
+ cselib_current_insn = insn;
+
+ ret = cselib_lookup (x, mode, create);
+
+ cselib_current_insn = NULL;
+
+ return ret;
+}
+
+/* Wrapper for cselib_lookup_1, that logs the lookup result and
+ maintains invariants related with debug insns. */
+
+cselib_val *
+cselib_lookup (rtx x, enum machine_mode mode, int create)
+{
+ cselib_val *ret = cselib_lookup_1 (x, mode, create);
+
+ /* ??? Should we return NULL if we're not to create an entry, the
+ found loc is a debug loc and cselib_current_insn is not DEBUG?
+ If so, we should also avoid converting val to non-DEBUG; probably
+ easiest setting cselib_current_insn to NULL before the call
+ above. */
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fputs ("cselib lookup ", dump_file);
+ print_inline_rtx (dump_file, x, 2);
+ fprintf (dump_file, " => %u:%u\n",
+ ret ? ret->uid : 0,
+ ret ? ret->hash : 0);
+ }
+
+ return ret;
}
/* Invalidate any entries in reg_values that overlap REGNO. This is called
while (*l)
{
cselib_val *v = (*l)->elt;
+ bool had_locs;
+ rtx setting_insn;
struct elt_loc_list **p;
unsigned int this_last = i;
if (i < FIRST_PSEUDO_REGISTER && v != NULL)
this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
- if (this_last < regno || v == NULL)
+ if (this_last < regno || v == NULL || v == cfa_base_preserved_val)
{
l = &(*l)->next;
continue;
else
unchain_one_elt_list (l);
+ had_locs = v->locs != NULL;
+ setting_insn = v->locs ? v->locs->setting_insn : NULL;
+
/* Now, we clear the mapping from value to reg. It must exist, so
this code will crash intentionally if it doesn't. */
for (p = &v->locs; ; p = &(*p)->next)
break;
}
}
- if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
- n_useless_values++;
+
+ if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
+ {
+ if (setting_insn && DEBUG_INSN_P (setting_insn))
+ n_useless_debug_values++;
+ else
+ n_useless_values++;
+ }
}
}
}
{
bool has_mem = false;
struct elt_loc_list **p = &v->locs;
- int had_locs = v->locs != 0;
+ bool had_locs = v->locs != NULL;
+ rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
while (*p)
{
}
if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
- n_useless_values++;
+ {
+ if (setting_insn && DEBUG_INSN_P (setting_insn))
+ n_useless_debug_values++;
+ else
+ n_useless_values++;
+ }
next = v->next_containing_mem;
if (has_mem)
src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1);
if (MEM_P (dest))
- sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0), Pmode, 1);
+ {
+ enum machine_mode address_mode
+ = targetm.addr_space.address_mode (MEM_ADDR_SPACE (dest));
+
+ sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
+ address_mode, 1);
+ }
else
sets[i].dest_addr_elt = 0;
}
&& GET_CODE (PATTERN (insn)) == ASM_OPERANDS
&& MEM_VOLATILE_P (PATTERN (insn))))
{
- cselib_reset_table_with_next_value (next_unknown_value);
+ cselib_reset_table (next_uid);
+ cselib_current_insn = NULL_RTX;
return;
}
if (! INSN_P (insn))
{
- cselib_current_insn = 0;
+ cselib_current_insn = NULL_RTX;
return;
}
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (call_used_regs[i]
|| (REG_VALUES (i) && REG_VALUES (i)->elt
- && HARD_REGNO_CALL_PART_CLOBBERED (i,
+ && HARD_REGNO_CALL_PART_CLOBBERED (i,
GET_MODE (REG_VALUES (i)->elt->val_rtx))))
cselib_invalidate_regno (i, reg_raw_mode[i]);
if (GET_CODE (XEXP (x, 0)) == CLOBBER)
cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
- cselib_current_insn = 0;
+ cselib_current_insn = NULL_RTX;
if (n_useless_values > MAX_USELESS_VALUES
/* remove_useless_values is linear in the hash table size. Avoid
quadratic behavior for very large hashtables with very few
useless elements. */
- && (unsigned int)n_useless_values > cselib_hash_table->n_elements / 4)
+ && ((unsigned int)n_useless_values
+ > (cselib_hash_table->n_elements
+ - cselib_hash_table->n_deleted
+ - n_debug_values) / 4))
remove_useless_values ();
}
init_alias_analysis. */
void
-cselib_init (bool record_memory)
+cselib_init (int record_what)
{
- elt_list_pool = create_alloc_pool ("elt_list",
+ elt_list_pool = create_alloc_pool ("elt_list",
sizeof (struct elt_list), 10);
- elt_loc_list_pool = create_alloc_pool ("elt_loc_list",
+ elt_loc_list_pool = create_alloc_pool ("elt_loc_list",
sizeof (struct elt_loc_list), 10);
- cselib_val_pool = create_alloc_pool ("cselib_val_list",
+ cselib_val_pool = create_alloc_pool ("cselib_val_list",
sizeof (cselib_val), 10);
value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
- cselib_record_memory = record_memory;
+ cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
+ cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
/* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
see canon_true_dependence. This is only created once. */
n_used_regs = 0;
cselib_hash_table = htab_create (31, get_value_hash,
entry_and_rtx_equal_p, NULL);
+ next_uid = 1;
}
/* Called when the current user is done with cselib. */
cselib_finish (void)
{
cselib_discard_hook = NULL;
+ cselib_preserve_constants = false;
+ cfa_base_preserved_val = NULL;
free_alloc_pool (elt_list_pool);
free_alloc_pool (elt_loc_list_pool);
free_alloc_pool (cselib_val_pool);
used_regs = 0;
cselib_hash_table = 0;
n_useless_values = 0;
- next_unknown_value = 0;
+ n_useless_debug_values = 0;
+ n_debug_values = 0;
+ next_uid = 0;
}
/* Dump the cselib_val *X to FILE *info. */
print_inline_rtx (out, first_containing_mem->val_rtx, 2);
fputc ('\n', out);
}
- fprintf (out, "last unknown value %i\n", next_unknown_value);
+ fprintf (out, "next uid %i\n", next_uid);
}
#include "gt-cselib.h"