/* Common subexpression elimination library for GNU compiler.
Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
This file is part of GCC.
#include "target.h"
static bool cselib_record_memory;
+static bool cselib_preserve_constants;
static int entry_and_rtx_equal_p (const void *, const void *);
static hashval_t get_value_hash (const void *);
static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
bitmap regs_active;
cselib_expand_callback callback;
void *callback_arg;
+ bool dummy;
};
static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
It is used in new_elt_loc_list to set SETTING_INSN. */
static rtx cselib_current_insn;
-/* Every new unknown value gets a unique number. */
-static unsigned int next_unknown_value;
+/* The unique id that the next create value will take. */
+static unsigned int next_uid;
/* The number of registers we had when the varrays were last resized. */
static unsigned int cselib_nregs;
presence in the list by checking the next pointer. */
static cselib_val dummy_val;
+/* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
+ that is constant through the whole function and should never be
+ eliminated. */
+static cselib_val *cfa_base_preserved_val;
+
/* Used to list all values that contain memory reference.
May or may not contain the useless values - the list is compacted
each time memory is invalidated. */
#define PRESERVED_VALUE_P(RTX) \
(RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
-#define LONG_TERM_PRESERVED_VALUE_P(RTX) \
- (RTL_FLAG_CHECK1("LONG_TERM_PRESERVED_VALUE_P", (RTX), VALUE)->in_struct)
\f
void
cselib_clear_table (void)
{
- cselib_reset_table_with_next_value (0);
+ cselib_reset_table (1);
+}
+
+/* Remove from hash table all VALUEs except constants. */
+
+static int
+preserve_only_constants (void **x, void *info ATTRIBUTE_UNUSED)
+{
+ cselib_val *v = (cselib_val *)*x;
+
+ if (v->locs != NULL
+ && v->locs->next == NULL)
+ {
+ if (CONSTANT_P (v->locs->loc)
+ && (GET_CODE (v->locs->loc) != CONST
+ || !references_value_p (v->locs->loc, 0)))
+ return 1;
+ if (cfa_base_preserved_val)
+ {
+ if (v == cfa_base_preserved_val)
+ return 1;
+ if (GET_CODE (v->locs->loc) == PLUS
+ && CONST_INT_P (XEXP (v->locs->loc, 1))
+ && XEXP (v->locs->loc, 0) == cfa_base_preserved_val->val_rtx)
+ return 1;
+ }
+ }
+
+ htab_clear_slot (cselib_hash_table, x);
+ return 1;
}
/* Remove all entries from the hash table, arranging for the next
value to be numbered NUM. */
void
-cselib_reset_table_with_next_value (unsigned int num)
+cselib_reset_table (unsigned int num)
{
unsigned int i;
- for (i = 0; i < n_used_regs; i++)
- REG_VALUES (used_regs[i]) = 0;
-
max_value_regs = 0;
- n_used_regs = 0;
+ if (cfa_base_preserved_val)
+ {
+ unsigned int regno = REGNO (cfa_base_preserved_val->locs->loc);
+ unsigned int new_used_regs = 0;
+ for (i = 0; i < n_used_regs; i++)
+ if (used_regs[i] == regno)
+ {
+ new_used_regs = 1;
+ continue;
+ }
+ else
+ REG_VALUES (used_regs[i]) = 0;
+ gcc_assert (new_used_regs == 1);
+ n_used_regs = new_used_regs;
+ used_regs[0] = regno;
+ max_value_regs
+ = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
+ }
+ else
+ {
+ for (i = 0; i < n_used_regs; i++)
+ REG_VALUES (used_regs[i]) = 0;
+ n_used_regs = 0;
+ }
- /* ??? Preserve constants? */
- htab_empty (cselib_hash_table);
+ if (cselib_preserve_constants)
+ htab_traverse (cselib_hash_table, preserve_only_constants, NULL);
+ else
+ htab_empty (cselib_hash_table);
n_useless_values = 0;
- next_unknown_value = num;
+ next_uid = num;
first_containing_mem = &dummy_val;
}
/* Return the number of the next value that will be generated. */
unsigned int
-cselib_get_next_unknown_value (void)
+cselib_get_next_uid (void)
{
- return next_unknown_value;
+ return next_uid;
}
/* The equality test for our hash table. The first argument ENTRY is a table
get_value_hash (const void *entry)
{
const cselib_val *const v = (const cselib_val *) entry;
- return v->value;
+ return v->hash;
}
/* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
return PRESERVED_VALUE_P (v->val_rtx);
}
-/* Mark preserved values as preserved for the long term. */
-
-static int
-cselib_preserve_definitely (void **slot, void *info ATTRIBUTE_UNUSED)
-{
- cselib_val *v = (cselib_val *)*slot;
-
- if (PRESERVED_VALUE_P (v->val_rtx)
- && !LONG_TERM_PRESERVED_VALUE_P (v->val_rtx))
- LONG_TERM_PRESERVED_VALUE_P (v->val_rtx) = true;
-
- return 1;
-}
-
-/* Clear the preserve marks for values not preserved for the long
- term. */
+/* Arrange for a REG value to be assumed constant through the whole function,
+ never invalidated and preserved across cselib_reset_table calls. */
-static int
-cselib_clear_preserve (void **slot, void *info ATTRIBUTE_UNUSED)
+void
+cselib_preserve_cfa_base_value (cselib_val *v)
{
- cselib_val *v = (cselib_val *)*slot;
-
- if (PRESERVED_VALUE_P (v->val_rtx)
- && !LONG_TERM_PRESERVED_VALUE_P (v->val_rtx))
- {
- PRESERVED_VALUE_P (v->val_rtx) = false;
- if (!v->locs)
- n_useless_values++;
- }
-
- return 1;
+ if (cselib_preserve_constants
+ && v->locs
+ && REG_P (v->locs->loc))
+ cfa_base_preserved_val = v;
}
/* Clean all non-constant expressions in the hash table, but retain
their values. */
void
-cselib_preserve_only_values (bool retain)
+cselib_preserve_only_values (void)
{
int i;
- htab_traverse (cselib_hash_table,
- retain ? cselib_preserve_definitely : cselib_clear_preserve,
- NULL);
-
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
cselib_invalidate_regno (i, reg_raw_mode[i]);
if (! e)
return 0;
- return e->value;
+ return e->hash;
case DEBUG_EXPR:
hash += ((unsigned) DEBUG_EXPR << 7)
value is MODE. */
static inline cselib_val *
-new_cselib_val (unsigned int value, enum machine_mode mode, rtx x)
+new_cselib_val (unsigned int hash, enum machine_mode mode, rtx x)
{
cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
- gcc_assert (value);
+ gcc_assert (hash);
+ gcc_assert (next_uid);
- e->value = value;
+ e->hash = hash;
+ e->uid = next_uid++;
/* We use an alloc pool to allocate this RTL construct because it
accounts for about 8% of the overall memory usage. We know
precisely when we can have VALUE RTXen (when cselib is active)
if (dump_file && (dump_flags & TDF_DETAILS))
{
- fprintf (dump_file, "cselib value %u ", value);
+ fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
if (flag_dump_noaddr || flag_dump_unnumbered)
fputs ("# ", dump_file);
else
if (! create)
return 0;
- mem_elt = new_cselib_val (++next_unknown_value, mode, x);
+ mem_elt = new_cselib_val (next_uid, mode, x);
add_mem_for_addr (addr, mem_elt, x);
slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
- mem_elt->value, INSERT);
+ mem_elt->hash, INSERT);
*slot = mem_elt;
return mem_elt;
}
evd.regs_active = regs_active;
evd.callback = NULL;
evd.callback_arg = NULL;
+ evd.dummy = false;
return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
}
evd.regs_active = regs_active;
evd.callback = cb;
evd.callback_arg = data;
+ evd.dummy = false;
return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
}
+/* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
+ or simplified. Useful to find out whether cselib_expand_value_rtx_cb
+ would return NULL or non-NULL, without allocating new rtx. */
+
+bool
+cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
+ cselib_expand_callback cb, void *data)
+{
+ struct expand_value_data evd;
+
+ evd.regs_active = regs_active;
+ evd.callback = cb;
+ evd.callback_arg = data;
+ evd.dummy = true;
+
+ return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
+}
+
/* Internal implementation of cselib_expand_value_rtx and
cselib_expand_value_rtx_cb. */
that all fields need copying, and then clear the fields that should
not be copied. That is the sensible default behavior, and forces
us to explicitly document why we are *not* copying a flag. */
- copy = shallow_copy_rtx (orig);
+ if (evd->dummy)
+ copy = NULL;
+ else
+ copy = shallow_copy_rtx (orig);
format_ptr = GET_RTX_FORMAT (code);
max_depth - 1);
if (!result)
return NULL;
- XEXP (copy, i) = result;
+ if (copy)
+ XEXP (copy, i) = result;
}
break;
case 'V':
if (XVEC (orig, i) != NULL)
{
- XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
- for (j = 0; j < XVECLEN (copy, i); j++)
+ if (copy)
+ XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
+ for (j = 0; j < XVECLEN (orig, i); j++)
{
rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
evd, max_depth - 1);
if (!result)
return NULL;
- XVECEXP (copy, i, j) = result;
+ if (copy)
+ XVECEXP (copy, i, j) = result;
}
}
break;
gcc_unreachable ();
}
+ if (evd->dummy)
+ return orig;
+
mode = GET_MODE (copy);
/* If an operand has been simplified into CONST_INT, which doesn't
have a mode and the mode isn't derivable from whole rtx's mode,
{
/* This happens for autoincrements. Assign a value that doesn't
match any other. */
- e = new_cselib_val (++next_unknown_value, GET_MODE (x), x);
+ e = new_cselib_val (next_uid, GET_MODE (x), x);
}
return e->val_rtx;
case PRE_DEC:
case POST_MODIFY:
case PRE_MODIFY:
- e = new_cselib_val (++next_unknown_value, GET_MODE (x), x);
+ e = new_cselib_val (next_uid, GET_MODE (x), x);
return e->val_rtx;
default:
{
fputs ("cselib lookup ", dump_file);
print_inline_rtx (dump_file, x, 2);
- fprintf (dump_file, " => %u\n", ret ? ret->value : 0);
+ fprintf (dump_file, " => %u:%u\n",
+ ret ? ret->uid : 0,
+ ret ? ret->hash : 0);
}
return ret;
max_value_regs = n;
}
- e = new_cselib_val (++next_unknown_value, GET_MODE (x), x);
+ e = new_cselib_val (next_uid, GET_MODE (x), x);
e->locs = new_elt_loc_list (e->locs, x);
if (REG_VALUES (i) == 0)
{
REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
}
REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
- slot = htab_find_slot_with_hash (cselib_hash_table, x, e->value, INSERT);
+ slot = htab_find_slot_with_hash (cselib_hash_table, x, e->hash, INSERT);
*slot = e;
return cselib_log_lookup (x, e);
}
if (i < FIRST_PSEUDO_REGISTER && v != NULL)
this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
- if (this_last < regno || v == NULL)
+ if (this_last < regno || v == NULL || v == cfa_base_preserved_val)
{
l = &(*l)->next;
continue;
&& GET_CODE (PATTERN (insn)) == ASM_OPERANDS
&& MEM_VOLATILE_P (PATTERN (insn))))
{
- cselib_reset_table_with_next_value (next_unknown_value);
+ cselib_reset_table (next_uid);
return;
}
init_alias_analysis. */
void
-cselib_init (bool record_memory)
+cselib_init (int record_what)
{
elt_list_pool = create_alloc_pool ("elt_list",
sizeof (struct elt_list), 10);
cselib_val_pool = create_alloc_pool ("cselib_val_list",
sizeof (cselib_val), 10);
value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
- cselib_record_memory = record_memory;
+ cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
+ cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
/* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
see canon_true_dependence. This is only created once. */
n_used_regs = 0;
cselib_hash_table = htab_create (31, get_value_hash,
entry_and_rtx_equal_p, NULL);
+ next_uid = 1;
}
/* Called when the current user is done with cselib. */
cselib_finish (void)
{
cselib_discard_hook = NULL;
+ cselib_preserve_constants = false;
+ cfa_base_preserved_val = NULL;
free_alloc_pool (elt_list_pool);
free_alloc_pool (elt_loc_list_pool);
free_alloc_pool (cselib_val_pool);
used_regs = 0;
cselib_hash_table = 0;
n_useless_values = 0;
- next_unknown_value = 0;
+ next_uid = 0;
}
/* Dump the cselib_val *X to FILE *info. */
print_inline_rtx (out, first_containing_mem->val_rtx, 2);
fputc ('\n', out);
}
- fprintf (out, "last unknown value %i\n", next_unknown_value);
+ fprintf (out, "next uid %i\n", next_uid);
}
#include "gt-cselib.h"