/* This canonized mem. */
rtx mem;
- /* The result of get_addr on mem. */
+ /* Canonized MEM address for use by canon_true_dependence. */
rtx mem_addr;
/* If this is non-zero, it is the alias set of a spill location. */
{
/* A bitmap with one bit per byte. Cleared bit means the position
is needed. Used if IS_LARGE is false. */
- bitmap bitmap;
+ bitmap bmap;
/* Number of set bits (i.e. unneeded bytes) in BITMAP. If it is
equal to END - BEGIN, the whole store is unused. */
do read dependency. */
rtx base_mem;
- /* Canonized version of base_mem, most likely the same thing. */
- rtx canon_base_mem;
+ /* Canonized version of base_mem's address. */
+ rtx canon_base_addr;
/* These two sets of two bitmaps are used to keep track of how many
stores are actually referencing that position from this base. We
gi->rtx_base = base;
gi->id = rtx_group_next_id++;
gi->base_mem = gen_rtx_MEM (QImode, base);
- gi->canon_base_mem = canon_rtx (gi->base_mem);
+ gi->canon_base_addr = canon_rtx (base);
gi->store1_n = BITMAP_ALLOC (NULL);
gi->store1_p = BITMAP_ALLOC (NULL);
gi->store2_n = BITMAP_ALLOC (NULL);
{
store_info_t next = store_info->next;
if (store_info->is_large)
- BITMAP_FREE (store_info->positions_needed.large.bitmap);
+ BITMAP_FREE (store_info->positions_needed.large.bmap);
if (store_info->cse_base)
pool_free (cse_store_info_pool, store_info);
else
case POST_INC:
{
rtx r1 = XEXP (x, 0);
- rtx c = gen_int_mode (Pmode, data->size);
+ rtx c = gen_int_mode (data->size, Pmode);
emit_insn_before (gen_rtx_SET (Pmode, r1,
gen_rtx_PLUS (Pmode, r1, c)),
data->insn);
case POST_DEC:
{
rtx r1 = XEXP (x, 0);
- rtx c = gen_int_mode (Pmode, -data->size);
+ rtx c = gen_int_mode (-data->size, Pmode);
emit_insn_before (gen_rtx_SET (Pmode, r1,
gen_rtx_PLUS (Pmode, r1, c)),
data->insn);
if (GET_CODE (address) == CONST)
address = XEXP (address, 0);
- if (GET_CODE (address) == PLUS && GET_CODE (XEXP (address, 1)) == CONST_INT)
+ if (GET_CODE (address) == PLUS && CONST_INT_P (XEXP (address, 1)))
{
*offset = INTVAL (XEXP (address, 1));
address = XEXP (address, 0);
{
if (__builtin_expect (s_info->is_large, false))
{
- if (!bitmap_bit_p (s_info->positions_needed.large.bitmap, pos))
+ if (!bitmap_bit_p (s_info->positions_needed.large.bmap, pos))
{
s_info->positions_needed.large.count++;
- bitmap_set_bit (s_info->positions_needed.large.bitmap, pos);
+ bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
}
}
else
{
int pos, end = s_info->end - s_info->begin;
for (pos = 0; pos < end; pos++)
- bitmap_set_bit (s_info->positions_needed.large.bitmap, pos);
+ bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
s_info->positions_needed.large.count = end;
}
else
{
int end = start + width;
while (start < end)
- if (bitmap_bit_p (s_info->positions_needed.large.bitmap, start++))
+ if (bitmap_bit_p (s_info->positions_needed.large.bmap, start++))
return false;
return true;
}
static int
record_store (rtx body, bb_info_t bb_info)
{
- rtx mem, rhs, const_rhs;
+ rtx mem, rhs, const_rhs, mem_addr;
HOST_WIDE_INT offset = 0;
HOST_WIDE_INT width = 0;
alias_set_type spill_alias_set;
ptr = active_local_stores;
last = NULL;
redundant_reason = NULL;
+ mem = canon_rtx (mem);
+ /* For alias_set != 0 canon_true_dependence should be never called. */
+ if (spill_alias_set)
+ mem_addr = NULL_RTX;
+ else
+ {
+ if (group_id < 0)
+ mem_addr = base->val_rtx;
+ else
+ {
+ group_info_t group
+ = VEC_index (group_info_t, rtx_group_vec, group_id);
+ mem_addr = group->canon_base_addr;
+ }
+ if (offset)
+ mem_addr = plus_constant (mem_addr, offset);
+ }
while (ptr)
{
if (canon_true_dependence (s_info->mem,
GET_MODE (s_info->mem),
s_info->mem_addr,
- mem, rtx_varies_p))
+ mem, mem_addr, rtx_varies_p))
{
s_info->rhs = NULL;
s_info->const_rhs = NULL;
}
}
-
+
/* An insn can be deleted if every position of every one of
its s_infos is zero. */
if (any_positions_needed_p (s_info)
/* Finish filling in the store_info. */
store_info->next = insn_info->store_rec;
insn_info->store_rec = store_info;
- store_info->mem = canon_rtx (mem);
+ store_info->mem = mem;
store_info->alias_set = spill_alias_set;
- store_info->mem_addr = get_addr (XEXP (mem, 0));
+ store_info->mem_addr = mem_addr;
store_info->cse_base = base;
if (width > HOST_BITS_PER_WIDE_INT)
{
store_info->is_large = true;
store_info->positions_needed.large.count = 0;
- store_info->positions_needed.large.bitmap = BITMAP_ALLOC (NULL);
+ store_info->positions_needed.large.bmap = BITMAP_ALLOC (NULL);
}
else
{
static int
check_mem_read_rtx (rtx *loc, void *data)
{
- rtx mem = *loc;
+ rtx mem = *loc, mem_addr;
bb_info_t bb_info;
insn_info_t insn_info;
HOST_WIDE_INT offset = 0;
read_info->end = offset + width;
read_info->next = insn_info->read_rec;
insn_info->read_rec = read_info;
+ /* For alias_set != 0 canon_true_dependence should be never called. */
+ if (spill_alias_set)
+ mem_addr = NULL_RTX;
+ else
+ {
+ if (group_id < 0)
+ mem_addr = base->val_rtx;
+ else
+ {
+ group_info_t group
+ = VEC_index (group_info_t, rtx_group_vec, group_id);
+ mem_addr = group->canon_base_addr;
+ }
+ if (offset)
+ mem_addr = plus_constant (mem_addr, offset);
+ }
/* We ignore the clobbers in store_info. The is mildly aggressive,
but there really should not be a clobber followed by a read. */
= canon_true_dependence (store_info->mem,
GET_MODE (store_info->mem),
store_info->mem_addr,
- mem, rtx_varies_p);
+ mem, mem_addr, rtx_varies_p);
else if (group_id == store_info->group_id)
{
= canon_true_dependence (store_info->mem,
GET_MODE (store_info->mem),
store_info->mem_addr,
- mem, rtx_varies_p);
+ mem, mem_addr, rtx_varies_p);
/* If this read is just reading back something that we just
stored, rewrite the read. */
if (store_info->rhs
&& store_info->group_id == -1
&& store_info->cse_base == base
+ && width != -1
&& offset >= store_info->begin
&& offset + width <= store_info->end
&& all_positions_needed_p (store_info,
remove = canon_true_dependence (store_info->mem,
GET_MODE (store_info->mem),
store_info->mem_addr,
- mem, rtx_varies_p);
+ mem, mem_addr, rtx_varies_p);
if (remove)
{
for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
if (s_info->is_large)
{
- BITMAP_FREE (s_info->positions_needed.large.bitmap);
+ BITMAP_FREE (s_info->positions_needed.large.bmap);
s_info->is_large = false;
}
}
if ((read_info->group_id < 0)
&& canon_true_dependence (group->base_mem,
QImode,
- group->canon_base_mem,
- read_info->mem, rtx_varies_p))
+ group->canon_base_addr,
+ read_info->mem, NULL_RTX,
+ rtx_varies_p))
{
if (kill)
bitmap_ior_into (kill, group->group_kill);