/* We record floating-point CONST_DOUBLEs in each floating-point mode for
the values of 0, 1, and 2. For the integer entries and VOIDmode, we
- record a copy of const[012]_rtx. */
+ record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
+ is set only for MODE_INT and MODE_VECTOR_INT modes. */
-rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
+rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
rtx const_true_rtx;
return (p->alias ^ (p->align * 1000)
^ (p->addrspace * 4000)
- ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
- ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
+ ^ ((p->offset_known_p ? p->offset : 0) * 50000)
+ ^ ((p->size_known_p ? p->size : 0) * 2500000)
^ (size_t) iterative_hash_expr (p->expr, 0));
}
static bool
mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
{
- return (p->alias == q->alias && p->offset == q->offset
- && p->size == q->size && p->align == q->align
+ return (p->alias == q->alias
+ && p->offset_known_p == q->offset_known_p
+ && (!p->offset_known_p || p->offset == q->offset)
+ && p->size_known_p == q->size_known_p
+ && (!p->size_known_p || p->size == q->size)
+ && p->align == q->align
&& p->addrspace == q->addrspace
&& (p->expr == q->expr
|| (p->expr != NULL_TREE && q->expr != NULL_TREE
offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
if (MEM_P (x))
{
- if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
- REG_ATTRS (reg)
- = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
+ if (MEM_OFFSET_KNOWN_P (x))
+ REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
+ MEM_OFFSET (x) + offset);
if (MEM_POINTER (x))
mark_reg_pointer (reg, 0);
}
unsigned HOST_WIDE_INT offset;
/* This function can't use
- if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
- || !CONST_INT_P (MEM_OFFSET (mem))
+ if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
|| (MAX (MEM_ALIGN (mem),
- get_object_alignment (MEM_EXPR (mem), align))
+ MAX (align, get_object_alignment (MEM_EXPR (mem))))
< align))
return -1;
else
- return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
+ return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
for two reasons:
- COMPONENT_REFs in MEM_EXPR can have NULL first operand,
for <variable>. get_inner_reference doesn't handle it and
isn't sufficiently aligned, the object it is in might be. */
gcc_assert (MEM_P (mem));
expr = MEM_EXPR (mem);
- if (expr == NULL_TREE
- || MEM_OFFSET (mem) == NULL_RTX
- || !CONST_INT_P (MEM_OFFSET (mem)))
+ if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
return -1;
- offset = INTVAL (MEM_OFFSET (mem));
+ offset = MEM_OFFSET (mem);
if (DECL_P (expr))
{
if (DECL_ALIGN (expr) < align)
/* ??? Can this ever happen? Calling this routine on a MEM that
already carries memory attributes should probably be invalid. */
attrs.expr = refattrs->expr;
+ attrs.offset_known_p = refattrs->offset_known_p;
attrs.offset = refattrs->offset;
+ attrs.size_known_p = refattrs->size_known_p;
attrs.size = refattrs->size;
attrs.align = refattrs->align;
}
{
defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
gcc_assert (!defattrs->expr);
- gcc_assert (!defattrs->offset);
+ gcc_assert (!defattrs->offset_known_p);
/* Respect mode size. */
+ attrs.size_known_p = defattrs->size_known_p;
attrs.size = defattrs->size;
/* ??? Is this really necessary? We probably should always get
the size from the type below. */
/* If the size is known, we can set that. */
if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
- attrs.size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
+ {
+ attrs.size_known_p = true;
+ attrs.size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
+ }
/* If T is not a type, we may be able to deduce some more information about
the expression. */
&& !TREE_THIS_VOLATILE (base))
MEM_READONLY_P (ref) = 1;
+ /* Mark static const strings readonly as well. */
+ if (base && TREE_CODE (base) == STRING_CST
+ && TREE_READONLY (base)
+ && TREE_STATIC (base))
+ MEM_READONLY_P (ref) = 1;
+
/* If this expression uses it's parent's alias set, mark it such
that we won't change it. */
if (component_uses_parent_alias_set (t))
if (DECL_P (t))
{
attrs.expr = t;
- attrs.offset = const0_rtx;
+ attrs.offset_known_p = true;
+ attrs.offset = 0;
apply_bitpos = bitpos;
- attrs.size = (DECL_SIZE_UNIT (t)
- && host_integerp (DECL_SIZE_UNIT (t), 1)
- ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
+ if (DECL_SIZE_UNIT (t) && host_integerp (DECL_SIZE_UNIT (t), 1))
+ {
+ attrs.size_known_p = true;
+ attrs.size = tree_low_cst (DECL_SIZE_UNIT (t), 1);
+ }
+ else
+ attrs.size_known_p = false;
attrs.align = DECL_ALIGN (t);
align_computed = true;
}
&& ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
{
attrs.expr = t;
- attrs.offset = const0_rtx;
+ attrs.offset_known_p = true;
+ attrs.offset = 0;
apply_bitpos = bitpos;
/* ??? Any reason the field size would be different than
the size we got from the type? */
if (DECL_P (t2))
{
attrs.expr = t2;
- attrs.offset = NULL;
+ attrs.offset_known_p = false;
if (host_integerp (off_tree, 1))
{
HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
if (aoff && (unsigned HOST_WIDE_INT) aoff < attrs.align)
attrs.align = aoff;
align_computed = true;
- attrs.offset = GEN_INT (ioff);
+ attrs.offset_known_p = true;
+ attrs.offset = ioff;
apply_bitpos = bitpos;
}
}
else if (TREE_CODE (t2) == COMPONENT_REF)
{
attrs.expr = t2;
- attrs.offset = NULL;
+ attrs.offset_known_p = false;
if (host_integerp (off_tree, 1))
{
- attrs.offset = GEN_INT (tree_low_cst (off_tree, 1));
+ attrs.offset_known_p = true;
+ attrs.offset = tree_low_cst (off_tree, 1);
apply_bitpos = bitpos;
}
/* ??? Any reason the field size would be different than
else if (TREE_CODE (t) == MEM_REF)
{
attrs.expr = t;
- attrs.offset = const0_rtx;
+ attrs.offset_known_p = true;
+ attrs.offset = 0;
apply_bitpos = bitpos;
}
}
|| TREE_CODE (t) == TARGET_MEM_REF)
{
attrs.expr = t;
- attrs.offset = const0_rtx;
+ attrs.offset_known_p = true;
+ attrs.offset = 0;
apply_bitpos = bitpos;
}
- if (!align_computed && !INDIRECT_REF_P (t))
+ if (!align_computed)
{
- unsigned int obj_align = get_object_alignment (t, BIGGEST_ALIGNMENT);
+ unsigned int obj_align = get_object_alignment (t);
attrs.align = MAX (attrs.align, obj_align);
}
}
object to contain the negative offset. */
if (apply_bitpos)
{
- attrs.offset = plus_constant (attrs.offset,
- -(apply_bitpos / BITS_PER_UNIT));
- if (attrs.size)
- attrs.size = plus_constant (attrs.size, apply_bitpos / BITS_PER_UNIT);
+ gcc_assert (attrs.offset_known_p);
+ attrs.offset -= apply_bitpos / BITS_PER_UNIT;
+ if (attrs.size_known_p)
+ attrs.size += apply_bitpos / BITS_PER_UNIT;
}
/* Now set the attributes we computed above. */
+ attrs.addrspace = TYPE_ADDR_SPACE (type);
set_mem_attrs (ref, &attrs);
/* If this is already known to be a scalar or aggregate, we are done. */
/* Set the offset of MEM to OFFSET. */
void
-set_mem_offset (rtx mem, rtx offset)
+set_mem_offset (rtx mem, HOST_WIDE_INT offset)
{
struct mem_attrs attrs;
attrs = *get_mem_attrs (mem);
+ attrs.offset_known_p = true;
attrs.offset = offset;
set_mem_attrs (mem, &attrs);
}
+/* Clear the offset of MEM. */
+
+void
+clear_mem_offset (rtx mem)
+{
+ struct mem_attrs attrs;
+
+ attrs = *get_mem_attrs (mem);
+ attrs.offset_known_p = false;
+ set_mem_attrs (mem, &attrs);
+}
+
/* Set the size of MEM to SIZE. */
void
struct mem_attrs attrs;
attrs = *get_mem_attrs (mem);
- attrs.size = GEN_INT (size);
+ attrs.size_known_p = true;
+ attrs.size = size;
set_mem_attrs (mem, &attrs);
}
struct mem_attrs attrs;
attrs = *get_mem_attrs (mem);
- attrs.size = NULL_RTX;
+ attrs.size_known_p = false;
set_mem_attrs (mem, &attrs);
}
\f
attrs = *get_mem_attrs (memref);
defattrs = mode_mem_attrs[(int) mmode];
- attrs.expr = defattrs->expr;
- attrs.offset = defattrs->offset;
+ attrs.expr = NULL_TREE;
+ attrs.offset_known_p = false;
+ attrs.size_known_p = defattrs->size_known_p;
attrs.size = defattrs->size;
attrs.align = defattrs->align;
/* Compute the new values of the memory attributes due to this adjustment.
We add the offsets and update the alignment. */
- if (attrs.offset)
- attrs.offset = GEN_INT (offset + INTVAL (attrs.offset));
+ if (attrs.offset_known_p)
+ attrs.offset += offset;
/* Compute the new alignment by taking the MIN of the alignment and the
lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
/* We can compute the size in a number of ways. */
defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
- if (defattrs->size)
- attrs.size = defattrs->size;
- else if (attrs.size)
- attrs.size = plus_constant (attrs.size, -offset);
+ if (defattrs->size_known_p)
+ {
+ attrs.size_known_p = true;
+ attrs.size = defattrs->size;
+ }
+ else if (attrs.size_known_p)
+ attrs.size -= offset;
set_mem_attrs (new_rtx, &attrs);
{
rtx new_rtx, addr = XEXP (memref, 0);
enum machine_mode address_mode;
- struct mem_attrs attrs;
+ struct mem_attrs attrs, *defattrs;
attrs = *get_mem_attrs (memref);
address_mode = targetm.addr_space.address_mode (attrs.addrspace);
/* Update the alignment to reflect the offset. Reset the offset, which
we don't know. */
- attrs.offset = 0;
- attrs.size = mode_mem_attrs[(int) GET_MODE (new_rtx)]->size;
+ defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
+ attrs.offset_known_p = false;
+ attrs.size_known_p = defattrs->size_known_p;
+ attrs.size = defattrs->size;
attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
set_mem_attrs (new_rtx, &attrs);
return new_rtx;
/* If we don't know what offset we were at within the expression, then
we can't know if we've overstepped the bounds. */
- if (! attrs.offset)
+ if (! attrs.offset_known_p)
attrs.expr = NULL_TREE;
while (attrs.expr)
otherwise strip back to the containing structure. */
if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
&& compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
- && INTVAL (attrs.offset) >= 0)
+ && attrs.offset >= 0)
break;
if (! host_integerp (offset, 1))
}
attrs.expr = TREE_OPERAND (attrs.expr, 0);
- attrs.offset
- = (GEN_INT (INTVAL (attrs.offset)
- + tree_low_cst (offset, 1)
- + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
- / BITS_PER_UNIT)));
+ attrs.offset += tree_low_cst (offset, 1);
+ attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
+ / BITS_PER_UNIT);
}
/* Similarly for the decl. */
else if (DECL_P (attrs.expr)
&& DECL_SIZE_UNIT (attrs.expr)
&& TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
&& compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
- && (! attrs.offset || INTVAL (attrs.offset) >= 0))
+ && (! attrs.offset_known_p || attrs.offset >= 0))
break;
else
{
}
if (! attrs.expr)
- attrs.offset = NULL_RTX;
+ attrs.offset_known_p = false;
/* The widened memory may alias other stuff, so zap the alias set. */
/* ??? Maybe use get_alias_set on any remaining expression. */
attrs.alias = 0;
- attrs.size = GEN_INT (size);
+ attrs.size_known_p = true;
+ attrs.size = size;
set_mem_attrs (new_rtx, &attrs);
return new_rtx;
}
(mem:MODE (plus (reg sfp) (const_int offset)))
with perhaps the plus missing for offset = 0. */
addr = XEXP (mem, 0);
- attrs.offset = const0_rtx;
+ attrs.offset_known_p = true;
+ attrs.offset = 0;
if (GET_CODE (addr) == PLUS
&& CONST_INT_P (XEXP (addr, 1)))
- attrs.offset = XEXP (addr, 1);
+ attrs.offset = INTVAL (XEXP (addr, 1));
set_mem_attrs (mem, &attrs);
MEM_NOTRAP_P (mem) = 1;
{
reset_used_flags (PATTERN (p));
reset_used_flags (REG_NOTES (p));
+ if (CALL_P (p))
+ reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
}
/* Make sure that virtual stack slots are not shared. */
case PC:
case CC0:
case RETURN:
+ case SIMPLE_RETURN:
case SCRATCH:
return;
/* SCRATCH must be shared because they represent distinct values. */
{
reset_used_flags (PATTERN (p));
reset_used_flags (REG_NOTES (p));
+ if (CALL_P (p))
+ reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
if (GET_CODE (PATTERN (p)) == SEQUENCE)
{
int i;
gcc_assert (INSN_P (q));
reset_used_flags (PATTERN (q));
reset_used_flags (REG_NOTES (q));
+ if (CALL_P (q))
+ reset_used_flags (CALL_INSN_FUNCTION_USAGE (q));
}
}
}
{
verify_rtx_sharing (PATTERN (p), p);
verify_rtx_sharing (REG_NOTES (p), p);
+ if (CALL_P (p))
+ verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (p), p);
}
timevar_pop (TV_VERIFY_RTL_SHARING);
{
PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
+ if (CALL_P (insn))
+ CALL_INSN_FUNCTION_USAGE (insn)
+ = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
}
}
case CODE_LABEL:
case PC:
case CC0:
+ case RETURN:
+ case SIMPLE_RETURN:
case SCRATCH:
/* SCRATCH must be shared because they represent distinct values. */
return;
case CODE_LABEL:
case PC:
case CC0:
+ case RETURN:
+ case SIMPLE_RETURN:
return;
case DEBUG_INSN:
return insn;
}
-/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
-
-rtx
-prev_label (rtx insn)
-{
- while (insn)
- {
- insn = PREV_INSN (insn);
- if (insn == 0 || LABEL_P (insn))
- break;
- }
-
- return insn;
-}
-
-/* Return the last label to mark the same position as LABEL. Return null
- if LABEL itself is null. */
+/* Return the last label to mark the same position as LABEL. Return LABEL
+ itself if it is null or any return rtx. */
rtx
skip_consecutive_labels (rtx label)
{
rtx insn;
+ if (label && ANY_RETURN_P (label))
+ return label;
+
for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
if (LABEL_P (insn))
label = insn;
case REG_NORETURN:
case REG_SETJMP:
+ case REG_TM:
for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
{
if (CALL_P (insn))
break;
#endif
+ case REG_ARGS_SIZE:
+ fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
+ break;
+
default:
break;
}
return REG_NOTES (insn);
}
+
+/* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
+rtx
+set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
+{
+ rtx set = single_set (insn);
+
+ if (set && SET_DEST (set) == dst)
+ return set_unique_reg_note (insn, kind, datum);
+ return NULL_RTX;
+}
\f
/* Return an indication of which type of insn should have X as a body.
The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
return CODE_LABEL;
if (GET_CODE (x) == CALL)
return CALL_INSN;
- if (GET_CODE (x) == RETURN)
+ if (ANY_RETURN_P (x))
return JUMP_INSN;
if (GET_CODE (x) == SET)
{
switch (code)
{
case REG:
+ case DEBUG_EXPR:
case CONST_INT:
case CONST_DOUBLE:
case CONST_FIXED:
case CODE_LABEL:
case PC:
case CC0:
+ case RETURN:
+ case SIMPLE_RETURN:
return orig;
case CLOBBER:
if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
return CONST0_RTX (mode);
else if (x == CONST1_RTX (inner))
return CONST1_RTX (mode);
+ else if (x == CONSTM1_RTX (inner))
+ return CONSTM1_RTX (mode);
}
return gen_rtx_raw_CONST_VECTOR (mode, v);
/* Assign register numbers to the globally defined register rtx. */
pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
+ simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
attrs->addrspace = ADDR_SPACE_GENERIC;
if (mode != BLKmode)
{
- attrs->size = GEN_INT (GET_MODE_SIZE (mode));
+ attrs->size_known_p = true;
+ attrs->size = GET_MODE_SIZE (mode);
if (STRICT_ALIGNMENT)
attrs->align = GET_MODE_ALIGNMENT (mode);
}
dconsthalf = dconst1;
SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
- for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
+ for (i = 0; i < 3; i++)
{
const REAL_VALUE_TYPE *const r =
(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
const_tiny_rtx[i][(int) mode] = GEN_INT (i);
}
+ const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
+
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ const_tiny_rtx[3][(int) mode] = constm1_rtx;
+
for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
{
const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
+ const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
}
for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);