/* Declarations of static functions. */
static void alpha_set_memflags_1
- PROTO((rtx, int, int, int, int));
+ PROTO((rtx, int, int, int));
static rtx alpha_emit_set_const_1
PROTO((rtx, enum machine_mode, HOST_WIDE_INT, int));
static void alpha_expand_unaligned_load_words
if (!alpha_mlat_string)
alpha_mlat_string = "L1";
- if (isdigit (alpha_mlat_string[0])
+ if (ISDIGIT ((unsigned char)alpha_mlat_string[0])
&& (lat = strtol (alpha_mlat_string, &end, 10), *end == '\0'))
;
else if ((alpha_mlat_string[0] == 'L' || alpha_mlat_string[0] == 'l')
- && isdigit (alpha_mlat_string[1])
+ && ISDIGIT ((unsigned char)alpha_mlat_string[1])
&& alpha_mlat_string[2] == '\0')
{
static int const cache_latency[][4] =
{
return ((GET_CODE (op) == CONST_INT
&& (unsigned HOST_WIDE_INT) INTVAL (op) < 64)
- || GET_CODE (op) == CONSTANT_P_RTX
|| register_operand (op, mode));
}
{
return ((GET_CODE (op) == CONST_INT
&& (unsigned HOST_WIDE_INT) INTVAL (op) < 0x100)
- || GET_CODE (op) == CONSTANT_P_RTX
|| register_operand (op, mode));
}
enum machine_mode mode ATTRIBUTE_UNUSED;
{
return ((GET_CODE (op) == CONST_INT
- && (unsigned HOST_WIDE_INT) INTVAL (op) < 0x100)
- || GET_CODE (op) == CONSTANT_P_RTX);
+ && (unsigned HOST_WIDE_INT) INTVAL (op) < 0x100));
}
/* Return 1 if the operand is a valid second operand to an add insn. */
enum machine_mode mode;
{
if (GET_CODE (op) == CONST_INT)
+ /* Constraints I, J, O and P are covered by K. */
return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'K')
- || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L')
- || CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'));
- else if (GET_CODE (op) == CONSTANT_P_RTX)
- return 1;
+ || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
return register_operand (op, mode);
}
enum machine_mode mode;
{
if (GET_CODE (op) == CONST_INT)
- return ((unsigned HOST_WIDE_INT) INTVAL (op) < 255
- || (unsigned HOST_WIDE_INT) (- INTVAL (op)) < 255);
- else if (GET_CODE (op) == CONSTANT_P_RTX)
- return 1;
+ return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
+ || CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'));
return register_operand (op, mode);
}
return ((unsigned HOST_WIDE_INT) INTVAL (op) < 0x100
|| (unsigned HOST_WIDE_INT) ~ INTVAL (op) < 0x100
|| zap_mask (INTVAL (op)));
- else if (GET_CODE (op) == CONSTANT_P_RTX)
- return 1;
return register_operand (op, mode);
}
if (GET_CODE (op) == CONST_INT)
return ((unsigned HOST_WIDE_INT) INTVAL (op) < 0x100
|| (unsigned HOST_WIDE_INT) ~ INTVAL (op) < 0x100);
- else if (GET_CODE (op) == CONSTANT_P_RTX)
- return 1;
return register_operand (op, mode);
}
enum machine_mode mode;
{
return (GET_CODE (op) == CONST_INT
- || GET_CODE (op) == CONSTANT_P_RTX
|| register_operand (op, mode));
}
switch (GET_CODE (op))
{
case REG: case MEM: case CONST_DOUBLE: case CONST_INT: case LABEL_REF:
- case SYMBOL_REF: case CONST: case CONSTANT_P_RTX:
+ case SYMBOL_REF: case CONST:
return 1;
case SUBREG:
return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
case CONST_INT:
- case CONSTANT_P_RTX:
return mode == QImode || mode == HImode || add_operand (op, mode);
default:
&& REGNO (SUBREG_REG (op)) >= FIRST_PSEUDO_REGISTER));
}
+/* Returns 1 if OP is not an eliminable register.
+
+ This exists to cure a pathological abort in the s8addq (et al) patterns,
+
+ long foo () { long t; bar(); return (long) &t * 26107; }
+
+ which run afoul of a hack in reload to cure a (presumably) similar
+ problem with lea-type instructions on other targets. But there is
+ one of us and many of them, so work around the problem by selectively
+ preventing combine from making the optimization. */
+
+int
+reg_not_elim_operand (op, mode)
+ register rtx op;
+ enum machine_mode mode;
+{
+ rtx inner = op;
+ if (GET_CODE (op) == SUBREG)
+ inner = SUBREG_REG (op);
+ if (inner == frame_pointer_rtx || inner == arg_pointer_rtx)
+ return 0;
+
+ return register_operand (op, mode);
+}
+\f
/* Return 1 if this function can directly return via $26. */
int
if (GET_CODE (base) == PLUS)
offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
- *paligned_mem = change_address (ref, SImode,
- plus_constant (base, offset & ~3));
+ *paligned_mem = gen_rtx_MEM (SImode, plus_constant (base, offset & ~3));
+ MEM_IN_STRUCT_P (*paligned_mem) = MEM_IN_STRUCT_P (ref);
+ MEM_VOLATILE_P (*paligned_mem) = MEM_VOLATILE_P (ref);
+ RTX_UNCHANGING_P (*paligned_mem) = RTX_UNCHANGING_P (ref);
+
+ /* Sadly, we cannot use alias sets here because we may overlap other
+ data in a different alias set. */
+ /* MEM_ALIAS_SET (*paligned_mem) = MEM_ALIAS_SET (ref); */
*pbitnum = GEN_INT ((offset & 3) * 8);
}
found in part of X. */
static void
-alpha_set_memflags_1 (x, in_struct_p, volatile_p, unchanging_p, alias_set)
+alpha_set_memflags_1 (x, in_struct_p, volatile_p, unchanging_p)
rtx x;
- int in_struct_p, volatile_p, unchanging_p, alias_set;
+ int in_struct_p, volatile_p, unchanging_p;
{
int i;
case PARALLEL:
for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
alpha_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p,
- unchanging_p, alias_set);
+ unchanging_p);
break;
case INSN:
alpha_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p,
- unchanging_p, alias_set);
+ unchanging_p);
break;
case SET:
alpha_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p,
- unchanging_p, alias_set);
+ unchanging_p);
alpha_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p,
- unchanging_p, alias_set);
+ unchanging_p);
break;
case MEM:
MEM_IN_STRUCT_P (x) = in_struct_p;
MEM_VOLATILE_P (x) = volatile_p;
RTX_UNCHANGING_P (x) = unchanging_p;
- MEM_ALIAS_SET (x) = alias_set;
+ /* Sadly, we cannot use alias sets because the extra aliasing
+ produced by the AND interferes. Given that two-byte quantities
+ are the only thing we would be able to differentiate anyway,
+ there does not seem to be any point in convoluting the early
+ out of the alias check. */
+ /* MEM_ALIAS_SET (x) = alias_set; */
break;
default:
rtx insn;
rtx ref;
{
- int in_struct_p, volatile_p, unchanging_p, alias_set;
+ int in_struct_p, volatile_p, unchanging_p;
if (GET_CODE (ref) != MEM)
return;
in_struct_p = MEM_IN_STRUCT_P (ref);
volatile_p = MEM_VOLATILE_P (ref);
unchanging_p = RTX_UNCHANGING_P (ref);
- alias_set = MEM_ALIAS_SET (ref);
/* This is only called from alpha.md, after having had something
generated from one of the insn patterns. So if everything is
zero, the pattern is already up-to-date. */
- if (! in_struct_p && ! volatile_p && ! unchanging_p && ! alias_set)
+ if (! in_struct_p && ! volatile_p && ! unchanging_p)
return;
- alpha_set_memflags_1 (insn, in_struct_p, volatile_p, unchanging_p,
- alias_set);
+ alpha_set_memflags_1 (insn, in_struct_p, volatile_p, unchanging_p);
}
\f
/* Try to output insns to set TARGET equal to the constant C if it can be
for (; bits > 0; bits--)
if ((temp = (alpha_emit_set_const
(subtarget, mode,
- (unsigned HOST_WIDE_INT) c >> bits, i))) != 0
+ (unsigned HOST_WIDE_INT) (c >> bits), i))) != 0
|| ((temp = (alpha_emit_set_const
(subtarget, mode,
((unsigned HOST_WIDE_INT) c) >> bits, i)))
{
rtx bytes_rtx = operands[2];
rtx align_rtx = operands[3];
- HOST_WIDE_INT bytes = INTVAL (bytes_rtx);
+ HOST_WIDE_INT orig_bytes = INTVAL (bytes_rtx);
+ HOST_WIDE_INT bytes = orig_bytes;
HOST_WIDE_INT src_align = INTVAL (align_rtx);
HOST_WIDE_INT dst_align = src_align;
rtx orig_src = operands[1];
enum machine_mode mode;
tmp = XEXP (XEXP (orig_src, 0), 0);
- mode = mode_for_size (bytes, MODE_INT, 1);
+ mode = mode_for_size (bytes * BITS_PER_UNIT, MODE_INT, 1);
if (mode != BLKmode
&& GET_MODE_SIZE (GET_MODE (tmp)) <= bytes)
{
enum machine_mode mode;
tmp = XEXP (XEXP (orig_dst, 0), 0);
- mode = mode_for_size (bytes, MODE_INT, 1);
+ mode = mode_for_size (orig_bytes * BITS_PER_UNIT, MODE_INT, 1);
if (GET_MODE (tmp) == mode && nregs == 1)
{
emit_move_insn (tmp, data_regs[0]);
/* ??? If nregs > 1, consider reconstructing the word in regs. */
/* ??? Optimize mode < dst_mode with strict_low_part. */
- /* No appropriate mode; fall back on memory. */
+
+ /* No appropriate mode; fall back on memory. We can speed things
+ up by recognizing extra alignment information. */
orig_dst = change_address (orig_dst, GET_MODE (orig_dst),
copy_addr_to_reg (XEXP (orig_dst, 0)));
+ dst_align = GET_MODE_SIZE (GET_MODE (tmp));
}
/* Write out the data in whatever chunks reading the source allowed. */
dest = change_address (block, ptr_mode, XEXP (block, 0));
emit_move_insn (dest, addr);
- if (flag_check_memory_usage)
+ if (current_function_check_memory_usage)
emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
dest, ptr_mode,
GEN_INT (GET_MODE_SIZE (ptr_mode)),
POINTER_SIZE/BITS_PER_UNIT));
emit_move_insn (dest, argsize);
- if (flag_check_memory_usage)
+ if (current_function_check_memory_usage)
emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
dest, ptr_mode,
GEN_INT (GET_MODE_SIZE
/* Let shorten branches care for assigning alignments to code labels. */
shorten_branches (insns);
+ align = (FUNCTION_BOUNDARY/BITS_PER_UNIT < max_align
+ ? FUNCTION_BOUNDARY/BITS_PER_UNIT : max_align);
+
/* Account for the initial GP load, which happens before the scheduled
prologue we emitted as RTL. */
ofs = prev_in_use = 0;
if (alpha_does_function_need_gp())
{
- ofs = 8;
+ ofs = 8 & (align - 1);
prev_in_use = gp_in_use;
}
- align = (FUNCTION_BOUNDARY/BITS_PER_UNIT < max_align
- ? FUNCTION_BOUNDARY/BITS_PER_UNIT : max_align);
-
i = insns;
if (GET_CODE (i) == NOTE)
i = next_nonnote_insn (i);