/* Xstormy16 target functions.
- Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
+ Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
Free Software Foundation, Inc.
Contributed by Red Hat, Inc.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+the Free Software Foundation, 51 Franklin Street, Fifth Floor,
+Boston, MA 02110-1301, USA. */
#include "config.h"
#include "system.h"
struct rtx_def * xstormy16_compare_op0;
struct rtx_def * xstormy16_compare_op1;
-/* Return 1 if this is a LT, GE, LTU, or GEU operator. */
-
-int
-xstormy16_ineqsi_operator (register rtx op, enum machine_mode mode)
-{
- enum rtx_code code = GET_CODE (op);
-
- return ((mode == VOIDmode || GET_MODE (op) == mode)
- && (code == LT || code == GE || code == LTU || code == GEU));
-}
-
-/* Return 1 if this is an EQ or NE operator. */
-
-int
-equality_operator (register rtx op, enum machine_mode mode)
-{
- return ((mode == VOIDmode || GET_MODE (op) == mode)
- && (GET_CODE (op) == EQ || GET_CODE (op) == NE));
-}
-
-/* Return 1 if this is a comparison operator but not an EQ or NE operator. */
-
-int
-inequality_operator (register rtx op, enum machine_mode mode)
-{
- return comparison_operator (op, mode) && ! equality_operator (op, mode);
-}
-
/* Compute a (partial) cost for rtx X. Return true if the complete
cost has been computed, and false if subexpressions should be
scanned. In either case, *TOTAL contains the cost result. */
enum machine_mode mode;
mode = GET_MODE (op0);
- if (mode != HImode && mode != SImode)
- abort ();
+ gcc_assert (mode == HImode || mode == SImode);
if (mode == SImode
&& (code == GT || code == LE || code == GTU || code == LEU))
seq = get_insns ();
end_sequence ();
- if (! INSN_P (seq))
- abort ();
+ gcc_assert (INSN_P (seq));
last_insn = seq;
while (NEXT_INSN (last_insn) != NULL_RTX)
case LEU: ccode = "ls"; break;
default:
- abort ();
+ gcc_unreachable ();
}
if (need_longbranch)
/* The missing codes above should never be generated. */
default:
- abort ();
+ gcc_unreachable ();
}
switch (code)
{
int regnum;
- if (GET_CODE (XEXP (op, 0)) != REG)
- abort ();
+ gcc_assert (GET_CODE (XEXP (op, 0)) == REG);
regnum = REGNO (XEXP (op, 0));
sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
break;
default:
- abort ();
+ gcc_unreachable ();
}
if (need_longbranch)
{
if ((GET_CODE (x) == CONST_INT)
&& ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
- error ("Constant halfword load operand out of range.");
+ error ("constant halfword load operand out of range");
return general_operand (x, mode);
}
{
if ((GET_CODE (x) == CONST_INT)
&& ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
- error ("Constant arithmetic operand out of range.");
+ error ("constant arithmetic operand out of range");
return nonmemory_operand (x, mode);
}
if (GET_CODE (x) == PLUS
&& GET_CODE (XEXP (x, 1)) == CONST_INT)
x = XEXP (x, 0);
+
if (GET_CODE (x) == SYMBOL_REF)
- {
- const char *n = XSTR (x, 0);
- if (n[0] == '@' && n[1] == 'b' && n[2] == '.')
- return 1;
- }
+ return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
+
if (GET_CODE (x) == CONST_INT)
{
HOST_WIDE_INT i = INTVAL (x);
return 0;
}
-/* Predicate for MEMs that can use special 8-bit addressing. */
-int
-xstormy16_below100_operand (rtx x, enum machine_mode mode)
-{
- if (GET_MODE (x) != mode)
- return 0;
- if (GET_CODE (x) == MEM)
- x = XEXP (x, 0);
- else if (GET_CODE (x) == SUBREG
- && GET_CODE (XEXP (x, 0)) == MEM
- && !MEM_VOLATILE_P (XEXP (x, 0)))
- x = XEXP (XEXP (x, 0), 0);
- else
- return 0;
- if (GET_CODE (x) == CONST_INT)
- {
- HOST_WIDE_INT i = INTVAL (x);
- return (i >= 0x7f00 && i < 0x7fff);
- }
- return xstormy16_below100_symbol (x, HImode);
-}
-
/* Likewise, but only for non-volatile MEMs, for patterns where the
MEM will get split into smaller sized accesses. */
int
return xstormy16_below100_operand (x, mode);
}
-int
-xstormy16_below100_or_register (rtx x, enum machine_mode mode)
-{
- return (xstormy16_below100_operand (x, mode)
- || register_operand (x, mode));
-}
-
-int
-xstormy16_splittable_below100_or_register (rtx x, enum machine_mode mode)
-{
- if (GET_CODE (x) == MEM && MEM_VOLATILE_P (x))
- return 0;
- return (xstormy16_below100_operand (x, mode)
- || register_operand (x, mode));
-}
-
-/* Predicate for constants with exactly one bit set. */
-int
-xstormy16_onebit_set_operand (rtx x, enum machine_mode mode)
-{
- HOST_WIDE_INT i;
- if (GET_CODE (x) != CONST_INT)
- return 0;
- i = INTVAL (x);
- if (mode == QImode)
- i &= 0xff;
- if (mode == HImode)
- i &= 0xffff;
- return exact_log2 (i) != -1;
-}
-
-/* Predicate for constants with exactly one bit not set. */
-int
-xstormy16_onebit_clr_operand (rtx x, enum machine_mode mode)
-{
- HOST_WIDE_INT i;
- if (GET_CODE (x) != CONST_INT)
- return 0;
- i = ~ INTVAL (x);
- if (mode == QImode)
- i &= 0xff;
- if (mode == HImode)
- i &= 0xffff;
- return exact_log2 (i) != -1;
-}
-
/* Expand an 8-bit IOR. This either detects the one case we can
actually do, or uses a 16-bit IOR. */
void
return (GET_CODE (XEXP (x, 0)) != PLUS);
}
-int
-nonimmediate_nonstack_operand (rtx op, enum machine_mode mode)
-{
- /* 'Q' is for pushes, 'R' for pops. */
- return (nonimmediate_operand (op, mode)
- && ! xstormy16_extra_constraint_p (op, 'Q')
- && ! xstormy16_extra_constraint_p (op, 'R'));
-}
-
/* Splitter for the 'move' patterns, for modes not directly implemented
by hardware. Emit insns to copy a value of mode MODE from SRC to
DEST.
rtx auto_inc_reg_rtx = NULL_RTX;
/* Check initial conditions. */
- if (! reload_completed
- || mode == QImode || mode == HImode
- || ! nonimmediate_operand (dest, mode)
- || ! general_operand (src, mode))
- abort ();
+ gcc_assert (reload_completed
+ && mode != QImode && mode != HImode
+ && nonimmediate_operand (dest, mode)
+ && general_operand (src, mode));
/* This case is not supported below, and shouldn't be generated. */
- if (GET_CODE (dest) == MEM
- && GET_CODE (src) == MEM)
- abort ();
+ gcc_assert (GET_CODE (dest) != MEM || GET_CODE (src) != MEM);
/* This case is very very bad after reload, so trap it now. */
- if (GET_CODE (dest) == SUBREG
- || GET_CODE (src) == SUBREG)
- abort ();
+ gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
/* The general idea is to copy by words, offsetting the source and
destination. Normally the least-significant word will be copied
&& reg_overlap_mentioned_p (dest, src))
{
int regno;
- if (GET_CODE (dest) != REG)
- abort ();
+
+ gcc_assert (GET_CODE (dest) == REG);
regno = REGNO (dest);
- if (! refers_to_regno_p (regno, regno + num_words, mem_operand, 0))
- abort ();
+ gcc_assert (refers_to_regno_p (regno, regno + num_words,
+ mem_operand, 0));
if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
direction = -1;
(set (reg:DI r0) (mem:DI (reg:HI r1)))
which we'd need to support by doing the set of the second word
last. */
- abort ();
+ gcc_unreachable ();
}
end = direction < 0 ? -1 : num_words;
MEM_VOLATILE_P (w_dest) = 1;
/* The simplify_subreg calls must always be able to simplify. */
- if (GET_CODE (w_src) == SUBREG
- || GET_CODE (w_dest) == SUBREG)
- abort ();
+ gcc_assert (GET_CODE (w_src) != SUBREG
+ && GET_CODE (w_dest) != SUBREG);
insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
if (auto_inc_reg_rtx)
else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
result = -(layout.sp_minus_fp + layout.fp_minus_ap);
else
- abort ();
+ gcc_unreachable ();
return result;
}
layout = xstormy16_compute_stack_layout ();
if (layout.locals_size >= 32768)
- error ("Local variable memory requirements exceed capacity.");
+ error ("local variable memory requirements exceed capacity");
/* Save the argument registers if necessary. */
if (layout.stdarg_save_size)
f_base = TYPE_FIELDS (va_list_type_node);
f_count = TREE_CHAIN (f_base);
- base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
- count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
- NULL_TREE);
+ base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
+ count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
+ NULL_TREE);
t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
- t = build (PLUS_EXPR, TREE_TYPE (base), t,
- build_int_cst (NULL_TREE, INCOMING_FRAME_SP_OFFSET));
- t = build (MODIFY_EXPR, TREE_TYPE (base), base, t);
+ t = build2 (PLUS_EXPR, TREE_TYPE (base), t,
+ build_int_cst (NULL_TREE, INCOMING_FRAME_SP_OFFSET));
+ t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
- t = build (MODIFY_EXPR, TREE_TYPE (count), count,
- build_int_cst (NULL_TREE,
- current_function_args_info * UNITS_PER_WORD));
+ t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
+ build_int_cst (NULL_TREE,
+ current_function_args_info * UNITS_PER_WORD));
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
}
f_base = TYPE_FIELDS (va_list_type_node);
f_count = TREE_CHAIN (f_base);
- base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
- count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
- NULL_TREE);
+ base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
+ count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
+ NULL_TREE);
must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
tree r;
t = fold_convert (TREE_TYPE (count), size_tree);
- t = build (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
+ t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
- t = build (GT_EXPR, boolean_type_node, t, r);
- t = build (COND_EXPR, void_type_node, t,
- build (GOTO_EXPR, void_type_node, lab_fromstack),
- NULL);
+ t = build2 (GT_EXPR, boolean_type_node, t, r);
+ t = build3 (COND_EXPR, void_type_node, t,
+ build1 (GOTO_EXPR, void_type_node, lab_fromstack),
+ NULL_TREE);
gimplify_and_add (t, pre_p);
t = fold_convert (ptr_type_node, count_tmp);
- t = build (PLUS_EXPR, ptr_type_node, base, t);
- t = build (MODIFY_EXPR, void_type_node, addr, t);
+ t = build2 (PLUS_EXPR, ptr_type_node, base, t);
+ t = build2 (MODIFY_EXPR, void_type_node, addr, t);
gimplify_and_add (t, pre_p);
- t = build (GOTO_EXPR, void_type_node, lab_gotaddr);
+ t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
gimplify_and_add (t, pre_p);
- t = build (LABEL_EXPR, void_type_node, lab_fromstack);
+ t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
gimplify_and_add (t, pre_p);
}
tree r, u;
r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
- u = build (MODIFY_EXPR, void_type_node, count_tmp, r);
+ u = build2 (MODIFY_EXPR, void_type_node, count_tmp, r);
t = fold_convert (TREE_TYPE (count), r);
- t = build (GE_EXPR, boolean_type_node, count_tmp, t);
- t = build (COND_EXPR, void_type_node, t, NULL, u);
+ t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
+ t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
gimplify_and_add (t, pre_p);
}
t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
- INCOMING_FRAME_SP_OFFSET);
t = fold_convert (TREE_TYPE (count), t);
- t = build (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
- t = build (PLUS_EXPR, TREE_TYPE (count), t,
- fold_convert (TREE_TYPE (count), size_tree));
+ t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
+ t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
+ fold_convert (TREE_TYPE (count), size_tree));
t = fold_convert (TREE_TYPE (base), fold (t));
- t = build (MINUS_EXPR, TREE_TYPE (base), base, t);
- t = build (MODIFY_EXPR, void_type_node, addr, t);
+ t = build2 (MINUS_EXPR, TREE_TYPE (base), base, t);
+ t = build2 (MODIFY_EXPR, void_type_node, addr, t);
gimplify_and_add (t, pre_p);
- t = build (LABEL_EXPR, void_type_node, lab_gotaddr);
+ t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
gimplify_and_add (t, pre_p);
t = fold_convert (TREE_TYPE (count), size_tree);
- t = build (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
- t = build (MODIFY_EXPR, TREE_TYPE (count), count, t);
+ t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
+ t = build2 (MODIFY_EXPR, TREE_TYPE (count), count, t);
gimplify_and_add (t, pre_p);
addr = fold_convert (build_pointer_type (type), addr);
- return build_fold_indirect_ref (addr);
+ return build_va_arg_indirect_ref (addr);
}
/* Initialize the variable parts of a trampoline. ADDR is an RTX for
than uninitialized. */
void
xstormy16_asm_output_aligned_common (FILE *stream,
- tree decl ATTRIBUTE_UNUSED,
+ tree decl,
const char *name,
int size,
int align,
int global)
{
- if (name[0] == '@' && name[2] == '.')
+ rtx mem = DECL_RTL (decl);
+ rtx symbol;
+
+ if (mem != NULL_RTX
+ && GET_CODE (mem) == MEM
+ && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
+ && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
{
- const char *op = 0;
- switch (name[1])
+ const char *name2;
+ int p2align = 0;
+
+ bss100_section ();
+
+ while (align > 8)
{
- case 'b':
- bss100_section();
- op = "space";
- break;
+ align /= 2;
+ p2align ++;
}
- if (op)
- {
- const char *name2;
- int p2align = 0;
- while (align > 8)
- {
- align /= 2;
- p2align ++;
- }
- name2 = xstormy16_strip_name_encoding (name);
- if (global)
- fprintf (stream, "\t.globl\t%s\n", name2);
- if (p2align)
- fprintf (stream, "\t.p2align %d\n", p2align);
- fprintf (stream, "\t.type\t%s, @object\n", name2);
- fprintf (stream, "\t.size\t%s, %d\n", name2, size);
- fprintf (stream, "%s:\n\t.%s\t%d\n", name2, op, size);
- return;
- }
+ name2 = default_strip_name_encoding (name);
+ if (global)
+ fprintf (stream, "\t.globl\t%s\n", name2);
+ if (p2align)
+ fprintf (stream, "\t.p2align %d\n", p2align);
+ fprintf (stream, "\t.type\t%s, @object\n", name2);
+ fprintf (stream, "\t.size\t%s, %d\n", name2, size);
+ fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
+ return;
}
if (!global)
special addressing modes for them. */
static void
-xstormy16_encode_section_info (tree decl,
- rtx r,
- int first ATTRIBUTE_UNUSED)
+xstormy16_encode_section_info (tree decl, rtx r, int first)
{
- if (TREE_CODE (decl) == VAR_DECL
+ default_encode_section_info (decl, r, first);
+
+ if (TREE_CODE (decl) == VAR_DECL
&& (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
|| lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
{
- const char *newsection = 0;
- char *newname;
- tree idp;
- rtx rtlname, rtl;
- const char *oldname;
-
- rtl = r;
- rtlname = XEXP (rtl, 0);
- if (GET_CODE (rtlname) == SYMBOL_REF)
- oldname = XSTR (rtlname, 0);
- else if (GET_CODE (rtlname) == MEM
- && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
- oldname = XSTR (XEXP (rtlname, 0), 0);
- else
- abort ();
-
- if (DECL_INITIAL (decl))
- {
- newsection = ".data_below100";
- DECL_SECTION_NAME (decl) = build_string (strlen (newsection), newsection);
- }
-
- newname = alloca (strlen (oldname) + 4);
- sprintf (newname, "@b.%s", oldname);
- idp = get_identifier (newname);
- XEXP (rtl, 0) =
- gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
- }
-}
-
-const char *
-xstormy16_strip_name_encoding (const char *name)
-{
- while (1)
- {
- if (name[0] == '@' && name[2] == '.')
- name += 3;
- else if (name[0] == '*')
- name ++;
- else
- return name;
+ rtx symbol = XEXP (r, 0);
+
+ gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
+ SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
}
}
if (GET_CODE (address) == PLUS)
{
- if (GET_CODE (XEXP (address, 1)) != CONST_INT)
- abort ();
+ gcc_assert (GET_CODE (XEXP (address, 1)) == CONST_INT);
offset = INTVAL (XEXP (address, 1));
address = XEXP (address, 0);
}
if (pre_dec || post_inc)
address = XEXP (address, 0);
- if (GET_CODE (address) != REG)
- abort ();
+ gcc_assert (GET_CODE (address) == REG);
fputc ('(', file);
if (pre_dec)
{
int vlen, idx;
- function_section (current_function_decl);
+ current_function_section (current_function_decl);
vlen = XVECLEN (table, 0);
for (idx = 0; idx < vlen; idx++)
rtx call, temp;
enum machine_mode mode;
- if (GET_CODE (dest) != MEM)
- abort ();
+ gcc_assert (GET_CODE (dest) == MEM);
dest = XEXP (dest, 0);
if (! CONSTANT_P (dest)
break;
default:
- abort ();
+ gcc_unreachable ();
}
firstloop = 0;
emit (gen_nop ());
}
-/* Return 1 if OP is a shift operator. */
-
-int
-shift_operator (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
-{
- enum rtx_code code = GET_CODE (op);
-
- return (code == ASHIFT
- || code == ASHIFTRT
- || code == LSHIFTRT);
-}
-
/* The shift operations are split at output time for constant values;
variable-width shifts get handed off to a library routine.
const char *r0, *r1, *rt;
static char r[64];
- if (GET_CODE (size_r) != CONST_INT
- || GET_CODE (x) != REG
- || mode != SImode)
- abort ();
+ gcc_assert (GET_CODE (size_r) == CONST_INT
+ && GET_CODE (x) == REG && mode == SImode);
size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
if (size == 0)
sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
break;
default:
- abort ();
+ gcc_unreachable ();
}
return r;
}
sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
break;
default:
- abort ();
+ gcc_unreachable ();
}
return r;
}
r0, r1, r1, r0, (int) size - 16);
break;
default:
- abort ();
+ gcc_unreachable ();
}
return r;
}
r0, rt);
break;
default:
- abort ();
+ gcc_unreachable ();
}
return r;
}
{
if (TREE_CODE (*node) != FUNCTION_TYPE)
{
- warning ("%qs attribute only applies to functions",
+ warning (OPT_Wattributes, "%qs attribute only applies to functions",
IDENTIFIER_POINTER (name));
*no_add_attrs = true;
}
&& TREE_CODE (*node) != POINTER_TYPE
&& TREE_CODE (*node) != TYPE_DECL)
{
- warning ("%<__BELOW100__%> attribute only applies to variables");
+ warning (OPT_Wattributes,
+ "%<__BELOW100__%> attribute only applies to variables");
*no_add_attrs = true;
}
else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
{
if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
{
- warning ("__BELOW100__ attribute not allowed with auto storage class.");
+ warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
+ "with auto storage class");
*no_add_attrs = true;
}
}
case 'S': arg = short_unsigned_type_node; break;
case 'l': arg = long_integer_type_node; break;
case 'L': arg = long_unsigned_type_node; break;
- default: abort();
+ default: gcc_unreachable ();
}
if (a == 0)
ret_type = arg;
static void
combine_bnp (rtx insn)
{
- int insn_code, regno, need_extend, mask;
+ int insn_code, regno, need_extend;
+ unsigned int mask;
rtx cond, reg, and, load, qireg, mem;
enum machine_mode load_mode = QImode;
+ enum machine_mode and_mode = QImode;
+ rtx shift = NULL_RTX;
insn_code = recog_memoized (insn);
if (insn_code != CODE_FOR_cbranchhi
if (need_extend)
{
- /* LT and GE conditionals should have an sign extend before
+ /* LT and GE conditionals should have a sign extend before
them. */
for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
{
int and_code = recog_memoized (and);
+
if (and_code == CODE_FOR_extendqihi2
- && rtx_equal_p (XEXP (PATTERN (and), 0), reg)
- && rtx_equal_p (XEXP (XEXP (PATTERN (and), 1), 0), qireg))
- {
- break;
- }
+ && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
+ && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), qireg))
+ break;
if (and_code == CODE_FOR_movhi_internal
- && rtx_equal_p (XEXP (PATTERN (and), 0), reg))
+ && rtx_equal_p (SET_DEST (PATTERN (and)), reg))
{
/* This is for testing bit 15. */
and = insn;
if (reg_mentioned_p (reg, and))
return;
+
if (GET_CODE (and) != NOTE
&& GET_CODE (and) != INSN)
return;
for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
{
if (recog_memoized (and) == CODE_FOR_andhi3
- && rtx_equal_p (XEXP (PATTERN (and), 0), reg)
- && rtx_equal_p (XEXP (XEXP (PATTERN (and), 1), 0), reg))
- {
- break;
- }
+ && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
+ && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), reg))
+ break;
if (reg_mentioned_p (reg, and))
return;
+
if (GET_CODE (and) != NOTE
&& GET_CODE (and) != INSN)
return;
}
+
+ if (and)
+ {
+ /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
+ followed by an AND like this:
+
+ (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
+ (clobber (reg:BI carry))]
+
+ (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
+
+ Attempt to detect this here. */
+ for (shift = prev_real_insn (and); shift; shift = prev_real_insn (shift))
+ {
+ if (recog_memoized (shift) == CODE_FOR_lshrhi3
+ && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
+ && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
+ break;
+
+ if (reg_mentioned_p (reg, shift)
+ || (GET_CODE (shift) != NOTE
+ && GET_CODE (shift) != INSN))
+ {
+ shift = NULL_RTX;
+ break;
+ }
+ }
+ }
}
if (!and)
return;
- for (load = prev_real_insn (and); load; load = prev_real_insn (load))
+ for (load = shift ? prev_real_insn (shift) : prev_real_insn (and);
+ load;
+ load = prev_real_insn (load))
{
int load_code = recog_memoized (load);
+
if (load_code == CODE_FOR_movhi_internal
- && rtx_equal_p (XEXP (PATTERN (load), 0), reg)
- && xstormy16_below100_operand (XEXP (PATTERN (load), 1), HImode)
- && ! MEM_VOLATILE_P (XEXP (PATTERN (load), 1)))
+ && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
+ && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
+ && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
{
load_mode = HImode;
break;
}
if (load_code == CODE_FOR_movqi_internal
- && rtx_equal_p (XEXP (PATTERN (load), 0), qireg)
- && xstormy16_below100_operand (XEXP (PATTERN (load), 1), QImode))
+ && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
+ && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
{
load_mode = QImode;
break;
}
-
+
+ if (load_code == CODE_FOR_zero_extendqihi2
+ && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
+ && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
+ {
+ load_mode = QImode;
+ and_mode = HImode;
+ break;
+ }
+
if (reg_mentioned_p (reg, load))
return;
+
if (GET_CODE (load) != NOTE
&& GET_CODE (load) != INSN)
return;
if (!load)
return;
- if (!need_extend)
+ mem = SET_SRC (PATTERN (load));
+
+ if (need_extend)
{
- if (!xstormy16_onebit_set_operand (XEXP (XEXP (PATTERN (and), 1), 1), load_mode))
- return;
- mask = (int) INTVAL (XEXP (XEXP (PATTERN (and), 1), 1));
+ mask = (load_mode == HImode) ? 0x8000 : 0x80;
+
+ /* If the mem includes a zero-extend operation and we are
+ going to generate a sign-extend operation then move the
+ mem inside the zero-extend. */
+ if (GET_CODE (mem) == ZERO_EXTEND)
+ mem = XEXP (mem, 0);
}
else
- mask = (load_mode == HImode) ? 0x8000 : 0x80;
+ {
+ if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and)), 1), load_mode))
+ return;
+
+ mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and)), 1));
+
+ if (shift)
+ mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
+ }
- mem = XEXP (PATTERN (load), 1);
if (load_mode == HImode)
{
rtx addr = XEXP (mem, 0);
+
if (! (mask & 0xff))
{
addr = plus_constant (addr, 1);
if (need_extend)
XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
else
- XEXP (cond, 0) = gen_rtx_AND (QImode, mem, GEN_INT (mask));
+ XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
+
INSN_CODE (insn) = -1;
delete_insn (load);
+
if (and != insn)
delete_insn (and);
+
+ if (shift != NULL_RTX)
+ delete_insn (shift);
}
static void
#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
#undef TARGET_ENCODE_SECTION_INFO
#define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
-#undef TARGET_STRIP_NAME_ENCODING
-#define TARGET_STRIP_NAME_ENCODING xstormy16_strip_name_encoding
#undef TARGET_ASM_OUTPUT_MI_THUNK
#define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk