/* Xstormy16 target functions.
- Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
- Free Software Foundation, Inc.
+ Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
+ 2006, 2007, 2008 Free Software Foundation, Inc.
Contributed by Red Hat, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
+the Free Software Foundation; either version 3, or (at your option)
any later version.
GCC is distributed in the hope that it will be useful,
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "target-def.h"
#include "tm_p.h"
#include "langhooks.h"
+#include "tree-gimple.h"
+#include "df.h"
+#include "ggc.h"
static rtx emit_addhi3_postreload (rtx, rtx, rtx);
static void xstormy16_asm_out_constructor (rtx, int);
static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
static bool xstormy16_rtx_costs (rtx, int, int, int *);
static int xstormy16_address_cost (rtx);
-static bool xstormy16_return_in_memory (tree, tree);
+static bool xstormy16_return_in_memory (const_tree, const_tree);
/* Define the information needed to generate branch and scc insns. This is
stored from the compare operation. */
struct rtx_def * xstormy16_compare_op0;
struct rtx_def * xstormy16_compare_op1;
-/* Return 1 if this is a LT, GE, LTU, or GEU operator. */
-
-int
-xstormy16_ineqsi_operator (register rtx op, enum machine_mode mode)
-{
- enum rtx_code code = GET_CODE (op);
-
- return ((mode == VOIDmode || GET_MODE (op) == mode)
- && (code == LT || code == GE || code == LTU || code == GEU));
-}
-
-/* Return 1 if this is an EQ or NE operator. */
-
-int
-equality_operator (register rtx op, enum machine_mode mode)
-{
- return ((mode == VOIDmode || GET_MODE (op) == mode)
- && (GET_CODE (op) == EQ || GET_CODE (op) == NE));
-}
-
-/* Return 1 if this is a comparison operator but not an EQ or NE operator. */
-
-int
-inequality_operator (register rtx op, enum machine_mode mode)
-{
- return comparison_operator (op, mode) && ! equality_operator (op, mode);
-}
+static GTY(()) section *bss100_section;
/* Compute a (partial) cost for rtx X. Return true if the complete
cost has been computed, and false if subexpressions should be
enum machine_mode mode;
mode = GET_MODE (op0);
- if (mode != HImode && mode != SImode)
- abort ();
+ gcc_assert (mode == HImode || mode == SImode);
if (mode == SImode
&& (code == GT || code == LE || code == GTU || code == LEU))
seq = get_insns ();
end_sequence ();
- if (! INSN_P (seq))
- abort ();
+ gcc_assert (INSN_P (seq));
last_insn = seq;
while (NEXT_INSN (last_insn) != NULL_RTX)
case LEU: ccode = "ls"; break;
default:
- abort ();
+ gcc_unreachable ();
}
if (need_longbranch)
/* The missing codes above should never be generated. */
default:
- abort ();
+ gcc_unreachable ();
}
switch (code)
{
int regnum;
- if (GET_CODE (XEXP (op, 0)) != REG)
- abort ();
+ gcc_assert (GET_CODE (XEXP (op, 0)) == REG);
regnum = REGNO (XEXP (op, 0));
sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
break;
default:
- abort ();
+ gcc_unreachable ();
}
if (need_longbranch)
return NO_REGS;
}
-/* Recognize a PLUS that needs the carry register. */
-int
-xstormy16_carry_plus_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
+enum reg_class
+xstormy16_preferred_reload_class (rtx x, enum reg_class class)
{
- return (GET_CODE (x) == PLUS
- && GET_CODE (XEXP (x, 1)) == CONST_INT
- && (INTVAL (XEXP (x, 1)) < -4 || INTVAL (XEXP (x, 1)) > 4));
+ if (class == GENERAL_REGS
+ && GET_CODE (x) == MEM)
+ return EIGHT_REGS;
+
+ return class;
}
-/* Detect and error out on out-of-range constants for movhi. */
+/* Predicate for symbols and addresses that reflect special 8-bit
+ addressing. */
int
-xs_hi_general_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
+xstormy16_below100_symbol (rtx x,
+ enum machine_mode mode ATTRIBUTE_UNUSED)
{
- if ((GET_CODE (x) == CONST_INT)
- && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
- error ("Constant halfword load operand out of range.");
- return general_operand (x, mode);
+ if (GET_CODE (x) == CONST)
+ x = XEXP (x, 0);
+ if (GET_CODE (x) == PLUS
+ && GET_CODE (XEXP (x, 1)) == CONST_INT)
+ x = XEXP (x, 0);
+
+ if (GET_CODE (x) == SYMBOL_REF)
+ return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
+
+ if (GET_CODE (x) == CONST_INT)
+ {
+ HOST_WIDE_INT i = INTVAL (x);
+ if ((i >= 0x0000 && i <= 0x00ff)
+ || (i >= 0x7f00 && i <= 0x7fff))
+ return 1;
+ }
+ return 0;
}
-/* Detect and error out on out-of-range constants for addhi and subhi. */
+/* Likewise, but only for non-volatile MEMs, for patterns where the
+ MEM will get split into smaller sized accesses. */
int
-xs_hi_nonmemory_operand (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
+xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
{
- if ((GET_CODE (x) == CONST_INT)
- && ((INTVAL (x) >= 32768) || (INTVAL (x) < -32768)))
- error ("Constant arithmetic operand out of range.");
- return nonmemory_operand (x, mode);
+ if (GET_CODE (x) == MEM && MEM_VOLATILE_P (x))
+ return 0;
+ return xstormy16_below100_operand (x, mode);
}
-enum reg_class
-xstormy16_preferred_reload_class (rtx x, enum reg_class class)
+/* Expand an 8-bit IOR. This either detects the one case we can
+ actually do, or uses a 16-bit IOR. */
+void
+xstormy16_expand_iorqi3 (rtx *operands)
{
- if (class == GENERAL_REGS
- && GET_CODE (x) == MEM)
- return EIGHT_REGS;
+ rtx in, out, outsub, val;
- return class;
+ out = operands[0];
+ in = operands[1];
+ val = operands[2];
+
+ if (xstormy16_onebit_set_operand (val, QImode))
+ {
+ if (!xstormy16_below100_or_register (in, QImode))
+ in = copy_to_mode_reg (QImode, in);
+ if (!xstormy16_below100_or_register (out, QImode))
+ out = gen_reg_rtx (QImode);
+ emit_insn (gen_iorqi3_internal (out, in, val));
+ if (out != operands[0])
+ emit_move_insn (operands[0], out);
+ return;
+ }
+
+ if (GET_CODE (in) != REG)
+ in = copy_to_mode_reg (QImode, in);
+ if (GET_CODE (val) != REG
+ && GET_CODE (val) != CONST_INT)
+ val = copy_to_mode_reg (QImode, val);
+ if (GET_CODE (out) != REG)
+ out = gen_reg_rtx (QImode);
+
+ in = simplify_gen_subreg (HImode, in, QImode, 0);
+ outsub = simplify_gen_subreg (HImode, out, QImode, 0);
+ if (GET_CODE (val) != CONST_INT)
+ val = simplify_gen_subreg (HImode, val, QImode, 0);
+
+ emit_insn (gen_iorhi3 (outsub, in, val));
+
+ if (out != operands[0])
+ emit_move_insn (operands[0], out);
+}
+
+/* Likewise, for AND. */
+void
+xstormy16_expand_andqi3 (rtx *operands)
+{
+ rtx in, out, outsub, val;
+
+ out = operands[0];
+ in = operands[1];
+ val = operands[2];
+
+ if (xstormy16_onebit_clr_operand (val, QImode))
+ {
+ if (!xstormy16_below100_or_register (in, QImode))
+ in = copy_to_mode_reg (QImode, in);
+ if (!xstormy16_below100_or_register (out, QImode))
+ out = gen_reg_rtx (QImode);
+ emit_insn (gen_andqi3_internal (out, in, val));
+ if (out != operands[0])
+ emit_move_insn (operands[0], out);
+ return;
+ }
+
+ if (GET_CODE (in) != REG)
+ in = copy_to_mode_reg (QImode, in);
+ if (GET_CODE (val) != REG
+ && GET_CODE (val) != CONST_INT)
+ val = copy_to_mode_reg (QImode, val);
+ if (GET_CODE (out) != REG)
+ out = gen_reg_rtx (QImode);
+
+ in = simplify_gen_subreg (HImode, in, QImode, 0);
+ outsub = simplify_gen_subreg (HImode, out, QImode, 0);
+ if (GET_CODE (val) != CONST_INT)
+ val = simplify_gen_subreg (HImode, val, QImode, 0);
+
+ emit_insn (gen_andhi3 (outsub, in, val));
+
+ if (out != operands[0])
+ emit_move_insn (operands[0], out);
}
#define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
if (GET_CODE (x) == PLUS
&& LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
- x = XEXP (x, 0);
+ {
+ x = XEXP (x, 0);
+ /* PR 31232: Do not allow INT+INT as an address. */
+ if (GET_CODE (x) == CONST_INT)
+ return 0;
+ }
if ((GET_CODE (x) == PRE_MODIFY
&& GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
if (GET_CODE (x) == REG && REGNO_OK_FOR_BASE_P (REGNO (x))
&& (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
return 1;
+
+ if (xstormy16_below100_symbol (x, mode))
+ return 1;
return 0;
}
if (GET_CODE (x) == PLUS)
x = XEXP (x, 0);
- if (GET_CODE (x) == POST_INC
- || GET_CODE (x) == PRE_DEC)
- return 1;
+ /* Auto-increment addresses are now treated generically in recog.c. */
return 0;
}
return (GET_CODE (x) == CONST_INT
&& (INTVAL (x) == 0));
+ case 'W':
+ return xstormy16_below100_operand (x, GET_MODE (x));
+
default:
return 0;
}
return (GET_CODE (XEXP (x, 0)) != PLUS);
}
-int
-nonimmediate_nonstack_operand (rtx op, enum machine_mode mode)
-{
- /* 'Q' is for pushes, 'R' for pops. */
- return (nonimmediate_operand (op, mode)
- && ! xstormy16_extra_constraint_p (op, 'Q')
- && ! xstormy16_extra_constraint_p (op, 'R'));
-}
-
/* Splitter for the 'move' patterns, for modes not directly implemented
by hardware. Emit insns to copy a value of mode MODE from SRC to
DEST.
rtx auto_inc_reg_rtx = NULL_RTX;
/* Check initial conditions. */
- if (! reload_completed
- || mode == QImode || mode == HImode
- || ! nonimmediate_operand (dest, mode)
- || ! general_operand (src, mode))
- abort ();
+ gcc_assert (reload_completed
+ && mode != QImode && mode != HImode
+ && nonimmediate_operand (dest, mode)
+ && general_operand (src, mode));
/* This case is not supported below, and shouldn't be generated. */
- if (GET_CODE (dest) == MEM
- && GET_CODE (src) == MEM)
- abort ();
+ gcc_assert (GET_CODE (dest) != MEM || GET_CODE (src) != MEM);
/* This case is very very bad after reload, so trap it now. */
- if (GET_CODE (dest) == SUBREG
- || GET_CODE (src) == SUBREG)
- abort ();
+ gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
/* The general idea is to copy by words, offsetting the source and
destination. Normally the least-significant word will be copied
&& reg_overlap_mentioned_p (dest, src))
{
int regno;
- if (GET_CODE (dest) != REG)
- abort ();
+
+ gcc_assert (GET_CODE (dest) == REG);
regno = REGNO (dest);
- if (! refers_to_regno_p (regno, regno + num_words, mem_operand, 0))
- abort ();
+ gcc_assert (refers_to_regno_p (regno, regno + num_words,
+ mem_operand, 0));
if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
direction = -1;
(set (reg:DI r0) (mem:DI (reg:HI r1)))
which we'd need to support by doing the set of the second word
last. */
- abort ();
+ gcc_unreachable ();
}
end = direction < 0 ? -1 : num_words;
MEM_VOLATILE_P (w_dest) = 1;
/* The simplify_subreg calls must always be able to simplify. */
- if (GET_CODE (w_src) == SUBREG
- || GET_CODE (w_dest) == SUBREG)
- abort ();
+ gcc_assert (GET_CODE (w_src) != SUBREG
+ && GET_CODE (w_dest) != SUBREG);
insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
if (auto_inc_reg_rtx)
&& GET_CODE (dest) == MEM
&& (GET_CODE (XEXP (dest, 0)) != CONST_INT
|| ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
+ && ! xstormy16_below100_operand (dest, mode)
&& GET_CODE (src) != REG
&& GET_CODE (src) != SUBREG)
src = copy_to_mode_reg (mode, src);
/* Does REGNO need to be saved? */
#define REG_NEEDS_SAVE(REGNUM, IFUN) \
- ((regs_ever_live[REGNUM] && ! call_used_regs[REGNUM]) \
+ ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
|| (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
&& (REGNO_REG_CLASS (REGNUM) != CARRY_REGS) \
- && (regs_ever_live[REGNUM] || ! current_function_is_leaf)))
+ && (df_regs_ever_live_p (REGNUM) || ! current_function_is_leaf)))
/* Compute the stack layout. */
struct xstormy16_stack_layout
else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
result = -(layout.sp_minus_fp + layout.fp_minus_ap);
else
- abort ();
+ gcc_unreachable ();
return result;
}
layout = xstormy16_compute_stack_layout ();
if (layout.locals_size >= 32768)
- error ("Local variable memory requirements exceed capacity.");
+ error ("local variable memory requirements exceed capacity");
/* Save the argument registers if necessary. */
if (layout.stdarg_save_size)
{
if (mode == VOIDmode)
return const0_rtx;
- if (MUST_PASS_IN_STACK (mode, type)
+ if (targetm.calls.must_pass_in_stack (mode, type)
|| cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
return 0;
return gen_rtx_REG (mode, cum + 2);
is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
variable to initialize. NEXTARG is the machine independent notion of the
'next' argument after the variable arguments. */
-void
+static void
xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
{
tree f_base, f_count;
tree base, count;
- tree t;
+ tree t,u;
if (xstormy16_interrupt_function_p ())
error ("cannot use va_start in interrupt function");
f_base = TYPE_FIELDS (va_list_type_node);
f_count = TREE_CHAIN (f_base);
- base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base);
- count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count);
+ base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
+ count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
+ NULL_TREE);
t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
- t = build (PLUS_EXPR, TREE_TYPE (base), t,
- build_int_2 (INCOMING_FRAME_SP_OFFSET, 0));
- t = build (MODIFY_EXPR, TREE_TYPE (base), base, t);
+ u = build_int_cst (NULL_TREE, INCOMING_FRAME_SP_OFFSET);
+ u = fold_convert (TREE_TYPE (count), u);
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), t, u);
+ t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (base), base, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
- t = build (MODIFY_EXPR, TREE_TYPE (count), count,
- build_int_2 (current_function_args_info * UNITS_PER_WORD, 0));
+ t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (count), count,
+ build_int_cst (NULL_TREE,
+ current_function_args_info * UNITS_PER_WORD));
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
}
of type va_list as a tree, TYPE is the type passed to va_arg.
Note: This algorithm is documented in stormy-abi. */
-rtx
-xstormy16_expand_builtin_va_arg (tree valist, tree type)
+static tree
+xstormy16_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
+ tree *post_p ATTRIBUTE_UNUSED)
{
tree f_base, f_count;
tree base, count;
- rtx count_rtx, addr_rtx, r;
- rtx lab_gotaddr, lab_fromstack;
- tree t;
+ tree count_tmp, addr, t;
+ tree lab_gotaddr, lab_fromstack;
int size, size_of_reg_args, must_stack;
- tree size_tree, count_plus_size;
- rtx count_plus_size_rtx;
-
+ tree size_tree;
+
f_base = TYPE_FIELDS (va_list_type_node);
f_count = TREE_CHAIN (f_base);
- base = build (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base);
- count = build (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count);
+ base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
+ count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
+ NULL_TREE);
- must_stack = MUST_PASS_IN_STACK (TYPE_MODE (type), type);
+ must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
+ gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
- count_rtx = expand_expr (count, NULL_RTX, HImode, EXPAND_NORMAL);
- lab_gotaddr = gen_label_rtx ();
- lab_fromstack = gen_label_rtx ();
- addr_rtx = gen_reg_rtx (Pmode);
+ count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
+ lab_gotaddr = create_artificial_label ();
+ lab_fromstack = create_artificial_label ();
+ addr = create_tmp_var (ptr_type_node, NULL);
if (!must_stack)
{
- count_plus_size = build (PLUS_EXPR, TREE_TYPE (count), count, size_tree);
- count_plus_size_rtx = expand_expr (count_plus_size, NULL_RTX, HImode, EXPAND_NORMAL);
- emit_cmp_and_jump_insns (count_plus_size_rtx, GEN_INT (size_of_reg_args),
- GTU, const1_rtx, HImode, 1, lab_fromstack);
-
- t = build (PLUS_EXPR, ptr_type_node, base, count);
- r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
- if (r != addr_rtx)
- emit_move_insn (addr_rtx, r);
-
- emit_jump_insn (gen_jump (lab_gotaddr));
- emit_barrier ();
- emit_label (lab_fromstack);
+ tree r;
+
+ t = fold_convert (TREE_TYPE (count), size_tree);
+ t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
+ r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
+ t = build2 (GT_EXPR, boolean_type_node, t, r);
+ t = build3 (COND_EXPR, void_type_node, t,
+ build1 (GOTO_EXPR, void_type_node, lab_fromstack),
+ NULL_TREE);
+ gimplify_and_add (t, pre_p);
+
+ t = build2 (POINTER_PLUS_EXPR, ptr_type_node, base, count_tmp);
+ t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
+ gimplify_and_add (t, pre_p);
+
+ t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
+ gimplify_and_add (t, pre_p);
+
+ t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
+ gimplify_and_add (t, pre_p);
}
/* Arguments larger than a word might need to skip over some
size = PUSH_ROUNDING (int_size_in_bytes (type));
if (size > 2 || size < 0 || must_stack)
{
- rtx lab_notransition = gen_label_rtx ();
- emit_cmp_and_jump_insns (count_rtx, GEN_INT (NUM_ARGUMENT_REGISTERS
- * UNITS_PER_WORD),
- GEU, const1_rtx, HImode, 1, lab_notransition);
-
- t = build (MODIFY_EXPR, TREE_TYPE (count), count,
- build_int_2 (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD, 0));
- TREE_SIDE_EFFECTS (t) = 1;
- expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
-
- emit_label (lab_notransition);
- }
+ tree r, u;
- t = build (PLUS_EXPR, sizetype, size_tree,
- build_int_2 ((- NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
- + INCOMING_FRAME_SP_OFFSET),
- -1));
- t = build (PLUS_EXPR, TREE_TYPE (count), count, fold (t));
- t = build (MINUS_EXPR, TREE_TYPE (base), base, t);
- r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
- if (r != addr_rtx)
- emit_move_insn (addr_rtx, r);
-
- emit_label (lab_gotaddr);
-
- count_plus_size = build (PLUS_EXPR, TREE_TYPE (count), count, size_tree);
- t = build (MODIFY_EXPR, TREE_TYPE (count), count, count_plus_size);
- TREE_SIDE_EFFECTS (t) = 1;
- expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
+ r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
+ u = build2 (GIMPLE_MODIFY_STMT, void_type_node, count_tmp, r);
- return addr_rtx;
+ t = fold_convert (TREE_TYPE (count), r);
+ t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
+ t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
+ gimplify_and_add (t, pre_p);
+ }
+
+ t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
+ - INCOMING_FRAME_SP_OFFSET);
+ t = fold_convert (TREE_TYPE (count), t);
+ t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
+ t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
+ fold_convert (TREE_TYPE (count), size_tree));
+ t = fold_convert (TREE_TYPE (t), fold (t));
+ t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), base, t);
+ t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
+ gimplify_and_add (t, pre_p);
+
+ t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
+ gimplify_and_add (t, pre_p);
+
+ t = fold_convert (TREE_TYPE (count), size_tree);
+ t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
+ t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (count), count, t);
+ gimplify_and_add (t, pre_p);
+
+ addr = fold_convert (build_pointer_type (type), addr);
+ return build_va_arg_indirect_ref (addr);
}
/* Initialize the variable parts of a trampoline. ADDR is an RTX for
/* Worker function for FUNCTION_VALUE. */
rtx
-xstormy16_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
+xstormy16_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
{
enum machine_mode mode;
mode = TYPE_MODE (valtype);
putc ('\n', file);
}
+/* The purpose of this function is to override the default behavior of
+ BSS objects. Normally, they go into .bss or .sbss via ".common"
+ directives, but we need to override that and put them in
+ .bss_below100. We can't just use a section override (like we do
+ for .data_below100), because that makes them initialized rather
+ than uninitialized. */
+void
+xstormy16_asm_output_aligned_common (FILE *stream,
+ tree decl,
+ const char *name,
+ int size,
+ int align,
+ int global)
+{
+ rtx mem = DECL_RTL (decl);
+ rtx symbol;
+
+ if (mem != NULL_RTX
+ && GET_CODE (mem) == MEM
+ && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
+ && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
+ {
+ const char *name2;
+ int p2align = 0;
+
+ switch_to_section (bss100_section);
+
+ while (align > 8)
+ {
+ align /= 2;
+ p2align ++;
+ }
+
+ name2 = default_strip_name_encoding (name);
+ if (global)
+ fprintf (stream, "\t.globl\t%s\n", name2);
+ if (p2align)
+ fprintf (stream, "\t.p2align %d\n", p2align);
+ fprintf (stream, "\t.type\t%s, @object\n", name2);
+ fprintf (stream, "\t.size\t%s, %d\n", name2, size);
+ fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
+ return;
+ }
+
+ if (!global)
+ {
+ fprintf (stream, "\t.local\t");
+ assemble_name (stream, name);
+ fprintf (stream, "\n");
+ }
+ fprintf (stream, "\t.comm\t");
+ assemble_name (stream, name);
+ fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
+}
+
+/* Implement TARGET_ASM_INIT_SECTIONS. */
+
+static void
+xstormy16_asm_init_sections (void)
+{
+ bss100_section
+ = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
+ output_section_asm_op,
+ "\t.section \".bss_below100\",\"aw\",@nobits");
+}
+
+/* Mark symbols with the "below100" attribute so that we can use the
+ special addressing modes for them. */
+
+static void
+xstormy16_encode_section_info (tree decl, rtx r, int first)
+{
+ default_encode_section_info (decl, r, first);
+
+ if (TREE_CODE (decl) == VAR_DECL
+ && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
+ || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
+ {
+ rtx symbol = XEXP (r, 0);
+
+ gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
+ SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
+ }
+}
+
/* Output constructors and destructors. Just like
default_named_section_asm_out_* but don't set the sections writable. */
#undef TARGET_ASM_CONSTRUCTOR
const char *section = ".dtors";
char buf[16];
- /* ??? This only works reliably with the GNU linker. */
+ /* ??? This only works reliably with the GNU linker. */
if (priority != DEFAULT_INIT_PRIORITY)
{
sprintf (buf, ".dtors.%.5u",
section = buf;
}
- named_section_flags (section, 0);
+ switch_to_section (get_section (section, 0, NULL));
assemble_align (POINTER_SIZE);
assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
}
const char *section = ".ctors";
char buf[16];
- /* ??? This only works reliably with the GNU linker. */
+ /* ??? This only works reliably with the GNU linker. */
if (priority != DEFAULT_INIT_PRIORITY)
{
sprintf (buf, ".ctors.%.5u",
section = buf;
}
- named_section_flags (section, 0);
+ switch_to_section (get_section (section, 0, NULL));
assemble_align (POINTER_SIZE);
assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
}
if (GET_CODE (address) == PLUS)
{
- if (GET_CODE (XEXP (address, 1)) != CONST_INT)
- abort ();
+ gcc_assert (GET_CODE (XEXP (address, 1)) == CONST_INT);
offset = INTVAL (XEXP (address, 1));
address = XEXP (address, 0);
}
if (pre_dec || post_inc)
address = XEXP (address, 0);
- if (GET_CODE (address) != REG)
- abort ();
+ gcc_assert (GET_CODE (address) == REG);
fputc ('(', file);
if (pre_dec)
/* There is either one bit set, or one bit clear, in X.
Print it preceded by '#'. */
{
+ static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
HOST_WIDE_INT xx = 1;
HOST_WIDE_INT l;
if (GET_CODE (x) == CONST_INT)
xx = INTVAL (x);
else
- output_operand_lossage ("`B' operand is not constant");
+ output_operand_lossage ("'B' operand is not constant");
- l = exact_log2 (xx);
- if (l == -1)
- l = exact_log2 (~xx);
+ /* GCC sign-extends masks with the MSB set, so we have to
+ detect all the cases that differ only in sign extension
+ beyond the bits we care about. Normally, the predicates
+ and constraints ensure that we have the right values. This
+ works correctly for valid masks. */
+ if (bits_set[xx & 7] <= 1)
+ {
+ /* Remove sign extension bits. */
+ if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
+ xx &= 0xff;
+ else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
+ xx &= 0xffff;
+ l = exact_log2 (xx);
+ }
+ else
+ {
+ /* Add sign extension bits. */
+ if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
+ xx |= ~(HOST_WIDE_INT)0xff;
+ else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
+ xx |= ~(HOST_WIDE_INT)0xffff;
+ l = exact_log2 (~xx);
+ }
+
if (l == -1)
- output_operand_lossage ("`B' operand has multiple bits set");
+ output_operand_lossage ("'B' operand has multiple bits set");
fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
return;
if (GET_CODE (x) == CONST_INT)
xx = INTVAL (x);
else
- output_operand_lossage ("`o' operand is not constant");
+ output_operand_lossage ("'o' operand is not constant");
if (code == 'O')
xx = -xx;
return;
}
+ case 'b':
+ /* Print the shift mask for bp/bn. */
+ {
+ HOST_WIDE_INT xx = 1;
+ HOST_WIDE_INT l;
+
+ if (GET_CODE (x) == CONST_INT)
+ xx = INTVAL (x);
+ else
+ output_operand_lossage ("'B' operand is not constant");
+
+ l = 7 - xx;
+
+ fputs (IMMEDIATE_PREFIX, file);
+ fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
+ return;
+ }
+
case 0:
/* Handled below. */
break;
{
int vlen, idx;
- function_section (current_function_decl);
+ switch_to_section (current_function_section ());
vlen = XVECLEN (table, 0);
for (idx = 0; idx < vlen; idx++)
rtx call, temp;
enum machine_mode mode;
- if (GET_CODE (dest) != MEM)
- abort ();
+ gcc_assert (GET_CODE (dest) == MEM);
dest = XEXP (dest, 0);
if (! CONSTANT_P (dest)
break;
default:
- abort ();
+ gcc_unreachable ();
}
firstloop = 0;
emit (gen_nop ());
}
-/* Return 1 if OP is a shift operator. */
-
-int
-shift_operator (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
-{
- enum rtx_code code = GET_CODE (op);
-
- return (code == ASHIFT
- || code == ASHIFTRT
- || code == LSHIFTRT);
-}
-
/* The shift operations are split at output time for constant values;
variable-width shifts get handed off to a library routine.
const char *r0, *r1, *rt;
static char r[64];
- if (GET_CODE (size_r) != CONST_INT
- || GET_CODE (x) != REG
- || mode != SImode)
- abort ();
+ gcc_assert (GET_CODE (size_r) == CONST_INT
+ && GET_CODE (x) == REG && mode == SImode);
size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
if (size == 0)
sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
break;
default:
- abort ();
+ gcc_unreachable ();
}
return r;
}
sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
break;
default:
- abort ();
+ gcc_unreachable ();
}
return r;
}
r0, r1, r1, r0, (int) size - 16);
break;
default:
- abort ();
+ gcc_unreachable ();
}
return r;
}
r0, rt);
break;
default:
- abort ();
+ gcc_unreachable ();
}
return r;
}
#define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
static tree xstormy16_handle_interrupt_attribute
(tree *, tree, tree, int, bool *);
+static tree xstormy16_handle_below100_attribute
+ (tree *, tree, tree, int, bool *);
static const struct attribute_spec xstormy16_attribute_table[] =
{
/* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
{ "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
+ { "BELOW100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
+ { "below100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
{ NULL, 0, 0, false, false, false, NULL }
};
{
if (TREE_CODE (*node) != FUNCTION_TYPE)
{
- warning ("`%s' attribute only applies to functions",
+ warning (OPT_Wattributes, "%qs attribute only applies to functions",
IDENTIFIER_POINTER (name));
*no_add_attrs = true;
}
return NULL_TREE;
}
+
+/* Handle an "below" attribute;
+ arguments as in struct attribute_spec.handler. */
+static tree
+xstormy16_handle_below100_attribute (tree *node,
+ tree name ATTRIBUTE_UNUSED,
+ tree args ATTRIBUTE_UNUSED,
+ int flags ATTRIBUTE_UNUSED,
+ bool *no_add_attrs)
+{
+ if (TREE_CODE (*node) != VAR_DECL
+ && TREE_CODE (*node) != POINTER_TYPE
+ && TREE_CODE (*node) != TYPE_DECL)
+ {
+ warning (OPT_Wattributes,
+ "%<__BELOW100__%> attribute only applies to variables");
+ *no_add_attrs = true;
+ }
+ else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
+ {
+ if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
+ {
+ warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
+ "with auto storage class");
+ *no_add_attrs = true;
+ }
+ }
+
+ return NULL_TREE;
+}
\f
#undef TARGET_INIT_BUILTINS
#define TARGET_INIT_BUILTINS xstormy16_init_builtins
case 'S': arg = short_unsigned_type_node; break;
case 'l': arg = long_integer_type_node; break;
case 'L': arg = long_unsigned_type_node; break;
- default: abort();
+ default: gcc_unreachable ();
}
if (a == 0)
ret_type = arg;
else
args = tree_cons (NULL_TREE, arg, args);
}
- builtin_function (s16builtins[i].name,
- build_function_type (ret_type, args),
- i, BUILT_IN_MD, NULL, NULL);
+ add_builtin_function (s16builtins[i].name,
+ build_function_type (ret_type, args),
+ i, BUILT_IN_MD, NULL, NULL);
}
}
static rtx
-xstormy16_expand_builtin(tree exp, rtx target,
- rtx subtarget ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
- int ignore ATTRIBUTE_UNUSED)
+xstormy16_expand_builtin (tree exp, rtx target,
+ rtx subtarget ATTRIBUTE_UNUSED,
+ enum machine_mode mode ATTRIBUTE_UNUSED,
+ int ignore ATTRIBUTE_UNUSED)
{
rtx op[10], args[10], pat, copyto[10], retval = 0;
tree fndecl, argtree;
return retval;
}
\f
+
+/* Look for combinations of insns that can be converted to BN or BP
+ opcodes. This is, unfortunately, too complex to do with MD
+ patterns. */
+static void
+combine_bnp (rtx insn)
+{
+ int insn_code, regno, need_extend;
+ unsigned int mask;
+ rtx cond, reg, and, load, qireg, mem;
+ enum machine_mode load_mode = QImode;
+ enum machine_mode and_mode = QImode;
+ rtx shift = NULL_RTX;
+
+ insn_code = recog_memoized (insn);
+ if (insn_code != CODE_FOR_cbranchhi
+ && insn_code != CODE_FOR_cbranchhi_neg)
+ return;
+
+ cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
+ cond = XEXP (cond, 1); /* if */
+ cond = XEXP (cond, 0); /* cond */
+ switch (GET_CODE (cond))
+ {
+ case NE:
+ case EQ:
+ need_extend = 0;
+ break;
+ case LT:
+ case GE:
+ need_extend = 1;
+ break;
+ default:
+ return;
+ }
+
+ reg = XEXP (cond, 0);
+ if (GET_CODE (reg) != REG)
+ return;
+ regno = REGNO (reg);
+ if (XEXP (cond, 1) != const0_rtx)
+ return;
+ if (! find_regno_note (insn, REG_DEAD, regno))
+ return;
+ qireg = gen_rtx_REG (QImode, regno);
+
+ if (need_extend)
+ {
+ /* LT and GE conditionals should have a sign extend before
+ them. */
+ for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
+ {
+ int and_code = recog_memoized (and);
+
+ if (and_code == CODE_FOR_extendqihi2
+ && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
+ && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), qireg))
+ break;
+
+ if (and_code == CODE_FOR_movhi_internal
+ && rtx_equal_p (SET_DEST (PATTERN (and)), reg))
+ {
+ /* This is for testing bit 15. */
+ and = insn;
+ break;
+ }
+
+ if (reg_mentioned_p (reg, and))
+ return;
+
+ if (GET_CODE (and) != NOTE
+ && GET_CODE (and) != INSN)
+ return;
+ }
+ }
+ else
+ {
+ /* EQ and NE conditionals have an AND before them. */
+ for (and = prev_real_insn (insn); and; and = prev_real_insn (and))
+ {
+ if (recog_memoized (and) == CODE_FOR_andhi3
+ && rtx_equal_p (SET_DEST (PATTERN (and)), reg)
+ && rtx_equal_p (XEXP (SET_SRC (PATTERN (and)), 0), reg))
+ break;
+
+ if (reg_mentioned_p (reg, and))
+ return;
+
+ if (GET_CODE (and) != NOTE
+ && GET_CODE (and) != INSN)
+ return;
+ }
+
+ if (and)
+ {
+ /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
+ followed by an AND like this:
+
+ (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
+ (clobber (reg:BI carry))]
+
+ (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
+
+ Attempt to detect this here. */
+ for (shift = prev_real_insn (and); shift; shift = prev_real_insn (shift))
+ {
+ if (recog_memoized (shift) == CODE_FOR_lshrhi3
+ && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
+ && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
+ break;
+
+ if (reg_mentioned_p (reg, shift)
+ || (GET_CODE (shift) != NOTE
+ && GET_CODE (shift) != INSN))
+ {
+ shift = NULL_RTX;
+ break;
+ }
+ }
+ }
+ }
+ if (!and)
+ return;
+
+ for (load = shift ? prev_real_insn (shift) : prev_real_insn (and);
+ load;
+ load = prev_real_insn (load))
+ {
+ int load_code = recog_memoized (load);
+
+ if (load_code == CODE_FOR_movhi_internal
+ && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
+ && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
+ && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
+ {
+ load_mode = HImode;
+ break;
+ }
+
+ if (load_code == CODE_FOR_movqi_internal
+ && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
+ && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
+ {
+ load_mode = QImode;
+ break;
+ }
+
+ if (load_code == CODE_FOR_zero_extendqihi2
+ && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
+ && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
+ {
+ load_mode = QImode;
+ and_mode = HImode;
+ break;
+ }
+
+ if (reg_mentioned_p (reg, load))
+ return;
+
+ if (GET_CODE (load) != NOTE
+ && GET_CODE (load) != INSN)
+ return;
+ }
+ if (!load)
+ return;
+
+ mem = SET_SRC (PATTERN (load));
+
+ if (need_extend)
+ {
+ mask = (load_mode == HImode) ? 0x8000 : 0x80;
+
+ /* If the mem includes a zero-extend operation and we are
+ going to generate a sign-extend operation then move the
+ mem inside the zero-extend. */
+ if (GET_CODE (mem) == ZERO_EXTEND)
+ mem = XEXP (mem, 0);
+ }
+ else
+ {
+ if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and)), 1), load_mode))
+ return;
+
+ mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and)), 1));
+
+ if (shift)
+ mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
+ }
+
+ if (load_mode == HImode)
+ {
+ rtx addr = XEXP (mem, 0);
+
+ if (! (mask & 0xff))
+ {
+ addr = plus_constant (addr, 1);
+ mask >>= 8;
+ }
+ mem = gen_rtx_MEM (QImode, addr);
+ }
+
+ if (need_extend)
+ XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
+ else
+ XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
+
+ INSN_CODE (insn) = -1;
+ delete_insn (load);
+
+ if (and != insn)
+ delete_insn (and);
+
+ if (shift != NULL_RTX)
+ delete_insn (shift);
+}
+
+static void
+xstormy16_reorg (void)
+{
+ rtx insn;
+
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ {
+ if (! JUMP_P (insn))
+ continue;
+ combine_bnp (insn);
+ }
+}
+
+\f
+/* Worker function for TARGET_RETURN_IN_MEMORY. */
+
static bool
-xstormy16_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
+xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
{
- return int_size_in_bytes (type) > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS;
+ const HOST_WIDE_INT size = int_size_in_bytes (type);
+ return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
}
\f
#undef TARGET_ASM_ALIGNED_HI_OP
#define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
#undef TARGET_ASM_ALIGNED_SI_OP
#define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
+#undef TARGET_ENCODE_SECTION_INFO
+#define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
+
+/* select_section doesn't handle .bss_below100. */
+#undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
+#define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
#undef TARGET_ASM_OUTPUT_MI_THUNK
#define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
#undef TARGET_BUILD_BUILTIN_VA_LIST
#define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
+#undef TARGET_EXPAND_BUILTIN_VA_START
+#define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
+#undef TARGET_GIMPLIFY_VA_ARG_EXPR
+#define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
#undef TARGET_PROMOTE_FUNCTION_ARGS
-#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
+#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
#undef TARGET_PROMOTE_FUNCTION_RETURN
-#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
+#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
#undef TARGET_PROMOTE_PROTOTYPES
-#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
+#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
-#undef TARGET_STRUCT_VALUE_RTX
-#define TARGET_STRUCT_VALUE_RTX hook_rtx_tree_int_null
#undef TARGET_RETURN_IN_MEMORY
#define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
+#undef TARGET_MACHINE_DEPENDENT_REORG
+#define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
+
struct gcc_target targetm = TARGET_INITIALIZER;
+
+#include "gt-stormy16.h"