/* Output routines for GCC for Renesas / SuperH SH.
Copyright (C) 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
- 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
+ 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+ Free Software Foundation, Inc.
Contributed by Steve Chamberlain (sac@cygnus.com).
Improved by Jim Wilson (wilson@cygnus.com).
#include "flags.h"
#include "expr.h"
#include "optabs.h"
+#include "reload.h"
#include "function.h"
#include "regs.h"
#include "hard-reg-set.h"
and returned from sh_reorder2. */
static short cached_can_issue_more;
+/* Unique number for UNSPEC_BBR pattern. */
+static unsigned int unspec_bbr_uid = 1;
+
/* Provides the class number of the smallest class containing
reg number. */
static int noncall_uses_reg (rtx, rtx, rtx *);
static rtx gen_block_redirect (rtx, int, int);
static void sh_reorg (void);
-static void output_stack_adjust (int, rtx, int, HARD_REG_SET *);
+static void output_stack_adjust (int, rtx, int, HARD_REG_SET *, bool);
static rtx frame_insn (rtx);
static rtx push (int);
static void pop (int);
static void push_regs (HARD_REG_SET *, int);
static int calc_live_regs (HARD_REG_SET *);
static HOST_WIDE_INT rounded_frame_size (int);
+static bool sh_frame_pointer_required (void);
static rtx mark_constant_pool_use (rtx);
static tree sh_handle_interrupt_handler_attribute (tree *, tree, tree, int, bool *);
static tree sh_handle_resbank_handler_attribute (tree *, tree,
static bool sh_ms_bitfield_layout_p (const_tree);
static void sh_init_builtins (void);
+static tree sh_builtin_decl (unsigned, bool);
static void sh_media_init_builtins (void);
+static tree sh_media_builtin_decl (unsigned, bool);
static rtx sh_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
static void sh_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree);
static void sh_file_start (void);
struct save_schedule_s *, int);
static rtx sh_struct_value_rtx (tree, int);
+static rtx sh_function_value (const_tree, const_tree, bool);
+static rtx sh_libcall_value (enum machine_mode, const_rtx);
static bool sh_return_in_memory (const_tree, const_tree);
static rtx sh_builtin_saveregs (void);
static void sh_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode, tree, int *, int);
static tree sh_build_builtin_va_list (void);
static void sh_va_start (tree, rtx);
static tree sh_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
+static bool sh_promote_prototypes (const_tree);
static enum machine_mode sh_promote_function_mode (const_tree type,
enum machine_mode,
int *punsignedp,
static int sh_dwarf_calling_convention (const_tree);
static void sh_encode_section_info (tree, rtx, int);
static int sh2a_function_vector_p (tree);
+static void sh_trampoline_init (rtx, tree, rtx);
+static rtx sh_trampoline_adjust_address (rtx);
\f
static const struct attribute_spec sh_attribute_table[] =
{
#undef TARGET_INIT_BUILTINS
#define TARGET_INIT_BUILTINS sh_init_builtins
+#undef TARGET_BUILTIN_DECL
+#define TARGET_BUILTIN_DECL sh_builtin_decl
#undef TARGET_EXPAND_BUILTIN
#define TARGET_EXPAND_BUILTIN sh_expand_builtin
#undef TARGET_PROMOTE_FUNCTION_MODE
#define TARGET_PROMOTE_FUNCTION_MODE sh_promote_function_mode
+#undef TARGET_FUNCTION_VALUE
+#define TARGET_FUNCTION_VALUE sh_function_value
+#undef TARGET_LIBCALL_VALUE
+#define TARGET_LIBCALL_VALUE sh_libcall_value
#undef TARGET_STRUCT_VALUE_RTX
#define TARGET_STRUCT_VALUE_RTX sh_struct_value_rtx
#undef TARGET_RETURN_IN_MEMORY
#undef TARGET_DWARF_CALLING_CONVENTION
#define TARGET_DWARF_CALLING_CONVENTION sh_dwarf_calling_convention
+#undef TARGET_FRAME_POINTER_REQUIRED
+#define TARGET_FRAME_POINTER_REQUIRED sh_frame_pointer_required
+
/* Return regmode weight for insn. */
#define INSN_REGMODE_WEIGHT(INSN, MODE) regmode_weight[((MODE) == SImode) ? 0 : 1][INSN_UID (INSN)]
#undef TARGET_STRIP_NAME_ENCODING
#define TARGET_STRIP_NAME_ENCODING sh_symbian_strip_name_encoding
#undef TARGET_CXX_IMPORT_EXPORT_CLASS
-#define TARGET_CXX_IMPORT_EXPORT_CLASS symbian_import_export_class
+#define TARGET_CXX_IMPORT_EXPORT_CLASS sh_symbian_import_export_class
#endif /* SYMBIAN */
#undef TARGET_LEGITIMATE_ADDRESS_P
#define TARGET_LEGITIMATE_ADDRESS_P sh_legitimate_address_p
+#undef TARGET_TRAMPOLINE_INIT
+#define TARGET_TRAMPOLINE_INIT sh_trampoline_init
+#undef TARGET_TRAMPOLINE_ADJUST_ADDRESS
+#define TARGET_TRAMPOLINE_ADJUST_ADDRESS sh_trampoline_adjust_address
+
/* Machine-specific symbol_ref flags. */
#define SYMBOL_FLAG_FUNCVEC_FUNCTION (SYMBOL_FLAG_MACH_DEP << 0)
{
if (level)
{
- flag_omit_frame_pointer = 2;
if (!size)
sh_div_str = "inv:minlat";
}
if (! VALID_REGISTER_P (ADDREGNAMES_REGNO (regno)))
sh_additional_register_names[regno][0] = '\0';
- if (flag_omit_frame_pointer == 2)
- {
- /* The debugging information is sufficient,
- but gdb doesn't implement this yet */
- if (0)
- flag_omit_frame_pointer
- = (PREFERRED_DEBUGGING_TYPE == DWARF2_DEBUG);
- else
- flag_omit_frame_pointer = 0;
- }
+ flag_omit_frame_pointer = (PREFERRED_DEBUGGING_TYPE == DWARF2_DEBUG);
if ((flag_pic && ! TARGET_PREFERGOT)
|| (TARGET_SHMEDIA && !TARGET_PT_FIXED))
flag_no_function_cse = 1;
- if (SMALL_REGISTER_CLASSES)
+ if (targetm.small_register_classes_for_mode_p (VOIDmode)) \
{
/* Never run scheduling before reload, since that can
break global alloc, and generates slower code anyway due
flag_schedule_insns = 0;
}
+ if ((target_flags_explicit & MASK_ACCUMULATE_OUTGOING_ARGS) == 0)
+ target_flags |= MASK_ACCUMULATE_OUTGOING_ARGS;
+
+ /* Unwind info is not correct around the CFG unless either a frame
+ pointer is present or M_A_O_A is set. Fixing this requires rewriting
+ unwind info generation to be aware of the CFG and propagating states
+ around edges. */
+ if ((flag_unwind_tables || flag_asynchronous_unwind_tables
+ || flag_exceptions || flag_non_call_exceptions)
+ && flag_omit_frame_pointer
+ && !(target_flags & MASK_ACCUMULATE_OUTGOING_ARGS))
+ {
+ if (target_flags_explicit & MASK_ACCUMULATE_OUTGOING_ARGS)
+ warning (0, "unwind tables currently require either a frame pointer "
+ "or -maccumulate-outgoing-args for correctness");
+ target_flags |= MASK_ACCUMULATE_OUTGOING_ARGS;
+ }
+
/* Unwinding with -freorder-blocks-and-partition does not work on this
architecture, because it requires far jumps to label crossing between
hot/cold sections which are rejected on this architecture. */
int si_limit;
int hi_limit;
rtx orig = from;
+ rtx last_got = NULL_RTX;
+ rtx last_symoff = NULL_RTX;
/* For HImode: range is 510, add 4 because pc counts from address of
second instruction after this one, subtract 2 for the jump instruction
dst = SET_DEST (pat);
mode = GET_MODE (dst);
+ /* GOT pcrelat setting comes in pair of
+ mova .L8,r0
+ mov.l .L8,r12
+ instructions. (plus add r0,r12).
+ Remember if we see one without the other. */
+ if (GET_CODE (src) == UNSPEC && PIC_ADDR_P (XVECEXP (src, 0, 0)))
+ last_got = last_got ? NULL_RTX : from;
+ else if (PIC_ADDR_P (src))
+ last_got = last_got ? NULL_RTX : from;
+
/* We must explicitly check the mode, because sometimes the
front end will generate code to load unsigned constants into
HImode targets without properly sign extending them. */
{
switch (untangle_mova (&num_mova, &mova, from))
{
+ case 1:
+ if (flag_pic)
+ {
+ rtx src = SET_SRC (PATTERN (from));
+ if (GET_CODE (src) == CONST
+ && GET_CODE (XEXP (src, 0)) == UNSPEC
+ && XINT (XEXP (src, 0), 1) == UNSPEC_SYMOFF)
+ last_symoff = from;
+ }
+ break;
case 0: return find_barrier (0, 0, mova);
case 2:
{
&& ! TARGET_SMALLCODE)
new_align = 4;
+ /* There is a possibility that a bf is transformed into a bf/s by the
+ delay slot scheduler. */
+ if (JUMP_P (from) && !JUMP_TABLE_DATA_P (from)
+ && get_attr_type (from) == TYPE_CBRANCH
+ && GET_CODE (PATTERN (NEXT_INSN (PREV_INSN (from)))) != SEQUENCE)
+ inc += 2;
+
if (found_si)
{
count_si += inc;
so we'll make one. */
rtx label = gen_label_rtx ();
+ /* Don't emit a constant table in the middle of insns for
+ casesi_worker_2. This is a bit overkill but is enough
+ because casesi_worker_2 wouldn't appear so frequently. */
+ if (last_symoff)
+ from = last_symoff;
+
/* If we exceeded the range, then we must back up over the last
instruction we looked at. Otherwise, we just need to undo the
NEXT_INSN at the end of the loop. */
else
from = PREV_INSN (from);
+ /* Don't emit a constant table int the middle of global pointer setting,
+ since that that would move the addressing base GOT into another table.
+ We need the first mov instruction before the _GLOBAL_OFFSET_TABLE_
+ in the pool anyway, so just move up the whole constant pool. */
+ if (last_got)
+ from = PREV_INSN (last_got);
+
+ /* Don't insert the constant pool table at the position which
+ may be the landing pad. */
+ if (flag_exceptions
+ && CALL_P (from)
+ && find_reg_note (from, REG_EH_REGION, NULL_RTX))
+ from = PREV_INSN (from);
+
/* Walk back to be just before any jump or label.
Putting it before a label reduces the number of times the branch
around the constant pool table will be hit. Putting it before
branch; simplejump_p fails for indirect jumps even if they have
a JUMP_LABEL. */
rtx insn = emit_insn_before (gen_indirect_jump_scratch
- (reg, GEN_INT (INSN_UID (JUMP_LABEL (jump))))
- , jump);
+ (reg, GEN_INT (unspec_bbr_uid++)),
+ jump);
/* ??? We would like this to have the scope of the jump, but that
scope will change when a delay slot insn of an inner scope is added.
Hence, after delay slot scheduling, we'll have to expect
/* We can't use JUMP_LABEL here because it might be undefined
when not optimizing. */
return emit_insn_before (gen_block_branch_redirect
- (GEN_INT (INSN_UID (XEXP (SET_SRC (PATTERN (jump)), 0))))
- , jump);
+ (GEN_INT (unspec_bbr_uid++)),
+ jump);
return prev;
}
if (bp->far_label)
(emit_insn_after
(gen_stuff_delay_slot
- (GEN_INT (INSN_UID (XEXP (SET_SRC (PATTERN (jump)), 0))),
+ (GEN_INT (unspec_bbr_uid++),
GEN_INT (recog_memoized (insn) == CODE_FOR_branch_false)),
insn));
/* Prevent reorg from undoing our splits. */
next = next_active_insn (insn);
- if ((JUMP_P (next)
- || ((next = next_active_insn (next))
- && JUMP_P (next)))
+ if (next
+ && (JUMP_P (next)
+ || ((next = next_active_insn (next))
+ && JUMP_P (next)))
&& GET_CODE (PATTERN (next)) == SET
&& recog_memoized (next) == CODE_FOR_jump_compact
&& ((INSN_ADDRESSES
static void
output_stack_adjust (int size, rtx reg, int epilogue_p,
- HARD_REG_SET *live_regs_mask)
+ HARD_REG_SET *live_regs_mask, bool frame_p)
{
- rtx (*emit_fn) (rtx) = epilogue_p ? &emit_insn : &frame_insn;
+ rtx (*emit_fn) (rtx) = frame_p ? &frame_insn : &emit_insn;
if (size)
{
HOST_WIDE_INT align = STACK_BOUNDARY / BITS_PER_UNIT;
HOST_WIDE_INT size = get_frame_size ();
HOST_WIDE_INT align = STACK_BOUNDARY / BITS_PER_UNIT;
+ if (ACCUMULATE_OUTGOING_ARGS)
+ size += crtl->outgoing_args_size;
+
return ((size + pushed + align - 1) & -align) - pushed;
}
&& (NPARM_REGS(SImode)
> crtl->args.info.arg_count[(int) SH_ARG_INT]))
pretend_args = 0;
+ /* Dwarf2 module doesn't expect frame related insns here. */
output_stack_adjust (-pretend_args
- crtl->args.info.stack_regs * 8,
- stack_pointer_rtx, 0, NULL);
+ stack_pointer_rtx, 0, NULL, false);
if (TARGET_SHCOMPACT && flag_pic && crtl->args.info.call_cookie)
/* We're going to use the PIC register to load the address of the
offset_base = d + d_rounding;
output_stack_adjust (-(save_size + d_rounding), stack_pointer_rtx,
- 0, NULL);
+ 0, NULL, true);
sh5_schedule_saves (&live_regs_mask, &schedule, offset_base);
tmp_pnt = schedule.temps;
target_flags = save_flags;
output_stack_adjust (-rounded_frame_size (d) + d_rounding,
- stack_pointer_rtx, 0, NULL);
+ stack_pointer_rtx, 0, NULL, true);
if (frame_pointer_needed)
frame_insn (GEN_MOV (hard_frame_pointer_rtx, stack_pointer_rtx));
See PR/18032 and PR/40313. */
emit_insn (gen_blockage ());
output_stack_adjust (frame_size, hard_frame_pointer_rtx, e,
- &live_regs_mask);
+ &live_regs_mask, false);
/* We must avoid moving the stack pointer adjustment past code
which reads from the local frame, else an interrupt could
occur after the SP adjustment and clobber data in the local
frame. */
emit_insn (gen_blockage ());
- output_stack_adjust (frame_size, stack_pointer_rtx, e, &live_regs_mask);
+ output_stack_adjust (frame_size, stack_pointer_rtx, e,
+ &live_regs_mask, false);
}
if (SHMEDIA_REGS_STACK_ADJUST ())
pop (PR_REG);
}
- /* Banked registers are poped first to avoid being scheduled in the
+ /* Banked registers are popped first to avoid being scheduled in the
delay slot. RTE switches banks before the ds instruction. */
if (current_function_interrupt)
{
- for (i = FIRST_BANKED_REG; i <= LAST_BANKED_REG; i++)
- if (TEST_HARD_REG_BIT (live_regs_mask, i))
- pop (LAST_BANKED_REG - i);
+ for (i = LAST_BANKED_REG; i >= FIRST_BANKED_REG; i--)
+ if (TEST_HARD_REG_BIT (live_regs_mask, i))
+ pop (i);
last_reg = FIRST_PSEUDO_REGISTER - LAST_BANKED_REG - 1;
}
output_stack_adjust (crtl->args.pretend_args_size
+ save_size + d_rounding
+ crtl->args.info.stack_regs * 8,
- stack_pointer_rtx, e, NULL);
+ stack_pointer_rtx, e, NULL, false);
if (crtl->calls_eh_return)
emit_insn (GEN_ADD3 (stack_pointer_rtx, stack_pointer_rtx,
pr_offset = rounded_frame_size (d);
emit_insn (GEN_MOV (tmp, GEN_INT (pr_offset)));
- emit_insn (GEN_ADD3 (tmp, tmp, hard_frame_pointer_rtx));
+
+ if (frame_pointer_needed)
+ emit_insn (GEN_ADD3 (tmp, tmp, hard_frame_pointer_rtx));
+ else
+ emit_insn (GEN_ADD3 (tmp, tmp, stack_pointer_rtx));
tmp = gen_frame_mem (Pmode, tmp);
emit_insn (GEN_MOV (tmp, ra));
return mode;
}
-bool
+static bool
sh_promote_prototypes (const_tree type)
{
if (TARGET_HITACHI)
return gen_rtx_REG (Pmode, 2);
}
+/* Worker function for TARGET_FUNCTION_VALUE.
+
+ For the SH, this is like LIBCALL_VALUE, except that we must change the
+ mode like PROMOTE_MODE does.
+ ??? PROMOTE_MODE is ignored for non-scalar types. The set of types
+ tested here has to be kept in sync with the one in explow.c:promote_mode.
+*/
+
+static rtx
+sh_function_value (const_tree valtype,
+ const_tree fn_decl_or_type,
+ bool outgoing ATTRIBUTE_UNUSED)
+{
+ if (fn_decl_or_type
+ && !DECL_P (fn_decl_or_type))
+ fn_decl_or_type = NULL;
+
+ return gen_rtx_REG (
+ ((GET_MODE_CLASS (TYPE_MODE (valtype)) == MODE_INT
+ && GET_MODE_SIZE (TYPE_MODE (valtype)) < 4
+ && (TREE_CODE (valtype) == INTEGER_TYPE
+ || TREE_CODE (valtype) == ENUMERAL_TYPE
+ || TREE_CODE (valtype) == BOOLEAN_TYPE
+ || TREE_CODE (valtype) == REAL_TYPE
+ || TREE_CODE (valtype) == OFFSET_TYPE))
+ && sh_promote_prototypes (fn_decl_or_type)
+ ? (TARGET_SHMEDIA64 ? DImode : SImode) : TYPE_MODE (valtype)),
+ BASE_RETURN_VALUE_REG (TYPE_MODE (valtype)));
+}
+
+/* Worker function for TARGET_LIBCALL_VALUE. */
+
+static rtx
+sh_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
+{
+ return gen_rtx_REG (mode, BASE_RETURN_VALUE_REG (mode));
+}
+
+/* Worker function for FUNCTION_VALUE_REGNO_P. */
+
+bool
+sh_function_value_regno_p (const unsigned int regno)
+{
+ return ((regno) == FIRST_RET_REG
+ || (TARGET_SH2E && (regno) == FIRST_FP_RET_REG)
+ || (TARGET_SHMEDIA_FPU && (regno) == FIRST_FP_RET_REG));
+}
+
/* Worker function for TARGET_RETURN_IN_MEMORY. */
static bool
if (! TEST_HARD_REG_BIT (regs_live, 1))
return gen_rtx_REG (Pmode, 1);
- /* Hard reg 1 is live; since this is a SMALL_REGISTER_CLASSES target,
+ /* Hard reg 1 is live; since this is a small register classes target,
there shouldn't be anything but a jump before the function end. */
gcc_assert (!TEST_HARD_REG_BIT (regs_live, 7));
return gen_rtx_REG (Pmode, 7);
&& GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER)
|| CALL_P (insn)
- || (JUMP_P (insn)
- && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
- && GET_CODE (PATTERN (insn)) != ADDR_VEC))
+ || (JUMP_P (insn) && !JUMP_TABLE_DATA_P (insn)))
&& GET_CODE (PATTERN (NEXT_INSN (PREV_INSN (insn)))) != SEQUENCE
&& get_attr_needs_delay_slot (insn) == NEEDS_DELAY_SLOT_YES)
return 2;
/* SH2e has a bug that prevents the use of annulled branches, so if
the delay slot is not filled, we'll have to put a NOP in it. */
if (sh_cpu_attr == CPU_SH2E
- && JUMP_P (insn)
- && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
- && GET_CODE (PATTERN (insn)) != ADDR_VEC
+ && JUMP_P (insn) && !JUMP_TABLE_DATA_P (insn)
&& get_attr_type (insn) == TYPE_CBRANCH
&& GET_CODE (PATTERN (NEXT_INSN (PREV_INSN (insn)))) != SEQUENCE)
return 2;
|| XINT (x, 1) == UNSPEC_GOTPLT
|| XINT (x, 1) == UNSPEC_GOTTPOFF
|| XINT (x, 1) == UNSPEC_DTPOFF
+ || XINT (x, 1) == UNSPEC_TPOFF
|| XINT (x, 1) == UNSPEC_PLT
|| XINT (x, 1) == UNSPEC_SYMOFF
|| XINT (x, 1) == UNSPEC_PCREL_SYMOFF))
return x;
}
+/* Attempt to replace *P, which is an address that needs reloading, with
+ a valid memory address for an operand of mode MODE.
+ Like for sh_legitimize_address, for the SH we try to get a normal form
+ of the address. That will allow inheritance of the address reloads. */
+
+bool
+sh_legitimize_reload_address (rtx *p, enum machine_mode mode, int opnum,
+ int itype)
+{
+ enum reload_type type = (enum reload_type) itype;
+
+ if (GET_CODE (*p) == PLUS
+ && (GET_MODE_SIZE (mode) == 4 || GET_MODE_SIZE (mode) == 8)
+ && CONST_INT_P (XEXP (*p, 1))
+ && MAYBE_BASE_REGISTER_RTX_P (XEXP (*p, 0), true)
+ && ! TARGET_SHMEDIA
+ && ! (TARGET_SH4 && mode == DFmode)
+ && ! (mode == PSImode && type == RELOAD_FOR_INPUT_ADDRESS)
+ && (ALLOW_INDEXED_ADDRESS
+ || XEXP (*p, 0) == stack_pointer_rtx
+ || XEXP (*p, 0) == hard_frame_pointer_rtx))
+ {
+ rtx index_rtx = XEXP (*p, 1);
+ HOST_WIDE_INT offset = INTVAL (index_rtx), offset_base;
+ rtx sum;
+
+ if (TARGET_SH2A && mode == DFmode && (offset & 0x7))
+ {
+ push_reload (*p, NULL_RTX, p, NULL,
+ BASE_REG_CLASS, Pmode, VOIDmode, 0, 0, opnum, type);
+ goto win;
+ }
+ if (TARGET_SH2E && mode == SFmode)
+ {
+ *p = copy_rtx (*p);
+ push_reload (*p, NULL_RTX, p, NULL,
+ BASE_REG_CLASS, Pmode, VOIDmode, 0, 0, opnum, type);
+ goto win;
+ }
+ /* Instead of offset_base 128..131 use 124..127, so that
+ simple add suffices. */
+ if (offset > 127)
+ offset_base = ((offset + 4) & ~60) - 4;
+ else
+ offset_base = offset & ~60;
+ /* Sometimes the normal form does not suit DImode. We could avoid
+ that by using smaller ranges, but that would give less optimized
+ code when SImode is prevalent. */
+ if (GET_MODE_SIZE (mode) + offset - offset_base <= 64)
+ {
+ sum = gen_rtx_PLUS (Pmode, XEXP (*p, 0), GEN_INT (offset_base));
+ *p = gen_rtx_PLUS (Pmode, sum, GEN_INT (offset - offset_base));
+ push_reload (sum, NULL_RTX, &XEXP (*p, 0), NULL,
+ BASE_REG_CLASS, Pmode, VOIDmode, 0, 0, opnum, type);
+ goto win;
+ }
+ }
+ /* We must re-recognize what we created before. */
+ else if (GET_CODE (*p) == PLUS
+ && (GET_MODE_SIZE (mode) == 4 || GET_MODE_SIZE (mode) == 8)
+ && GET_CODE (XEXP (*p, 0)) == PLUS
+ && CONST_INT_P (XEXP (XEXP (*p, 0), 1))
+ && MAYBE_BASE_REGISTER_RTX_P (XEXP (XEXP (*p, 0), 0), true)
+ && CONST_INT_P (XEXP (*p, 1))
+ && ! TARGET_SHMEDIA
+ && ! (TARGET_SH2E && mode == SFmode))
+ {
+ /* Because this address is so complex, we know it must have
+ been created by LEGITIMIZE_RELOAD_ADDRESS before; thus,
+ it is already unshared, and needs no further unsharing. */
+ push_reload (XEXP (*p, 0), NULL_RTX, &XEXP (*p, 0), NULL,
+ BASE_REG_CLASS, Pmode, VOIDmode, 0, 0, opnum, type);
+ goto win;
+ }
+
+ return false;
+
+ win:
+ return true;
+}
+
/* Mark the use of a constant in the literal table. If the constant
has multiple labels, make it unique. */
static rtx
FNADDR is an RTX for the address of the function's pure code.
CXT is an RTX for the static chain value for the function. */
-void
-sh_initialize_trampoline (rtx tramp, rtx fnaddr, rtx cxt)
+static void
+sh_trampoline_init (rtx tramp_mem, tree fndecl, rtx cxt)
{
- rtx tramp_mem = gen_frame_mem (BLKmode, tramp);
+ rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
+ rtx tramp = force_reg (Pmode, XEXP (tramp_mem, 0));
if (TARGET_SHMEDIA64)
{
rtx ptabs = force_reg (DImode, GEN_INT (0x6bf10600));
rtx blink = force_reg (DImode, GEN_INT (0x4401fff0));
- tramp = force_reg (Pmode, tramp);
fnaddr = force_reg (SImode, fnaddr);
cxt = force_reg (SImode, cxt);
emit_insn (gen_mshflo_w_x (gen_rtx_SUBREG (V4HImode, quad0, 0),
}
}
+/* On SH5, trampolines are SHmedia code, so add 1 to the address. */
+
+static rtx
+sh_trampoline_adjust_address (rtx tramp)
+{
+ if (TARGET_SHMEDIA)
+ tramp = expand_simple_binop (Pmode, PLUS, tramp, const1_rtx,
+ gen_reg_rtx (Pmode), 0, OPTAB_LIB_WIDEN);
+ return tramp;
+}
+
/* FIXME: This is overly conservative. A SHcompact function that
receives arguments ``by reference'' will have them stored in its
own stack frame, so it must not pass pointers or references to
const enum insn_code icode;
const char *const name;
int signature;
+ tree fndecl;
};
/* describe number and signedness of arguments; arg[0] == result
/* mshalds, mshard, mshards, mshlld, mshlrd: shift count is unsigned int. */
/* mshards_q: returns signed short. */
/* nsb: takes long long arg, returns unsigned char. */
-static const struct builtin_description bdesc[] =
-{
- { CODE_FOR_absv2si2, "__builtin_absv2si2", SH_BLTIN_V2SI2 },
- { CODE_FOR_absv4hi2, "__builtin_absv4hi2", SH_BLTIN_V4HI2 },
- { CODE_FOR_addv2si3, "__builtin_addv2si3", SH_BLTIN_V2SI3 },
- { CODE_FOR_addv4hi3, "__builtin_addv4hi3", SH_BLTIN_V4HI3 },
- { CODE_FOR_ssaddv2si3,"__builtin_ssaddv2si3", SH_BLTIN_V2SI3 },
- { CODE_FOR_usaddv8qi3,"__builtin_usaddv8qi3", SH_BLTIN_V8QI3 },
- { CODE_FOR_ssaddv4hi3,"__builtin_ssaddv4hi3", SH_BLTIN_V4HI3 },
- { CODE_FOR_alloco_i, "__builtin_sh_media_ALLOCO", SH_BLTIN_PV },
- { CODE_FOR_negcmpeqv8qi,"__builtin_sh_media_MCMPEQ_B", SH_BLTIN_V8QI3 },
- { CODE_FOR_negcmpeqv2si,"__builtin_sh_media_MCMPEQ_L", SH_BLTIN_V2SI3 },
- { CODE_FOR_negcmpeqv4hi,"__builtin_sh_media_MCMPEQ_W", SH_BLTIN_V4HI3 },
- { CODE_FOR_negcmpgtuv8qi,"__builtin_sh_media_MCMPGT_UB", SH_BLTIN_V8QI3 },
- { CODE_FOR_negcmpgtv2si,"__builtin_sh_media_MCMPGT_L", SH_BLTIN_V2SI3 },
- { CODE_FOR_negcmpgtv4hi,"__builtin_sh_media_MCMPGT_W", SH_BLTIN_V4HI3 },
- { CODE_FOR_mcmv, "__builtin_sh_media_MCMV", SH_BLTIN_UUUU },
- { CODE_FOR_mcnvs_lw, "__builtin_sh_media_MCNVS_LW", SH_BLTIN_3 },
- { CODE_FOR_mcnvs_wb, "__builtin_sh_media_MCNVS_WB", SH_BLTIN_V4HI2V8QI },
- { CODE_FOR_mcnvs_wub, "__builtin_sh_media_MCNVS_WUB", SH_BLTIN_V4HI2V8QI },
- { CODE_FOR_mextr1, "__builtin_sh_media_MEXTR1", SH_BLTIN_V8QI3 },
- { CODE_FOR_mextr2, "__builtin_sh_media_MEXTR2", SH_BLTIN_V8QI3 },
- { CODE_FOR_mextr3, "__builtin_sh_media_MEXTR3", SH_BLTIN_V8QI3 },
- { CODE_FOR_mextr4, "__builtin_sh_media_MEXTR4", SH_BLTIN_V8QI3 },
- { CODE_FOR_mextr5, "__builtin_sh_media_MEXTR5", SH_BLTIN_V8QI3 },
- { CODE_FOR_mextr6, "__builtin_sh_media_MEXTR6", SH_BLTIN_V8QI3 },
- { CODE_FOR_mextr7, "__builtin_sh_media_MEXTR7", SH_BLTIN_V8QI3 },
- { CODE_FOR_mmacfx_wl, "__builtin_sh_media_MMACFX_WL", SH_BLTIN_MAC_HISI },
- { CODE_FOR_mmacnfx_wl,"__builtin_sh_media_MMACNFX_WL", SH_BLTIN_MAC_HISI },
- { CODE_FOR_mulv2si3, "__builtin_mulv2si3", SH_BLTIN_V2SI3, },
- { CODE_FOR_mulv4hi3, "__builtin_mulv4hi3", SH_BLTIN_V4HI3 },
- { CODE_FOR_mmulfx_l, "__builtin_sh_media_MMULFX_L", SH_BLTIN_V2SI3 },
- { CODE_FOR_mmulfx_w, "__builtin_sh_media_MMULFX_W", SH_BLTIN_V4HI3 },
- { CODE_FOR_mmulfxrp_w,"__builtin_sh_media_MMULFXRP_W", SH_BLTIN_V4HI3 },
- { CODE_FOR_mmulhi_wl, "__builtin_sh_media_MMULHI_WL", SH_BLTIN_V4HI2V2SI },
- { CODE_FOR_mmullo_wl, "__builtin_sh_media_MMULLO_WL", SH_BLTIN_V4HI2V2SI },
- { CODE_FOR_mmulsum_wq,"__builtin_sh_media_MMULSUM_WQ", SH_BLTIN_XXUU },
- { CODE_FOR_mperm_w, "__builtin_sh_media_MPERM_W", SH_BLTIN_SH_HI },
- { CODE_FOR_msad_ubq, "__builtin_sh_media_MSAD_UBQ", SH_BLTIN_XXUU },
- { CODE_FOR_mshalds_l, "__builtin_sh_media_MSHALDS_L", SH_BLTIN_SH_SI },
- { CODE_FOR_mshalds_w, "__builtin_sh_media_MSHALDS_W", SH_BLTIN_SH_HI },
- { CODE_FOR_ashrv2si3, "__builtin_ashrv2si3", SH_BLTIN_SH_SI },
- { CODE_FOR_ashrv4hi3, "__builtin_ashrv4hi3", SH_BLTIN_SH_HI },
- { CODE_FOR_mshards_q, "__builtin_sh_media_MSHARDS_Q", SH_BLTIN_SUS },
- { CODE_FOR_mshfhi_b, "__builtin_sh_media_MSHFHI_B", SH_BLTIN_V8QI3 },
- { CODE_FOR_mshfhi_l, "__builtin_sh_media_MSHFHI_L", SH_BLTIN_V2SI3 },
- { CODE_FOR_mshfhi_w, "__builtin_sh_media_MSHFHI_W", SH_BLTIN_V4HI3 },
- { CODE_FOR_mshflo_b, "__builtin_sh_media_MSHFLO_B", SH_BLTIN_V8QI3 },
- { CODE_FOR_mshflo_l, "__builtin_sh_media_MSHFLO_L", SH_BLTIN_V2SI3 },
- { CODE_FOR_mshflo_w, "__builtin_sh_media_MSHFLO_W", SH_BLTIN_V4HI3 },
- { CODE_FOR_ashlv2si3, "__builtin_ashlv2si3", SH_BLTIN_SH_SI },
- { CODE_FOR_ashlv4hi3, "__builtin_ashlv4hi3", SH_BLTIN_SH_HI },
- { CODE_FOR_lshrv2si3, "__builtin_lshrv2si3", SH_BLTIN_SH_SI },
- { CODE_FOR_lshrv4hi3, "__builtin_lshrv4hi3", SH_BLTIN_SH_HI },
- { CODE_FOR_subv2si3, "__builtin_subv2si3", SH_BLTIN_V2SI3 },
- { CODE_FOR_subv4hi3, "__builtin_subv4hi3", SH_BLTIN_V4HI3 },
- { CODE_FOR_sssubv2si3,"__builtin_sssubv2si3", SH_BLTIN_V2SI3 },
- { CODE_FOR_ussubv8qi3,"__builtin_ussubv8qi3", SH_BLTIN_V8QI3 },
- { CODE_FOR_sssubv4hi3,"__builtin_sssubv4hi3", SH_BLTIN_V4HI3 },
- { CODE_FOR_fcosa_s, "__builtin_sh_media_FCOSA_S", SH_BLTIN_SISF },
- { CODE_FOR_fsina_s, "__builtin_sh_media_FSINA_S", SH_BLTIN_SISF },
- { CODE_FOR_fipr, "__builtin_sh_media_FIPR_S", SH_BLTIN_3 },
- { CODE_FOR_ftrv, "__builtin_sh_media_FTRV_S", SH_BLTIN_3 },
- { CODE_FOR_mac_media, "__builtin_sh_media_FMAC_S", SH_BLTIN_3 },
- { CODE_FOR_sqrtdf2, "__builtin_sh_media_FSQRT_D", SH_BLTIN_2 },
- { CODE_FOR_sqrtsf2, "__builtin_sh_media_FSQRT_S", SH_BLTIN_2 },
- { CODE_FOR_fsrra_s, "__builtin_sh_media_FSRRA_S", SH_BLTIN_2 },
- { CODE_FOR_ldhi_l, "__builtin_sh_media_LDHI_L", SH_BLTIN_LDUA_L },
- { CODE_FOR_ldhi_q, "__builtin_sh_media_LDHI_Q", SH_BLTIN_LDUA_Q },
- { CODE_FOR_ldlo_l, "__builtin_sh_media_LDLO_L", SH_BLTIN_LDUA_L },
- { CODE_FOR_ldlo_q, "__builtin_sh_media_LDLO_Q", SH_BLTIN_LDUA_Q },
- { CODE_FOR_sthi_l, "__builtin_sh_media_STHI_L", SH_BLTIN_STUA_L },
- { CODE_FOR_sthi_q, "__builtin_sh_media_STHI_Q", SH_BLTIN_STUA_Q },
- { CODE_FOR_stlo_l, "__builtin_sh_media_STLO_L", SH_BLTIN_STUA_L },
- { CODE_FOR_stlo_q, "__builtin_sh_media_STLO_Q", SH_BLTIN_STUA_Q },
- { CODE_FOR_ldhi_l64, "__builtin_sh_media_LDHI_L", SH_BLTIN_LDUA_L64 },
- { CODE_FOR_ldhi_q64, "__builtin_sh_media_LDHI_Q", SH_BLTIN_LDUA_Q64 },
- { CODE_FOR_ldlo_l64, "__builtin_sh_media_LDLO_L", SH_BLTIN_LDUA_L64 },
- { CODE_FOR_ldlo_q64, "__builtin_sh_media_LDLO_Q", SH_BLTIN_LDUA_Q64 },
- { CODE_FOR_sthi_l64, "__builtin_sh_media_STHI_L", SH_BLTIN_STUA_L64 },
- { CODE_FOR_sthi_q64, "__builtin_sh_media_STHI_Q", SH_BLTIN_STUA_Q64 },
- { CODE_FOR_stlo_l64, "__builtin_sh_media_STLO_L", SH_BLTIN_STUA_L64 },
- { CODE_FOR_stlo_q64, "__builtin_sh_media_STLO_Q", SH_BLTIN_STUA_Q64 },
- { CODE_FOR_nsb, "__builtin_sh_media_NSB", SH_BLTIN_SU },
- { CODE_FOR_byterev, "__builtin_sh_media_BYTEREV", SH_BLTIN_2 },
- { CODE_FOR_prefetch, "__builtin_sh_media_PREFO", SH_BLTIN_PSSV },
+static struct builtin_description bdesc[] =
+{
+ { CODE_FOR_absv2si2, "__builtin_absv2si2", SH_BLTIN_V2SI2, 0 },
+ { CODE_FOR_absv4hi2, "__builtin_absv4hi2", SH_BLTIN_V4HI2, 0 },
+ { CODE_FOR_addv2si3, "__builtin_addv2si3", SH_BLTIN_V2SI3, 0 },
+ { CODE_FOR_addv4hi3, "__builtin_addv4hi3", SH_BLTIN_V4HI3, 0 },
+ { CODE_FOR_ssaddv2si3,"__builtin_ssaddv2si3", SH_BLTIN_V2SI3, 0 },
+ { CODE_FOR_usaddv8qi3,"__builtin_usaddv8qi3", SH_BLTIN_V8QI3, 0 },
+ { CODE_FOR_ssaddv4hi3,"__builtin_ssaddv4hi3", SH_BLTIN_V4HI3, 0 },
+ { CODE_FOR_alloco_i, "__builtin_sh_media_ALLOCO", SH_BLTIN_PV, 0 },
+ { CODE_FOR_negcmpeqv8qi,"__builtin_sh_media_MCMPEQ_B", SH_BLTIN_V8QI3, 0 },
+ { CODE_FOR_negcmpeqv2si,"__builtin_sh_media_MCMPEQ_L", SH_BLTIN_V2SI3, 0 },
+ { CODE_FOR_negcmpeqv4hi,"__builtin_sh_media_MCMPEQ_W", SH_BLTIN_V4HI3, 0 },
+ { CODE_FOR_negcmpgtuv8qi,"__builtin_sh_media_MCMPGT_UB", SH_BLTIN_V8QI3, 0 },
+ { CODE_FOR_negcmpgtv2si,"__builtin_sh_media_MCMPGT_L", SH_BLTIN_V2SI3, 0 },
+ { CODE_FOR_negcmpgtv4hi,"__builtin_sh_media_MCMPGT_W", SH_BLTIN_V4HI3, 0 },
+ { CODE_FOR_mcmv, "__builtin_sh_media_MCMV", SH_BLTIN_UUUU, 0 },
+ { CODE_FOR_mcnvs_lw, "__builtin_sh_media_MCNVS_LW", SH_BLTIN_3, 0 },
+ { CODE_FOR_mcnvs_wb, "__builtin_sh_media_MCNVS_WB", SH_BLTIN_V4HI2V8QI, 0 },
+ { CODE_FOR_mcnvs_wub, "__builtin_sh_media_MCNVS_WUB", SH_BLTIN_V4HI2V8QI, 0 },
+ { CODE_FOR_mextr1, "__builtin_sh_media_MEXTR1", SH_BLTIN_V8QI3, 0 },
+ { CODE_FOR_mextr2, "__builtin_sh_media_MEXTR2", SH_BLTIN_V8QI3, 0 },
+ { CODE_FOR_mextr3, "__builtin_sh_media_MEXTR3", SH_BLTIN_V8QI3, 0 },
+ { CODE_FOR_mextr4, "__builtin_sh_media_MEXTR4", SH_BLTIN_V8QI3, 0 },
+ { CODE_FOR_mextr5, "__builtin_sh_media_MEXTR5", SH_BLTIN_V8QI3, 0 },
+ { CODE_FOR_mextr6, "__builtin_sh_media_MEXTR6", SH_BLTIN_V8QI3, 0 },
+ { CODE_FOR_mextr7, "__builtin_sh_media_MEXTR7", SH_BLTIN_V8QI3, 0 },
+ { CODE_FOR_mmacfx_wl, "__builtin_sh_media_MMACFX_WL", SH_BLTIN_MAC_HISI, 0 },
+ { CODE_FOR_mmacnfx_wl,"__builtin_sh_media_MMACNFX_WL", SH_BLTIN_MAC_HISI, 0 },
+ { CODE_FOR_mulv2si3, "__builtin_mulv2si3", SH_BLTIN_V2SI3, 0 },
+ { CODE_FOR_mulv4hi3, "__builtin_mulv4hi3", SH_BLTIN_V4HI3, 0 },
+ { CODE_FOR_mmulfx_l, "__builtin_sh_media_MMULFX_L", SH_BLTIN_V2SI3, 0 },
+ { CODE_FOR_mmulfx_w, "__builtin_sh_media_MMULFX_W", SH_BLTIN_V4HI3, 0 },
+ { CODE_FOR_mmulfxrp_w,"__builtin_sh_media_MMULFXRP_W", SH_BLTIN_V4HI3, 0 },
+ { CODE_FOR_mmulhi_wl, "__builtin_sh_media_MMULHI_WL", SH_BLTIN_V4HI2V2SI, 0 },
+ { CODE_FOR_mmullo_wl, "__builtin_sh_media_MMULLO_WL", SH_BLTIN_V4HI2V2SI, 0 },
+ { CODE_FOR_mmulsum_wq,"__builtin_sh_media_MMULSUM_WQ", SH_BLTIN_XXUU, 0 },
+ { CODE_FOR_mperm_w, "__builtin_sh_media_MPERM_W", SH_BLTIN_SH_HI, 0 },
+ { CODE_FOR_msad_ubq, "__builtin_sh_media_MSAD_UBQ", SH_BLTIN_XXUU, 0 },
+ { CODE_FOR_mshalds_l, "__builtin_sh_media_MSHALDS_L", SH_BLTIN_SH_SI, 0 },
+ { CODE_FOR_mshalds_w, "__builtin_sh_media_MSHALDS_W", SH_BLTIN_SH_HI, 0 },
+ { CODE_FOR_ashrv2si3, "__builtin_ashrv2si3", SH_BLTIN_SH_SI, 0 },
+ { CODE_FOR_ashrv4hi3, "__builtin_ashrv4hi3", SH_BLTIN_SH_HI, 0 },
+ { CODE_FOR_mshards_q, "__builtin_sh_media_MSHARDS_Q", SH_BLTIN_SUS, 0 },
+ { CODE_FOR_mshfhi_b, "__builtin_sh_media_MSHFHI_B", SH_BLTIN_V8QI3, 0 },
+ { CODE_FOR_mshfhi_l, "__builtin_sh_media_MSHFHI_L", SH_BLTIN_V2SI3, 0 },
+ { CODE_FOR_mshfhi_w, "__builtin_sh_media_MSHFHI_W", SH_BLTIN_V4HI3, 0 },
+ { CODE_FOR_mshflo_b, "__builtin_sh_media_MSHFLO_B", SH_BLTIN_V8QI3, 0 },
+ { CODE_FOR_mshflo_l, "__builtin_sh_media_MSHFLO_L", SH_BLTIN_V2SI3, 0 },
+ { CODE_FOR_mshflo_w, "__builtin_sh_media_MSHFLO_W", SH_BLTIN_V4HI3, 0 },
+ { CODE_FOR_ashlv2si3, "__builtin_ashlv2si3", SH_BLTIN_SH_SI, 0 },
+ { CODE_FOR_ashlv4hi3, "__builtin_ashlv4hi3", SH_BLTIN_SH_HI, 0 },
+ { CODE_FOR_lshrv2si3, "__builtin_lshrv2si3", SH_BLTIN_SH_SI, 0 },
+ { CODE_FOR_lshrv4hi3, "__builtin_lshrv4hi3", SH_BLTIN_SH_HI, 0 },
+ { CODE_FOR_subv2si3, "__builtin_subv2si3", SH_BLTIN_V2SI3, 0 },
+ { CODE_FOR_subv4hi3, "__builtin_subv4hi3", SH_BLTIN_V4HI3, 0 },
+ { CODE_FOR_sssubv2si3,"__builtin_sssubv2si3", SH_BLTIN_V2SI3, 0 },
+ { CODE_FOR_ussubv8qi3,"__builtin_ussubv8qi3", SH_BLTIN_V8QI3, 0 },
+ { CODE_FOR_sssubv4hi3,"__builtin_sssubv4hi3", SH_BLTIN_V4HI3, 0 },
+ { CODE_FOR_fcosa_s, "__builtin_sh_media_FCOSA_S", SH_BLTIN_SISF, 0 },
+ { CODE_FOR_fsina_s, "__builtin_sh_media_FSINA_S", SH_BLTIN_SISF, 0 },
+ { CODE_FOR_fipr, "__builtin_sh_media_FIPR_S", SH_BLTIN_3, 0 },
+ { CODE_FOR_ftrv, "__builtin_sh_media_FTRV_S", SH_BLTIN_3, 0 },
+ { CODE_FOR_mac_media, "__builtin_sh_media_FMAC_S", SH_BLTIN_3, 0 },
+ { CODE_FOR_sqrtdf2, "__builtin_sh_media_FSQRT_D", SH_BLTIN_2, 0 },
+ { CODE_FOR_sqrtsf2, "__builtin_sh_media_FSQRT_S", SH_BLTIN_2, 0 },
+ { CODE_FOR_fsrra_s, "__builtin_sh_media_FSRRA_S", SH_BLTIN_2, 0 },
+ { CODE_FOR_ldhi_l, "__builtin_sh_media_LDHI_L", SH_BLTIN_LDUA_L, 0 },
+ { CODE_FOR_ldhi_q, "__builtin_sh_media_LDHI_Q", SH_BLTIN_LDUA_Q, 0 },
+ { CODE_FOR_ldlo_l, "__builtin_sh_media_LDLO_L", SH_BLTIN_LDUA_L, 0 },
+ { CODE_FOR_ldlo_q, "__builtin_sh_media_LDLO_Q", SH_BLTIN_LDUA_Q, 0 },
+ { CODE_FOR_sthi_l, "__builtin_sh_media_STHI_L", SH_BLTIN_STUA_L, 0 },
+ { CODE_FOR_sthi_q, "__builtin_sh_media_STHI_Q", SH_BLTIN_STUA_Q, 0 },
+ { CODE_FOR_stlo_l, "__builtin_sh_media_STLO_L", SH_BLTIN_STUA_L, 0 },
+ { CODE_FOR_stlo_q, "__builtin_sh_media_STLO_Q", SH_BLTIN_STUA_Q, 0 },
+ { CODE_FOR_ldhi_l64, "__builtin_sh_media_LDHI_L", SH_BLTIN_LDUA_L64, 0 },
+ { CODE_FOR_ldhi_q64, "__builtin_sh_media_LDHI_Q", SH_BLTIN_LDUA_Q64, 0 },
+ { CODE_FOR_ldlo_l64, "__builtin_sh_media_LDLO_L", SH_BLTIN_LDUA_L64, 0 },
+ { CODE_FOR_ldlo_q64, "__builtin_sh_media_LDLO_Q", SH_BLTIN_LDUA_Q64, 0 },
+ { CODE_FOR_sthi_l64, "__builtin_sh_media_STHI_L", SH_BLTIN_STUA_L64, 0 },
+ { CODE_FOR_sthi_q64, "__builtin_sh_media_STHI_Q", SH_BLTIN_STUA_Q64, 0 },
+ { CODE_FOR_stlo_l64, "__builtin_sh_media_STLO_L", SH_BLTIN_STUA_L64, 0 },
+ { CODE_FOR_stlo_q64, "__builtin_sh_media_STLO_Q", SH_BLTIN_STUA_Q64, 0 },
+ { CODE_FOR_nsb, "__builtin_sh_media_NSB", SH_BLTIN_SU, 0 },
+ { CODE_FOR_byterev, "__builtin_sh_media_BYTEREV", SH_BLTIN_2, 0 },
+ { CODE_FOR_prefetch, "__builtin_sh_media_PREFO", SH_BLTIN_PSSV, 0 },
};
static void
sh_media_init_builtins (void)
{
tree shared[SH_BLTIN_NUM_SHARED_SIGNATURES];
- const struct builtin_description *d;
+ struct builtin_description *d;
memset (shared, 0, sizeof shared);
for (d = bdesc; d - bdesc < (int) ARRAY_SIZE (bdesc); d++)
if (signature < SH_BLTIN_NUM_SHARED_SIGNATURES)
shared[signature] = type;
}
- add_builtin_function (d->name, type, d - bdesc, BUILT_IN_MD,
- NULL, NULL_TREE);
+ d->fndecl =
+ add_builtin_function (d->name, type, d - bdesc, BUILT_IN_MD,
+ NULL, NULL_TREE);
}
}
+/* Returns the shmedia builtin decl for CODE. */
+
+static tree
+sh_media_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
+{
+ if (code >= ARRAY_SIZE (bdesc))
+ return error_mark_node;
+
+ return bdesc[code].fndecl;
+}
+
/* Implements target hook vector_mode_supported_p. */
bool
sh_vector_mode_supported_p (enum machine_mode mode)
return false;
}
+bool
+sh_frame_pointer_required (void)
+{
+/* If needed override this in other tm.h files to cope with various OS
+ lossage requiring a frame pointer. */
+ if (SUBTARGET_FRAME_POINTER_REQUIRED)
+ return true;
+
+ if (crtl->profile)
+ return true;
+
+ return false;
+}
+
/* Implements target hook dwarf_calling_convention. Return an enum
of dwarf_calling_convention. */
int
sh_media_init_builtins ();
}
+/* Returns the sh builtin decl for CODE. */
+
+static tree
+sh_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
+{
+ if (TARGET_SHMEDIA)
+ return sh_media_builtin_decl (code, initialize_p);
+
+ return error_mark_node;
+}
+
/* Expand an expression EXP that calls a built-in function,
with result going to TARGET if that's convenient
(and in mode MODE if that's convenient).
return 0;
}
+/* Return true if registers in machine mode MODE will likely be
+ allocated to registers in small register classes. */
+
+static bool
+sh_small_register_classes_for_mode_p (enum machine_mode mode ATTRIBUTE_UNUSED)
+{
+ return (! TARGET_SHMEDIA);
+}
/* If ADDRESS refers to a CODE_LABEL, add NUSES to the number of times
that label is used. */