+/* Target machine subroutines for Altera Nios II.
+ Copyright (C) 2012 Free Software Foundation, Inc.
+ Contributed by Jonah Graham (jgraham@altera.com),
+ Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com).
+ Contributed by Mentor Graphics, Inc.
+
+ This file is part of GCC.
+
+ GCC is free software; you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published
+ by the Free Software Foundation; either version 3, or (at your
+ option) any later version.
+
+ GCC is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with GCC; see the file COPYING3. If not see
+ <http://www.gnu.org/licenses/>. */
+
+#include <stdio.h>
+#include "config.h"
+#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
+#include "rtl.h"
+#include "tree.h"
+#include "regs.h"
+#include "hard-reg-set.h"
+#include "insn-config.h"
+#include "conditions.h"
+#include "output.h"
+#include "insn-attr.h"
+#include "flags.h"
+#include "recog.h"
+#include "expr.h"
+#include "optabs.h"
+#include "function.h"
+#include "ggc.h"
+#include "basic-block.h"
+#include "diagnostic-core.h"
+#include "toplev.h"
+#include "target.h"
+#include "target-def.h"
+#include "tm_p.h"
+#include "langhooks.h"
+#include "df.h"
+#include "debug.h"
+#include "real.h"
+#include "integrate.h"
+#include "reload.h"
+
+/* Local prototypes. */
+static bool nios2_rtx_costs (rtx, int, int, int, int *, bool);
+static reg_class_t nios2_preferred_reload_class (rtx, reg_class_t);
+static void nios2_print_operand (FILE *, rtx, int);
+static void nios2_print_operand_address (FILE *, rtx);
+static void nios2_asm_function_prologue (FILE *, HOST_WIDE_INT);
+static int nios2_issue_rate (void);
+static struct machine_function *nios2_init_machine_status (void);
+static bool nios2_in_small_data_p (const_tree);
+static void dump_frame_size (FILE *);
+static HOST_WIDE_INT compute_frame_size (void);
+static void save_reg (int, unsigned);
+static void restore_reg (int, unsigned);
+static unsigned int nios2_section_type_flags (tree, const char *, int);
+static bool nios2_can_eliminate (const int, const int);
+static void nios2_load_pic_register (void);
+static bool nios2_cannot_force_const_mem (enum machine_mode, rtx);
+static rtx nios2_legitimize_pic_address (rtx orig, enum machine_mode mode,
+ rtx reg);
+static bool nios2_legitimate_constant_p (enum machine_mode, rtx);
+static rtx nios2_legitimize_address (rtx x, rtx orig_x, enum machine_mode mode);
+static bool nios2_legitimate_address_p (enum machine_mode mode, rtx, bool);
+static void nios2_init_builtins (void);
+static rtx nios2_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
+static void nios2_init_libfuncs (void);
+static rtx nios2_function_arg (cumulative_args_t, enum machine_mode,
+ const_tree, bool);
+static void nios2_function_arg_advance (cumulative_args_t, enum machine_mode,
+ const_tree, bool);
+static void nios2_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
+ tree, int *, int);
+static int nios2_arg_partial_bytes (cumulative_args_t,
+ enum machine_mode, tree, bool);
+static void nios2_trampoline_init (rtx, tree, rtx);
+static rtx nios2_function_value (const_tree, const_tree, bool);
+static rtx nios2_libcall_value (enum machine_mode, const_rtx);
+static bool nios2_function_value_regno_p (const unsigned int);
+static bool nios2_return_in_memory (const_tree, const_tree);
+static void nios2_encode_section_info (tree, rtx, int);
+static void nios2_output_dwarf_dtprel (FILE *fuke, int size, rtx x);
+static void nios2_option_override (void);
+static void nios2_option_save (struct cl_target_option*);
+static void nios2_option_restore (struct cl_target_option*);
+static void nios2_set_current_function (tree);
+static bool nios2_valid_target_attribute_p (tree, tree, tree, int);
+static bool nios2_pragma_target_parse (tree, tree);
+static tree nios2_merge_decl_attributes (tree, tree);
+static void nios2_custom_check_insns (void);
+static void nios2_handle_custom_fpu_cfg (const char*, bool);
+static void nios2_handle_custom_fpu_insn_option (int);
+static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code, int);
+static void nios2_deregister_custom_code (unsigned int);
+
+/* Initialize the GCC target structure. */
+#undef TARGET_ASM_FUNCTION_PROLOGUE
+#define TARGET_ASM_FUNCTION_PROLOGUE nios2_asm_function_prologue
+
+#undef TARGET_SCHED_ISSUE_RATE
+#define TARGET_SCHED_ISSUE_RATE nios2_issue_rate
+#undef TARGET_IN_SMALL_DATA_P
+#define TARGET_IN_SMALL_DATA_P nios2_in_small_data_p
+#undef TARGET_ENCODE_SECTION_INFO
+#define TARGET_ENCODE_SECTION_INFO nios2_encode_section_info
+#undef TARGET_SECTION_TYPE_FLAGS
+#define TARGET_SECTION_TYPE_FLAGS nios2_section_type_flags
+
+#undef TARGET_INIT_BUILTINS
+#define TARGET_INIT_BUILTINS nios2_init_builtins
+#undef TARGET_EXPAND_BUILTIN
+#define TARGET_EXPAND_BUILTIN nios2_expand_builtin
+
+#undef TARGET_INIT_LIBFUNCS
+#define TARGET_INIT_LIBFUNCS nios2_init_libfuncs
+
+#undef TARGET_FUNCTION_OK_FOR_SIBCALL
+#define TARGET_FUNCTION_OK_FOR_SIBCALL hook_bool_tree_tree_true
+
+#undef TARGET_CAN_ELIMINATE
+#define TARGET_CAN_ELIMINATE nios2_can_eliminate
+
+#undef TARGET_FUNCTION_ARG
+#define TARGET_FUNCTION_ARG nios2_function_arg
+
+#undef TARGET_FUNCTION_ARG_ADVANCE
+#define TARGET_FUNCTION_ARG_ADVANCE nios2_function_arg_advance
+
+#undef TARGET_ARG_PARTIAL_BYTES
+#define TARGET_ARG_PARTIAL_BYTES nios2_arg_partial_bytes
+
+#undef TARGET_TRAMPOLINE_INIT
+#define TARGET_TRAMPOLINE_INIT nios2_trampoline_init
+
+#undef TARGET_FUNCTION_VALUE
+#define TARGET_FUNCTION_VALUE nios2_function_value
+
+#undef TARGET_LIBCALL_VALUE
+#define TARGET_LIBCALL_VALUE nios2_libcall_value
+
+#undef TARGET_FUNCTION_VALUE_REGNO_P
+#define TARGET_FUNCTION_VALUE_REGNO_P nios2_function_value_regno_p
+
+#undef TARGET_RETURN_IN_MEMORY
+#define TARGET_RETURN_IN_MEMORY nios2_return_in_memory
+
+#undef TARGET_PROMOTE_PROTOTYPES
+#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
+
+#undef TARGET_SETUP_INCOMING_VARARGS
+#define TARGET_SETUP_INCOMING_VARARGS nios2_setup_incoming_varargs
+
+#undef TARGET_MUST_PASS_IN_STACK
+#define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
+
+#undef TARGET_LEGITIMATE_CONSTANT_P
+#define TARGET_LEGITIMATE_CONSTANT_P nios2_legitimate_constant_p
+
+#undef TARGET_LEGITIMIZE_ADDRESS
+#define TARGET_LEGITIMIZE_ADDRESS nios2_legitimize_address
+
+#undef TARGET_LEGITIMATE_ADDRESS_P
+#define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p
+
+#undef TARGET_PREFERRED_RELOAD_CLASS
+#define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class
+
+#undef TARGET_RTX_COSTS
+#define TARGET_RTX_COSTS nios2_rtx_costs
+
+#undef TARGET_HAVE_TLS
+#define TARGET_HAVE_TLS TARGET_LINUX_ABI
+
+#undef TARGET_CANNOT_FORCE_CONST_MEM
+#define TARGET_CANNOT_FORCE_CONST_MEM nios2_cannot_force_const_mem
+
+#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
+#define TARGET_ASM_OUTPUT_DWARF_DTPREL nios2_output_dwarf_dtprel
+
+#undef TARGET_PRINT_OPERAND
+#define TARGET_PRINT_OPERAND nios2_print_operand
+
+#undef TARGET_PRINT_OPERAND_ADDRESS
+#define TARGET_PRINT_OPERAND_ADDRESS nios2_print_operand_address
+
+#undef TARGET_OPTION_OVERRIDE
+#define TARGET_OPTION_OVERRIDE nios2_option_override
+
+#undef TARGET_OPTION_SAVE
+#define TARGET_OPTION_SAVE nios2_option_save
+
+#undef TARGET_OPTION_RESTORE
+#define TARGET_OPTION_RESTORE nios2_option_restore
+
+#undef TARGET_SET_CURRENT_FUNCTION
+#define TARGET_SET_CURRENT_FUNCTION nios2_set_current_function
+
+#undef TARGET_OPTION_VALID_ATTRIBUTE_P
+#define TARGET_OPTION_VALID_ATTRIBUTE_P nios2_valid_target_attribute_p
+
+#undef TARGET_OPTION_PRAGMA_PARSE
+#define TARGET_OPTION_PRAGMA_PARSE nios2_pragma_target_parse
+
+#undef TARGET_MERGE_DECL_ATTRIBUTES
+#define TARGET_MERGE_DECL_ATTRIBUTES nios2_merge_decl_attributes
+
+/* ??? Might want to redefine TARGET_RETURN_IN_MSB here to handle
+ big-endian case; depends on what ABI we choose. */
+
+struct gcc_target targetm = TARGET_INITIALIZER;
+\f
+
+/* Threshold for data being put into the small data/bss area, instead
+ of the normal data area (references to the small data/bss area take
+ 1 instruction, and use the global pointer, references to the normal
+ data area takes 2 instructions). */
+unsigned HOST_WIDE_INT nios2_section_threshold = NIOS2_DEFAULT_GVALUE;
+
+struct GTY (()) machine_function
+{
+ /* Current frame information, to be filled in by compute_frame_size
+ with register save masks, and offsets for the current function. */
+
+ unsigned HOST_WIDE_INT save_mask; /* Mask of registers to save. */
+ long total_size; /* # bytes that the entire frame takes up. */
+ long var_size; /* # bytes that variables take up. */
+ long args_size; /* # bytes that outgoing arguments take up. */
+ int save_reg_size; /* # bytes needed to store gp regs. */
+ long save_regs_offset; /* Offset from new sp to store gp registers. */
+ int initialized; /* != 0 if frame size already calculated. */
+};
+
+/* State to track the assignment of custom codes to FPU/custom builtins. */
+static enum nios2_ccs_code custom_code_status[256];
+static int custom_code_index[256];
+/* Set to true if any conflicts (re-use of a code between 0-255) are found. */
+static bool custom_code_conflict = false;
+
+\f
+
+/* Definition of builtin function types for nios2. */
+
+#define N2_FTYPES \
+ N2_FTYPE(1, (SF)) \
+ N2_FTYPE(1, (VOID)) \
+ N2_FTYPE(2, (DF, DF)) \
+ N2_FTYPE(3, (DF, DF, DF)) \
+ N2_FTYPE(2, (DF, SF)) \
+ N2_FTYPE(2, (DF, SI)) \
+ N2_FTYPE(2, (DF, UI)) \
+ N2_FTYPE(2, (SF, DF)) \
+ N2_FTYPE(2, (SF, SF)) \
+ N2_FTYPE(3, (SF, SF, SF)) \
+ N2_FTYPE(2, (SF, SI)) \
+ N2_FTYPE(2, (SF, UI)) \
+ N2_FTYPE(2, (SI, CVPTR)) \
+ N2_FTYPE(2, (SI, DF)) \
+ N2_FTYPE(3, (SI, DF, DF)) \
+ N2_FTYPE(2, (SI, SF)) \
+ N2_FTYPE(3, (SI, SF, SF)) \
+ N2_FTYPE(2, (SI, SI)) \
+ N2_FTYPE(2, (UI, CVPTR)) \
+ N2_FTYPE(2, (UI, DF)) \
+ N2_FTYPE(2, (UI, SF)) \
+ N2_FTYPE(2, (VOID, DF)) \
+ N2_FTYPE(2, (VOID, SF)) \
+ N2_FTYPE(3, (VOID, SI, SI)) \
+ N2_FTYPE(3, (VOID, VPTR, SI))
+
+#define N2_FTYPE_OP1(R) N2_FTYPE_ ## R ## _VOID
+#define N2_FTYPE_OP2(R, A1) N2_FTYPE_ ## R ## _ ## A1
+#define N2_FTYPE_OP3(R, A1, A2) N2_FTYPE_ ## R ## _ ## A1 ## _ ## A2
+
+/* Expand ftcode enumeration. */
+enum nios2_ftcode {
+#define N2_FTYPE(N,ARGS) N2_FTYPE_OP ## N ARGS,
+N2_FTYPES
+#undef N2_FTYPE
+N2_FTYPE_MAX
+};
+
+/* Return the tree function type, based on the ftcode. */
+static tree
+nios2_ftype (enum nios2_ftcode ftcode)
+{
+ static tree types[(int) N2_FTYPE_MAX];
+
+ tree N2_TYPE_SF = float_type_node;
+ tree N2_TYPE_DF = double_type_node;
+ tree N2_TYPE_SI = integer_type_node;
+ tree N2_TYPE_UI = unsigned_type_node;
+ tree N2_TYPE_VOID = void_type_node;
+
+ static const_tree N2_TYPE_CVPTR, N2_TYPE_VPTR;
+ if (!N2_TYPE_CVPTR)
+ {
+ /* const volatile void * */
+ N2_TYPE_CVPTR
+ = build_pointer_type (build_qualified_type (void_type_node,
+ (TYPE_QUAL_CONST
+ | TYPE_QUAL_VOLATILE)));
+ /* volatile void * */
+ N2_TYPE_VPTR
+ = build_pointer_type (build_qualified_type (void_type_node,
+ TYPE_QUAL_VOLATILE));
+ }
+ if (types[(int) ftcode] == NULL_TREE)
+ switch (ftcode)
+ {
+#define N2_FTYPE_ARGS1(R) N2_TYPE_ ## R
+#define N2_FTYPE_ARGS2(R,A1) N2_TYPE_ ## R, N2_TYPE_ ## A1
+#define N2_FTYPE_ARGS3(R,A1,A2) N2_TYPE_ ## R, N2_TYPE_ ## A1, N2_TYPE_ ## A2
+#define N2_FTYPE(N,ARGS) \
+ case N2_FTYPE_OP ## N ARGS: \
+ types[(int) ftcode] \
+ = build_function_type_list (N2_FTYPE_ARGS ## N ARGS, NULL_TREE); \
+ break;
+ N2_FTYPES
+#undef N2_FTYPE
+ default: gcc_unreachable ();
+ }
+ return types[(int) ftcode];
+}
+
+
+\f
+/* Definition of FPU instruction descriptions. */
+
+struct nios2_fpu_insn_info
+{
+ const char *name;
+ int num_operands, *optvar;
+ int opt, no_opt;
+#define N2F_DF 0x1
+#define N2F_DFREQ 0x2
+#define N2F_UNSAFE 0x4
+#define N2F_FINITE 0x8
+ unsigned int flags;
+ enum insn_code icode;
+ enum nios2_ftcode ftcode;
+};
+
+/* Base macro for defining FPU instructions. */
+#define N2FPU_INSN_DEF_BASE(insn, nop, flags, icode, args) \
+ { #insn, nop, &nios2_custom_ ## insn, OPT_mcustom_##insn##_, \
+ OPT_mno_custom_##insn, flags, CODE_FOR_ ## icode, \
+ N2_FTYPE_OP ## nop args }
+
+/* Arithmetic and math functions; 2 or 3 operand FP operations. */
+#define N2FPU_OP2(mode) (mode, mode)
+#define N2FPU_OP3(mode) (mode, mode, mode)
+#define N2FPU_INSN_DEF(code, icode, nop, flags, m, M) \
+ N2FPU_INSN_DEF_BASE (f ## code ## m, nop, flags, \
+ icode ## m ## f ## nop, N2FPU_OP ## nop (M ## F))
+#define N2FPU_INSN_SF(code, nop, flags) \
+ N2FPU_INSN_DEF (code, code, nop, flags, s, S)
+#define N2FPU_INSN_DF(code, nop, flags) \
+ N2FPU_INSN_DEF (code, code, nop, flags | N2F_DF, d, D)
+
+/* Compare instructions, 3 operand FP operation with a SI result. */
+#define N2FPU_CMP_DEF(code, flags, m, M) \
+ N2FPU_INSN_DEF_BASE (fcmp ## code ## m, 3, flags, \
+ nios2_s ## code ## m ## f, (SI, M ## F, M ## F))
+#define N2FPU_CMP_SF(code) N2FPU_CMP_DEF (code, 0, s, S)
+#define N2FPU_CMP_DF(code) N2FPU_CMP_DEF (code, N2F_DF, d, D)
+
+/* The order of definition needs to be maintained consistent with
+ enum n2fpu_code in nios2-opts.h. */
+struct nios2_fpu_insn_info nios2_fpu_insn[] =
+ {
+ /* Single precision instructions. */
+ N2FPU_INSN_SF (add, 3, 0),
+ N2FPU_INSN_SF (sub, 3, 0),
+ N2FPU_INSN_SF (mul, 3, 0),
+ N2FPU_INSN_SF (div, 3, 0),
+ /* Due to textual difference between min/max and smin/smax. */
+ N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, s, S),
+ N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, s, S),
+ N2FPU_INSN_SF (neg, 2, 0),
+ N2FPU_INSN_SF (abs, 2, 0),
+ N2FPU_INSN_SF (sqrt, 2, 0),
+ N2FPU_INSN_SF (sin, 2, N2F_UNSAFE),
+ N2FPU_INSN_SF (cos, 2, N2F_UNSAFE),
+ N2FPU_INSN_SF (tan, 2, N2F_UNSAFE),
+ N2FPU_INSN_SF (atan, 2, N2F_UNSAFE),
+ N2FPU_INSN_SF (exp, 2, N2F_UNSAFE),
+ N2FPU_INSN_SF (log, 2, N2F_UNSAFE),
+ /* Single precision compares. */
+ N2FPU_CMP_SF (eq), N2FPU_CMP_SF (ne),
+ N2FPU_CMP_SF (lt), N2FPU_CMP_SF (le),
+ N2FPU_CMP_SF (gt), N2FPU_CMP_SF (ge),
+
+ /* Double precision instructions. */
+ N2FPU_INSN_DF (add, 3, 0),
+ N2FPU_INSN_DF (sub, 3, 0),
+ N2FPU_INSN_DF (mul, 3, 0),
+ N2FPU_INSN_DF (div, 3, 0),
+ /* Due to textual difference between min/max and smin/smax. */
+ N2FPU_INSN_DEF (min, smin, 3, N2F_FINITE, d, D),
+ N2FPU_INSN_DEF (max, smax, 3, N2F_FINITE, d, D),
+ N2FPU_INSN_DF (neg, 2, 0),
+ N2FPU_INSN_DF (abs, 2, 0),
+ N2FPU_INSN_DF (sqrt, 2, 0),
+ N2FPU_INSN_DF (sin, 2, N2F_UNSAFE),
+ N2FPU_INSN_DF (cos, 2, N2F_UNSAFE),
+ N2FPU_INSN_DF (tan, 2, N2F_UNSAFE),
+ N2FPU_INSN_DF (atan, 2, N2F_UNSAFE),
+ N2FPU_INSN_DF (exp, 2, N2F_UNSAFE),
+ N2FPU_INSN_DF (log, 2, N2F_UNSAFE),
+ /* Double precision compares. */
+ N2FPU_CMP_DF (eq), N2FPU_CMP_DF (ne),
+ N2FPU_CMP_DF (lt), N2FPU_CMP_DF (le),
+ N2FPU_CMP_DF (gt), N2FPU_CMP_DF (ge),
+
+ /* Conversion instructions. */
+ N2FPU_INSN_DEF_BASE (floatis, 2, 0, floatsisf2, (SF, SI)),
+ N2FPU_INSN_DEF_BASE (floatus, 2, 0, floatunssisf2, (SF, UI)),
+ N2FPU_INSN_DEF_BASE (floatid, 2, 0, floatsidf2, (DF, SI)),
+ N2FPU_INSN_DEF_BASE (floatud, 2, 0, floatunssidf2, (DF, UI)),
+ N2FPU_INSN_DEF_BASE (fixsi, 2, 0, fix_truncsfsi2, (SI, SF)),
+ N2FPU_INSN_DEF_BASE (fixsu, 2, 0, fixuns_truncsfsi2, (UI, SF)),
+ N2FPU_INSN_DEF_BASE (fixdi, 2, 0, fix_truncdfsi2, (SI, DF)),
+ N2FPU_INSN_DEF_BASE (fixdu, 2, 0, fixuns_truncdfsi2, (UI, DF)),
+ N2FPU_INSN_DEF_BASE (fextsd, 2, 0, extendsfdf2, (DF, SF)),
+ N2FPU_INSN_DEF_BASE (ftruncds, 2, 0, truncdfsf2, (SF, DF)),
+
+ /* X, Y access instructions. */
+ N2FPU_INSN_DEF_BASE (fwrx, 2, N2F_DFREQ, nios2_fwrx, (VOID, DF)),
+ N2FPU_INSN_DEF_BASE (fwry, 2, N2F_DFREQ, nios2_fwry, (VOID, SF)),
+ N2FPU_INSN_DEF_BASE (frdxlo, 1, N2F_DFREQ, nios2_frdxlo, (SF)),
+ N2FPU_INSN_DEF_BASE (frdxhi, 1, N2F_DFREQ, nios2_frdxhi, (SF)),
+ N2FPU_INSN_DEF_BASE (frdy, 1, N2F_DFREQ, nios2_frdy, (SF))
+ };
+
+/* Some macros for ease of access. */
+#define N2FPU(code) nios2_fpu_insn[(int) code]
+#define N2FPU_ENABLED_P(code) (N2FPU_N(code) >= 0)
+#define N2FPU_N(code) (*N2FPU(code).optvar)
+#define N2FPU_NAME(code) (N2FPU(code).name)
+#define N2FPU_ICODE(code) (N2FPU(code).icode)
+#define N2FPU_FTCODE(code) (N2FPU(code).ftcode)
+#define N2FPU_FINITE_P(code) (N2FPU(code).flags & N2F_FINITE)
+#define N2FPU_UNSAFE_P(code) (N2FPU(code).flags & N2F_UNSAFE)
+#define N2FPU_DOUBLE_P(code) (N2FPU(code).flags & N2F_DF)
+#define N2FPU_DOUBLE_REQUIRED_P(code) (N2FPU(code).flags & N2F_DFREQ)
+
+/* Same as above, but for cases where using only the op part is shorter. */
+#define N2FPU_OP(op) N2FPU(n2fpu_ ## op)
+#define N2FPU_OP_NAME(op) N2FPU_NAME(n2fpu_ ## op)
+#define N2FPU_OP_ENABLED_P(op) N2FPU_ENABLED_P(n2fpu_ ## op)
+
+/* Export the FPU insn enabled predicate to nios2.md. */
+bool
+nios2_fpu_insn_enabled (enum n2fpu_code code)
+{
+ return N2FPU_ENABLED_P (code);
+}
+
+static bool
+nios2_fpu_compare_enabled (enum rtx_code cond, enum machine_mode mode)
+{
+ if (mode == SFmode)
+ switch (cond)
+ {
+ case EQ: return N2FPU_OP_ENABLED_P (fcmpeqs);
+ case NE: return N2FPU_OP_ENABLED_P (fcmpnes);
+ case GT: return N2FPU_OP_ENABLED_P (fcmpgts);
+ case GE: return N2FPU_OP_ENABLED_P (fcmpges);
+ case LT: return N2FPU_OP_ENABLED_P (fcmplts);
+ case LE: return N2FPU_OP_ENABLED_P (fcmples);
+ default: break;
+ }
+ else if (mode == DFmode)
+ switch (cond)
+ {
+ case EQ: return N2FPU_OP_ENABLED_P (fcmpeqd);
+ case NE: return N2FPU_OP_ENABLED_P (fcmpned);
+ case GT: return N2FPU_OP_ENABLED_P (fcmpgtd);
+ case GE: return N2FPU_OP_ENABLED_P (fcmpged);
+ case LT: return N2FPU_OP_ENABLED_P (fcmpltd);
+ case LE: return N2FPU_OP_ENABLED_P (fcmpled);
+ default: break;
+ }
+ return false;
+}
+
+#define IS_UNSPEC_TLS(x) ((x) >= UNSPEC_TLS && (x) <= UNSPEC_ADD_TLS_LDO)
+
+
+/* Stack Layout and Calling Conventions */
+
+#define TOO_BIG_OFFSET(X) ((X) > ((1 << 15) - 1))
+#define TEMP_REG_NUM 8
+
+static void
+save_reg (int regno, unsigned offset)
+{
+ rtx reg = gen_rtx_REG (SImode, regno);
+ rtx addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
+ gen_int_mode (offset, Pmode));
+
+ rtx pattern = gen_rtx_SET (SImode, gen_frame_mem (Pmode, addr), reg);
+ rtx insn = emit_insn (pattern);
+ RTX_FRAME_RELATED_P (insn) = 1;
+}
+
+static void
+restore_reg (int regno, unsigned offset)
+{
+ rtx reg = gen_rtx_REG (SImode, regno);
+ rtx addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
+ gen_int_mode (offset, Pmode));
+
+ rtx pattern = gen_rtx_SET (SImode, reg, gen_frame_mem (Pmode, addr));
+ emit_insn (pattern);
+}
+
+void
+expand_prologue (void)
+{
+ int ix;
+ HOST_WIDE_INT total_frame_size = compute_frame_size ();
+ int sp_offset; /* offset from base_reg to final stack value. */
+ int fp_offset; /* offset from base_reg to final fp value. */
+ int save_offset;
+ rtx insn;
+ unsigned HOST_WIDE_INT save_mask;
+
+ total_frame_size = compute_frame_size ();
+ if (flag_stack_usage_info)
+ current_function_static_stack_size = total_frame_size;
+
+ /* Decrement the stack pointer */
+ if (TOO_BIG_OFFSET (total_frame_size))
+ {
+ /* We need an intermediary point, this will point at the spill
+ block. */
+ insn = emit_insn
+ (gen_add3_insn (stack_pointer_rtx,
+ stack_pointer_rtx,
+ gen_int_mode (cfun->machine->save_regs_offset
+ - total_frame_size, Pmode)));
+ RTX_FRAME_RELATED_P (insn) = 1;
+
+ fp_offset = 0;
+ sp_offset = -cfun->machine->save_regs_offset;
+ }
+ else if (total_frame_size)
+ {
+ insn = emit_insn (gen_add3_insn (stack_pointer_rtx,
+ stack_pointer_rtx,
+ gen_int_mode (-total_frame_size,
+ Pmode)));
+ RTX_FRAME_RELATED_P (insn) = 1;
+ fp_offset = cfun->machine->save_regs_offset;
+ sp_offset = 0;
+ }
+ else
+ fp_offset = sp_offset = 0;
+
+ if (crtl->limit_stack)
+ emit_insn (gen_stack_overflow_detect_and_trap ());
+
+ save_offset = fp_offset + cfun->machine->save_reg_size;
+ save_mask = cfun->machine->save_mask;
+
+ for (ix = 32; ix--;)
+ if (save_mask & ((unsigned HOST_WIDE_INT)1 << ix))
+ {
+ save_offset -= 4;
+ save_reg (ix, save_offset);
+ }
+
+ if (frame_pointer_needed)
+ {
+ insn = emit_insn (gen_add3_insn (hard_frame_pointer_rtx,
+ stack_pointer_rtx,
+ gen_int_mode (fp_offset, Pmode)));
+ RTX_FRAME_RELATED_P (insn) = 1;
+ }
+
+ if (sp_offset)
+ {
+ rtx sp_adjust
+ = gen_rtx_SET (Pmode, stack_pointer_rtx,
+ gen_rtx_PLUS (Pmode, stack_pointer_rtx,
+ gen_int_mode (sp_offset, Pmode)));
+ if (SMALL_INT (sp_offset))
+ insn = emit_insn (sp_adjust);
+ else
+ {
+ rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
+ emit_insn (gen_rtx_SET (Pmode, tmp, gen_int_mode (sp_offset, Pmode)));
+ insn = emit_insn (gen_add3_insn (stack_pointer_rtx, stack_pointer_rtx,
+ tmp));
+ /* Attach the sp_adjust as a note indicating what happened. */
+ REG_NOTES (insn) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR,
+ sp_adjust, REG_NOTES (insn));
+ }
+ RTX_FRAME_RELATED_P (insn) = 1;
+
+ if (crtl->limit_stack)
+ emit_insn (gen_stack_overflow_detect_and_trap ());
+ }
+
+ /* Load the PIC register if needed. */
+ if (crtl->uses_pic_offset_table)
+ nios2_load_pic_register ();
+
+ /* If we are profiling, make sure no instructions are scheduled before
+ the call to mcount. */
+ if (crtl->profile)
+ emit_insn (gen_blockage ());
+}
+
+void
+expand_epilogue (bool sibcall_p)
+{
+ int ix;
+ HOST_WIDE_INT total_frame_size = compute_frame_size ();
+ unsigned HOST_WIDE_INT save_mask;
+ int sp_adjust;
+ int save_offset;
+
+ if (!sibcall_p && nios2_can_use_return_insn ())
+ {
+ emit_jump_insn (gen_return ());
+ return;
+ }
+
+ emit_insn (gen_blockage ());
+
+ if (frame_pointer_needed)
+ {
+ /* Recover the stack pointer. */
+ emit_insn (gen_rtx_SET (Pmode, stack_pointer_rtx,
+ hard_frame_pointer_rtx));
+ save_offset = 0;
+ sp_adjust = total_frame_size - cfun->machine->save_regs_offset;
+ }
+ else if (TOO_BIG_OFFSET (total_frame_size))
+ {
+ rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
+
+ emit_insn
+ (gen_rtx_SET (Pmode, tmp,
+ gen_int_mode (cfun->machine->save_regs_offset,
+ Pmode)));
+ emit_insn (gen_add3_insn (stack_pointer_rtx, stack_pointer_rtx, tmp));
+ save_offset = 0;
+ sp_adjust = total_frame_size - cfun->machine->save_regs_offset;
+ }
+ else
+ {
+ save_offset = cfun->machine->save_regs_offset;
+ sp_adjust = total_frame_size;
+ }
+
+ save_mask = cfun->machine->save_mask;
+ save_offset += cfun->machine->save_reg_size;
+
+ for (ix = 32; ix--;)
+ if (save_mask & ((unsigned HOST_WIDE_INT)1 << ix))
+ {
+ save_offset -= 4;
+ restore_reg (ix, save_offset);
+ }
+
+ if (sp_adjust)
+ emit_insn (gen_add3_insn (stack_pointer_rtx, stack_pointer_rtx,
+ gen_int_mode (sp_adjust, Pmode)));
+
+ /* Add in the __builtin_eh_return stack adjustment. */
+ if (crtl->calls_eh_return)
+ emit_insn (gen_add3_insn (stack_pointer_rtx,
+ stack_pointer_rtx,
+ EH_RETURN_STACKADJ_RTX));
+
+ if (!sibcall_p)
+ emit_jump_insn (gen_return_from_epilogue (gen_rtx_REG (Pmode, RA_REGNO)));
+}
+
+/* Implement RETURN_ADDR_RTX. Note, we do not support moving
+ back to a previous frame. */
+rtx
+nios2_get_return_address (int count)
+{
+ if (count != 0)
+ return const0_rtx;
+
+ return get_hard_reg_initial_val (Pmode, RA_REGNO);
+}
+
+/* Emit code to change the current function's return address to
+ ADDRESS. SCRATCH is available as a scratch register, if needed.
+ ADDRESS and SCRATCH are both word-mode GPRs. */
+void
+nios2_set_return_address (rtx address, rtx scratch)
+{
+ compute_frame_size ();
+ if ((cfun->machine->save_mask >> RA_REGNO) & 1)
+ {
+ unsigned offset = cfun->machine->save_reg_size - 4;
+ rtx base;
+
+ if (frame_pointer_needed)
+ base = hard_frame_pointer_rtx;
+ else
+ {
+ base = stack_pointer_rtx;
+ offset += cfun->machine->save_regs_offset;
+
+ if (TOO_BIG_OFFSET (offset))
+ {
+ emit_insn (gen_rtx_SET (Pmode, scratch,
+ gen_int_mode (offset, Pmode)));
+ emit_insn (gen_add3_insn (scratch, scratch, base));
+ base = scratch;
+ offset = 0;
+ }
+ }
+ if (offset)
+ base = gen_rtx_PLUS (Pmode, base, gen_int_mode (offset, Pmode));
+ emit_insn (gen_rtx_SET (Pmode, gen_rtx_MEM (Pmode, base), address));
+ }
+ else
+ emit_insn (gen_rtx_SET (Pmode, gen_rtx_REG (Pmode, RA_REGNO), address));
+}
+
+\f
+/* Profiling. */
+
+void
+nios2_function_profiler (FILE *file, int labelno ATTRIBUTE_UNUSED)
+{
+ fprintf (file, "\tmov\tr8, ra\n");
+ if (flag_pic)
+ {
+ fprintf (file, "\tnextpc\tr2\n");
+ fprintf (file, "\t1: movhi\tr3, %%hiadj(_GLOBAL_OFFSET_TABLE_ - 1b)\n");
+ fprintf (file, "\taddi\tr3, r3, %%lo(_GLOBAL_OFFSET_TABLE_ - 1b)\n");
+ fprintf (file, "\tadd\tr2, r2, r3\n");
+ fprintf (file, "\tldw\tr2, %%call(_mcount)(r2)\n");
+ fprintf (file, "\tcallr\tr2\n");
+ }
+ else
+ fprintf (file, "\tcall\t_mcount\n");
+ fprintf (file, "\tmov\tra, r8\n");
+}
+
+/* Stack Layout. */
+static void
+dump_frame_size (FILE *file)
+{
+ fprintf (file, "\t%s Current Frame Info\n", ASM_COMMENT_START);
+
+ fprintf (file, "\t%s total_size = %ld\n", ASM_COMMENT_START,
+ cfun->machine->total_size);
+ fprintf (file, "\t%s var_size = %ld\n", ASM_COMMENT_START,
+ cfun->machine->var_size);
+ fprintf (file, "\t%s args_size = %ld\n", ASM_COMMENT_START,
+ cfun->machine->args_size);
+ fprintf (file, "\t%s save_reg_size = %d\n", ASM_COMMENT_START,
+ cfun->machine->save_reg_size);
+ fprintf (file, "\t%s initialized = %d\n", ASM_COMMENT_START,
+ cfun->machine->initialized);
+ fprintf (file, "\t%s save_regs_offset = %ld\n", ASM_COMMENT_START,
+ cfun->machine->save_regs_offset);
+ fprintf (file, "\t%s current_function_is_leaf = %d\n", ASM_COMMENT_START,
+ current_function_is_leaf);
+ fprintf (file, "\t%s frame_pointer_needed = %d\n", ASM_COMMENT_START,
+ frame_pointer_needed);
+ fprintf (file, "\t%s pretend_args_size = %d\n", ASM_COMMENT_START,
+ crtl->args.pretend_args_size);
+
+}
+
+/* Return true if REGNO should be saved in a prologue. */
+static bool
+save_reg_p (unsigned regno)
+{
+ gcc_assert (GP_REGNO_P (regno));
+
+ if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
+ return true;
+
+ if (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed)
+ return true;
+
+ if (regno == PIC_OFFSET_TABLE_REGNUM && crtl->uses_pic_offset_table)
+ return true;
+
+ if (regno == RA_REGNO && df_regs_ever_live_p (RA_REGNO))
+ return true;
+
+ return false;
+}
+
+/* Return the bytes needed to compute the frame pointer from the current
+ stack pointer. */
+static HOST_WIDE_INT
+compute_frame_size (void)
+{
+ unsigned int regno;
+ HOST_WIDE_INT var_size; /* # of var. bytes allocated. */
+ HOST_WIDE_INT total_size; /* # bytes that the entire frame takes up. */
+ HOST_WIDE_INT save_reg_size; /* # bytes needed to store callee save regs. */
+ HOST_WIDE_INT out_args_size; /* # bytes needed for outgoing args. */
+ unsigned HOST_WIDE_INT save_mask = 0;
+
+ if (cfun->machine->initialized)
+ return cfun->machine->total_size;
+
+ save_reg_size = 0;
+ var_size = STACK_ALIGN (get_frame_size ());
+ out_args_size = STACK_ALIGN (crtl->outgoing_args_size);
+
+ total_size = var_size + out_args_size;
+
+ /* Calculate space needed for gp registers. */
+ for (regno = 0; GP_REGNO_P (regno); regno++)
+ if (save_reg_p (regno))
+ {
+ save_mask |= (unsigned HOST_WIDE_INT)1 << regno;
+ save_reg_size += 4;
+ }
+
+ /* If we call eh_return, we need to save the EH data registers. */
+ if (crtl->calls_eh_return)
+ {
+ unsigned i;
+ unsigned r;
+
+ for (i = 0; (r = EH_RETURN_DATA_REGNO (i)) != INVALID_REGNUM; i++)
+ if (!(save_mask & (1 << r)))
+ {
+ save_mask |= 1 << r;
+ save_reg_size += 4;
+ }
+ }
+
+ save_reg_size = STACK_ALIGN (save_reg_size);
+ total_size += save_reg_size;
+
+ total_size += STACK_ALIGN (crtl->args.pretend_args_size);
+
+ /* Save other computed information. */
+ cfun->machine->save_mask = save_mask;
+ cfun->machine->total_size = total_size;
+ cfun->machine->var_size = var_size;
+ cfun->machine->args_size = out_args_size;
+ cfun->machine->save_reg_size = save_reg_size;
+ cfun->machine->initialized = reload_completed;
+ cfun->machine->save_regs_offset = out_args_size + var_size;
+
+ return total_size;
+}
+
+/* Implement TARGET_CAN_ELIMINATE. */
+static bool
+nios2_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
+{
+ if (to == STACK_POINTER_REGNUM)
+ return !frame_pointer_needed;
+ return true;
+}
+
+int
+nios2_initial_elimination_offset (int from, int to)
+{
+ int offset;
+
+ compute_frame_size ();
+
+ /* Set OFFSET to the offset from the stack pointer. */
+ switch (from)
+ {
+ case FRAME_POINTER_REGNUM:
+ offset = cfun->machine->args_size;
+ break;
+
+ case ARG_POINTER_REGNUM:
+ offset = cfun->machine->total_size;
+ offset -= crtl->args.pretend_args_size;
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+
+ /* If we are asked for the frame pointer offset, then adjust OFFSET
+ by the offset from the frame pointer to the stack pointer. */
+ if (to == HARD_FRAME_POINTER_REGNUM)
+ offset -= cfun->machine->save_regs_offset;
+
+ return offset;
+}
+
+/* Return nonzero if this function is known to have a null epilogue.
+ This allows the optimizer to omit jumps to jumps if no stack
+ was created. */
+int
+nios2_can_use_return_insn (void)
+{
+ if (!reload_completed)
+ return 0;
+
+ if (df_regs_ever_live_p (RA_REGNO) || crtl->profile)
+ return 0;
+
+ if (cfun->machine->initialized)
+ return cfun->machine->total_size == 0;
+
+ return compute_frame_size () == 0;
+}
+
+
+\f
+/* Check and signal some warnings/errors on FPU insn options. */
+static void
+nios2_custom_check_insns (void)
+{
+ unsigned int i, j;
+ bool errors = false;
+
+ for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
+ if (N2FPU_ENABLED_P (i) && N2FPU_DOUBLE_P (i))
+ {
+ for (j = 0; j < ARRAY_SIZE (nios2_fpu_insn); j++)
+ if (N2FPU_DOUBLE_REQUIRED_P (j) && ! N2FPU_ENABLED_P (j))
+ {
+ error ("switch `-mcustom-%s' is required for double precision "
+ "floating point", N2FPU_NAME (j));
+ errors = true;
+ }
+ break;
+ }
+
+ /* Warn if the user has certain exotic operations that won't get used
+ without -funsafe-math-optimizations. See expand_builtin () in
+ builtins.c. */
+ if (!flag_unsafe_math_optimizations)
+ for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
+ if (N2FPU_ENABLED_P (i) && N2FPU_UNSAFE_P (i))
+ warning (0, "switch `-mcustom-%s' has no effect unless "
+ "-funsafe-math-optimizations is specified", N2FPU_NAME (i));
+
+ /* Warn if the user is trying to use -mcustom-fmins et. al, that won't
+ get used without -ffinite-math-only. See fold_builtin_fmin_fmax ()
+ in builtins.c. */
+ if (!flag_finite_math_only)
+ for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
+ if (N2FPU_ENABLED_P (i) && N2FPU_FINITE_P (i))
+ warning (0, "switch `-mcustom-%s' has no effect unless "
+ "-ffinite-math-only is specified", N2FPU_NAME (i));
+
+ if (errors || custom_code_conflict)
+ fatal_error ("conflicting use of -mcustom switches, target attributes, "
+ "and/or __builtin_custom_ functions");
+}
+
+static void
+nios2_set_fpu_custom_code (enum n2fpu_code code, int N, bool override_p)
+{
+ if (override_p || N2FPU_N (code) == -1)
+ N2FPU_N (code) = N;
+ nios2_register_custom_code (N, CCS_FPU, (int) code);
+}
+
+static void
+nios2_handle_custom_fpu_cfg (const char *cfg, bool override_p)
+{
+ if (!strncasecmp (cfg, "60-1", 4))
+ {
+ nios2_set_fpu_custom_code (n2fpu_fmuls, 252, override_p);
+ nios2_set_fpu_custom_code (n2fpu_fadds, 253, override_p);
+ nios2_set_fpu_custom_code (n2fpu_fsubs, 254, override_p);
+ flag_single_precision_constant = 1;
+ }
+ else if (!strncasecmp (cfg, "60-2", 4))
+ {
+ nios2_set_fpu_custom_code (n2fpu_fmuls, 252, override_p);
+ nios2_set_fpu_custom_code (n2fpu_fadds, 253, override_p);
+ nios2_set_fpu_custom_code (n2fpu_fsubs, 254, override_p);
+ nios2_set_fpu_custom_code (n2fpu_fdivs, 255, override_p);
+ flag_single_precision_constant = 1;
+ }
+ else if (!strncasecmp (cfg, "72-3", 4))
+ {
+ nios2_set_fpu_custom_code (n2fpu_floatus, 243, override_p);
+ nios2_set_fpu_custom_code (n2fpu_fixsi, 244, override_p);
+ nios2_set_fpu_custom_code (n2fpu_floatis, 245, override_p);
+ nios2_set_fpu_custom_code (n2fpu_fcmpgts, 246, override_p);
+ nios2_set_fpu_custom_code (n2fpu_fcmples, 249, override_p);
+ nios2_set_fpu_custom_code (n2fpu_fcmpeqs, 250, override_p);
+ nios2_set_fpu_custom_code (n2fpu_fcmpnes, 251, override_p);
+ nios2_set_fpu_custom_code (n2fpu_fmuls, 252, override_p);
+ nios2_set_fpu_custom_code (n2fpu_fadds, 253, override_p);
+ nios2_set_fpu_custom_code (n2fpu_fsubs, 254, override_p);
+ nios2_set_fpu_custom_code (n2fpu_fdivs, 255, override_p);
+ flag_single_precision_constant = 1;
+ }
+ else
+ warning (0, "ignoring unrecognized switch `-mcustom-fpu-cfg' value `%s'",
+ cfg);
+
+ /* Guard against errors in the standard configurations. */
+ nios2_custom_check_insns ();
+}
+
+/* Check individual FPU insn options, and register custom code. */
+static void
+nios2_handle_custom_fpu_insn_option (int fpu_insn_index)
+{
+ int param = N2FPU_N (fpu_insn_index);
+
+ if (0 <= param && param <= 255)
+ nios2_register_custom_code (param, CCS_FPU, fpu_insn_index);
+
+ /* Valid values are 0-255, but also allow -1 so that the
+ -mno-custom-<opt> switches work. */
+ else if (param != -1)
+ error ("switch `-mcustom-%s' value %d must be between 0 and 255",
+ N2FPU_NAME (fpu_insn_index), param);
+}
+
+/* Implement TARGET_OPTION_OVERRIDE. */
+static void
+nios2_option_override (void)
+{
+ unsigned int i;
+
+#ifdef SUBTARGET_OVERRIDE_OPTIONS
+ SUBTARGET_OVERRIDE_OPTIONS;
+#endif
+
+ /* Check for unsupported options. */
+ if (flag_pic && !TARGET_LINUX_ABI)
+ error ("position-independent code requires the Linux ABI");
+
+ /* Function to allocate machine-dependent function status. */
+ init_machine_status = &nios2_init_machine_status;
+
+ nios2_section_threshold
+ = (global_options_set.x_g_switch_value
+ ? g_switch_value : NIOS2_DEFAULT_GVALUE);
+
+ /* If we don't have mul, we don't have mulx either! */
+ if (!TARGET_HAS_MUL && TARGET_HAS_MULX)
+ target_flags &= ~MASK_HAS_MULX;
+
+ /* Set up default handling for floating point custom instructions.
+
+ Putting things in this order means that the -mcustom-fpu-cfg=
+ switch will always be overridden by individual -mcustom-fadds=
+ switches, regardless of the order in which they were specified
+ on the command line.
+
+ This behavior of prioritization of individual -mcustom-<insn>=
+ options before the -mcustom-fpu-cfg= switch is maintained for
+ compatibility. */
+ if (nios2_custom_fpu_cfg_string && *nios2_custom_fpu_cfg_string)
+ nios2_handle_custom_fpu_cfg (nios2_custom_fpu_cfg_string, false);
+
+ /* Handle options for individual FPU insns. */
+ for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
+ nios2_handle_custom_fpu_insn_option (i);
+
+ nios2_custom_check_insns ();
+
+ /* Save the initial options in case the user does function specific options */
+ target_option_default_node = target_option_current_node
+ = build_target_option_node ();
+}
+
+/* Allocate a chunk of memory for per-function machine-dependent data. */
+static struct machine_function *
+nios2_init_machine_status (void)
+{
+ return ggc_alloc_cleared_machine_function ();
+}
+
+\f
+/* Return true if CST is a constant within range of movi/movui/movhi. */
+static bool
+nios2_simple_const_p (const_rtx cst)
+{
+ HOST_WIDE_INT val = INTVAL (cst);
+ return (SMALL_INT (val) || SMALL_INT_UNSIGNED (val) || UPPER16_INT (val));
+}
+
+/* Compute a (partial) cost for rtx X. Return true if the complete
+ cost has been computed, and false if subexpressions should be
+ scanned. In either case, *TOTAL contains the cost result. */
+static bool
+nios2_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
+ int opno ATTRIBUTE_UNUSED,
+ int *total, bool speed ATTRIBUTE_UNUSED)
+{
+ switch (code)
+ {
+ case CONST_INT:
+ if (INTVAL (x) == 0)
+ {
+ *total = COSTS_N_INSNS (0);
+ return true;
+ }
+ else if (nios2_simple_const_p (x))
+ {
+ *total = COSTS_N_INSNS (2);
+ return true;
+ }
+ else
+ {
+ *total = COSTS_N_INSNS (4);
+ return true;
+ }
+
+ case LABEL_REF:
+ case SYMBOL_REF:
+ /* ??? gp relative stuff will fit in here. */
+ /* fall through */
+ case CONST:
+ case CONST_DOUBLE:
+ {
+ *total = COSTS_N_INSNS (4);
+ return true;
+ }
+
+ case AND:
+ {
+ /* Recognize 'nor' insn pattern. */
+ if (GET_CODE (XEXP (x, 0)) == NOT
+ && GET_CODE (XEXP (x, 1)) == NOT)
+ {
+ *total = COSTS_N_INSNS (1);
+ return true;
+ }
+ return false;
+ }
+
+ case MULT:
+ {
+ *total = COSTS_N_INSNS (1);
+ return false;
+ }
+ case SIGN_EXTEND:
+ {
+ *total = COSTS_N_INSNS (3);
+ return false;
+ }
+ case ZERO_EXTEND:
+ {
+ *total = COSTS_N_INSNS (1);
+ return false;
+ }
+
+ default:
+ return false;
+ }
+}
+
+/* Implement TARGET_PREFERRED_RELOAD_CLASS. */
+
+static reg_class_t
+nios2_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, reg_class_t regclass)
+{
+ return (regclass == NO_REGS ? GENERAL_REGS : regclass);
+}
+
+/* Instruction support:
+ These functions are used within the machine description to
+ handle common or complicated output and expansions from
+ instructions. */
+
+/* Return TRUE if X references a SYMBOL_REF. */
+static int
+symbol_mentioned_p (rtx x)
+{
+ const char * fmt;
+ int i;
+
+ if (GET_CODE (x) == SYMBOL_REF)
+ return 1;
+
+ /* UNSPEC_TLS entries for a symbol include the SYMBOL_REF, but they
+ are constant offsets, not symbols. */
+ if (GET_CODE (x) == UNSPEC && IS_UNSPEC_TLS (XINT (x, 1)))
+ return 0;
+
+ fmt = GET_RTX_FORMAT (GET_CODE (x));
+
+ for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
+ if (fmt[i] == 'E')
+ {
+ int j;
+
+ for (j = XVECLEN (x, i) - 1; j >= 0; j--)
+ if (symbol_mentioned_p (XVECEXP (x, i, j)))
+ return 1;
+ }
+ else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
+ return 1;
+
+ return 0;
+}
+
+/* Return TRUE if X references a LABEL_REF. */
+static int
+label_mentioned_p (rtx x)
+{
+ const char * fmt;
+ int i;
+
+ if (GET_CODE (x) == LABEL_REF)
+ return 1;
+
+ /* UNSPEC_TLS entries for a symbol include a LABEL_REF for the referencing
+ instruction, but they are constant offsets, not symbols. */
+ if (GET_CODE (x) == UNSPEC && IS_UNSPEC_TLS (XINT (x, 1)))
+ return 0;
+
+ fmt = GET_RTX_FORMAT (GET_CODE (x));
+ for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
+ if (fmt[i] == 'E')
+ {
+ int j;
+
+ for (j = XVECLEN (x, i) - 1; j >= 0; j--)
+ if (label_mentioned_p (XVECEXP (x, i, j)))
+ return 1;
+ }
+ else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
+ return 1;
+
+ return 0;
+}
+
+static int
+tls_mentioned_p (rtx x)
+{
+ switch (GET_CODE (x))
+ {
+ case CONST:
+ return tls_mentioned_p (XEXP (x, 0));
+
+ case UNSPEC:
+ if (IS_UNSPEC_TLS (XINT (x, 1)))
+ return 1;
+
+ default:
+ return 0;
+ }
+}
+
+/* Helper for nios2_tls_referenced_p. */
+static int
+nios2_tls_operand_p_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
+{
+ if (GET_CODE (*x) == SYMBOL_REF)
+ return SYMBOL_REF_TLS_MODEL (*x) != 0;
+
+ /* Don't recurse into UNSPEC_TLS looking for TLS symbols; these are
+ TLS offsets, not real symbol references. */
+ if (GET_CODE (*x) == UNSPEC
+ && IS_UNSPEC_TLS (XINT (*x, 1)))
+ return -1;
+
+ return 0;
+}
+
+/* Return TRUE if X contains any TLS symbol references. */
+static bool
+nios2_tls_referenced_p (rtx x)
+{
+ if (! TARGET_HAVE_TLS)
+ return false;
+
+ return for_each_rtx (&x, nios2_tls_operand_p_1, NULL);
+}
+
+static bool
+nios2_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+{
+ return nios2_tls_referenced_p (x);
+}
+
+/* Emit a call to __tls_get_addr. TI is the argument to this function. RET is
+ an RTX for the return value location. The entire insn sequence is
+ returned. */
+static GTY(()) rtx nios2_tls_symbol;
+
+static rtx
+nios2_call_tls_get_addr (rtx ti)
+{
+ rtx arg = gen_rtx_REG (Pmode, FIRST_ARG_REGNO);
+ rtx ret = gen_rtx_REG (Pmode, FIRST_RETVAL_REGNO);
+ rtx fn, insn;
+
+ if (!nios2_tls_symbol)
+ nios2_tls_symbol = init_one_libfunc ("__tls_get_addr");
+
+ emit_insn (gen_rtx_SET (Pmode, arg, ti));
+ fn = gen_rtx_MEM (QImode, nios2_tls_symbol);
+ insn = emit_call_insn (gen_call_value (ret, fn, const0_rtx));
+ RTL_CONST_CALL_P (insn) = 1;
+ use_reg (&CALL_INSN_FUNCTION_USAGE (insn), ret);
+ use_reg (&CALL_INSN_FUNCTION_USAGE (insn), arg);
+
+ return ret;
+}
+
+/* Generate the code to access LOC, a thread local SYMBOL_REF. The
+ return value will be a valid address and move_operand (either a REG
+ or a LO_SUM). */
+static rtx
+nios2_legitimize_tls_address (rtx loc)
+{
+ rtx dest = gen_reg_rtx (Pmode);
+ rtx ret, tmp1;
+ enum tls_model model = SYMBOL_REF_TLS_MODEL (loc);
+
+ switch (model)
+ {
+ case TLS_MODEL_GLOBAL_DYNAMIC:
+ tmp1 = gen_reg_rtx (Pmode);
+ emit_insn (gen_add_tls_gd (tmp1, pic_offset_table_rtx, loc));
+ crtl->uses_pic_offset_table = 1;
+ ret = nios2_call_tls_get_addr (tmp1);
+ emit_insn (gen_rtx_SET (Pmode, dest, ret));
+ break;
+
+ case TLS_MODEL_LOCAL_DYNAMIC:
+ tmp1 = gen_reg_rtx (Pmode);
+ emit_insn (gen_add_tls_ldm (tmp1, pic_offset_table_rtx, loc));
+ crtl->uses_pic_offset_table = 1;
+ ret = nios2_call_tls_get_addr (tmp1);
+
+ emit_insn (gen_add_tls_ldo (dest, ret, loc));
+
+ break;
+
+ case TLS_MODEL_INITIAL_EXEC:
+ tmp1 = gen_reg_rtx (Pmode);
+ emit_insn (gen_load_tls_ie (tmp1, pic_offset_table_rtx, loc));
+ crtl->uses_pic_offset_table = 1;
+ emit_insn (gen_add3_insn (dest,
+ gen_rtx_REG (Pmode, THREAD_POINTER_REGNUM),
+ tmp1));
+ break;
+
+ case TLS_MODEL_LOCAL_EXEC:
+ emit_insn (gen_add_tls_le (dest,
+ gen_rtx_REG (Pmode, THREAD_POINTER_REGNUM),
+ loc));
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+
+ return dest;
+}
+
+int
+nios2_emit_move_sequence (rtx *operands, enum machine_mode mode)
+{
+ rtx to = operands[0];
+ rtx from = operands[1];
+
+ if (!register_operand (to, mode) && !reg_or_0_operand (from, mode))
+ {
+ gcc_assert (can_create_pseudo_p ());
+ from = copy_to_mode_reg (mode, from);
+ }
+
+ /* Recognize the case where from is a reference to thread-local
+ data and load its address to a register. */
+ if (nios2_tls_referenced_p (from))
+ {
+ rtx tmp = from;
+ rtx addend = NULL;
+
+ if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
+ {
+ addend = XEXP (XEXP (tmp, 0), 1);
+ tmp = XEXP (XEXP (tmp, 0), 0);
+ }
+
+ gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
+ gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
+
+ tmp = nios2_legitimize_tls_address (tmp);
+ if (addend)
+ {
+ tmp = gen_rtx_PLUS (SImode, tmp, addend);
+ tmp = force_operand (tmp, to);
+ }
+ from = tmp;
+ }
+ else if (flag_pic && (CONSTANT_P (from) || symbol_mentioned_p (from) ||
+ label_mentioned_p (from)))
+ from = nios2_legitimize_pic_address (from, SImode,
+ (can_create_pseudo_p () ? 0 : to));
+
+ operands[0] = to;
+ operands[1] = from;
+ return 0;
+}
+
+/* Divide Support */
+
+/*
+ If -O3 is used, we want to output a table lookup for
+ divides between small numbers (both num and den >= 0
+ and < 0x10). The overhead of this method in the worse
+ case is 40 bytes in the text section (10 insns) and
+ 256 bytes in the data section. Additional divides do
+ not incur additional penalties in the data section.
+
+ Code speed is improved for small divides by about 5x
+ when using this method in the worse case (~9 cycles
+ vs ~45). And in the worse case divides not within the
+ table are penalized by about 10% (~5 cycles vs ~45).
+ However in the typical case the penalty is not as bad
+ because doing the long divide in only 45 cycles is
+ quite optimistic.
+
+ ??? It would be nice to have some benchmarks other
+ than Dhrystone to back this up.
+
+ This bit of expansion is to create this instruction
+ sequence as rtl.
+ or $8, $4, $5
+ slli $9, $4, 4
+ cmpgeui $3, $8, 16
+ beq $3, $0, .L3
+ or $10, $9, $5
+ add $12, $11, divide_table
+ ldbu $2, 0($12)
+ br .L1
+.L3:
+ call slow_div
+.L1:
+# continue here with result in $2
+
+ ??? Ideally I would like the emit libcall block to contain
+ all of this code, but I don't know how to do that. What it
+ means is that if the divide can be eliminated, it may not
+ completely disappear.
+
+ ??? The __divsi3_table label should ideally be moved out
+ of this block and into a global. If it is placed into the
+ sdata section we can save even more cycles by doing things
+ gp relative.
+*/
+int
+nios2_emit_expensive_div (rtx *operands, enum machine_mode mode)
+{
+ rtx or_result, shift_left_result;
+ rtx lookup_value;
+ rtx lab1, lab3;
+ rtx insns;
+ rtx libfunc;
+ rtx final_result;
+ rtx tmp;
+ rtx table;
+
+ /* It may look a little generic, but only SImode
+ is supported for now. */
+ gcc_assert (mode == SImode);
+ libfunc = optab_libfunc (sdiv_optab, SImode);
+
+ lab1 = gen_label_rtx ();
+ lab3 = gen_label_rtx ();
+
+ or_result = expand_simple_binop (SImode, IOR,
+ operands[1], operands[2],
+ 0, 0, OPTAB_LIB_WIDEN);
+
+ emit_cmp_and_jump_insns (or_result, GEN_INT (15), GTU, 0,
+ GET_MODE (or_result), 0, lab3);
+ JUMP_LABEL (get_last_insn ()) = lab3;
+
+ shift_left_result = expand_simple_binop (SImode, ASHIFT,
+ operands[1], GEN_INT (4),
+ 0, 0, OPTAB_LIB_WIDEN);
+
+ lookup_value = expand_simple_binop (SImode, IOR,
+ shift_left_result, operands[2],
+ 0, 0, OPTAB_LIB_WIDEN);
+ table = gen_rtx_PLUS (SImode, lookup_value,
+ gen_rtx_SYMBOL_REF (SImode, "__divsi3_table"));
+ convert_move (operands[0], gen_rtx_MEM (QImode, table), 1);
+
+ tmp = emit_jump_insn (gen_jump (lab1));
+ JUMP_LABEL (tmp) = lab1;
+ emit_barrier ();
+
+ emit_label (lab3);
+ LABEL_NUSES (lab3) = 1;
+
+ start_sequence ();
+ final_result = emit_library_call_value (libfunc, NULL_RTX,
+ LCT_CONST, SImode, 2,
+ operands[1], SImode,
+ operands[2], SImode);
+
+
+ insns = get_insns ();
+ end_sequence ();
+ emit_libcall_block (insns, operands[0], final_result,
+ gen_rtx_DIV (SImode, operands[1], operands[2]));
+
+ emit_label (lab1);
+ LABEL_NUSES (lab1) = 1;
+ return 1;
+}
+
+/* The function with address *ADDR is being called. If the address
+ needs to be loaded from the GOT, emit the instruction to do so and
+ update *ADDR to point to the rtx for the loaded value. */
+void
+nios2_adjust_call_address (rtx *addr)
+{
+ if (flag_pic
+ && (GET_CODE (*addr) == SYMBOL_REF || GET_CODE (*addr) == LABEL_REF))
+ {
+ rtx addr_orig;
+ crtl->uses_pic_offset_table = 1;
+ addr_orig = *addr;
+ *addr = gen_reg_rtx (GET_MODE (addr_orig));
+ emit_insn (gen_pic_load_call_addr (*addr,
+ pic_offset_table_rtx, addr_orig));
+ }
+}
+
+
+\f
+/* Branches/Compares. */
+
+/* Return in *ALT_CODE and *ALT_OP, an alternate equivalent constant
+ comparision, e.g. >= 1 into > 0. */
+static void
+nios2_alternate_compare_const (enum rtx_code code, rtx op,
+ enum rtx_code *alt_code, rtx *alt_op,
+ enum machine_mode mode)
+{
+ HOST_WIDE_INT opval = INTVAL (op);
+ enum rtx_code scode = signed_condition (code);
+ *alt_code = ((code == EQ || code == NE) ? code
+ /* The required conversion between [>,>=] and [<,<=] is captured
+ by a reverse + swap of condition codes. */
+ : reverse_condition (swap_condition (code)));
+ *alt_op = ((scode == LT || scode == GE) ? gen_int_mode (opval - 1, mode)
+ : (scode == LE || scode == GT) ? gen_int_mode (opval + 1, mode)
+ : gen_int_mode (opval, mode));
+}
+
+/* Return true if the constant comparison is supported by nios2. */
+static bool
+nios2_valid_compare_const_p (enum rtx_code code, rtx op)
+{
+ switch (code)
+ {
+ case EQ: case NE: case GE: case LT:
+ return SMALL_INT (INTVAL (op));
+ case GEU: case LTU:
+ return SMALL_INT_UNSIGNED (INTVAL (op));
+ default:
+ return false;
+ }
+}
+
+/* Return true if compares in MODE is supported, mainly for floating-point
+ modes. */
+bool
+nios2_supported_compare_p (enum machine_mode mode)
+{
+ switch (mode)
+ {
+ case SFmode:
+ return (N2FPU_OP_ENABLED_P (fcmpeqs) && N2FPU_OP_ENABLED_P (fcmpnes)
+ && (N2FPU_OP_ENABLED_P (fcmplts) || N2FPU_OP_ENABLED_P (fcmpgts))
+ && (N2FPU_OP_ENABLED_P (fcmpges) || N2FPU_OP_ENABLED_P (fcmples)));
+
+ case DFmode:
+ return (N2FPU_OP_ENABLED_P (fcmpeqd) && N2FPU_OP_ENABLED_P (fcmpned)
+ && (N2FPU_OP_ENABLED_P (fcmpltd) || N2FPU_OP_ENABLED_P (fcmpgtd))
+ && (N2FPU_OP_ENABLED_P (fcmpged) || N2FPU_OP_ENABLED_P (fcmpled)));
+ default:
+ return true;
+ }
+}
+
+/* Checks and modifies the comparison in *CMP, *OP1, and *OP2 into valid
+ nios2 supported form. Returns true if success. */
+bool
+nios2_validate_compare (enum machine_mode mode, rtx *cmp, rtx *op1, rtx *op2)
+{
+ enum rtx_code code = GET_CODE (*cmp);
+ enum rtx_code alt_code;
+ rtx alt_op2;
+
+ if (GET_MODE_CLASS (mode) == MODE_FLOAT)
+ {
+ if (nios2_fpu_compare_enabled (code, mode))
+ {
+ *op1 = force_reg (mode, *op1);
+ *op2 = force_reg (mode, *op2);
+ goto rebuild_cmp;
+ }
+ else
+ {
+ enum rtx_code rev_code = swap_condition (code);
+ if (nios2_fpu_compare_enabled (rev_code, mode))
+ {
+ rtx tmp = *op1;
+ *op1 = force_reg (mode, *op2);
+ *op2 = force_reg (mode, tmp);
+ code = rev_code;
+ goto rebuild_cmp;
+ }
+ else
+ return false;
+ }
+ }
+
+ if (!reg_or_0_operand (*op2, mode))
+ {
+ /* Create alternate constant compare. */
+ nios2_alternate_compare_const (code, *op2, &alt_code, &alt_op2, mode);
+
+ /* If alterate op2 is zero(0), we can use it directly, possibly
+ swapping the compare code. */
+ if (alt_op2 == const0_rtx)
+ {
+ code = alt_code;
+ *op2 = alt_op2;
+ goto check_rebuild_cmp;
+ }
+
+ /* Check if either constant compare can be used. */
+ if (nios2_valid_compare_const_p (code, *op2))
+ return true;
+ else if (nios2_valid_compare_const_p (alt_code, alt_op2))
+ {
+ code = alt_code;
+ *op2 = alt_op2;
+ goto rebuild_cmp;
+ }
+
+ /* We have to force op2 into a register now. Try to pick one
+ with a lower cost. */
+ if (! nios2_simple_const_p (*op2)
+ && nios2_simple_const_p (alt_op2))
+ {
+ code = alt_code;
+ *op2 = alt_op2;
+ }
+ *op2 = force_reg (SImode, *op2);
+ }
+ check_rebuild_cmp:
+ if (code == GT || code == GTU || code == LE || code == LEU)
+ {
+ rtx t = *op1; *op1 = *op2; *op2 = t;
+ code = swap_condition (code);
+ }
+ rebuild_cmp:
+ *cmp = gen_rtx_fmt_ee (code, mode, *op1, *op2);
+ return true;
+}
+
+
+/* Addressing Modes. */
+
+/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
+static bool
+nios2_legitimate_constant_p (enum machine_mode mode, rtx x)
+{
+ switch (GET_CODE (x))
+ {
+ case SYMBOL_REF:
+ return !SYMBOL_REF_TLS_MODEL (x);
+ case CONST:
+ {
+ rtx op = XEXP (x, 0);
+ if (GET_CODE (op) != PLUS)
+ return false;
+ return (nios2_legitimate_constant_p (mode, XEXP (op, 0))
+ && nios2_legitimate_constant_p (mode, XEXP (op, 1)));
+ }
+ default:
+ return true;
+ }
+}
+
+/* Implement TARGET_LEGITIMATE_ADDRESS_P. */
+static bool
+nios2_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+ rtx operand, bool strict)
+{
+ int ret_val = 0;
+
+ switch (GET_CODE (operand))
+ {
+ /* Direct. */
+ case SYMBOL_REF:
+ if (SYMBOL_REF_TLS_MODEL (operand))
+ break;
+
+ if (SYMBOL_REF_IN_NIOS2_SMALL_DATA_P (operand))
+ {
+ ret_val = 1;
+ break;
+ }
+ /* Else, fall through. */
+ case LABEL_REF:
+ case CONST_INT:
+ case CONST:
+ case CONST_DOUBLE:
+ /* ??? In here I need to add gp addressing. */
+ ret_val = 0;
+
+ break;
+
+ /* Register indirect. */
+ case REG:
+ ret_val = REG_OK_FOR_BASE_P2 (operand, strict);
+ break;
+
+ /* Register indirect with displacement. */
+ case PLUS:
+ {
+ rtx op0 = XEXP (operand, 0);
+ rtx op1 = XEXP (operand, 1);
+
+ if (REG_P (op0) && REG_P (op1))
+ ret_val = 0;
+ else if (REG_P (op0) && GET_CODE (op1) == CONST_INT)
+ ret_val = (REG_OK_FOR_BASE_P2 (op0, strict)
+ && SMALL_INT (INTVAL (op1)));
+ else if (REG_P (op1) && GET_CODE (op0) == CONST_INT)
+ ret_val = (REG_OK_FOR_BASE_P2 (op1, strict)
+ && SMALL_INT (INTVAL (op0)));
+ else
+ ret_val = 0;
+ }
+ break;
+
+ default:
+ ret_val = 0;
+ break;
+ }
+
+ return ret_val;
+}
+
+/* Return true if EXP should be placed in the small data section. */
+static bool
+nios2_in_small_data_p (const_tree exp)
+{
+ /* We want to merge strings, so we never consider them small data. */
+ if (TREE_CODE (exp) == STRING_CST)
+ return false;
+
+ if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
+ {
+ const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (exp));
+ /* ??? these string names need moving into
+ an array in some header file */
+ if (nios2_section_threshold > 0
+ && (strcmp (section, ".sbss") == 0
+ || strncmp (section, ".sbss.", 6) == 0
+ || strcmp (section, ".sdata") == 0
+ || strncmp (section, ".sdata.", 7) == 0))
+ return true;
+ }
+ else if (TREE_CODE (exp) == VAR_DECL)
+ {
+ HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
+
+ /* If this is an incomplete type with size 0, then we can't put it
+ in sdata because it might be too big when completed. */
+ if (size > 0 && (unsigned HOST_WIDE_INT)size <= nios2_section_threshold)
+ return true;
+ }
+
+ return false;
+}
+
+static void
+nios2_encode_section_info (tree decl, rtx rtl, int first)
+{
+ rtx symbol;
+ int flags;
+
+ default_encode_section_info (decl, rtl, first);
+
+ /* Careful not to prod global register variables. */
+ if (GET_CODE (rtl) != MEM)
+ return;
+ symbol = XEXP (rtl, 0);
+ if (GET_CODE (symbol) != SYMBOL_REF)
+ return;
+
+ flags = SYMBOL_REF_FLAGS (symbol);
+
+ /* We don't want weak variables to be addressed with gp in case they end up
+ with value 0 which is not within 2^15 of $gp. */
+ if (DECL_P (decl) && DECL_WEAK (decl))
+ flags |= SYMBOL_FLAG_WEAK_DECL;
+
+ SYMBOL_REF_FLAGS (symbol) = flags;
+}
+
+static unsigned int
+nios2_section_type_flags (tree decl, const char *name, int reloc)
+{
+ unsigned int flags;
+
+ flags = default_section_type_flags (decl, name, reloc);
+
+ if (strcmp (name, ".sbss") == 0
+ || strncmp (name, ".sbss.", 6) == 0
+ || strcmp (name, ".sdata") == 0
+ || strncmp (name, ".sdata.", 7) == 0)
+ flags |= SECTION_SMALL;
+
+ return flags;
+}
+
+
+/* Position Independent Code related. */
+
+/* Emit code to load the PIC register. */
+static void
+nios2_load_pic_register (void)
+{
+ rtx tmp = gen_rtx_REG (Pmode, TEMP_REG_NUM);
+
+ emit_insn (gen_load_got_register (pic_offset_table_rtx, tmp));
+ emit_insn (gen_add3_insn (pic_offset_table_rtx, pic_offset_table_rtx, tmp));
+}
+
+/* Nonzero if the constant value X is a legitimate general operand
+ when generating PIC code. It is given that flag_pic is on and
+ that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
+bool
+nios2_legitimate_pic_operand_p (rtx x)
+{
+ rtx inner;
+
+ /* UNSPEC_TLS is always PIC. */
+ if (tls_mentioned_p (x))
+ return true;
+
+ if (GET_CODE (x) == SYMBOL_REF)
+ return false;
+ if (GET_CODE (x) == LABEL_REF)
+ return false;
+ if (GET_CODE (x) == CONST)
+ {
+ inner = XEXP (x, 0);
+ if (GET_CODE (inner) == PLUS
+ && GET_CODE (XEXP (inner, 0)) == SYMBOL_REF)
+ return false;
+ }
+ return true;
+}
+
+rtx
+nios2_legitimize_pic_address (rtx orig,
+ enum machine_mode mode ATTRIBUTE_UNUSED, rtx reg)
+{
+ if (GET_CODE (orig) == SYMBOL_REF
+ || GET_CODE (orig) == LABEL_REF)
+ {
+ if (reg == 0)
+ {
+ gcc_assert (can_create_pseudo_p ());
+ reg = gen_reg_rtx (Pmode);
+ }
+
+ emit_insn (gen_pic_load_addr (reg, pic_offset_table_rtx, orig));
+
+ crtl->uses_pic_offset_table = 1;
+
+ return reg;
+ }
+ else if (GET_CODE (orig) == CONST)
+ {
+ rtx base, offset;
+
+ if (GET_CODE (XEXP (orig, 0)) == PLUS
+ && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
+ return orig;
+
+ if (GET_CODE (XEXP (orig, 0)) == UNSPEC
+ && IS_UNSPEC_TLS (XINT (XEXP (orig, 0), 1)))
+ return orig;
+
+ if (reg == 0)
+ {
+ gcc_assert (can_create_pseudo_p ());
+ reg = gen_reg_rtx (Pmode);
+ }
+
+ gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
+
+ base = nios2_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode,
+ reg);
+ offset = nios2_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
+ base == reg ? 0 : reg);
+
+ if (CONST_INT_P (offset))
+ {
+ if (SMALL_INT (INTVAL (offset)))
+ return plus_constant (base, INTVAL (offset));
+ else
+ offset = force_reg (Pmode, offset);
+ }
+
+ return gen_rtx_PLUS (Pmode, base, offset);
+ }
+
+ return orig;
+}
+
+/* Test for various thread-local symbols. */
+
+/* Return TRUE if X is a thread-local symbol. */
+static bool
+nios2_tls_symbol_p (rtx x)
+{
+ return (TARGET_HAVE_TLS && GET_CODE (x) == SYMBOL_REF
+ && SYMBOL_REF_TLS_MODEL (x) != 0);
+}
+
+/* Implement TARGET_LEGITIMIZE_ADDRESS. */
+static rtx
+nios2_legitimize_address (rtx x, rtx orig_x, enum machine_mode mode)
+{
+ if (nios2_tls_symbol_p (x))
+ return nios2_legitimize_tls_address (x);
+
+ if (flag_pic)
+ {
+ /* We need to find and carefully transform any SYMBOL and LABEL
+ references; so go back to the original address expression. */
+ rtx new_x = nios2_legitimize_pic_address (orig_x, mode, NULL_RTX);
+
+ if (new_x != orig_x)
+ x = new_x;
+ }
+
+ return x;
+}
+
+
+/* Output assembly language related definitions. */
+
+/* Print the operand OP to file stream
+ FILE modified by LETTER. LETTER
+ can be one of:
+ i: print "i" if OP is an immediate, except 0
+ o: print "io" if OP is volatile
+
+ z: for const0_rtx print $0 instead of 0
+ H: for %hiadj
+ L: for %lo
+ U: for upper half of 32 bit value
+ D: for the upper 32-bits of a 64-bit double value
+ R: prints reverse condition. */
+static void
+nios2_print_operand (FILE *file, rtx op, int letter)
+{
+
+ switch (letter)
+ {
+ case 'i':
+ if (CONSTANT_P (op) && (op != const0_rtx))
+ fprintf (file, "i");
+ return;
+
+ case 'o':
+ if (GET_CODE (op) == MEM
+ && ((MEM_VOLATILE_P (op) && TARGET_BYPASS_CACHE_VOLATILE)
+ || TARGET_BYPASS_CACHE))
+ fprintf (file, "io");
+ return;
+
+ default:
+ break;
+ }
+
+ if (comparison_operator (op, VOIDmode))
+ {
+ enum rtx_code cond = GET_CODE (op);
+ if (letter == 0)
+ {
+ fprintf (file, "%s", GET_RTX_NAME (cond));
+ return;
+ }
+ if (letter == 'R')
+ {
+ fprintf (file, "%s", GET_RTX_NAME (reverse_condition (cond)));
+ return;
+ }
+ }
+
+ switch (GET_CODE (op))
+ {
+ case REG:
+ if (letter == 0 || letter == 'z')
+ {
+ fprintf (file, "%s", reg_names[REGNO (op)]);
+ return;
+ }
+ else if (letter == 'D')
+ {
+ fprintf (file, "%s", reg_names[REGNO (op)+1]);
+ return;
+ }
+ break;
+
+ case CONST_INT:
+ if (INTVAL (op) == 0 && letter == 'z')
+ {
+ fprintf (file, "zero");
+ return;
+ }
+ else if (letter == 'U')
+ {
+ HOST_WIDE_INT val = INTVAL (op);
+ rtx new_op;
+ val = (val / 65536) & 0xFFFF;
+ new_op = gen_int_mode (val, SImode);
+ output_addr_const (file, new_op);
+ return;
+ }
+ /* Else, fall through. */
+
+ case CONST:
+ case LABEL_REF:
+ case SYMBOL_REF:
+ case CONST_DOUBLE:
+ if (letter == 0 || letter == 'z')
+ {
+ output_addr_const (file, op);
+ return;
+ }
+ else if (letter == 'H')
+ {
+ fprintf (file, "%%hiadj(");
+ output_addr_const (file, op);
+ fprintf (file, ")");
+ return;
+ }
+ else if (letter == 'L')
+ {
+ fprintf (file, "%%lo(");
+ output_addr_const (file, op);
+ fprintf (file, ")");
+ return;
+ }
+ break;
+
+
+ case SUBREG:
+ case MEM:
+ if (letter == 0)
+ {
+ output_address (op);
+ return;
+ }
+ break;
+
+ case CODE_LABEL:
+ if (letter == 0)
+ {
+ output_addr_const (file, op);
+ return;
+ }
+ break;
+
+ default:
+ break;
+ }
+
+ fprintf (stderr, "Missing way to print (%c) ", letter);
+ debug_rtx (op);
+ gcc_unreachable ();
+}
+
+static int
+gprel_constant (rtx op)
+{
+ if (GET_CODE (op) == SYMBOL_REF
+ && SYMBOL_REF_IN_NIOS2_SMALL_DATA_P (op))
+ return 1;
+ else if (GET_CODE (op) == CONST
+ && GET_CODE (XEXP (op, 0)) == PLUS)
+ return gprel_constant (XEXP (XEXP (op, 0), 0));
+ else
+ return 0;
+}
+
+static void
+nios2_print_operand_address (FILE *file, rtx op)
+{
+ switch (GET_CODE (op))
+ {
+ case CONST:
+ case CONST_INT:
+ case LABEL_REF:
+ case CONST_DOUBLE:
+ case SYMBOL_REF:
+ if (gprel_constant (op))
+ {
+ fprintf (file, "%%gprel(");
+ output_addr_const (file, op);
+ fprintf (file, ")(%s)", reg_names[GP_REGNO]);
+ return;
+ }
+
+ break;
+
+ case PLUS:
+ {
+ rtx op0 = XEXP (op, 0);
+ rtx op1 = XEXP (op, 1);
+
+ if (REG_P (op0) && CONSTANT_P (op1))
+ {
+ output_addr_const (file, op1);
+ fprintf (file, "(%s)", reg_names[REGNO (op0)]);
+ return;
+ }
+ else if (REG_P (op1) && CONSTANT_P (op0))
+ {
+ output_addr_const (file, op0);
+ fprintf (file, "(%s)", reg_names[REGNO (op1)]);
+ return;
+ }
+ }
+ break;
+
+ case REG:
+ fprintf (file, "0(%s)", reg_names[REGNO (op)]);
+ return;
+
+ case MEM:
+ {
+ rtx base = XEXP (op, 0);
+ nios2_print_operand_address (file, base);
+ return;
+ }
+ default:
+ break;
+ }
+
+ fprintf (stderr, "Missing way to print address\n");
+ debug_rtx (op);
+ gcc_unreachable ();
+}
+
+static void
+nios2_output_dwarf_dtprel (FILE *file, int size, rtx x)
+{
+ gcc_assert (size == 4);
+ fprintf (file, "\t.4byte\t%%tls_ldo(");
+ output_addr_const (file, x);
+ fprintf (file, ")");
+}
+
+static void
+nios2_asm_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
+{
+ if (flag_verbose_asm || flag_debug_asm)
+ {
+ compute_frame_size ();
+ dump_frame_size (file);
+ }
+}
+
+/* Emit assembly of custom FPU instructions. */
+const char*
+nios2_fpu_insn_asm (enum n2fpu_code code)
+{
+ static char buf[256];
+ const char *op1, *op2, *op3;
+ int ln = 256, n = 0;
+
+ int N = N2FPU_N (code);
+ int num_operands = N2FPU (code).num_operands;
+ const char* insn_name = N2FPU_NAME (code);
+ tree ftype = nios2_ftype (N2FPU_FTCODE (code));
+ enum machine_mode dst_mode = TYPE_MODE (TREE_TYPE (ftype));
+ enum machine_mode src_mode = TYPE_MODE (TREE_VALUE (TYPE_ARG_TYPES (ftype)));
+
+ /* Prepare X register for DF input operands. */
+ if (GET_MODE_SIZE (src_mode) == 8 && num_operands == 3)
+ n = snprintf (buf, ln, "custom\t%d, zero, %%1, %%D1 # fwrx %%1\n\t",
+ N2FPU_N (n2fpu_fwrx));
+
+ if (src_mode == SFmode)
+ {
+ if (dst_mode == VOIDmode)
+ {
+ /* The fwry case. */
+ op1 = op3 = "zero";
+ op2 = "%0";
+ num_operands -= 1;
+ }
+ else
+ {
+ op1 = "%0"; op2 = "%1";
+ op3 = (num_operands == 2 ? "zero" : "%2");
+ }
+ }
+ else if (src_mode == DFmode)
+ {
+ if (dst_mode == VOIDmode)
+ {
+ /* The fwrx case. */
+ op1 = "zero";
+ op2 = "%0";
+ op3 = "%D0";
+ num_operands -= 1;
+ }
+ else
+ {
+ op1 = (dst_mode == DFmode ? "%D0" : "%0");
+ op2 = (num_operands == 2 ? "%1" : "%2");
+ op3 = (num_operands == 2 ? "%D1" : "%D2");
+ }
+ }
+ else if (src_mode == VOIDmode)
+ {
+ /* frdxlo, frdxhi, frdy cases. */
+ gcc_assert (dst_mode == SFmode);
+ op1 = "%0";
+ op2 = op3 = "zero";
+ }
+ else if (src_mode == SImode)
+ {
+ /* Conversion operators. */
+ gcc_assert (num_operands == 2);
+ op1 = (dst_mode == DFmode ? "%D0" : "%0");
+ op2 = "%1";
+ op3 = "zero";
+ }
+ else
+ gcc_unreachable ();
+
+ /* Main instruction string. */
+ n += snprintf (buf + n, ln - n, "custom\t%d, %s, %s, %s # %s %%0%s%s",
+ N, op1, op2, op3, insn_name,
+ (num_operands >= 2 ? ", %1" : ""),
+ (num_operands == 3 ? ", %2" : ""));
+
+ /* Extraction of Y register for DF results. */
+ if (dst_mode == DFmode)
+ snprintf (buf + n, ln - n, "\n\tcustom\t%d, %%0, zero, zero # frdy %%0",
+ N2FPU_N (n2fpu_frdy));
+ return buf;
+}
+
+
+\f
+/* Instruction scheduler related. */
+
+static int
+nios2_issue_rate (void)
+{
+#ifdef MAX_DFA_ISSUE_RATE
+ return MAX_DFA_ISSUE_RATE;
+#else
+ return 1;
+#endif
+}
+
+\f
+
+/* Function argument related. */
+
+void
+nios2_init_cumulative_args (CUMULATIVE_ARGS *cum,
+ tree fntype ATTRIBUTE_UNUSED,
+ rtx libname ATTRIBUTE_UNUSED,
+ tree fndecl ATTRIBUTE_UNUSED,
+ int n_named_args ATTRIBUTE_UNUSED)
+{
+ cum->regs_used = 0;
+}
+
+/* Define where to put the arguments to a function. Value is zero to
+ push the argument on the stack, or a hard register in which to
+ store the argument.
+
+ MODE is the argument's machine mode.
+ TYPE is the data type of the argument (as a tree).
+ This is null for libcalls where that information may
+ not be available.
+ CUM is a variable of type CUMULATIVE_ARGS which gives info about
+ the preceding args and about the function being called.
+ NAMED is nonzero if this argument is a named parameter
+ (otherwise it is an extra parameter matching an ellipsis). */
+
+static rtx
+nios2_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
+ const_tree type ATTRIBUTE_UNUSED,
+ bool named ATTRIBUTE_UNUSED)
+{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+ rtx return_rtx = NULL_RTX;
+
+ if (cum->regs_used < NUM_ARG_REGS)
+ return_rtx = gen_rtx_REG (mode, FIRST_ARG_REGNO + cum->regs_used);
+
+ return return_rtx;
+}
+
+/* Return number of bytes, at the beginning of the argument, that must be
+ put in registers. 0 is the argument is entirely in registers or entirely
+ in memory. */
+
+static int
+nios2_arg_partial_bytes (cumulative_args_t cum_v,
+ enum machine_mode mode, tree type,
+ bool named ATTRIBUTE_UNUSED)
+{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+ HOST_WIDE_INT param_size;
+
+ if (mode == BLKmode)
+ {
+ param_size = int_size_in_bytes (type);
+ if (param_size < 0)
+ internal_error
+ ("Do not know how to handle large structs or variable length types");
+ }
+ else
+ param_size = GET_MODE_SIZE (mode);
+
+ /* Convert to words (round up). */
+ param_size = (3 + param_size) / 4;
+
+ if (cum->regs_used < NUM_ARG_REGS
+ && cum->regs_used + param_size > NUM_ARG_REGS)
+ return (NUM_ARG_REGS - cum->regs_used) * UNITS_PER_WORD;
+ else
+ return 0;
+}
+
+/* Update the data in CUM to advance over an argument
+ of mode MODE and data type TYPE.
+ (TYPE is null for libcalls where that information may not be available.) */
+
+static void
+nios2_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
+ const_tree type ATTRIBUTE_UNUSED,
+ bool named ATTRIBUTE_UNUSED)
+{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+ HOST_WIDE_INT param_size;
+
+ if (mode == BLKmode)
+ {
+ param_size = int_size_in_bytes (type);
+ if (param_size < 0)
+ internal_error
+ ("Do not know how to handle large structs or variable length types");
+ }
+ else
+ param_size = GET_MODE_SIZE (mode);
+
+ /* Convert to words (round up). */
+ param_size = (3 + param_size) / 4;
+
+ if (cum->regs_used + param_size > NUM_ARG_REGS)
+ cum->regs_used = NUM_ARG_REGS;
+ else
+ cum->regs_used += param_size;
+
+ return;
+}
+
+enum direction
+nios2_function_arg_padding (enum machine_mode mode, const_tree type)
+{
+ /* On little-endian targets, the first byte of every stack argument
+ is passed in the first byte of the stack slot. */
+ if (!BYTES_BIG_ENDIAN)
+ return upward;
+
+ /* Otherwise, integral types are padded downward: the last byte of a
+ stack argument is passed in the last byte of the stack slot. */
+ if (type != 0
+ ? INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
+ : GET_MODE_CLASS (mode) == MODE_INT)
+ return downward;
+
+ /* Arguments smaller than a stack slot are padded downward. */
+ if (mode != BLKmode)
+ return (GET_MODE_BITSIZE (mode) >= PARM_BOUNDARY) ? upward : downward;
+ else
+ return ((int_size_in_bytes (type) >= (PARM_BOUNDARY / BITS_PER_UNIT))
+ ? upward : downward);
+}
+
+enum direction
+nios2_block_reg_padding (enum machine_mode mode, tree type,
+ int first ATTRIBUTE_UNUSED)
+{
+ /* ??? Do we need to treat floating point specially, ala MIPS? */
+ return nios2_function_arg_padding (mode, type);
+}
+
+
+/* Emit RTL insns to initialize the variable parts of a trampoline.
+ FNADDR is an RTX for the address of the function's pure code.
+ CXT is an RTX for the static chain value for the function.
+ On Nios II, we handle this by a library call. */
+static void
+nios2_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
+{
+ rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
+ rtx ctx_reg = force_reg (Pmode, cxt);
+ rtx addr = force_reg (Pmode, XEXP (m_tramp, 0));
+
+ emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
+ LCT_NORMAL, VOIDmode, 3,
+ addr, Pmode,
+ fnaddr, Pmode,
+ ctx_reg, Pmode);
+}
+
+static rtx
+nios2_function_value (const_tree ret_type, const_tree fn ATTRIBUTE_UNUSED,
+ bool outgoing ATTRIBUTE_UNUSED)
+{
+ return gen_rtx_REG (TYPE_MODE (ret_type), FIRST_RETVAL_REGNO);
+}
+
+static rtx
+nios2_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
+{
+ return gen_rtx_REG (mode, FIRST_RETVAL_REGNO);
+}
+
+static bool
+nios2_function_value_regno_p (const unsigned int regno)
+{
+ return (regno == FIRST_RETVAL_REGNO);
+}
+
+static bool
+nios2_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
+{
+ return (int_size_in_bytes (type) > (2 * UNITS_PER_WORD)
+ || int_size_in_bytes (type) == -1);
+}
+
+/* ??? It may be possible to eliminate the copyback and implement
+ my own va_arg type, but that is more work for now. */
+static void
+nios2_setup_incoming_varargs (cumulative_args_t cum_v,
+ enum machine_mode mode, tree type,
+ int *pretend_size, int second_time)
+{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+ CUMULATIVE_ARGS local_cum;
+ cumulative_args_t local_cum_v = pack_cumulative_args (&local_cum);
+ int regs_to_push;
+ int pret_size;
+
+ local_cum = *cum;
+ nios2_function_arg_advance (local_cum_v, mode, type, 1);
+
+ regs_to_push = NUM_ARG_REGS - local_cum.regs_used;
+
+ if (!second_time && regs_to_push > 0)
+ {
+ rtx ptr = virtual_incoming_args_rtx;
+ rtx mem = gen_rtx_MEM (BLKmode, ptr);
+ emit_insn (gen_blockage ());
+ move_block_from_reg (local_cum.regs_used + FIRST_ARG_REGNO, mem,
+ regs_to_push);
+ emit_insn (gen_blockage ());
+ }
+
+ pret_size = regs_to_push * UNITS_PER_WORD;
+
+ if (pret_size)
+ *pretend_size = pret_size;
+}
+
+
+\f
+/* Init FPU builtins. */
+static void
+nios2_init_fpu_builtins (int start_code)
+{
+ char builtin_name[64] = "__builtin_custom_";
+ unsigned int i, n = strlen ("__builtin_custom_");
+
+ for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
+ {
+ snprintf (builtin_name + n, sizeof (builtin_name) - n,
+ "%s", N2FPU_NAME (i));
+ add_builtin_function (builtin_name, nios2_ftype (N2FPU_FTCODE (i)),
+ start_code + i, BUILT_IN_MD, NULL, NULL_TREE);
+ }
+}
+
+static rtx
+nios2_expand_fpu_builtin (tree exp, unsigned int code, rtx target)
+{
+ struct expand_operand ops[MAX_RECOG_OPERANDS];
+ enum insn_code icode = N2FPU_ICODE (code);
+ int nargs, argno, opno = 0;
+ int num_operands = N2FPU (code).num_operands;
+ enum machine_mode dst_mode = TYPE_MODE (TREE_TYPE (exp));
+ bool has_target_p = (dst_mode != VOIDmode);
+
+ if (N2FPU_N (code) < 0)
+ fatal_error ("Cannot call `__builtin_custom_%s' without specifying switch"
+ " `-mcustom-%s'", N2FPU_NAME (code), N2FPU_NAME (code));
+ if (has_target_p)
+ create_output_operand (&ops[opno++], target, dst_mode);
+ else
+ /* Subtract away the count of the VOID return, mainly for fwrx/fwry. */
+ num_operands -= 1;
+ nargs = call_expr_nargs (exp);
+ for (argno = 0; argno < nargs; argno++)
+ {
+ tree arg = CALL_EXPR_ARG (exp, argno);
+ create_input_operand (&ops[opno++], expand_normal (arg),
+ TYPE_MODE (TREE_TYPE (arg)));
+ }
+ if (!maybe_expand_insn (icode, num_operands, ops))
+ {
+ error ("invalid argument to built-in function");
+ return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
+ }
+ return has_target_p ? ops[0].value : const0_rtx;
+}
+
+
+/* Nios II has custom instruction built-in functions of the forms:
+ __builtin_custom_n
+ __builtin_custom_nX
+ __builtin_custom_nXX
+ __builtin_custom_Xn
+ __builtin_custom_XnX
+ __builtin_custom_XnXX
+
+ where each X could be either 'i' (int), 'f' (float), or 'p' (void*).
+ Therefore with 0-1 return values, and 0-2 arguments, we have a
+ total of (3 + 1) * (1 + 3 + 9) == 52 custom builtin functions.
+*/
+#define NUM_CUSTOM_BUILTINS ((3 + 1) * (1 + 3 + 9))
+static char custom_builtin_name[NUM_CUSTOM_BUILTINS][5];
+
+static void
+nios2_init_custom_builtins (int start_code)
+{
+ tree builtin_ftype, ret_type;
+ char builtin_name[32] = "__builtin_custom_";
+ int n = strlen ("__builtin_custom_");
+ int builtin_code = 0;
+ int lhs, rhs1, rhs2;
+
+ struct { tree type; const char* c; } op[4];
+ /* z */ op[0].c = ""; op[0].type = NULL_TREE;
+ /* f */ op[1].c = "f"; op[1].type = float_type_node;
+ /* i */ op[2].c = "i"; op[2].type = integer_type_node;
+ /* p */ op[3].c = "p"; op[3].type = ptr_type_node;
+
+ /* This way of constructing the function tree types will slightly
+ overlap with the N2_FTYPES list used by other builtins. */
+
+ for (lhs = 0; lhs < 4; lhs++)
+ for (rhs1 = 0; rhs1 < 4; rhs1++)
+ for (rhs2 = 0; rhs2 < 4; rhs2++)
+ {
+ if (rhs1 == 0 && rhs2 != 0)
+ continue;
+ ret_type = (op[lhs].type ? op[lhs].type : void_type_node);
+ builtin_ftype
+ = build_function_type_list (ret_type, integer_type_node,
+ op[rhs1].type, op[rhs2].type,
+ NULL_TREE);
+ snprintf (builtin_name + n, 32 - n, "%sn%s%s",
+ op[lhs].c, op[rhs1].c, op[rhs2].c);
+ /* Save copy of parameter string into custom_builtin_name[]. */
+ strncpy (custom_builtin_name[builtin_code], builtin_name + n, 5);
+ add_builtin_function (builtin_name, builtin_ftype,
+ start_code + builtin_code,
+ BUILT_IN_MD, NULL, NULL_TREE);
+ builtin_code += 1;
+ }
+}
+
+static rtx
+nios2_expand_custom_builtin (tree exp, unsigned int index, rtx target)
+{
+ bool has_target_p = (TREE_TYPE (exp) != void_type_node);
+ enum machine_mode tmode = VOIDmode;
+ int nargs, argno;
+ rtx value, insn, unspec_args[3];
+ tree arg;
+
+ /* XnXX form. */
+ if (has_target_p)
+ {
+ tmode = TYPE_MODE (TREE_TYPE (exp));
+ if (!target || GET_MODE (target) != tmode
+ || !REG_P (target))
+ target = gen_reg_rtx (tmode);
+ }
+
+ nargs = call_expr_nargs (exp);
+ for (argno = 0; argno < nargs; argno++)
+ {
+ arg = CALL_EXPR_ARG (exp, argno);
+ value = expand_normal (arg);
+ unspec_args[argno] = value;
+ if (argno == 0)
+ {
+ if (!custom_insn_opcode (value, VOIDmode))
+ error ("Custom instruction opcode must be compile time "
+ "constant in the range 0-255 for __builtin_custom_%s",
+ custom_builtin_name[index]);
+ }
+ else
+ /* For other arguments, force into a register. */
+ unspec_args[argno] = force_reg (TYPE_MODE (TREE_TYPE (arg)),
+ unspec_args[argno]);
+ }
+ /* Fill remaining unspec operands with zero. */
+ for (; argno < 3; argno++)
+ unspec_args[argno] = const0_rtx;
+
+ insn = (has_target_p
+ ? gen_rtx_SET (VOIDmode, target,
+ gen_rtx_UNSPEC_VOLATILE (tmode,
+ gen_rtvec_v (3, unspec_args),
+ UNSPECV_CUSTOM_XNXX))
+ : gen_rtx_UNSPEC_VOLATILE (VOIDmode, gen_rtvec_v (3, unspec_args),
+ UNSPECV_CUSTOM_NXX));
+ emit_insn (insn);
+ return has_target_p ? target : const0_rtx;
+}
+
+
+\f
+
+/* Main definition of built-in functions. Nios II has a small number of fixed
+ builtins, plus a large number of FPU insn builtins, and builtins for
+ generating custom instructions. */
+
+struct nios2_builtin_desc
+{
+ enum insn_code icode;
+ enum nios2_ftcode ftype;
+ const char* name;
+};
+
+#define N2_BUILTINS \
+ N2_BUILTIN_DEF (sync, N2_FTYPE_VOID_VOID) \
+ N2_BUILTIN_DEF (ldbio, N2_FTYPE_SI_CVPTR) \
+ N2_BUILTIN_DEF (ldbuio, N2_FTYPE_UI_CVPTR) \
+ N2_BUILTIN_DEF (ldhio, N2_FTYPE_SI_CVPTR) \
+ N2_BUILTIN_DEF (ldhuio, N2_FTYPE_UI_CVPTR) \
+ N2_BUILTIN_DEF (ldwio, N2_FTYPE_SI_CVPTR) \
+ N2_BUILTIN_DEF (stbio, N2_FTYPE_VOID_VPTR_SI) \
+ N2_BUILTIN_DEF (sthio, N2_FTYPE_VOID_VPTR_SI) \
+ N2_BUILTIN_DEF (stwio, N2_FTYPE_VOID_VPTR_SI) \
+ N2_BUILTIN_DEF (rdctl, N2_FTYPE_SI_SI) \
+ N2_BUILTIN_DEF (wrctl, N2_FTYPE_VOID_SI_SI)
+
+enum nios2_builtin_code {
+#define N2_BUILTIN_DEF(name, ftype) NIOS2_BUILTIN_ ## name,
+ N2_BUILTINS
+#undef N2_BUILTIN_DEF
+ NUM_FIXED_NIOS2_BUILTINS
+};
+
+static const struct nios2_builtin_desc nios2_builtins[] = {
+#define N2_BUILTIN_DEF(name, ftype) \
+ { CODE_FOR_ ## name, ftype, "__builtin_" #name },
+ N2_BUILTINS
+#undef N2_BUILTIN_DEF
+};
+
+/* Start/ends of FPU/custom insn builtin index ranges. */
+static unsigned int nios2_fpu_builtin_base;
+static unsigned int nios2_custom_builtin_base;
+static unsigned int nios2_custom_builtin_end;
+
+static void
+nios2_init_builtins (void)
+{
+ unsigned int i;
+
+ /* Initialize fixed builtins. */
+ for (i = 0; i < ARRAY_SIZE (nios2_builtins); i++)
+ {
+ const struct nios2_builtin_desc *d = &nios2_builtins[i];
+ add_builtin_function (d->name, nios2_ftype (d->ftype), i,
+ BUILT_IN_MD, NULL, NULL);
+ }
+
+ /* Initialize FPU builtins. */
+ nios2_fpu_builtin_base = ARRAY_SIZE (nios2_builtins);
+ nios2_init_fpu_builtins (nios2_fpu_builtin_base);
+
+ /* Initialize custom insn builtins. */
+ nios2_custom_builtin_base
+ = nios2_fpu_builtin_base + ARRAY_SIZE (nios2_fpu_insn);
+ nios2_custom_builtin_end
+ = nios2_custom_builtin_base + NUM_CUSTOM_BUILTINS;
+ nios2_init_custom_builtins (nios2_custom_builtin_base);
+}
+
+static rtx
+nios2_expand_builtin_insn (const struct nios2_builtin_desc *d, int n,
+ struct expand_operand* ops, bool has_target_p)
+{
+ if (maybe_expand_insn (d->icode, n, ops))
+ return has_target_p ? ops[0].value : const0_rtx;
+ else
+ {
+ error ("invalid argument to built-in function %s", d->name);
+ return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
+ }
+}
+
+static rtx
+nios2_expand_ldstio_builtin (tree exp, rtx target,
+ const struct nios2_builtin_desc *d)
+{
+ bool has_target_p;
+ rtx addr, mem, val;
+ struct expand_operand ops[MAX_RECOG_OPERANDS];
+ enum machine_mode mode = insn_data[d->icode].operand[0].mode;
+
+ addr = expand_normal (CALL_EXPR_ARG (exp, 0));
+ mem = gen_rtx_MEM (mode, addr);
+
+ if (insn_data[d->icode].operand[0].allows_mem)
+ {
+ /* stxio */
+ val = force_reg (mode, expand_normal (CALL_EXPR_ARG (exp, 1)));
+ val = simplify_gen_subreg (mode, val, GET_MODE (val), 0);
+ create_output_operand (&ops[0], mem, mode);
+ create_input_operand (&ops[1], val, mode);
+ has_target_p = false;
+ }
+ else
+ {
+ /* ldxio */
+ create_output_operand (&ops[0], target, mode);
+ create_input_operand (&ops[1], mem, mode);
+ has_target_p = true;
+ }
+ return nios2_expand_builtin_insn (d, 2, ops, has_target_p);
+}
+
+static rtx
+nios2_expand_rdwrctl_builtin (tree exp, rtx target,
+ const struct nios2_builtin_desc *d)
+{
+ bool has_target_p = (insn_data[d->icode].operand[0].predicate
+ == register_operand);
+ rtx ctlcode = expand_normal (CALL_EXPR_ARG (exp, 0));
+ struct expand_operand ops[MAX_RECOG_OPERANDS];
+ if (!rdwrctl_operand (ctlcode, VOIDmode))
+ {
+ error ("Control register number must be in range 0-31 for %s",
+ d->name);
+ return has_target_p ? gen_reg_rtx (SImode) : const0_rtx;
+ }
+ if (has_target_p)
+ {
+ create_output_operand (&ops[0], target, SImode);
+ create_integer_operand (&ops[1], INTVAL (ctlcode));
+ }
+ else
+ {
+ rtx val = expand_normal (CALL_EXPR_ARG (exp, 1));
+ create_integer_operand (&ops[0], INTVAL (ctlcode));
+ create_input_operand (&ops[1], val, SImode);
+ }
+ return nios2_expand_builtin_insn (d, 2, ops, has_target_p);
+}
+
+/* Expand an expression EXP that calls a built-in function,
+ with result going to TARGET if that's convenient
+ (and in mode MODE if that's convenient).
+ SUBTARGET may be used as the target for computing one of EXP's operands.
+ IGNORE is nonzero if the value is to be ignored. */
+
+static rtx
+nios2_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
+ enum machine_mode mode ATTRIBUTE_UNUSED,
+ int ignore ATTRIBUTE_UNUSED)
+{
+ tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
+ unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
+
+ if (fcode < nios2_fpu_builtin_base)
+ {
+ const struct nios2_builtin_desc *d = &nios2_builtins[fcode];
+
+ switch (fcode)
+ {
+ case NIOS2_BUILTIN_sync:
+ emit_insn (gen_sync ());
+ return const0_rtx;
+
+ case NIOS2_BUILTIN_ldbio:
+ case NIOS2_BUILTIN_ldbuio:
+ case NIOS2_BUILTIN_ldhio:
+ case NIOS2_BUILTIN_ldhuio:
+ case NIOS2_BUILTIN_ldwio:
+ case NIOS2_BUILTIN_stbio:
+ case NIOS2_BUILTIN_sthio:
+ case NIOS2_BUILTIN_stwio:
+ return nios2_expand_ldstio_builtin (exp, target, d);
+
+ case NIOS2_BUILTIN_rdctl:
+ case NIOS2_BUILTIN_wrctl:
+ return nios2_expand_rdwrctl_builtin (exp, target, d);
+
+ default:
+ gcc_unreachable ();
+ }
+ }
+ else if (fcode < nios2_custom_builtin_base)
+ /* FPU builtin range. */
+ return nios2_expand_fpu_builtin (exp, fcode - nios2_fpu_builtin_base,
+ target);
+ else if (fcode < nios2_custom_builtin_end)
+ /* Custom insn builtin range. */
+ return nios2_expand_custom_builtin (exp, fcode - nios2_custom_builtin_base,
+ target);
+ else
+ gcc_unreachable ();
+}
+
+static void
+nios2_init_libfuncs (void)
+{
+ /* For Linux, we have access to kernel support for atomic operations. */
+ if (TARGET_LINUX_ABI)
+ init_sync_libfuncs (UNITS_PER_WORD);
+}
+
+\f
+
+
+/* Register a custom code use, and signal error if a conflict was found. */
+static void
+nios2_register_custom_code (unsigned int N, enum nios2_ccs_code status,
+ int index)
+{
+ gcc_assert (N <= 255);
+
+ if (status == CCS_FPU)
+ {
+ if (custom_code_status[N] == CCS_FPU && index != custom_code_index[N])
+ {
+ custom_code_conflict = true;
+ error ("switch `-mcustom-%s' conflicts with switch `-mcustom-%s'",
+ N2FPU_NAME (custom_code_index[N]), N2FPU_NAME (index));
+ }
+ else if (custom_code_status[N] == CCS_BUILTIN_CALL)
+ {
+ custom_code_conflict = true;
+ error ("call to `__builtin_custom_%s' conflicts with switch "
+ "`-mcustom-%s'", custom_builtin_name[custom_code_index[N]],
+ N2FPU_NAME (index));
+ }
+ }
+ else if (status == CCS_BUILTIN_CALL)
+ {
+ if (custom_code_status[N] == CCS_FPU)
+ {
+ custom_code_conflict = true;
+ error ("call to `__builtin_custom_%s' conflicts with switch "
+ "`-mcustom-%s'", custom_builtin_name[index],
+ N2FPU_NAME (custom_code_index[N]));
+ }
+ /* Code conflicts between different __builtin_custom_xnxx calls
+ do not seem to be checked. ??? */
+ }
+ else
+ gcc_unreachable ();
+
+ custom_code_status[N] = status;
+ custom_code_index[N] = index;
+}
+
+/* Mark a custom code as not in use. */
+static void
+nios2_deregister_custom_code (unsigned int N)
+{
+ if (N <= 255)
+ {
+ custom_code_status[N] = CCS_UNUSED;
+ custom_code_index[N] = 0;
+ }
+}
+
+/* Target attributes can affect per-function option state, so we need to
+ save/restore the custom code tracking info using the
+ TARGET_OPTION_SAVE/TARGET_OPTION_RESTORE hooks. */
+
+static void
+nios2_option_save (struct cl_target_option *ptr)
+{
+ unsigned int i;
+ for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
+ ptr->saved_fpu_custom_code[i] = N2FPU_N (i);
+ memcpy (ptr->saved_custom_code_status, custom_code_status,
+ sizeof (custom_code_status));
+ memcpy (ptr->saved_custom_code_index, custom_code_index,
+ sizeof (custom_code_index));
+}
+
+static void
+nios2_option_restore (struct cl_target_option *ptr)
+{
+ unsigned int i;
+ for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
+ N2FPU_N (i) = ptr->saved_fpu_custom_code[i];
+ memcpy (custom_code_status, ptr->saved_custom_code_status,
+ sizeof (custom_code_status));
+ memcpy (custom_code_index, ptr->saved_custom_code_index,
+ sizeof (custom_code_index));
+}
+
+/* Inner function to process the attribute((target(...))), take an argument and
+ set the current options from the argument. If we have a list, recursively go
+ over the list. */
+
+static bool
+nios2_valid_target_attribute_rec (tree args)
+{
+ if (TREE_CODE (args) == TREE_LIST)
+ {
+ bool ret = true;
+ for (; args; args = TREE_CHAIN (args))
+ if (TREE_VALUE (args)
+ && !nios2_valid_target_attribute_rec (TREE_VALUE (args)))
+ ret = false;
+ return ret;
+ }
+ else if (TREE_CODE (args) == STRING_CST)
+ {
+ char *argstr = ASTRDUP (TREE_STRING_POINTER (args));
+ while (argstr && *argstr != '\0')
+ {
+ bool no_opt = false, end_p = false;
+ char *eq = NULL, *p;
+ while (ISSPACE (*argstr))
+ argstr++;
+ p = argstr;
+ while (*p != '\0' && *p != ',')
+ {
+ if (!eq && *p == '=')
+ eq = p;
+ ++p;
+ }
+ if (*p == '\0')
+ end_p = true;
+ else
+ *p = '\0';
+ if (eq) *eq = '\0';
+
+ if (!strncmp (argstr, "no-", 3))
+ {
+ no_opt = true;
+ argstr += 3;
+ }
+ if (!strncmp (argstr, "custom-fpu-cfg", 14))
+ {
+ if (no_opt)
+ {
+ error ("custom-fpu-cfg option does not support `no-'");
+ return false;
+ }
+ if (!eq)
+ {
+ error ("custom-fpu-cfg option requires configuration"
+ " argument");
+ return false;
+ }
+ /* Increment and skip whitespace. */
+ while (ISSPACE (*(++eq))) ;
+ nios2_handle_custom_fpu_cfg (eq, true);
+ }
+ else if (!strncmp (argstr, "custom-", 7))
+ {
+ int code = -1;
+ unsigned int i;
+ for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
+ if (!strncmp (argstr + 7, N2FPU_NAME (i),
+ strlen (N2FPU_NAME (i))))
+ {
+ /* Found insn. */
+ code = i;
+ break;
+ }
+ if (code >= 0)
+ {
+ if (no_opt)
+ {
+ if (eq)
+ {
+ error ("`no-custom-%s' does not accept arguments",
+ N2FPU_NAME (code));
+ return false;
+ }
+ /* Disable option by setting to -1. */
+ nios2_deregister_custom_code (N2FPU_N (code));
+ N2FPU_N (code) = -1;
+ }
+ else
+ {
+ char *t;
+ if (eq)
+ while (ISSPACE (*(++eq))) ;
+ if (!eq || eq == p)
+ {
+ error ("`custom-%s=' requires argument",
+ N2FPU_NAME (code));
+ return false;
+ }
+ for (t = eq; t != p; ++t)
+ {
+ if (ISSPACE (*t))
+ continue;
+ if (!ISDIGIT (*t))
+ {
+ error ("`custom-%s=' argument requires "
+ "numeric digits", N2FPU_NAME (code));
+ return false;
+ }
+ }
+ /* Set option to argument. */
+ N2FPU_N (code) = atoi (eq);
+ nios2_handle_custom_fpu_insn_option (code);
+ }
+ }
+ else
+ {
+ error ("`custom-%s=' is not recognised as FPU instruction",
+ argstr + 7);
+ return false;
+ }
+ }
+ else
+ {
+ error ("`%s' is unknown", argstr);
+ return false;
+ }
+
+ if (end_p)
+ break;
+ else
+ argstr = p + 1;
+ }
+ return true;
+ }
+ else
+ gcc_unreachable ();
+}
+
+/* Return a TARGET_OPTION_NODE tree of the target options listed or NULL. */
+
+static tree
+nios2_valid_target_attribute_tree (tree args)
+{
+ if (!nios2_valid_target_attribute_rec (args))
+ return NULL_TREE;
+ nios2_custom_check_insns ();
+ return build_target_option_node ();
+}
+
+/* Hook to validate attribute((target("string"))). */
+
+static bool
+nios2_valid_target_attribute_p (tree fndecl,
+ tree ARG_UNUSED (name),
+ tree args,
+ int ARG_UNUSED (flags))
+{
+ struct cl_target_option cur_target;
+ bool ret = true;
+ tree old_optimize = build_optimization_node ();
+ tree new_target, new_optimize;
+ tree func_optimize = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl);
+
+ /* If the function changed the optimization levels as well as setting target
+ options, start with the optimizations specified. */
+ if (func_optimize && func_optimize != old_optimize)
+ cl_optimization_restore (&global_options,
+ TREE_OPTIMIZATION (func_optimize));
+
+ /* The target attributes may also change some optimization flags, so update
+ the optimization options if necessary. */
+ cl_target_option_save (&cur_target, &global_options);
+ new_target = nios2_valid_target_attribute_tree (args);
+ new_optimize = build_optimization_node ();
+
+ if (!new_target)
+ ret = false;
+
+ else if (fndecl)
+ {
+ DECL_FUNCTION_SPECIFIC_TARGET (fndecl) = new_target;
+
+ if (old_optimize != new_optimize)
+ DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl) = new_optimize;
+ }
+
+ cl_target_option_restore (&global_options, &cur_target);
+
+ if (old_optimize != new_optimize)
+ cl_optimization_restore (&global_options,
+ TREE_OPTIMIZATION (old_optimize));
+ return ret;
+}
+
+/* Remember the last target of nios2_set_current_function. */
+static GTY(()) tree nios2_previous_fndecl;
+
+/* Establish appropriate back-end context for processing the function
+ FNDECL. The argument might be NULL to indicate processing at top
+ level, outside of any function scope. */
+static void
+nios2_set_current_function (tree fndecl)
+{
+ tree old_tree = (nios2_previous_fndecl
+ ? DECL_FUNCTION_SPECIFIC_TARGET (nios2_previous_fndecl)
+ : NULL_TREE);
+
+ tree new_tree = (fndecl
+ ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl)
+ : NULL_TREE);
+
+ if (fndecl && fndecl != nios2_previous_fndecl)
+ {
+ nios2_previous_fndecl = fndecl;
+ if (old_tree == new_tree)
+ ;
+
+ else if (new_tree)
+ {
+ cl_target_option_restore (&global_options,
+ TREE_TARGET_OPTION (new_tree));
+ target_reinit ();
+ }
+
+ else if (old_tree)
+ {
+ struct cl_target_option *def
+ = TREE_TARGET_OPTION (target_option_current_node);
+
+ cl_target_option_restore (&global_options, def);
+ target_reinit ();
+ }
+ }
+}
+
+/* Hook to validate the current #pragma GCC target and set the FPU custom
+ code option state. If ARGS is NULL, then POP_TARGET is used to reset
+ the options. */
+
+static bool
+nios2_pragma_target_parse (tree args, tree pop_target)
+{
+ tree cur_tree;
+ if (! args)
+ {
+ cur_tree = ((pop_target)
+ ? pop_target
+ : target_option_default_node);
+ cl_target_option_restore (&global_options,
+ TREE_TARGET_OPTION (cur_tree));
+ }
+ else
+ {
+ cur_tree = nios2_valid_target_attribute_tree (args);
+ if (!cur_tree)
+ return false;
+ }
+
+ target_option_current_node = cur_tree;
+ return true;
+}
+
+/* Implement TARGET_MERGE_DECL_ATTRIBUTES.
+ We are just using this hook to add some additional error checking to
+ the default behavior. GCC does not provide a target hook for merging
+ the target options, and only correctly handles merging empty vs non-empty
+ option data; see merge_decls() in c-decl.c.
+ So here we require either that at least one of the decls has empty
+ target options, or that the target options/data be identical. */
+static tree
+nios2_merge_decl_attributes (tree olddecl, tree newdecl)
+{
+ tree oldopts = lookup_attribute ("target", DECL_ATTRIBUTES (olddecl));
+ tree newopts = lookup_attribute ("target", DECL_ATTRIBUTES (newdecl));
+ if (newopts && oldopts && newopts != oldopts)
+ {
+ tree oldtree = DECL_FUNCTION_SPECIFIC_TARGET (olddecl);
+ tree newtree = DECL_FUNCTION_SPECIFIC_TARGET (newdecl);
+ if (oldtree && newtree && oldtree != newtree)
+ {
+ struct cl_target_option *olddata = TREE_TARGET_OPTION (oldtree);
+ struct cl_target_option *newdata = TREE_TARGET_OPTION (newtree);
+ if (olddata != newdata
+ && memcmp (olddata, newdata, sizeof (struct cl_target_option)))
+ error ("%qE redeclared with conflicting %qs attributes",
+ DECL_NAME (newdecl), "target");
+ }
+ }
+ return merge_attributes (DECL_ATTRIBUTES (olddecl),
+ DECL_ATTRIBUTES (newdecl));
+}
+
+#include "gt-nios2.h"