static bool mips_classify_address (struct mips_address_info *, rtx,
enum machine_mode, int);
static bool mips_cannot_force_const_mem (rtx);
+static bool mips_use_blocks_for_constant_p (enum machine_mode, rtx);
static int mips_symbol_insns (enum mips_symbol_type);
static bool mips16_unextended_reference_p (enum machine_mode mode, rtx, rtx);
static rtx mips_force_temporary (rtx, rtx);
-static rtx mips_split_symbol (rtx, rtx);
static rtx mips_unspec_offset_high (rtx, rtx, rtx, enum mips_symbol_type);
static rtx mips_add_offset (rtx, rtx, HOST_WIDE_INT);
static unsigned int mips_build_shift (struct mips_integer_op *, HOST_WIDE_INT);
unsigned HOST_WIDE_INT);
static unsigned int mips_build_integer (struct mips_integer_op *,
unsigned HOST_WIDE_INT);
-static void mips_move_integer (rtx, unsigned HOST_WIDE_INT);
static void mips_legitimize_const_move (enum machine_mode, rtx, rtx);
static int m16_check_op (rtx, int, int, int);
static bool mips_rtx_costs (rtx, int, int, int *);
unsigned HOST_WIDE_INT);
static section *mips_function_rodata_section (tree);
static bool mips_in_small_data_p (tree);
+static bool mips_use_anchors_for_symbol_p (rtx);
static int mips_fpr_return_fields (tree, tree *);
static bool mips_return_in_msb (tree);
static rtx mips_return_fpr_pair (enum machine_mode mode,
static rtx mips_expand_builtin_bposge (enum mips_builtin_type, rtx);
static void mips_encode_section_info (tree, rtx, int);
static void mips_extra_live_on_entry (bitmap);
+static int mips_mode_rep_extended (enum machine_mode, enum machine_mode);
/* Structure to be filled in by compute_frame_size with register
save masks, and offsets for the current function. */
/* mips_split_p[X] is true if symbols of type X can be split by
mips_split_symbol(). */
-static bool mips_split_p[NUM_SYMBOL_TYPES];
+bool mips_split_p[NUM_SYMBOL_TYPES];
/* mips_lo_relocs[X] is the relocation to use when a symbol of type X
appears in a LO_SUM. It can be null if such LO_SUMs aren't valid or
#undef TARGET_ARG_PARTIAL_BYTES
#define TARGET_ARG_PARTIAL_BYTES mips_arg_partial_bytes
+#undef TARGET_MODE_REP_EXTENDED
+#define TARGET_MODE_REP_EXTENDED mips_mode_rep_extended
+
#undef TARGET_VECTOR_MODE_SUPPORTED_P
#define TARGET_VECTOR_MODE_SUPPORTED_P mips_vector_mode_supported_p
#define TARGET_ATTRIBUTE_TABLE mips_attribute_table
#undef TARGET_EXTRA_LIVE_ON_ENTRY
-/* With -mabicalls (which is the default on GNU/Linux),
- PIC_FUNCTION_ADDR_REGNUM is live on function entry and is to
- initialize $28, which is PIC_OFFSET_TABLE_REGNUM. */
#define TARGET_EXTRA_LIVE_ON_ENTRY mips_extra_live_on_entry
+#undef TARGET_MIN_ANCHOR_OFFSET
+#define TARGET_MIN_ANCHOR_OFFSET -32768
+#undef TARGET_MAX_ANCHOR_OFFSET
+#define TARGET_MAX_ANCHOR_OFFSET 32767
+#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
+#define TARGET_USE_BLOCKS_FOR_CONSTANT_P mips_use_blocks_for_constant_p
+#undef TARGET_USE_ANCHORS_FOR_SYMBOL_P
+#define TARGET_USE_ANCHORS_FOR_SYMBOL_P mips_use_anchors_for_symbol_p
+
struct gcc_target targetm = TARGET_INITIALIZER;
\f
/* Classify symbol X, which must be a SYMBOL_REF or a LABEL_REF. */
{
if (TARGET_MIPS16)
return SYMBOL_CONSTANT_POOL;
- if (TARGET_ABICALLS)
+ if (TARGET_ABICALLS && !TARGET_ABSOLUTE_ABICALLS)
return SYMBOL_GOT_LOCAL;
return SYMBOL_GENERAL;
}
if (TARGET_MIPS16)
return SYMBOL_CONSTANT_POOL;
- if (TARGET_ABICALLS)
- return SYMBOL_GOT_LOCAL;
-
if (GET_MODE_SIZE (get_pool_mode (x)) <= mips_section_threshold)
return SYMBOL_SMALL_DATA;
-
- return SYMBOL_GENERAL;
}
if (SYMBOL_REF_SMALL_P (x))
if (TARGET_ABICALLS)
{
if (SYMBOL_REF_DECL (x) == 0)
- return SYMBOL_REF_LOCAL_P (x) ? SYMBOL_GOT_LOCAL : SYMBOL_GOT_GLOBAL;
-
- /* There are three cases to consider:
-
- - o32 PIC (either with or without explicit relocs)
- - n32/n64 PIC without explicit relocs
- - n32/n64 PIC with explicit relocs
-
- In the first case, both local and global accesses will use an
- R_MIPS_GOT16 relocation. We must correctly predict which of
- the two semantics (local or global) the assembler and linker
- will apply. The choice doesn't depend on the symbol's
- visibility, so we deliberately ignore decl_visibility and
- binds_local_p here.
-
- In the second case, the assembler will not use R_MIPS_GOT16
- relocations, but it chooses between local and global accesses
- in the same way as for o32 PIC.
-
- In the third case we have more freedom since both forms of
- access will work for any kind of symbol. However, there seems
- little point in doing things differently. */
- if (DECL_P (SYMBOL_REF_DECL (x)) && TREE_PUBLIC (SYMBOL_REF_DECL (x)))
- return SYMBOL_GOT_GLOBAL;
+ {
+ if (!SYMBOL_REF_LOCAL_P (x))
+ return SYMBOL_GOT_GLOBAL;
+ }
+ else
+ {
+ /* Don't use GOT accesses for locally-binding symbols if
+ TARGET_ABSOLUTE_ABICALLS. Otherwise, there are three
+ cases to consider:
+
+ - o32 PIC (either with or without explicit relocs)
+ - n32/n64 PIC without explicit relocs
+ - n32/n64 PIC with explicit relocs
+
+ In the first case, both local and global accesses will use an
+ R_MIPS_GOT16 relocation. We must correctly predict which of
+ the two semantics (local or global) the assembler and linker
+ will apply. The choice doesn't depend on the symbol's
+ visibility, so we deliberately ignore decl_visibility and
+ binds_local_p here.
+
+ In the second case, the assembler will not use R_MIPS_GOT16
+ relocations, but it chooses between local and global accesses
+ in the same way as for o32 PIC.
+
+ In the third case we have more freedom since both forms of
+ access will work for any kind of symbol. However, there seems
+ little point in doing things differently. */
+ if (DECL_P (SYMBOL_REF_DECL (x))
+ && TREE_PUBLIC (SYMBOL_REF_DECL (x))
+ && !(TARGET_ABSOLUTE_ABICALLS
+ && targetm.binds_local_p (SYMBOL_REF_DECL (x))))
+ return SYMBOL_GOT_GLOBAL;
+ }
- return SYMBOL_GOT_LOCAL;
+ if (!TARGET_ABSOLUTE_ABICALLS)
+ return SYMBOL_GOT_LOCAL;
}
return SYMBOL_GENERAL;
/* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
- to the same object as SYMBOL. */
+ to the same object as SYMBOL, or to the same object_block. */
static bool
mips_offset_within_object_p (rtx symbol, HOST_WIDE_INT offset)
&& offset < int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (symbol))))
return true;
+ if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
+ && SYMBOL_REF_BLOCK (symbol)
+ && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
+ && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
+ < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
+ return true;
+
return false;
}
}
-/* Return true if X is a symbolic constant whose value is not split
- into separate relocations. */
-
-bool
-mips_atomic_symbolic_constant_p (rtx x)
-{
- enum mips_symbol_type type;
- return mips_symbolic_constant_p (x, &type) && !mips_split_p[type];
-}
-
-
/* This function is used to implement REG_MODE_OK_FOR_BASE_P. */
int
/* The address will have to be loaded from the GOT first. */
return false;
- case SYMBOL_TLSGD:
- case SYMBOL_TLSLDM:
- case SYMBOL_DTPREL:
- case SYMBOL_TPREL:
- case SYMBOL_GOTTPREL:
- case SYMBOL_TLS:
- return false;
-
case SYMBOL_GOTOFF_PAGE:
case SYMBOL_GOTOFF_GLOBAL:
case SYMBOL_GOTOFF_CALL:
case SYMBOL_GOTOFF_LOADGP:
+ case SYMBOL_TLS:
+ case SYMBOL_TLSGD:
+ case SYMBOL_TLSLDM:
+ case SYMBOL_DTPREL:
+ case SYMBOL_GOTTPREL:
+ case SYMBOL_TPREL:
case SYMBOL_64_HIGH:
case SYMBOL_64_MID:
case SYMBOL_64_LOW:
static bool
mips_cannot_force_const_mem (rtx x)
{
- if (! TARGET_HAVE_TLS)
- return false;
+ rtx base;
+ HOST_WIDE_INT offset;
+
+ if (!TARGET_MIPS16)
+ {
+ /* As an optimization, reject constants that mips_legitimize_move
+ can expand inline.
+
+ Suppose we have a multi-instruction sequence that loads constant C
+ into register R. If R does not get allocated a hard register, and
+ R is used in an operand that allows both registers and memory
+ references, reload will consider forcing C into memory and using
+ one of the instruction's memory alternatives. Returning false
+ here will force it to use an input reload instead. */
+ if (GET_CODE (x) == CONST_INT)
+ return true;
+
+ mips_split_const (x, &base, &offset);
+ if (symbolic_operand (base, VOIDmode) && SMALL_OPERAND (offset))
+ return true;
+ }
+
+ if (TARGET_HAVE_TLS && for_each_rtx (&x, &mips_tls_symbol_ref_1, 0))
+ return true;
- return for_each_rtx (&x, &mips_tls_symbol_ref_1, 0);
+ return false;
+}
+
+/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. MIPS16 uses per-function
+ constant pools, but normal-mode code doesn't need to. */
+
+static bool
+mips_use_blocks_for_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+ rtx x ATTRIBUTE_UNUSED)
+{
+ return !TARGET_MIPS16;
}
\f
/* Return the number of instructions needed to load a symbol of the
/* Return a LO_SUM expression for ADDR. TEMP is as for mips_force_temporary
and is used to load the high part into a register. */
-static rtx
+rtx
mips_split_symbol (rtx temp, rtx addr)
{
rtx high;
}
-/* Move VALUE into register DEST. */
+/* Load VALUE into DEST, using TEMP as a temporary register if need be. */
-static void
-mips_move_integer (rtx dest, unsigned HOST_WIDE_INT value)
+void
+mips_move_integer (rtx dest, rtx temp, unsigned HOST_WIDE_INT value)
{
struct mips_integer_op codes[MIPS_MAX_INTEGER_OPS];
enum machine_mode mode;
for (i = 1; i < cost; i++)
{
if (no_new_pseudos)
- emit_move_insn (dest, x), x = dest;
+ {
+ emit_insn (gen_rtx_SET (VOIDmode, temp, x));
+ x = temp;
+ }
else
x = force_reg (mode, x);
x = gen_rtx_fmt_ee (codes[i].code, mode, x, GEN_INT (codes[i].value));
{
rtx base;
HOST_WIDE_INT offset;
- enum mips_symbol_type symbol_type;
- /* Split moves of big integers into smaller pieces. In mips16 code,
- it's better to force the constant into memory instead. */
- if (GET_CODE (src) == CONST_INT && !TARGET_MIPS16)
+ /* Split moves of big integers into smaller pieces. */
+ if (splittable_const_int_operand (src, mode))
{
- mips_move_integer (dest, INTVAL (src));
+ mips_move_integer (dest, dest, INTVAL (src));
return;
}
- if (mips_tls_operand_p (src))
+ /* Split moves of symbolic constants into high/low pairs. */
+ if (splittable_symbolic_operand (src, mode))
{
- emit_move_insn (dest, mips_legitimize_tls_address (src));
+ emit_insn (gen_rtx_SET (VOIDmode, dest, mips_split_symbol (dest, src)));
return;
}
- /* See if the symbol can be split. For mips16, this is often worse than
- forcing it in the constant pool since it needs the single-register form
- of addiu or daddiu. */
- if (!TARGET_MIPS16
- && mips_symbolic_constant_p (src, &symbol_type)
- && mips_split_p[symbol_type])
+ if (mips_tls_operand_p (src))
{
- emit_move_insn (dest, mips_split_symbol (dest, src));
+ emit_move_insn (dest, mips_legitimize_tls_address (src));
return;
}
cmp0, cmp1, 0, 0, OPTAB_DIRECT);
}
+/* Convert *CODE into a code that can be used in a floating-point
+ scc instruction (c.<cond>.<fmt>). Return true if the values of
+ the condition code registers will be inverted, with 0 indicating
+ that the condition holds. */
+
+static bool
+mips_reverse_fp_cond_p (enum rtx_code *code)
+{
+ switch (*code)
+ {
+ case NE:
+ case LTGT:
+ case ORDERED:
+ *code = reverse_condition_maybe_unordered (*code);
+ return true;
+
+ default:
+ return false;
+ }
+}
+
/* Convert a comparison into something that can be used in a branch or
conditional move. cmp_operands[0] and cmp_operands[1] are the values
being compared and *CODE is the code used to compare them.
Set CMP_CODE to the code of the comparison instruction and
*CODE to the code that the branch or move should use. */
- switch (*code)
- {
- case NE:
- case LTGT:
- case ORDERED:
- cmp_code = reverse_condition_maybe_unordered (*code);
- *code = EQ;
- break;
-
- default:
- cmp_code = *code;
- *code = NE;
- break;
- }
+ cmp_code = *code;
+ *code = mips_reverse_fp_cond_p (&cmp_code) ? EQ : NE;
*op0 = (ISA_HAS_8CC
? gen_reg_rtx (CCmode)
: gen_rtx_REG (CCmode, FPSW_REGNUM));
emit_jump_insn (gen_condjump (condition, operands[0]));
}
+/* Implement:
+
+ (set temp (COND:CCV2 CMP_OP0 CMP_OP1))
+ (set DEST (unspec [TRUE_SRC FALSE_SRC temp] UNSPEC_MOVE_TF_PS)) */
+
+void
+mips_expand_vcondv2sf (rtx dest, rtx true_src, rtx false_src,
+ enum rtx_code cond, rtx cmp_op0, rtx cmp_op1)
+{
+ rtx cmp_result;
+ bool reversed_p;
+
+ reversed_p = mips_reverse_fp_cond_p (&cond);
+ cmp_result = gen_reg_rtx (CCV2mode);
+ emit_insn (gen_scc_ps (cmp_result,
+ gen_rtx_fmt_ee (cond, VOIDmode, cmp_op0, cmp_op1)));
+ if (reversed_p)
+ emit_insn (gen_mips_cond_move_tf_ps (dest, false_src, true_src,
+ cmp_result));
+ else
+ emit_insn (gen_mips_cond_move_tf_ps (dest, true_src, false_src,
+ cmp_result));
+}
+
/* Emit the common code for conditional moves. OPERANDS is the array
of operands passed to the conditional move define_expand. */
target_flags &= ~MASK_ABICALLS;
}
- /* -fpic (-KPIC) is the default when TARGET_ABICALLS is defined. We need
- to set flag_pic so that the LEGITIMATE_PIC_OPERAND_P macro will work. */
- /* ??? -non_shared turns off pic code generation, but this is not
- implemented. */
if (TARGET_ABICALLS)
{
+ /* We need to set flag_pic for executables as well as DSOs
+ because we may reference symbols that are not defined in
+ the final executable. (MIPS does not use things like
+ copy relocs, for example.)
+
+ Also, there is a body of code that uses __PIC__ to distinguish
+ between -mabicalls and -mno-abicalls code. */
flag_pic = 1;
if (mips_section_threshold > 0)
- warning (0, "-G is incompatible with PIC code which is the default");
+ warning (0, "%<-G%> is incompatible with %<-mabicalls%>");
}
/* mips_split_addresses is a half-way house between explicit
/* There is no ELF header flag to distinguish long32 forms of the
EABI from long64 forms. Emit a special section to help tools
- such as GDB. */
- if (mips_abi == ABI_EABI)
+ such as GDB. Do the same for o64, which is sometimes used with
+ -mlong64. */
+ if (mips_abi == ABI_EABI || mips_abi == ABI_O64)
fprintf (asm_out_file, "\t.section .gcc_compiled_long%d\n",
TARGET_LONG64 ? 64 : 32);
/* Generate the pseudo ops that System V.4 wants. */
if (TARGET_ABICALLS)
- /* ??? but do not want this (or want pic0) if -non-shared? */
fprintf (asm_out_file, "\t.abicalls\n");
if (TARGET_MIPS16)
output_asm_insn (".cplocal %+", 0);
}
+/* Return the style of GP load sequence that is being used for the
+ current function. */
+
+enum mips_loadgp_style
+mips_current_loadgp_style (void)
+{
+ if (!TARGET_ABICALLS || cfun->machine->global_pointer == 0)
+ return LOADGP_NONE;
+
+ if (TARGET_ABSOLUTE_ABICALLS)
+ return LOADGP_ABSOLUTE;
+
+ return TARGET_NEWABI ? LOADGP_NEWABI : LOADGP_OLDABI;
+}
+
+/* The __gnu_local_gp symbol. */
+
+static GTY(()) rtx mips_gnu_local_gp;
+
/* If we're generating n32 or n64 abicalls, emit instructions
to set up the global pointer. */
static void
mips_emit_loadgp (void)
{
- if (TARGET_ABICALLS && TARGET_NEWABI && cfun->machine->global_pointer > 0)
+ rtx addr, offset, incoming_address;
+
+ switch (mips_current_loadgp_style ())
{
- rtx addr, offset, incoming_address;
+ case LOADGP_ABSOLUTE:
+ if (mips_gnu_local_gp == NULL)
+ {
+ mips_gnu_local_gp = gen_rtx_SYMBOL_REF (Pmode, "__gnu_local_gp");
+ SYMBOL_REF_FLAGS (mips_gnu_local_gp) |= SYMBOL_FLAG_LOCAL;
+ }
+ emit_insn (gen_loadgp_noshared (mips_gnu_local_gp));
+ break;
+ case LOADGP_NEWABI:
addr = XEXP (DECL_RTL (current_function_decl), 0);
offset = mips_unspec_address (addr, SYMBOL_GOTOFF_LOADGP);
incoming_address = gen_rtx_REG (Pmode, PIC_FUNCTION_ADDR_REGNUM);
emit_insn (gen_loadgp (offset, incoming_address));
if (!TARGET_EXPLICIT_RELOCS)
emit_insn (gen_loadgp_blockage ());
+ break;
+
+ default:
+ break;
}
}
HIGHEST_GP_SAVED == *FRAMEREG + FRAMESIZE + GPOFFSET => can find saved regs. */
}
- if (TARGET_ABICALLS && !TARGET_NEWABI && cfun->machine->global_pointer > 0)
+ if (mips_current_loadgp_style () == LOADGP_OLDABI)
{
/* Handle the initialization of $gp for SVR4 PIC. */
if (!cfun->machine->all_noreorder_p)
stack_pointer_rtx)) = 1;
}
+ mips_emit_loadgp ();
+
/* If generating o32/o64 abicalls, save $gp on the stack. */
if (TARGET_ABICALLS && !TARGET_NEWABI && !current_function_is_leaf)
emit_insn (gen_cprestore (GEN_INT (current_function_outgoing_args_size)));
- mips_emit_loadgp ();
-
/* If we are profiling, make sure no instructions are scheduled before
the call to mcount. */
size = int_size_in_bytes (TREE_TYPE (decl));
return (size > 0 && size <= mips_section_threshold);
}
+
+/* Implement TARGET_USE_ANCHORS_FOR_SYMBOL_P. We don't want to use
+ anchors for small data: the GP register acts as an anchor in that
+ case. We also don't want to use them for PC-relative accesses,
+ where the PC acts as an anchor. */
+
+static bool
+mips_use_anchors_for_symbol_p (rtx symbol)
+{
+ switch (mips_classify_symbol (symbol))
+ {
+ case SYMBOL_CONSTANT_POOL:
+ case SYMBOL_SMALL_DATA:
+ return false;
+
+ default:
+ return true;
+ }
+}
\f
/* See whether VALTYPE is a record whose fields should be returned in
floating-point registers. If so, return the number of fields and
}
}
-/* Implement TARGET_EXTRA_LIVE_ON_ENTRY. TARGET_ABICALLS makes
- PIC_FUNCTION_ADDR_REGNUM live on entry to a function. */
+/* Implement TARGET_EXTRA_LIVE_ON_ENTRY. PIC_FUNCTION_ADDR_REGNUM is live
+ on entry to a function when generating -mshared abicalls code. */
static void
mips_extra_live_on_entry (bitmap regs)
{
- if (!TARGET_ABICALLS)
+ if (TARGET_ABICALLS && !TARGET_ABSOLUTE_ABICALLS)
bitmap_set_bit (regs, PIC_FUNCTION_ADDR_REGNUM);
}
+/* SImode values are represented as sign-extended to DImode. */
+
+int
+mips_mode_rep_extended (enum machine_mode mode, enum machine_mode mode_rep)
+{
+ if (TARGET_64BIT && mode == SImode && mode_rep == DImode)
+ return SIGN_EXTEND;
+
+ return UNKNOWN;
+}
\f
#include "gt-mips.h"