#include "insn-attr.h"
#include "flags.h"
#include "function.h"
+#include "except.h"
#include "expr.h"
#include "optabs.h"
#include "recog.h"
-#include "toplev.h"
+#include "diagnostic-core.h"
#include "ggc.h"
#include "tm_p.h"
#include "debug.h"
#include "cfglayout.h"
#include "gimple.h"
#include "langhooks.h"
+#include "reload.h"
#include "params.h"
#include "df.h"
#include "dwarf2out.h"
int sparc_indent_opcode = 0;
static bool sparc_handle_option (size_t, const char *, int);
+static void sparc_option_override (void);
static void sparc_init_modes (void);
-static void scan_record_type (tree, int *, int *, int *);
+static void scan_record_type (const_tree, int *, int *, int *);
static int function_arg_slotno (const CUMULATIVE_ARGS *, enum machine_mode,
- tree, int, int, int *, int *);
+ const_tree, bool, bool, int *, int *);
static int supersparc_adjust_cost (rtx, rtx, rtx, int);
static int hypersparc_adjust_cost (rtx, rtx, rtx, int);
+static void sparc_emit_set_const32 (rtx, rtx);
+static void sparc_emit_set_const64 (rtx, rtx);
static void sparc_output_addr_vec (rtx);
static void sparc_output_addr_diff_vec (rtx);
static void sparc_output_deferred_case_vectors (void);
static const char *get_some_local_dynamic_name (void);
static int get_some_local_dynamic_name_1 (rtx *, void *);
static bool sparc_rtx_costs (rtx, int, int, int *, bool);
-static bool sparc_promote_prototypes (const_tree);
+static rtx sparc_function_value (const_tree, const_tree, bool);
+static rtx sparc_libcall_value (enum machine_mode, const_rtx);
+static bool sparc_function_value_regno_p (const unsigned int);
static rtx sparc_struct_value_rtx (tree, int);
static enum machine_mode sparc_promote_function_mode (const_tree, enum machine_mode,
int *, const_tree, int);
static tree sparc_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
static bool sparc_vector_mode_supported_p (enum machine_mode);
static bool sparc_tls_referenced_p (rtx);
-static rtx legitimize_tls_address (rtx);
-static rtx legitimize_pic_address (rtx, rtx);
+static rtx sparc_legitimize_tls_address (rtx);
+static rtx sparc_legitimize_pic_address (rtx, rtx);
static rtx sparc_legitimize_address (rtx, rtx, enum machine_mode);
+static rtx sparc_delegitimize_address (rtx);
static bool sparc_mode_dependent_address_p (const_rtx);
static bool sparc_pass_by_reference (CUMULATIVE_ARGS *,
enum machine_mode, const_tree, bool);
+static void sparc_function_arg_advance (CUMULATIVE_ARGS *,
+ enum machine_mode, const_tree, bool);
+static rtx sparc_function_arg_1 (const CUMULATIVE_ARGS *,
+ enum machine_mode, const_tree, bool, bool);
+static rtx sparc_function_arg (CUMULATIVE_ARGS *,
+ enum machine_mode, const_tree, bool);
+static rtx sparc_function_incoming_arg (CUMULATIVE_ARGS *,
+ enum machine_mode, const_tree, bool);
+static unsigned int sparc_function_arg_boundary (enum machine_mode,
+ const_tree);
static int sparc_arg_partial_bytes (CUMULATIVE_ARGS *,
enum machine_mode, tree, bool);
static void sparc_dwarf_handle_frame_unspec (const char *, rtx, int);
static void sparc_file_end (void);
static bool sparc_frame_pointer_required (void);
static bool sparc_can_eliminate (const int, const int);
+static void sparc_conditional_register_usage (void);
#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
static const char *sparc_mangle_type (const_tree);
#endif
static void sparc_trampoline_init (rtx, tree, rtx);
+static enum machine_mode sparc_preferred_simd_mode (enum machine_mode);
\f
#ifdef SUBTARGET_ATTRIBUTE_TABLE
/* Table of valid machine attributes. */
/* Whether\fan FPU option was specified. */
static bool fpu_option_set = false;
+/* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
+static const struct default_options sparc_option_optimization_table[] =
+ {
+ { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
+ { OPT_LEVELS_NONE, 0, NULL, 0 }
+ };
+
/* Initialize the GCC target structure. */
/* The default is to use .half rather than .short for aligned HI objects. */
#undef TARGET_LEGITIMIZE_ADDRESS
#define TARGET_LEGITIMIZE_ADDRESS sparc_legitimize_address
+#undef TARGET_DELEGITIMIZE_ADDRESS
+#define TARGET_DELEGITIMIZE_ADDRESS sparc_delegitimize_address
#undef TARGET_MODE_DEPENDENT_ADDRESS_P
#define TARGET_MODE_DEPENDENT_ADDRESS_P sparc_mode_dependent_address_p
#undef TARGET_PROMOTE_FUNCTION_MODE
#define TARGET_PROMOTE_FUNCTION_MODE sparc_promote_function_mode
-#undef TARGET_PROMOTE_PROTOTYPES
-#define TARGET_PROMOTE_PROTOTYPES sparc_promote_prototypes
+#undef TARGET_FUNCTION_VALUE
+#define TARGET_FUNCTION_VALUE sparc_function_value
+#undef TARGET_LIBCALL_VALUE
+#define TARGET_LIBCALL_VALUE sparc_libcall_value
+#undef TARGET_FUNCTION_VALUE_REGNO_P
+#define TARGET_FUNCTION_VALUE_REGNO_P sparc_function_value_regno_p
#undef TARGET_STRUCT_VALUE_RTX
#define TARGET_STRUCT_VALUE_RTX sparc_struct_value_rtx
#define TARGET_PASS_BY_REFERENCE sparc_pass_by_reference
#undef TARGET_ARG_PARTIAL_BYTES
#define TARGET_ARG_PARTIAL_BYTES sparc_arg_partial_bytes
+#undef TARGET_FUNCTION_ARG_ADVANCE
+#define TARGET_FUNCTION_ARG_ADVANCE sparc_function_arg_advance
+#undef TARGET_FUNCTION_ARG
+#define TARGET_FUNCTION_ARG sparc_function_arg
+#undef TARGET_FUNCTION_INCOMING_ARG
+#define TARGET_FUNCTION_INCOMING_ARG sparc_function_incoming_arg
+#undef TARGET_FUNCTION_ARG_BOUNDARY
+#define TARGET_FUNCTION_ARG_BOUNDARY sparc_function_arg_boundary
#undef TARGET_EXPAND_BUILTIN_SAVEREGS
#define TARGET_EXPAND_BUILTIN_SAVEREGS sparc_builtin_saveregs
#undef TARGET_VECTOR_MODE_SUPPORTED_P
#define TARGET_VECTOR_MODE_SUPPORTED_P sparc_vector_mode_supported_p
+#undef TARGET_VECTORIZE_PREFERRED_SIMD_MODE
+#define TARGET_VECTORIZE_PREFERRED_SIMD_MODE sparc_preferred_simd_mode
+
#undef TARGET_DWARF_HANDLE_FRAME_UNSPEC
#define TARGET_DWARF_HANDLE_FRAME_UNSPEC sparc_dwarf_handle_frame_unspec
#define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
#undef TARGET_HANDLE_OPTION
#define TARGET_HANDLE_OPTION sparc_handle_option
+#undef TARGET_OPTION_OVERRIDE
+#define TARGET_OPTION_OVERRIDE sparc_option_override
+#undef TARGET_OPTION_OPTIMIZATION_TABLE
+#define TARGET_OPTION_OPTIMIZATION_TABLE sparc_option_optimization_table
#if TARGET_GNU_TLS && defined(HAVE_AS_SPARC_UA_PCREL)
#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
#undef TARGET_CAN_ELIMINATE
#define TARGET_CAN_ELIMINATE sparc_can_eliminate
+#undef TARGET_CONDITIONAL_REGISTER_USAGE
+#define TARGET_CONDITIONAL_REGISTER_USAGE sparc_conditional_register_usage
+
#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
#undef TARGET_MANGLE_TYPE
#define TARGET_MANGLE_TYPE sparc_mangle_type
/* Validate and override various options, and do some machine dependent
initialization. */
-void
-sparc_override_options (void)
+static void
+sparc_option_override (void)
{
static struct code_model {
const char *const name;
const struct sparc_cpu_select *sel;
int fpu;
+#ifdef SUBTARGET_OVERRIDE_OPTIONS
+ SUBTARGET_OVERRIDE_OPTIONS;
+#endif
+
#ifndef SPARC_BI_ARCH
/* Check for unsupported architecture size. */
if (! TARGET_64BIT != DEFAULT_ARCH32_P)
target_flags |= MASK_LONG_DOUBLE_128;
#endif
- if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
- set_param_value ("simultaneous-prefetches",
- ((sparc_cpu == PROCESSOR_ULTRASPARC
- || sparc_cpu == PROCESSOR_NIAGARA
- || sparc_cpu == PROCESSOR_NIAGARA2)
- ? 2
- : (sparc_cpu == PROCESSOR_ULTRASPARC3
- ? 8 : 3)));
- if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
- set_param_value ("l1-cache-line-size",
- ((sparc_cpu == PROCESSOR_ULTRASPARC
- || sparc_cpu == PROCESSOR_ULTRASPARC3
- || sparc_cpu == PROCESSOR_NIAGARA
- || sparc_cpu == PROCESSOR_NIAGARA2)
- ? 64 : 32));
+ maybe_set_param_value (PARAM_SIMULTANEOUS_PREFETCHES,
+ ((sparc_cpu == PROCESSOR_ULTRASPARC
+ || sparc_cpu == PROCESSOR_NIAGARA
+ || sparc_cpu == PROCESSOR_NIAGARA2)
+ ? 2
+ : (sparc_cpu == PROCESSOR_ULTRASPARC3
+ ? 8 : 3)),
+ global_options.x_param_values,
+ global_options_set.x_param_values);
+ maybe_set_param_value (PARAM_L1_CACHE_LINE_SIZE,
+ ((sparc_cpu == PROCESSOR_ULTRASPARC
+ || sparc_cpu == PROCESSOR_ULTRASPARC3
+ || sparc_cpu == PROCESSOR_NIAGARA
+ || sparc_cpu == PROCESSOR_NIAGARA2)
+ ? 64 : 32),
+ global_options.x_param_values,
+ global_options_set.x_param_values);
}
\f
/* Miscellaneous utilities. */
return 0;
}
+/* Return true if the address of LABEL can be loaded by means of the
+ mov{si,di}_pic_label_ref patterns in PIC mode. */
+
+static bool
+can_use_mov_pic_label_ref (rtx label)
+{
+ /* VxWorks does not impose a fixed gap between segments; the run-time
+ gap can be different from the object-file gap. We therefore can't
+ assume X - _GLOBAL_OFFSET_TABLE_ is a link-time constant unless we
+ are absolutely sure that X is in the same segment as the GOT.
+ Unfortunately, the flexibility of linker scripts means that we
+ can't be sure of that in general, so assume that GOT-relative
+ accesses are never valid on VxWorks. */
+ if (TARGET_VXWORKS_RTP)
+ return false;
+
+ /* Similarly, if the label is non-local, it might end up being placed
+ in a different section than the current one; now mov_pic_label_ref
+ requires the label and the code to be in the same section. */
+ if (LABEL_REF_NONLOCAL_P (label))
+ return false;
+
+ /* Finally, if we are reordering basic blocks and partition into hot
+ and cold sections, this might happen for any label. */
+ if (flag_reorder_blocks_and_partition)
+ return false;
+
+ return true;
+}
+
/* Expand a move instruction. Return true if all work is done. */
bool
&& CONSTANT_P (operands[1])
&& sparc_tls_referenced_p (operands [1]))
{
- operands[1] = legitimize_tls_address (operands[1]);
+ operands[1] = sparc_legitimize_tls_address (operands[1]);
return false;
}
if (flag_pic && CONSTANT_P (operands[1]))
{
if (pic_address_needs_scratch (operands[1]))
- operands[1] = legitimize_pic_address (operands[1], NULL_RTX);
+ operands[1] = sparc_legitimize_pic_address (operands[1], NULL_RTX);
- /* VxWorks does not impose a fixed gap between segments; the run-time
- gap can be different from the object-file gap. We therefore can't
- assume X - _GLOBAL_OFFSET_TABLE_ is a link-time constant unless we
- are absolutely sure that X is in the same segment as the GOT.
- Unfortunately, the flexibility of linker scripts means that we
- can't be sure of that in general, so assume that _G_O_T_-relative
- accesses are never valid on VxWorks. */
- if (GET_CODE (operands[1]) == LABEL_REF && !TARGET_VXWORKS_RTP)
+ /* We cannot use the mov{si,di}_pic_label_ref patterns in all cases. */
+ if (GET_CODE (operands[1]) == LABEL_REF
+ && can_use_mov_pic_label_ref (operands[1]))
{
if (mode == SImode)
{
if (symbolic_operand (operands[1], mode))
{
- operands[1] = legitimize_pic_address (operands[1],
- reload_in_progress
- ? operands[0] : NULL_RTX);
+ operands[1]
+ = sparc_legitimize_pic_address (operands[1],
+ reload_in_progress
+ ? operands[0] : NULL_RTX);
return false;
}
}
We know it can't be done in one insn when we get
here, the move expander guarantees this. */
-void
+static void
sparc_emit_set_const32 (rtx op0, rtx op1)
{
enum machine_mode mode = GET_MODE (op0);
}
#if HOST_BITS_PER_WIDE_INT == 32
-void
+static void
sparc_emit_set_const64 (rtx op0 ATTRIBUTE_UNUSED, rtx op1 ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
being loaded into a register. Emit the most efficient
insn sequence possible. Detection of all the 1-insn cases
has been done already. */
-void
+static void
sparc_emit_set_const64 (rtx op0, rtx op1)
{
unsigned HOST_WIDE_INT high_bits, low_bits;
}
/* The easiest way when all else fails, is full decomposition. */
-#if 0
- printf ("sparc_emit_set_const64: Hard constant [%08lx%08lx] neg[%08lx%08lx]\n",
- high_bits, low_bits, ~high_bits, ~low_bits);
-#endif
sparc_emit_set_const64_longway (op0, temp, high_bits, low_bits);
}
#endif /* HOST_BITS_PER_WIDE_INT == 32 */
if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x))
return true;
- /* That's all we handle in legitimize_tls_address for now. */
+ /* That's all we handle in sparc_legitimize_tls_address for now. */
return false;
}
this (thread-local) address. */
static rtx
-legitimize_tls_address (rtx addr)
+sparc_legitimize_tls_address (rtx addr)
{
rtx temp1, temp2, temp3, ret, o0, got, insn;
gcc_assert (GET_CODE (XEXP (addr, 0)) == PLUS);
- base = legitimize_tls_address (XEXP (XEXP (addr, 0), 0));
+ base = sparc_legitimize_tls_address (XEXP (XEXP (addr, 0), 0));
offset = XEXP (XEXP (addr, 0), 1);
base = force_operand (base, NULL_RTX);
necessary. */
static rtx
-legitimize_pic_address (rtx orig, rtx reg)
+sparc_legitimize_pic_address (rtx orig, rtx reg)
{
bool gotdata_op = false;
if (GET_CODE (orig) == SYMBOL_REF
/* See the comment in sparc_expand_move. */
- || (TARGET_VXWORKS_RTP && GET_CODE (orig) == LABEL_REF))
+ || (GET_CODE (orig) == LABEL_REF && !can_use_mov_pic_label_ref (orig)))
{
rtx pic_ref, address;
rtx insn;
if (gotdata_op)
{
if (TARGET_ARCH64)
- insn = emit_insn (gen_movdi_pic_gotdata_op (reg, pic_offset_table_rtx,
+ insn = emit_insn (gen_movdi_pic_gotdata_op (reg,
+ pic_offset_table_rtx,
address, orig));
else
- insn = emit_insn (gen_movsi_pic_gotdata_op (reg, pic_offset_table_rtx,
+ insn = emit_insn (gen_movsi_pic_gotdata_op (reg,
+ pic_offset_table_rtx,
address, orig));
}
else
{
- pic_ref = gen_const_mem (Pmode,
- gen_rtx_PLUS (Pmode,
- pic_offset_table_rtx, address));
+ pic_ref
+ = gen_const_mem (Pmode,
+ gen_rtx_PLUS (Pmode,
+ pic_offset_table_rtx, address));
insn = emit_move_insn (reg, pic_ref);
}
+
/* Put a REG_EQUAL note on this insn, so that it can be optimized
by loop. */
set_unique_reg_note (insn, REG_EQUAL, orig);
}
gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
- base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
- offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
- base == reg ? NULL_RTX : reg);
+ base = sparc_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
+ offset = sparc_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
+ base == reg ? NULL_RTX : reg);
if (GET_CODE (offset) == CONST_INT)
{
return gen_rtx_PLUS (Pmode, base, offset);
}
else if (GET_CODE (orig) == LABEL_REF)
- /* ??? Why do we do this? */
- /* Now movsi_pic_label_ref uses it, but we ought to be checking that
- the register is live instead, in case it is eliminated. */
+ /* ??? We ought to be checking that the register is live instead, in case
+ it is eliminated. */
crtl->uses_pic_offset_table = 1;
return orig;
return x;
if (sparc_tls_referenced_p (x))
- x = legitimize_tls_address (x);
+ x = sparc_legitimize_tls_address (x);
else if (flag_pic)
- x = legitimize_pic_address (x, NULL_RTX);
+ x = sparc_legitimize_pic_address (x, NULL_RTX);
else if (GET_CODE (x) == PLUS && CONSTANT_ADDRESS_P (XEXP (x, 1)))
x = gen_rtx_PLUS (Pmode, XEXP (x, 0),
copy_to_mode_reg (Pmode, XEXP (x, 1)));
return x;
}
+/* Delegitimize an address that was legitimized by the above function. */
+
+static rtx
+sparc_delegitimize_address (rtx x)
+{
+ x = delegitimize_mem_from_attrs (x);
+
+ if (GET_CODE (x) == LO_SUM
+ && GET_CODE (XEXP (x, 1)) == UNSPEC
+ && XINT (XEXP (x, 1), 1) == UNSPEC_TLSLE)
+ {
+ x = XVECEXP (XEXP (x, 1), 0, 0);
+ gcc_assert (GET_CODE (x) == SYMBOL_REF);
+ }
+
+ return x;
+}
+
+/* SPARC implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
+ replace the input X, or the original X if no replacement is called for.
+ The output parameter *WIN is 1 if the calling macro should goto WIN,
+ 0 if it should not.
+
+ For SPARC, we wish to handle addresses by splitting them into
+ HIGH+LO_SUM pairs, retaining the LO_SUM in the memory reference.
+ This cuts the number of extra insns by one.
+
+ Do nothing when generating PIC code and the address is a symbolic
+ operand or requires a scratch register. */
+
+rtx
+sparc_legitimize_reload_address (rtx x, enum machine_mode mode,
+ int opnum, int type,
+ int ind_levels ATTRIBUTE_UNUSED, int *win)
+{
+ /* Decompose SImode constants into HIGH+LO_SUM. */
+ if (CONSTANT_P (x)
+ && (mode != TFmode || TARGET_ARCH64)
+ && GET_MODE (x) == SImode
+ && GET_CODE (x) != LO_SUM
+ && GET_CODE (x) != HIGH
+ && sparc_cmodel <= CM_MEDLOW
+ && !(flag_pic
+ && (symbolic_operand (x, Pmode) || pic_address_needs_scratch (x))))
+ {
+ x = gen_rtx_LO_SUM (GET_MODE (x), gen_rtx_HIGH (GET_MODE (x), x), x);
+ push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
+ BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
+ opnum, (enum reload_type)type);
+ *win = 1;
+ return x;
+ }
+
+ /* We have to recognize what we have already generated above. */
+ if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == HIGH)
+ {
+ push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
+ BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
+ opnum, (enum reload_type)type);
+ *win = 1;
+ return x;
+ }
+
+ *win = 0;
+ return x;
+}
+
/* Return true if ADDR (a legitimate address expression)
has an effect that depends on the machine mode it is used for.
#endif
}
+#define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
+
+#if PROBE_INTERVAL > 4096
+#error Cannot use indexed addressing mode for stack probing
+#endif
+
+/* Emit code to probe a range of stack addresses from FIRST to FIRST+SIZE,
+ inclusive. These are offsets from the current stack pointer.
+
+ Note that we don't use the REG+REG addressing mode for the probes because
+ of the stack bias in 64-bit mode. And it doesn't really buy us anything
+ so the advantages of having a single code win here. */
+
+static void
+sparc_emit_probe_stack_range (HOST_WIDE_INT first, HOST_WIDE_INT size)
+{
+ rtx g1 = gen_rtx_REG (Pmode, 1);
+
+ /* See if we have a constant small number of probes to generate. If so,
+ that's the easy case. */
+ if (size <= PROBE_INTERVAL)
+ {
+ emit_move_insn (g1, GEN_INT (first));
+ emit_insn (gen_rtx_SET (VOIDmode, g1,
+ gen_rtx_MINUS (Pmode, stack_pointer_rtx, g1)));
+ emit_stack_probe (plus_constant (g1, -size));
+ }
+
+ /* The run-time loop is made up of 10 insns in the generic case while the
+ compile-time loop is made up of 4+2*(n-2) insns for n # of intervals. */
+ else if (size <= 5 * PROBE_INTERVAL)
+ {
+ HOST_WIDE_INT i;
+
+ emit_move_insn (g1, GEN_INT (first + PROBE_INTERVAL));
+ emit_insn (gen_rtx_SET (VOIDmode, g1,
+ gen_rtx_MINUS (Pmode, stack_pointer_rtx, g1)));
+ emit_stack_probe (g1);
+
+ /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 2 until
+ it exceeds SIZE. If only two probes are needed, this will not
+ generate any code. Then probe at FIRST + SIZE. */
+ for (i = 2 * PROBE_INTERVAL; i < size; i += PROBE_INTERVAL)
+ {
+ emit_insn (gen_rtx_SET (VOIDmode, g1,
+ plus_constant (g1, -PROBE_INTERVAL)));
+ emit_stack_probe (g1);
+ }
+
+ emit_stack_probe (plus_constant (g1, (i - PROBE_INTERVAL) - size));
+ }
+
+ /* Otherwise, do the same as above, but in a loop. Note that we must be
+ extra careful with variables wrapping around because we might be at
+ the very top (or the very bottom) of the address space and we have
+ to be able to handle this case properly; in particular, we use an
+ equality test for the loop condition. */
+ else
+ {
+ HOST_WIDE_INT rounded_size;
+ rtx g4 = gen_rtx_REG (Pmode, 4);
+
+ emit_move_insn (g1, GEN_INT (first));
+
+
+ /* Step 1: round SIZE to the previous multiple of the interval. */
+
+ rounded_size = size & -PROBE_INTERVAL;
+ emit_move_insn (g4, GEN_INT (rounded_size));
+
+
+ /* Step 2: compute initial and final value of the loop counter. */
+
+ /* TEST_ADDR = SP + FIRST. */
+ emit_insn (gen_rtx_SET (VOIDmode, g1,
+ gen_rtx_MINUS (Pmode, stack_pointer_rtx, g1)));
+
+ /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
+ emit_insn (gen_rtx_SET (VOIDmode, g4, gen_rtx_MINUS (Pmode, g1, g4)));
+
+
+ /* Step 3: the loop
+
+ while (TEST_ADDR != LAST_ADDR)
+ {
+ TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
+ probe at TEST_ADDR
+ }
+
+ probes at FIRST + N * PROBE_INTERVAL for values of N from 1
+ until it is equal to ROUNDED_SIZE. */
+
+ if (TARGET_64BIT)
+ emit_insn (gen_probe_stack_rangedi (g1, g1, g4));
+ else
+ emit_insn (gen_probe_stack_rangesi (g1, g1, g4));
+
+
+ /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
+ that SIZE is equal to ROUNDED_SIZE. */
+
+ if (size != rounded_size)
+ emit_stack_probe (plus_constant (g4, rounded_size - size));
+ }
+
+ /* Make sure nothing is scheduled before we are done. */
+ emit_insn (gen_blockage ());
+}
+
+/* Probe a range of stack addresses from REG1 to REG2 inclusive. These are
+ absolute addresses. */
+
+const char *
+output_probe_stack_range (rtx reg1, rtx reg2)
+{
+ static int labelno = 0;
+ char loop_lab[32], end_lab[32];
+ rtx xops[2];
+
+ ASM_GENERATE_INTERNAL_LABEL (loop_lab, "LPSRL", labelno);
+ ASM_GENERATE_INTERNAL_LABEL (end_lab, "LPSRE", labelno++);
+
+ ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, loop_lab);
+
+ /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
+ xops[0] = reg1;
+ xops[1] = reg2;
+ output_asm_insn ("cmp\t%0, %1", xops);
+ if (TARGET_ARCH64)
+ fputs ("\tbe,pn\t%xcc,", asm_out_file);
+ else
+ fputs ("\tbe\t", asm_out_file);
+ assemble_name_raw (asm_out_file, end_lab);
+ fputc ('\n', asm_out_file);
+
+ /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
+ xops[1] = GEN_INT (-PROBE_INTERVAL);
+ output_asm_insn (" add\t%0, %1, %0", xops);
+
+ /* Probe at TEST_ADDR and branch. */
+ if (TARGET_ARCH64)
+ fputs ("\tba,pt\t%xcc,", asm_out_file);
+ else
+ fputs ("\tba\t", asm_out_file);
+ assemble_name_raw (asm_out_file, loop_lab);
+ fputc ('\n', asm_out_file);
+ xops[1] = GEN_INT (SPARC_STACK_BIAS);
+ output_asm_insn (" st\t%%g0, [%0+%1]", xops);
+
+ ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, end_lab);
+
+ return "";
+}
+
/* Save/restore call-saved registers from LOW to HIGH at BASE+OFFSET
as needed. LOW should be double-word aligned for 32-bit registers.
Return the new OFFSET. */
/* Advertise that the data calculated just above are now valid. */
sparc_prologue_data_valid_p = true;
+ if (flag_stack_usage)
+ current_function_static_stack_size = actual_fsize;
+
+ if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK && actual_fsize)
+ sparc_emit_probe_stack_range (STACK_CHECK_PROTECT, actual_fsize);
+
if (sparc_leaf_function_p)
{
frame_base_reg = stack_pointer_rtx;
static void
sparc_asm_function_epilogue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
{
- /* If code does not drop into the epilogue, we have to still output
- a dummy nop for the sake of sane backtraces. Otherwise, if the
- last two instructions of a function were "call foo; dslot;" this
- can make the return PC of foo (i.e. address of call instruction
- plus 8) point to the first instruction in the next function. */
+ /* If the last two instructions of a function are "call foo; dslot;"
+ the return address might point to the first instruction in the next
+ function and we have to output a dummy nop for the sake of sane
+ backtraces in such cases. This is pointless for sibling calls since
+ the return address is explicitly adjusted. */
rtx insn, last_real_insn;
&& GET_CODE (PATTERN (last_real_insn)) == SEQUENCE)
last_real_insn = XVECEXP (PATTERN (last_real_insn), 0, 0);
- if (last_real_insn && GET_CODE (last_real_insn) == CALL_INSN)
+ if (last_real_insn
+ && CALL_P (last_real_insn)
+ && !SIBLING_CALL_P (last_real_insn))
fputs("\tnop\n", file);
sparc_output_deferred_case_vectors ();
cum->libcall_p = fntype == 0;
}
-/* Handle the TARGET_PROMOTE_PROTOTYPES target hook.
- When a prototype says `char' or `short', really pass an `int'. */
-
-static bool
-sparc_promote_prototypes (const_tree fntype ATTRIBUTE_UNUSED)
-{
- return TARGET_ARCH32 ? true : false;
-}
-
/* Handle promotion of pointer and integer arguments. */
static enum machine_mode
return Pmode;
}
- /* For TARGET_ARCH64 we need this, as we don't have instructions
- for arithmetic operations which do zero/sign extension at the same time,
- so without this we end up with a srl/sra after every assignment to an
- user variable, which means very very bad code. */
- if (TARGET_ARCH64
- && GET_MODE_CLASS (mode) == MODE_INT
+ /* Integral arguments are passed as full words, as per the ABI. */
+ if (GET_MODE_CLASS (mode) == MODE_INT
&& GET_MODE_SIZE (mode) < UNITS_PER_WORD)
return word_mode;
Sub-fields are not taken into account for the PACKED_P predicate. */
static void
-scan_record_type (tree type, int *intregs_p, int *fpregs_p, int *packed_p)
+scan_record_type (const_tree type, int *intregs_p, int *fpregs_p,
+ int *packed_p)
{
tree field;
- for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
+ for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
{
if (TREE_CODE (field) == FIELD_DECL)
{
static int
function_arg_slotno (const struct sparc_args *cum, enum machine_mode mode,
- tree type, int named, int incoming_p,
+ const_tree type, bool named, bool incoming_p,
int *pregno, int *ppadding)
{
int regbase = (incoming_p
}
/* Compute how many registers we need. */
- for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
+ for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
{
if (TREE_CODE (field) == FIELD_DECL)
{
tree field;
if (! packed_p)
- for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
+ for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
{
if (TREE_CODE (field) == FIELD_DECL && DECL_PACKED (field))
{
}
}
- for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
+ for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
{
if (TREE_CODE (field) == FIELD_DECL)
{
}
}
-/* Used by function_arg and function_value to implement the complex
+/* Used by function_arg and sparc_function_value_1 to implement the complex
conventions of the 64-bit ABI for passing and returning structures.
- Return an expression valid as a return value for the two macros
- FUNCTION_ARG and FUNCTION_VALUE.
+ Return an expression valid as a return value for the FUNCTION_ARG
+ and TARGET_FUNCTION_VALUE.
TYPE is the data type of the argument (as a tree).
This is null for libcalls where that information may
return parms.ret;
}
-/* Used by function_arg and function_value to implement the conventions
+/* Used by function_arg and sparc_function_value_1 to implement the conventions
of the 64-bit ABI for passing and returning unions.
- Return an expression valid as a return value for the two macros
- FUNCTION_ARG and FUNCTION_VALUE.
+ Return an expression valid as a return value for the FUNCTION_ARG
+ and TARGET_FUNCTION_VALUE.
SIZE is the size in bytes of the union.
MODE is the argument's machine mode.
return regs;
}
-/* Used by function_arg and function_value to implement the conventions
+/* Used by function_arg and sparc_function_value_1 to implement the conventions
for passing and returning large (BLKmode) vectors.
- Return an expression valid as a return value for the two macros
- FUNCTION_ARG and FUNCTION_VALUE.
+ Return an expression valid as a return value for the FUNCTION_ARG
+ and TARGET_FUNCTION_VALUE.
SIZE is the size in bytes of the vector (at least 8 bytes).
REGNO is the FP hard register the vector will be passed in. */
return regs;
}
-/* Handle the FUNCTION_ARG macro.
- Determine where to put an argument to a function.
+/* Determine where to put an argument to a function.
Value is zero to push the argument on the stack,
or a hard register in which to store the argument.
TYPE is the data type of the argument (as a tree).
This is null for libcalls where that information may
not be available.
- NAMED is nonzero if this argument is a named parameter
+ NAMED is true if this argument is a named parameter
(otherwise it is an extra parameter matching an ellipsis).
- INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG. */
+ INCOMING_P is false for TARGET_FUNCTION_ARG, true for
+ TARGET_FUNCTION_INCOMING_ARG. */
-rtx
-function_arg (const struct sparc_args *cum, enum machine_mode mode,
- tree type, int named, int incoming_p)
+static rtx
+sparc_function_arg_1 (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
+ const_tree type, bool named, bool incoming_p)
{
int regbase = (incoming_p
? SPARC_INCOMING_INT_ARG_FIRST
return gen_rtx_REG (mode, regno);
}
+/* Handle the TARGET_FUNCTION_ARG target hook. */
+
+static rtx
+sparc_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+ const_tree type, bool named)
+{
+ return sparc_function_arg_1 (cum, mode, type, named, false);
+}
+
+/* Handle the TARGET_FUNCTION_INCOMING_ARG target hook. */
+
+static rtx
+sparc_function_incoming_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+ const_tree type, bool named)
+{
+ return sparc_function_arg_1 (cum, mode, type, named, true);
+}
+
+/* For sparc64, objects requiring 16 byte alignment are passed that way. */
+
+static unsigned int
+sparc_function_arg_boundary (enum machine_mode mode, const_tree type)
+{
+ return ((TARGET_ARCH64
+ && (GET_MODE_ALIGNMENT (mode) == 128
+ || (type && TYPE_ALIGN (type) == 128)))
+ ? 128
+ : PARM_BOUNDARY);
+}
+
/* For an arg passed partly in registers and partly in memory,
this is the number of bytes of registers used.
For args passed entirely in registers or entirely in memory, zero.
{
int slotno, regno, padding;
- /* We pass 0 for incoming_p here, it doesn't matter. */
- slotno = function_arg_slotno (cum, mode, type, named, 0, ®no, &padding);
+ /* We pass false for incoming_p here, it doesn't matter. */
+ slotno = function_arg_slotno (cum, mode, type, named, false,
+ ®no, &padding);
if (slotno == -1)
return 0;
|| GET_MODE_SIZE (mode) > 16);
}
-/* Handle the FUNCTION_ARG_ADVANCE macro.
+/* Handle the TARGET_FUNCTION_ARG_ADVANCE hook.
Update the data in CUM to advance over an argument
of mode MODE and data type TYPE.
TYPE is null for libcalls where that information may not be available. */
-void
-function_arg_advance (struct sparc_args *cum, enum machine_mode mode,
- tree type, int named)
+static void
+sparc_function_arg_advance (struct sparc_args *cum, enum machine_mode mode,
+ const_tree type, bool named)
{
- int slotno, regno, padding;
+ int regno, padding;
- /* We pass 0 for incoming_p here, it doesn't matter. */
- slotno = function_arg_slotno (cum, mode, type, named, 0, ®no, &padding);
+ /* We pass false for incoming_p here, it doesn't matter. */
+ function_arg_slotno (cum, mode, type, named, false, ®no, &padding);
- /* If register required leading padding, add it. */
- if (slotno != -1)
- cum->words += padding;
+ /* If argument requires leading padding, add it. */
+ cum->words += padding;
if (TARGET_ARCH32)
{
integers are returned like floats of the same size, that is in
registers. Return all vector floats like structure and unions;
note that they always have BLKmode like the latter. */
- return ((TYPE_MODE (type) == BLKmode
- && (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 32));
+ return (TYPE_MODE (type) == BLKmode
+ && (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 32);
}
/* Handle the TARGET_STRUCT_VALUE target hook.
tree size = TYPE_SIZE_UNIT (TREE_TYPE (fndecl));
rtx size_rtx = GEN_INT (TREE_INT_CST_LOW (size) & 0xfff);
/* Construct a temporary return value */
- rtx temp_val = assign_stack_local (Pmode, TREE_INT_CST_LOW (size), 0);
+ rtx temp_val
+ = assign_stack_local (Pmode, TREE_INT_CST_LOW (size), 0);
- /* Implement SPARC 32-bit psABI callee returns struck checking
- requirements:
+ /* Implement SPARC 32-bit psABI callee return struct checking:
- Fetch the instruction where we will return to and see if
+ Fetch the instruction where we will return to and see if
it's an unimp instruction (the most significant 10 bits
will be zero). */
emit_move_insn (scratch, gen_rtx_MEM (SImode,
plus_constant (ret_rtx, 8)));
/* Assume the size is valid and pre-adjust */
emit_insn (gen_add3_insn (ret_rtx, ret_rtx, GEN_INT (4)));
- emit_cmp_and_jump_insns (scratch, size_rtx, EQ, const0_rtx, SImode, 0, endlab);
+ emit_cmp_and_jump_insns (scratch, size_rtx, EQ, const0_rtx, SImode,
+ 0, endlab);
emit_insn (gen_sub3_insn (ret_rtx, ret_rtx, GEN_INT (4)));
- /* Assign stack temp:
- Write the address of the memory pointed to by temp_val into
+ /* Write the address of the memory pointed to by temp_val into
the memory pointed to by mem */
emit_move_insn (mem, XEXP (temp_val, 0));
emit_label (endlab);
}
}
-/* Handle FUNCTION_VALUE, FUNCTION_OUTGOING_VALUE, and LIBCALL_VALUE macros.
+/* Handle TARGET_FUNCTION_VALUE, and TARGET_LIBCALL_VALUE target hook.
For v9, function return values are subject to the same rules as arguments,
except that up to 32 bytes may be returned in registers. */
-rtx
-function_value (const_tree type, enum machine_mode mode, int incoming_p)
+static rtx
+sparc_function_value_1 (const_tree type, enum machine_mode mode,
+ bool outgoing)
{
/* Beware that the two values are swapped here wrt function_arg. */
- int regbase = (incoming_p
- ? SPARC_OUTGOING_INT_ARG_FIRST
- : SPARC_INCOMING_INT_ARG_FIRST);
+ int regbase = (outgoing
+ ? SPARC_INCOMING_INT_ARG_FIRST
+ : SPARC_OUTGOING_INT_ARG_FIRST);
enum mode_class mclass = GET_MODE_CLASS (mode);
int regno;
mclass = MODE_INT;
}
- /* This must match sparc_promote_function_mode.
- ??? Maybe 32-bit pointers should actually remain in Pmode? */
+ /* We should only have pointer and integer types at this point. This
+ must match sparc_promote_function_mode. */
else if (mclass == MODE_INT && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
mode = word_mode;
}
+ /* We should only have pointer and integer types at this point. This must
+ match sparc_promote_function_mode. */
+ else if (TARGET_ARCH32
+ && mclass == MODE_INT
+ && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
+ mode = word_mode;
+
if ((mclass == MODE_FLOAT || mclass == MODE_COMPLEX_FLOAT) && TARGET_FPU)
regno = SPARC_FP_ARG_FIRST;
else
return gen_rtx_REG (mode, regno);
}
+/* Handle TARGET_FUNCTION_VALUE.
+ On the SPARC, the value is found in the first "output" register, but the
+ called function leaves it in the first "input" register. */
+
+static rtx
+sparc_function_value (const_tree valtype,
+ const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
+ bool outgoing)
+{
+ return sparc_function_value_1 (valtype, TYPE_MODE (valtype), outgoing);
+}
+
+/* Handle TARGET_LIBCALL_VALUE. */
+
+static rtx
+sparc_libcall_value (enum machine_mode mode,
+ const_rtx fun ATTRIBUTE_UNUSED)
+{
+ return sparc_function_value_1 (NULL_TREE, mode, false);
+}
+
+/* Handle FUNCTION_VALUE_REGNO_P.
+ On the SPARC, the first "output" reg is used for integer values, and the
+ first floating point register is used for floating point values. */
+
+static bool
+sparc_function_value_regno_p (const unsigned int regno)
+{
+ return (regno == 8 || regno == 32);
+}
+
/* Do what is necessary for `va_start'. We look at the current function
to determine if stdarg or varargs is used and return the address of
the first unnamed parameter. */
return TARGET_VIS && VECTOR_MODE_P (mode) ? true : false;
}
\f
+/* Implement the TARGET_VECTORIZE_PREFERRED_SIMD_MODE target hook. */
+
+static enum machine_mode
+sparc_preferred_simd_mode (enum machine_mode mode)
+{
+ if (TARGET_VIS)
+ switch (mode)
+ {
+ case SImode:
+ return V2SImode;
+ case HImode:
+ return V4HImode;
+ case QImode:
+ return V8QImode;
+
+ default:;
+ }
+
+ return word_mode;
+}
+\f
/* Return the string to output an unconditional branch to LABEL, which is
the operand number of the label.
case VECTOR_TYPE:
case BOOLEAN_TYPE: /* Boolean truth value type. */
- case LANG_TYPE: /* ? */
+ case LANG_TYPE:
+ case NULLPTR_TYPE:
return qualifiers;
default:
}
\f
/* libfunc renaming. */
-#include "config/gofast.h"
static void
sparc_init_libfuncs (void)
set_conv_libfunc (ufix_optab, DImode, DFmode, "__dtoul");
}
}
-
- gofast_maybe_init_libfuncs ();
}
\f
#define def_builtin(NAME, CODE, TYPE) \
if (ignore
&& icode != CODE_FOR_alignaddrsi_vis
&& icode != CODE_FOR_alignaddrdi_vis)
- return fold_convert (rtype, integer_zero_node);
+ return build_zero_cst (rtype);
switch (icode)
{
/* Delay emitting the PIC helper function because it needs to
change the section and we are emitting assembly code. */
load_pic_register (); /* clobbers %o7 */
- scratch = legitimize_pic_address (funexp, scratch);
+ scratch = sparc_legitimize_pic_address (funexp, scratch);
seq = get_insns ();
end_sequence ();
emit_and_preserve (seq, spill_reg, spill_reg2);
static struct machine_function *
sparc_init_machine_status (void)
{
- return GGC_CNEW (struct machine_function);
+ return ggc_alloc_cleared_machine_function ();
}
/* Locate some local-dynamic symbol still in use by this function
DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
DECL_VISIBILITY_SPECIFIED (decl) = 1;
allocate_struct_function (decl, true);
+ cfun->is_thunk = 1;
current_function_decl = decl;
init_varasm_status ();
assemble_start_function (decl, name);
|| !targetm.frame_pointer_required ());
}
+/* If !TARGET_FPU, then make the fp registers and fp cc regs fixed so that
+ they won't be allocated. */
+
+static void
+sparc_conditional_register_usage (void)
+{
+ if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
+ {
+ fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
+ call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
+ }
+ /* If the user has passed -f{fixed,call-{used,saved}}-g5 */
+ /* then honor it. */
+ if (TARGET_ARCH32 && fixed_regs[5])
+ fixed_regs[5] = 1;
+ else if (TARGET_ARCH64 && fixed_regs[5] == 2)
+ fixed_regs[5] = 0;
+ if (! TARGET_V9)
+ {
+ int regno;
+ for (regno = SPARC_FIRST_V9_FP_REG;
+ regno <= SPARC_LAST_V9_FP_REG;
+ regno++)
+ fixed_regs[regno] = 1;
+ /* %fcc0 is used by v8 and v9. */
+ for (regno = SPARC_FIRST_V9_FCC_REG + 1;
+ regno <= SPARC_LAST_V9_FCC_REG;
+ regno++)
+ fixed_regs[regno] = 1;
+ }
+ if (! TARGET_FPU)
+ {
+ int regno;
+ for (regno = 32; regno < SPARC_LAST_V9_FCC_REG; regno++)
+ fixed_regs[regno] = 1;
+ }
+ /* If the user has passed -f{fixed,call-{used,saved}}-g2 */
+ /* then honor it. Likewise with g3 and g4. */
+ if (fixed_regs[2] == 2)
+ fixed_regs[2] = ! TARGET_APP_REGS;
+ if (fixed_regs[3] == 2)
+ fixed_regs[3] = ! TARGET_APP_REGS;
+ if (TARGET_ARCH32 && fixed_regs[4] == 2)
+ fixed_regs[4] = ! TARGET_APP_REGS;
+ else if (TARGET_CM_EMBMEDANY)
+ fixed_regs[4] = 1;
+ else if (fixed_regs[4] == 2)
+ fixed_regs[4] = 0;
+}
+
#include "gt-sparc.h"