#include "tm_p.h"
#include "target.h"
#include "target-def.h"
+#include "common/common-target.h"
#include "debug.h"
#include "langhooks.h"
#include "splay-tree.h"
static int alpha_function_needs_gp;
-/* The alias set for prologue/epilogue register save/restore. */
-
-static GTY(()) alias_set_type alpha_sr_alias_set;
-
/* The assembler name of the current function. */
static const char *alpha_fnname;
static rtx alpha_emit_xfloating_compare (enum rtx_code *, rtx, rtx);
#if TARGET_ABI_OPEN_VMS
-static void alpha_write_linkage (FILE *, const char *, tree);
+static void alpha_write_linkage (FILE *, const char *);
static bool vms_valid_pointer_mode (enum machine_mode);
+#else
+#define vms_patch_builtins() gcc_unreachable()
#endif
\f
-/* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
-static const struct default_options alpha_option_optimization_table[] =
- {
- { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
- { OPT_LEVELS_NONE, 0, NULL, 0 }
- };
-
-/* Implement TARGET_HANDLE_OPTION. */
-
-static bool
-alpha_handle_option (struct gcc_options *opts,
- struct gcc_options *opts_set ATTRIBUTE_UNUSED,
- const struct cl_decoded_option *decoded,
- location_t loc)
-{
- size_t code = decoded->opt_index;
- const char *arg = decoded->arg;
- int value = decoded->value;
-
- switch (code)
- {
- case OPT_mfp_regs:
- if (value == 0)
- opts->x_target_flags |= MASK_SOFT_FP;
- break;
-
- case OPT_mieee:
- case OPT_mieee_with_inexact:
- opts->x_target_flags |= MASK_IEEE_CONFORMANT;
- break;
-
- case OPT_mtls_size_:
- if (value != 16 && value != 32 && value != 64)
- error_at (loc, "bad value %qs for -mtls-size switch", arg);
- break;
- }
-
- return true;
-}
-
#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
/* Implement TARGET_MANGLE_TYPE. */
SUBTARGET_OVERRIDE_OPTIONS;
#endif
+ /* Default to full IEEE compliance mode for Go language. */
+ if (strcmp (lang_hooks.name, "GNU Go") == 0
+ && !(target_flags_explicit & MASK_IEEE))
+ target_flags |= MASK_IEEE;
+
alpha_fprm = ALPHA_FPRM_NORM;
alpha_tp = ALPHA_TP_PROG;
alpha_fptm = ALPHA_FPTM_N;
if (align_functions <= 0)
align_functions = 16;
- /* Acquire a unique set number for our register saves and restores. */
- alpha_sr_alias_set = new_alias_set ();
-
/* Register variables and functions with the garbage collector. */
/* Set up function hooks. */
if (REG_P (tmp)
&& REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
{
- op = reg_equiv_memory_loc[REGNO (tmp)];
+ op = reg_equiv_memory_loc (REGNO (tmp));
if (op == 0)
return 0;
}
int
direct_return (void)
{
- return (!TARGET_ABI_OPEN_VMS
+ return (TARGET_ABI_OSF
&& reload_completed
&& alpha_sa_size () == 0
&& get_frame_size () == 0
&& crtl->args.pretend_args_size == 0);
}
-/* Return the ADDR_VEC associated with a tablejump insn. */
-
-rtx
-alpha_tablejump_addr_vec (rtx insn)
-{
- rtx tmp;
-
- tmp = JUMP_LABEL (insn);
- if (!tmp)
- return NULL_RTX;
- tmp = NEXT_INSN (tmp);
- if (!tmp)
- return NULL_RTX;
- if (JUMP_P (tmp)
- && GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC)
- return PATTERN (tmp);
- return NULL_RTX;
-}
-
-/* Return the label of the predicted edge, or CONST0_RTX if we don't know. */
-
-rtx
-alpha_tablejump_best_label (rtx insn)
-{
- rtx jump_table = alpha_tablejump_addr_vec (insn);
- rtx best_label = NULL_RTX;
-
- /* ??? Once the CFG doesn't keep getting completely rebuilt, look
- there for edge frequency counts from profile data. */
-
- if (jump_table)
- {
- int n_labels = XVECLEN (jump_table, 1);
- int best_count = -1;
- int i, j;
-
- for (i = 0; i < n_labels; i++)
- {
- int count = 1;
-
- for (j = i + 1; j < n_labels; j++)
- if (XEXP (XVECEXP (jump_table, 1, i), 0)
- == XEXP (XVECEXP (jump_table, 1, j), 0))
- count++;
-
- if (count > best_count)
- best_count = count, best_label = XVECEXP (jump_table, 1, i);
- }
- }
-
- return best_label ? best_label : const0_rtx;
-}
-
/* Return the TLS model to use for SYMBOL. */
static enum tls_model
should never be spilling symbolic operands to the constant pool, ever. */
static bool
-alpha_cannot_force_const_mem (rtx x)
+alpha_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
{
enum rtx_code code = GET_CODE (x);
return code == SYMBOL_REF || code == LABEL_REF || code == CONST;
scanned. In either case, *TOTAL contains the cost result. */
static bool
-alpha_rtx_costs (rtx x, int code, int outer_code, int *total,
+alpha_rtx_costs (rtx x, int code, int outer_code, int opno, int *total,
bool speed)
{
enum machine_mode mode = GET_MODE (x);
&& const48_operand (XEXP (XEXP (x, 0), 1), VOIDmode))
{
*total = (rtx_cost (XEXP (XEXP (x, 0), 0),
- (enum rtx_code) outer_code, speed)
+ (enum rtx_code) outer_code, opno, speed)
+ rtx_cost (XEXP (x, 1),
- (enum rtx_code) outer_code, speed)
+ (enum rtx_code) outer_code, opno, speed)
+ COSTS_N_INSNS (1));
return true;
}
*paligned_mem = widen_memory_access (ref, SImode, -offset);
/* Convert the byte offset within the word to a bit offset. */
- if (WORDS_BIG_ENDIAN)
- offset = 32 - (GET_MODE_BITSIZE (GET_MODE (ref)) + offset * 8);
- else
- offset *= 8;
+ offset *= BITS_PER_UNIT;
*pbitnum = GEN_INT (offset);
}
return 0;
MEM_VOLATILE_P (x) = MEM_VOLATILE_P (orig);
- MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (orig);
- MEM_SCALAR_P (x) = MEM_SCALAR_P (orig);
MEM_NOTRAP_P (x) = MEM_NOTRAP_P (orig);
MEM_READONLY_P (x) = MEM_READONLY_P (orig);
generated from one of the insn patterns. So if everything is
zero, the pattern is already up-to-date. */
if (!MEM_VOLATILE_P (ref)
- && !MEM_IN_STRUCT_P (ref)
- && !MEM_SCALAR_P (ref)
&& !MEM_NOTRAP_P (ref)
&& !MEM_READONLY_P (ref))
return;
*p1 = i1;
}
-/* Implement LEGITIMATE_CONSTANT_P. This is all constants for which we
- are willing to load the value into a register via a move pattern.
+/* Implement TARGET_LEGITIMATE_CONSTANT_P. This is all constants for which
+ we are willing to load the value into a register via a move pattern.
Normally this is all symbolic constants, integral constants that
take three or fewer instructions, and floating-point zero. */
bool
-alpha_legitimate_constant_p (rtx x)
+alpha_legitimate_constant_p (enum machine_mode mode, rtx x)
{
- enum machine_mode mode = GET_MODE (x);
HOST_WIDE_INT i0, i1;
switch (GET_CODE (x))
}
emit_move_insn (reg, operands[i]);
- usage = alloc_EXPR_LIST (0, gen_rtx_USE (VOIDmode, reg), usage);
+ use_reg (&usage, reg);
}
switch (GET_MODE (target))
{
meml = adjust_address (mem, QImode, ofs);
memh = adjust_address (mem, QImode, ofs+1);
- if (BYTES_BIG_ENDIAN)
- tmp = meml, meml = memh, memh = tmp;
extl = gen_reg_rtx (DImode);
exth = gen_reg_rtx (DImode);
emit_insn (gen_zero_extendqidi2 (extl, meml));
set_mem_alias_set (tmp, 0);
emit_move_insn (memh, tmp);
- if (WORDS_BIG_ENDIAN && sign && (size == 2 || size == 4))
- {
- emit_move_insn (addr, plus_constant (mema, -1));
-
- emit_insn (gen_extqh_be (extl, meml, addr));
- emit_insn (gen_extxl_be (exth, memh, GEN_INT (64), addr));
-
- addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
- addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (64 - size*8),
- addr, 1, OPTAB_WIDEN);
- }
- else if (sign && size == 2)
+ if (sign && size == 2)
{
emit_move_insn (addr, plus_constant (mema, ofs+2));
- emit_insn (gen_extxl_le (extl, meml, GEN_INT (64), addr));
- emit_insn (gen_extqh_le (exth, memh, addr));
+ emit_insn (gen_extql (extl, meml, addr));
+ emit_insn (gen_extqh (exth, memh, addr));
/* We must use tgt here for the target. Alpha-vms port fails if we use
addr for the target, because addr is marked as a pointer and combine
}
else
{
- if (WORDS_BIG_ENDIAN)
- {
- emit_move_insn (addr, plus_constant (mema, ofs+size-1));
- switch ((int) size)
- {
- case 2:
- emit_insn (gen_extwh_be (extl, meml, addr));
- mode = HImode;
- break;
-
- case 4:
- emit_insn (gen_extlh_be (extl, meml, addr));
- mode = SImode;
- break;
-
- case 8:
- emit_insn (gen_extqh_be (extl, meml, addr));
- mode = DImode;
- break;
-
- default:
- gcc_unreachable ();
- }
- emit_insn (gen_extxl_be (exth, memh, GEN_INT (size*8), addr));
- }
- else
+ emit_move_insn (addr, plus_constant (mema, ofs));
+ emit_insn (gen_extxl (extl, meml, GEN_INT (size*8), addr));
+ switch ((int) size)
{
- emit_move_insn (addr, plus_constant (mema, ofs));
- emit_insn (gen_extxl_le (extl, meml, GEN_INT (size*8), addr));
- switch ((int) size)
- {
- case 2:
- emit_insn (gen_extwh_le (exth, memh, addr));
- mode = HImode;
- break;
-
- case 4:
- emit_insn (gen_extlh_le (exth, memh, addr));
- mode = SImode;
- break;
-
- case 8:
- emit_insn (gen_extqh_le (exth, memh, addr));
- mode = DImode;
- break;
-
- default:
- gcc_unreachable ();
- }
+ case 2:
+ emit_insn (gen_extwh (exth, memh, addr));
+ mode = HImode;
+ break;
+ case 4:
+ emit_insn (gen_extlh (exth, memh, addr));
+ mode = SImode;
+ break;
+ case 8:
+ emit_insn (gen_extqh (exth, memh, addr));
+ mode = DImode;
+ break;
+ default:
+ gcc_unreachable ();
}
addr = expand_binop (mode, ior_optab, gen_lowpart (mode, extl),
meml = adjust_address (dst, QImode, ofs);
memh = adjust_address (dst, QImode, ofs+1);
- if (BYTES_BIG_ENDIAN)
- addr = meml, meml = memh, memh = addr;
emit_move_insn (meml, dstl);
emit_move_insn (memh, dsth);
emit_move_insn (dsth, memh);
emit_move_insn (dstl, meml);
- if (WORDS_BIG_ENDIAN)
- {
- addr = copy_addr_to_reg (plus_constant (dsta, ofs+size-1));
- if (src != const0_rtx)
- {
- switch ((int) size)
- {
- case 2:
- emit_insn (gen_inswl_be (insh, gen_lowpart (HImode,src), addr));
- break;
- case 4:
- emit_insn (gen_insll_be (insh, gen_lowpart (SImode,src), addr));
- break;
- case 8:
- emit_insn (gen_insql_be (insh, gen_lowpart (DImode,src), addr));
- break;
- }
- emit_insn (gen_insxh (insl, gen_lowpart (DImode, src),
- GEN_INT (size*8), addr));
- }
+ addr = copy_addr_to_reg (plus_constant (dsta, ofs));
+
+ if (src != CONST0_RTX (GET_MODE (src)))
+ {
+ emit_insn (gen_insxh (insh, gen_lowpart (DImode, src),
+ GEN_INT (size*8), addr));
switch ((int) size)
{
case 2:
- emit_insn (gen_mskxl_be (dsth, dsth, GEN_INT (0xffff), addr));
+ emit_insn (gen_inswl (insl, gen_lowpart (HImode, src), addr));
break;
case 4:
- {
- rtx msk = immed_double_const (0xffffffff, 0, DImode);
- emit_insn (gen_mskxl_be (dsth, dsth, msk, addr));
- break;
- }
+ emit_insn (gen_insll (insl, gen_lowpart (SImode, src), addr));
+ break;
case 8:
- emit_insn (gen_mskxl_be (dsth, dsth, constm1_rtx, addr));
+ emit_insn (gen_insql (insl, gen_lowpart (DImode, src), addr));
break;
+ default:
+ gcc_unreachable ();
}
-
- emit_insn (gen_mskxh (dstl, dstl, GEN_INT (size*8), addr));
}
- else
- {
- addr = copy_addr_to_reg (plus_constant (dsta, ofs));
-
- if (src != CONST0_RTX (GET_MODE (src)))
- {
- emit_insn (gen_insxh (insh, gen_lowpart (DImode, src),
- GEN_INT (size*8), addr));
- switch ((int) size)
- {
- case 2:
- emit_insn (gen_inswl_le (insl, gen_lowpart (HImode, src), addr));
- break;
- case 4:
- emit_insn (gen_insll_le (insl, gen_lowpart (SImode, src), addr));
- break;
- case 8:
- emit_insn (gen_insql_le (insl, gen_lowpart (DImode, src), addr));
- break;
- }
- }
+ emit_insn (gen_mskxh (dsth, dsth, GEN_INT (size*8), addr));
- emit_insn (gen_mskxh (dsth, dsth, GEN_INT (size*8), addr));
-
- switch ((int) size)
- {
- case 2:
- emit_insn (gen_mskxl_le (dstl, dstl, GEN_INT (0xffff), addr));
- break;
- case 4:
- {
- rtx msk = immed_double_const (0xffffffff, 0, DImode);
- emit_insn (gen_mskxl_le (dstl, dstl, msk, addr));
- break;
- }
- case 8:
- emit_insn (gen_mskxl_le (dstl, dstl, constm1_rtx, addr));
- break;
- }
+ switch ((int) size)
+ {
+ case 2:
+ emit_insn (gen_mskwl (dstl, dstl, addr));
+ break;
+ case 4:
+ emit_insn (gen_mskll (dstl, dstl, addr));
+ break;
+ case 8:
+ emit_insn (gen_mskql (dstl, dstl, addr));
+ break;
+ default:
+ gcc_unreachable ();
}
if (src != CONST0_RTX (GET_MODE (src)))
dstl = expand_binop (DImode, ior_optab, insl, dstl, dstl, 0, OPTAB_WIDEN);
}
- if (WORDS_BIG_ENDIAN)
- {
- emit_move_insn (meml, dstl);
- emit_move_insn (memh, dsth);
- }
- else
- {
- /* Must store high before low for degenerate case of aligned. */
- emit_move_insn (memh, dsth);
- emit_move_insn (meml, dstl);
- }
+ /* Must store high before low for degenerate case of aligned. */
+ emit_move_insn (memh, dsth);
+ emit_move_insn (meml, dstl);
}
/* The block move code tries to maximize speed by separating loads and
HOST_WIDE_INT words, HOST_WIDE_INT ofs)
{
rtx const im8 = GEN_INT (-8);
- rtx const i64 = GEN_INT (64);
rtx ext_tmps[MAX_MOVE_WORDS], data_regs[MAX_MOVE_WORDS+1];
rtx sreg, areg, tmp, smema;
HOST_WIDE_INT i;
sreg = copy_addr_to_reg (smema);
areg = expand_binop (DImode, and_optab, sreg, GEN_INT (7), NULL,
1, OPTAB_WIDEN);
- if (WORDS_BIG_ENDIAN)
- emit_move_insn (sreg, plus_constant (sreg, 7));
for (i = 0; i < words; ++i)
{
- if (WORDS_BIG_ENDIAN)
- {
- emit_insn (gen_extqh_be (data_regs[i], data_regs[i], sreg));
- emit_insn (gen_extxl_be (ext_tmps[i], data_regs[i+1], i64, sreg));
- }
- else
- {
- emit_insn (gen_extxl_le (data_regs[i], data_regs[i], i64, sreg));
- emit_insn (gen_extqh_le (ext_tmps[i], data_regs[i+1], sreg));
- }
+ emit_insn (gen_extql (data_regs[i], data_regs[i], sreg));
+ emit_insn (gen_extqh (ext_tmps[i], data_regs[i+1], sreg));
emit_insn (gen_rtx_SET (VOIDmode, ext_tmps[i],
gen_rtx_IF_THEN_ELSE (DImode,
gen_rtx_EQ (DImode, areg,
HOST_WIDE_INT words, HOST_WIDE_INT ofs)
{
rtx const im8 = GEN_INT (-8);
- rtx const i64 = GEN_INT (64);
rtx ins_tmps[MAX_MOVE_WORDS];
rtx st_tmp_1, st_tmp_2, dreg;
rtx st_addr_1, st_addr_2, dmema;
/* Shift the input data into place. */
dreg = copy_addr_to_reg (dmema);
- if (WORDS_BIG_ENDIAN)
- emit_move_insn (dreg, plus_constant (dreg, 7));
if (data_regs != NULL)
{
for (i = words-1; i >= 0; --i)
{
- if (WORDS_BIG_ENDIAN)
- {
- emit_insn (gen_insql_be (ins_tmps[i], data_regs[i], dreg));
- emit_insn (gen_insxh (data_regs[i], data_regs[i], i64, dreg));
- }
- else
- {
- emit_insn (gen_insxh (ins_tmps[i], data_regs[i], i64, dreg));
- emit_insn (gen_insql_le (data_regs[i], data_regs[i], dreg));
- }
+ emit_insn (gen_insqh (ins_tmps[i], data_regs[i], dreg));
+ emit_insn (gen_insql (data_regs[i], data_regs[i], dreg));
}
for (i = words-1; i > 0; --i)
{
}
/* Split and merge the ends with the destination data. */
- if (WORDS_BIG_ENDIAN)
- {
- emit_insn (gen_mskxl_be (st_tmp_2, st_tmp_2, constm1_rtx, dreg));
- emit_insn (gen_mskxh (st_tmp_1, st_tmp_1, i64, dreg));
- }
- else
- {
- emit_insn (gen_mskxh (st_tmp_2, st_tmp_2, i64, dreg));
- emit_insn (gen_mskxl_le (st_tmp_1, st_tmp_1, constm1_rtx, dreg));
- }
+ emit_insn (gen_mskqh (st_tmp_2, st_tmp_2, dreg));
+ emit_insn (gen_mskql (st_tmp_1, st_tmp_1, dreg));
if (data_regs != NULL)
{
}
/* Store it all. */
- if (WORDS_BIG_ENDIAN)
- emit_move_insn (st_addr_1, st_tmp_1);
- else
- emit_move_insn (st_addr_2, st_tmp_2);
+ emit_move_insn (st_addr_2, st_tmp_2);
for (i = words-1; i > 0; --i)
{
rtx tmp = change_address (dmem, DImode,
gen_rtx_AND (DImode,
- plus_constant(dmema,
- WORDS_BIG_ENDIAN ? i*8-1 : i*8),
+ plus_constant (dmema, i*8),
im8));
set_mem_alias_set (tmp, 0);
emit_move_insn (tmp, data_regs ? ins_tmps[i-1] : const0_rtx);
}
- if (WORDS_BIG_ENDIAN)
- emit_move_insn (st_addr_2, st_tmp_2);
- else
- emit_move_insn (st_addr_1, st_tmp_1);
+ emit_move_insn (st_addr_1, st_tmp_1);
}
emit_insn (fn (res, mem, val));
}
+/* Subroutines of the atomic operation splitters. Emit barriers
+ as needed for the memory MODEL. */
+
+static void
+alpha_pre_atomic_barrier (enum memmodel model)
+{
+ switch (model)
+ {
+ case MEMMODEL_RELAXED:
+ case MEMMODEL_CONSUME:
+ case MEMMODEL_ACQUIRE:
+ break;
+ case MEMMODEL_RELEASE:
+ case MEMMODEL_ACQ_REL:
+ case MEMMODEL_SEQ_CST:
+ emit_insn (gen_memory_barrier ());
+ break;
+ default:
+ gcc_unreachable ();
+ }
+}
+
+static void
+alpha_post_atomic_barrier (enum memmodel model)
+{
+ switch (model)
+ {
+ case MEMMODEL_RELAXED:
+ case MEMMODEL_CONSUME:
+ case MEMMODEL_RELEASE:
+ break;
+ case MEMMODEL_ACQUIRE:
+ case MEMMODEL_ACQ_REL:
+ case MEMMODEL_SEQ_CST:
+ emit_insn (gen_memory_barrier ());
+ break;
+ default:
+ gcc_unreachable ();
+ }
+}
+
/* A subroutine of the atomic operation splitters. Emit an insxl
instruction in MODE. */
rtx ret = gen_reg_rtx (DImode);
rtx (*fn) (rtx, rtx, rtx);
- if (WORDS_BIG_ENDIAN)
- {
- if (mode == QImode)
- fn = gen_insbl_be;
- else
- fn = gen_inswl_be;
- }
- else
+ switch (mode)
{
- if (mode == QImode)
- fn = gen_insbl_le;
- else
- fn = gen_inswl_le;
+ case QImode:
+ fn = gen_insbl;
+ break;
+ case HImode:
+ fn = gen_inswl;
+ break;
+ case SImode:
+ fn = gen_insll;
+ break;
+ case DImode:
+ fn = gen_insql;
+ break;
+ default:
+ gcc_unreachable ();
}
- /* The insbl and inswl patterns require a register operand. */
+
op1 = force_reg (mode, op1);
emit_insn (fn (ret, op1, op2));
a scratch register. */
void
-alpha_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
- rtx before, rtx after, rtx scratch)
+alpha_split_atomic_op (enum rtx_code code, rtx mem, rtx val, rtx before,
+ rtx after, rtx scratch, enum memmodel model)
{
enum machine_mode mode = GET_MODE (mem);
rtx label, x, cond = gen_rtx_REG (DImode, REGNO (scratch));
- emit_insn (gen_memory_barrier ());
+ alpha_pre_atomic_barrier (model);
label = gen_label_rtx ();
emit_label (label);
x = gen_rtx_EQ (DImode, cond, const0_rtx);
emit_unlikely_jump (x, label);
- emit_insn (gen_memory_barrier ());
+ alpha_post_atomic_barrier (model);
}
/* Expand a compare and swap operation. */
void
-alpha_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
- rtx scratch)
+alpha_split_compare_and_swap (rtx operands[])
{
- enum machine_mode mode = GET_MODE (mem);
- rtx label1, label2, x, cond = gen_lowpart (DImode, scratch);
+ rtx cond, retval, mem, oldval, newval;
+ bool is_weak;
+ enum memmodel mod_s, mod_f;
+ enum machine_mode mode;
+ rtx label1, label2, x;
+
+ cond = operands[0];
+ retval = operands[1];
+ mem = operands[2];
+ oldval = operands[3];
+ newval = operands[4];
+ is_weak = (operands[5] != const0_rtx);
+ mod_s = (enum memmodel) INTVAL (operands[6]);
+ mod_f = (enum memmodel) INTVAL (operands[7]);
+ mode = GET_MODE (mem);
- emit_insn (gen_memory_barrier ());
+ alpha_pre_atomic_barrier (mod_s);
- label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
+ label1 = NULL_RTX;
+ if (!is_weak)
+ {
+ label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
+ emit_label (XEXP (label1, 0));
+ }
label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
- emit_label (XEXP (label1, 0));
emit_load_locked (mode, retval, mem);
x = gen_lowpart (DImode, retval);
if (oldval == const0_rtx)
- x = gen_rtx_NE (DImode, x, const0_rtx);
+ {
+ emit_move_insn (cond, const0_rtx);
+ x = gen_rtx_NE (DImode, x, const0_rtx);
+ }
else
{
x = gen_rtx_EQ (DImode, x, oldval);
}
emit_unlikely_jump (x, label2);
- emit_move_insn (scratch, newval);
- emit_store_conditional (mode, cond, mem, scratch);
+ emit_move_insn (cond, newval);
+ emit_store_conditional (mode, cond, mem, gen_lowpart (mode, cond));
- x = gen_rtx_EQ (DImode, cond, const0_rtx);
- emit_unlikely_jump (x, label1);
+ if (!is_weak)
+ {
+ x = gen_rtx_EQ (DImode, cond, const0_rtx);
+ emit_unlikely_jump (x, label1);
+ }
- emit_insn (gen_memory_barrier ());
- emit_label (XEXP (label2, 0));
+ if (mod_f != MEMMODEL_RELAXED)
+ emit_label (XEXP (label2, 0));
+
+ alpha_post_atomic_barrier (mod_s);
+
+ if (mod_f == MEMMODEL_RELAXED)
+ emit_label (XEXP (label2, 0));
}
void
-alpha_expand_compare_and_swap_12 (rtx dst, rtx mem, rtx oldval, rtx newval)
+alpha_expand_compare_and_swap_12 (rtx operands[])
{
- enum machine_mode mode = GET_MODE (mem);
+ rtx cond, dst, mem, oldval, newval, is_weak, mod_s, mod_f;
+ enum machine_mode mode;
rtx addr, align, wdst;
- rtx (*fn5) (rtx, rtx, rtx, rtx, rtx);
+ rtx (*gen) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
+
+ cond = operands[0];
+ dst = operands[1];
+ mem = operands[2];
+ oldval = operands[3];
+ newval = operands[4];
+ is_weak = operands[5];
+ mod_s = operands[6];
+ mod_f = operands[7];
+ mode = GET_MODE (mem);
+
+ /* We forced the address into a register via mem_noofs_operand. */
+ addr = XEXP (mem, 0);
+ gcc_assert (register_operand (addr, DImode));
- addr = force_reg (DImode, XEXP (mem, 0));
align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8),
NULL_RTX, 1, OPTAB_DIRECT);
oldval = convert_modes (DImode, mode, oldval, 1);
- newval = emit_insxl (mode, newval, addr);
+
+ if (newval != const0_rtx)
+ newval = emit_insxl (mode, newval, addr);
wdst = gen_reg_rtx (DImode);
if (mode == QImode)
- fn5 = gen_sync_compare_and_swapqi_1;
+ gen = gen_atomic_compare_and_swapqi_1;
else
- fn5 = gen_sync_compare_and_swaphi_1;
- emit_insn (fn5 (wdst, addr, oldval, newval, align));
+ gen = gen_atomic_compare_and_swaphi_1;
+ emit_insn (gen (cond, wdst, mem, oldval, newval, align,
+ is_weak, mod_s, mod_f));
emit_move_insn (dst, gen_lowpart (mode, wdst));
}
void
-alpha_split_compare_and_swap_12 (enum machine_mode mode, rtx dest, rtx addr,
- rtx oldval, rtx newval, rtx align,
- rtx scratch, rtx cond)
+alpha_split_compare_and_swap_12 (rtx operands[])
{
- rtx label1, label2, mem, width, mask, x;
+ rtx cond, dest, orig_mem, oldval, newval, align, scratch;
+ enum machine_mode mode;
+ bool is_weak;
+ enum memmodel mod_s, mod_f;
+ rtx label1, label2, mem, addr, width, mask, x;
+
+ cond = operands[0];
+ dest = operands[1];
+ orig_mem = operands[2];
+ oldval = operands[3];
+ newval = operands[4];
+ align = operands[5];
+ is_weak = (operands[6] != const0_rtx);
+ mod_s = (enum memmodel) INTVAL (operands[7]);
+ mod_f = (enum memmodel) INTVAL (operands[8]);
+ scratch = operands[9];
+ mode = GET_MODE (orig_mem);
+ addr = XEXP (orig_mem, 0);
mem = gen_rtx_MEM (DImode, align);
- MEM_VOLATILE_P (mem) = 1;
+ MEM_VOLATILE_P (mem) = MEM_VOLATILE_P (orig_mem);
+ if (MEM_ALIAS_SET (orig_mem) == ALIAS_SET_MEMORY_BARRIER)
+ set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
- emit_insn (gen_memory_barrier ());
- label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
+ alpha_pre_atomic_barrier (mod_s);
+
+ label1 = NULL_RTX;
+ if (!is_weak)
+ {
+ label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
+ emit_label (XEXP (label1, 0));
+ }
label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
- emit_label (XEXP (label1, 0));
emit_load_locked (DImode, scratch, mem);
width = GEN_INT (GET_MODE_BITSIZE (mode));
mask = GEN_INT (mode == QImode ? 0xff : 0xffff);
- if (WORDS_BIG_ENDIAN)
- emit_insn (gen_extxl_be (dest, scratch, width, addr));
- else
- emit_insn (gen_extxl_le (dest, scratch, width, addr));
+ emit_insn (gen_extxl (dest, scratch, width, addr));
if (oldval == const0_rtx)
- x = gen_rtx_NE (DImode, dest, const0_rtx);
+ {
+ emit_move_insn (cond, const0_rtx);
+ x = gen_rtx_NE (DImode, dest, const0_rtx);
+ }
else
{
x = gen_rtx_EQ (DImode, dest, oldval);
}
emit_unlikely_jump (x, label2);
- if (WORDS_BIG_ENDIAN)
- emit_insn (gen_mskxl_be (scratch, scratch, mask, addr));
- else
- emit_insn (gen_mskxl_le (scratch, scratch, mask, addr));
- emit_insn (gen_iordi3 (scratch, scratch, newval));
+ emit_insn (gen_mskxl (cond, scratch, mask, addr));
- emit_store_conditional (DImode, scratch, mem, scratch);
+ if (newval != const0_rtx)
+ emit_insn (gen_iordi3 (cond, cond, newval));
- x = gen_rtx_EQ (DImode, scratch, const0_rtx);
- emit_unlikely_jump (x, label1);
+ emit_store_conditional (DImode, cond, mem, cond);
+
+ if (!is_weak)
+ {
+ x = gen_rtx_EQ (DImode, cond, const0_rtx);
+ emit_unlikely_jump (x, label1);
+ }
- emit_insn (gen_memory_barrier ());
- emit_label (XEXP (label2, 0));
+ if (mod_f != MEMMODEL_RELAXED)
+ emit_label (XEXP (label2, 0));
+
+ alpha_post_atomic_barrier (mod_s);
+
+ if (mod_f == MEMMODEL_RELAXED)
+ emit_label (XEXP (label2, 0));
}
/* Expand an atomic exchange operation. */
void
-alpha_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
+alpha_split_atomic_exchange (rtx operands[])
{
- enum machine_mode mode = GET_MODE (mem);
- rtx label, x, cond = gen_lowpart (DImode, scratch);
+ rtx retval, mem, val, scratch;
+ enum memmodel model;
+ enum machine_mode mode;
+ rtx label, x, cond;
+
+ retval = operands[0];
+ mem = operands[1];
+ val = operands[2];
+ model = (enum memmodel) INTVAL (operands[3]);
+ scratch = operands[4];
+ mode = GET_MODE (mem);
+ cond = gen_lowpart (DImode, scratch);
+
+ alpha_pre_atomic_barrier (model);
label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
emit_label (XEXP (label, 0));
x = gen_rtx_EQ (DImode, cond, const0_rtx);
emit_unlikely_jump (x, label);
- emit_insn (gen_memory_barrier ());
+ alpha_post_atomic_barrier (model);
}
void
-alpha_expand_lock_test_and_set_12 (rtx dst, rtx mem, rtx val)
+alpha_expand_atomic_exchange_12 (rtx operands[])
{
- enum machine_mode mode = GET_MODE (mem);
+ rtx dst, mem, val, model;
+ enum machine_mode mode;
rtx addr, align, wdst;
- rtx (*fn4) (rtx, rtx, rtx, rtx);
+ rtx (*gen) (rtx, rtx, rtx, rtx, rtx);
+
+ dst = operands[0];
+ mem = operands[1];
+ val = operands[2];
+ model = operands[3];
+ mode = GET_MODE (mem);
- /* Force the address into a register. */
- addr = force_reg (DImode, XEXP (mem, 0));
+ /* We forced the address into a register via mem_noofs_operand. */
+ addr = XEXP (mem, 0);
+ gcc_assert (register_operand (addr, DImode));
- /* Align it to a multiple of 8. */
align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8),
NULL_RTX, 1, OPTAB_DIRECT);
/* Insert val into the correct byte location within the word. */
- val = emit_insxl (mode, val, addr);
+ if (val != const0_rtx)
+ val = emit_insxl (mode, val, addr);
wdst = gen_reg_rtx (DImode);
if (mode == QImode)
- fn4 = gen_sync_lock_test_and_setqi_1;
+ gen = gen_atomic_exchangeqi_1;
else
- fn4 = gen_sync_lock_test_and_sethi_1;
- emit_insn (fn4 (wdst, addr, val, align));
+ gen = gen_atomic_exchangehi_1;
+ emit_insn (gen (wdst, mem, val, align, model));
emit_move_insn (dst, gen_lowpart (mode, wdst));
}
void
-alpha_split_lock_test_and_set_12 (enum machine_mode mode, rtx dest, rtx addr,
- rtx val, rtx align, rtx scratch)
+alpha_split_atomic_exchange_12 (rtx operands[])
{
+ rtx dest, orig_mem, addr, val, align, scratch;
rtx label, mem, width, mask, x;
+ enum machine_mode mode;
+ enum memmodel model;
+
+ dest = operands[0];
+ orig_mem = operands[1];
+ val = operands[2];
+ align = operands[3];
+ model = (enum memmodel) INTVAL (operands[4]);
+ scratch = operands[5];
+ mode = GET_MODE (orig_mem);
+ addr = XEXP (orig_mem, 0);
mem = gen_rtx_MEM (DImode, align);
- MEM_VOLATILE_P (mem) = 1;
+ MEM_VOLATILE_P (mem) = MEM_VOLATILE_P (orig_mem);
+ if (MEM_ALIAS_SET (orig_mem) == ALIAS_SET_MEMORY_BARRIER)
+ set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
+
+ alpha_pre_atomic_barrier (model);
label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
emit_label (XEXP (label, 0));
width = GEN_INT (GET_MODE_BITSIZE (mode));
mask = GEN_INT (mode == QImode ? 0xff : 0xffff);
- if (WORDS_BIG_ENDIAN)
- {
- emit_insn (gen_extxl_be (dest, scratch, width, addr));
- emit_insn (gen_mskxl_be (scratch, scratch, mask, addr));
- }
- else
- {
- emit_insn (gen_extxl_le (dest, scratch, width, addr));
- emit_insn (gen_mskxl_le (scratch, scratch, mask, addr));
- }
- emit_insn (gen_iordi3 (scratch, scratch, val));
+ emit_insn (gen_extxl (dest, scratch, width, addr));
+ emit_insn (gen_mskxl (scratch, scratch, mask, addr));
+ if (val != const0_rtx)
+ emit_insn (gen_iordi3 (scratch, scratch, val));
emit_store_conditional (DImode, scratch, mem, scratch);
x = gen_rtx_EQ (DImode, scratch, const0_rtx);
emit_unlikely_jump (x, label);
- emit_insn (gen_memory_barrier ());
+ alpha_post_atomic_barrier (model);
}
\f
/* Adjust the cost of a scheduling dependency. Return the new cost of
\f
/* Machine-specific function data. */
+struct GTY(()) alpha_links;
+
struct GTY(()) machine_function
{
/* For OSF. */
const char *some_ld_name;
/* For TARGET_LD_BUGGY_LDGP. */
- struct rtx_def *gp_save_rtx;
+ rtx gp_save_rtx;
/* For VMS condition handlers. */
- bool uses_condition_handler;
+ bool uses_condition_handler;
+
+ /* Linkage entries. */
+ splay_tree GTY ((param1_is (char *), param2_is (struct alpha_links *)))
+ links;
};
/* How to allocate a 'struct machine_function'. */
return m;
}
+static void
+alpha_instantiate_decls (void)
+{
+ if (cfun->machine->gp_save_rtx != NULL_RTX)
+ instantiate_decl_rtl (cfun->machine->gp_save_rtx);
+}
+
static int
alpha_ra_ever_killed (void)
{
break;
case 's':
- /* Write the constant value divided by 8 for little-endian mode or
- (56 - value) / 8 for big-endian mode. */
-
+ /* Write the constant value divided by 8. */
if (!CONST_INT_P (x)
- || (unsigned HOST_WIDE_INT) INTVAL (x) >= (WORDS_BIG_ENDIAN
- ? 56
- : 64)
+ || (unsigned HOST_WIDE_INT) INTVAL (x) >= 64
|| (INTVAL (x) & 7) != 0)
output_operand_lossage ("invalid %%s value");
- fprintf (file, HOST_WIDE_INT_PRINT_DEC,
- WORDS_BIG_ENDIAN
- ? (56 - INTVAL (x)) / 8
- : INTVAL (x) / 8);
+ fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) / 8);
break;
case 'S':
mem = adjust_address (m_tramp, Pmode, 24);
emit_move_insn (mem, chain_value);
- if (!TARGET_ABI_OPEN_VMS)
+ if (TARGET_ABI_OSF)
{
emit_insn (gen_imb ());
-#ifdef ENABLE_EXECUTE_STACK
+#ifdef HAVE_ENABLE_EXECUTE_STACK
emit_library_call (init_one_libfunc ("__enable_execute_stack"),
LCT_NORMAL, VOIDmode, 1, XEXP (m_tramp, 0), Pmode);
#endif
and the rest are pushed. */
static rtx
-alpha_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+alpha_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int basereg;
int num_args;
(TYPE is null for libcalls where that information may not be available.) */
static void
-alpha_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+alpha_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
bool onstack = targetm.calls.must_pass_in_stack (mode, type);
int increment = onstack ? 6 : ALPHA_ARG_SIZE (mode, type, named);
}
static int
-alpha_arg_partial_bytes (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+alpha_arg_partial_bytes (cumulative_args_t cum_v,
enum machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
int words = 0;
+ CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED = get_cumulative_args (cum_v);
#if TARGET_ABI_OPEN_VMS
if (cum->num_args < 6
/* Return true if TYPE should be passed by invisible reference. */
static bool
-alpha_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
+alpha_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
enum machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
variable number of arguments. */
static void
-alpha_setup_incoming_varargs (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
+alpha_setup_incoming_varargs (cumulative_args_t pcum, enum machine_mode mode,
tree type, int *pretend_size, int no_rtl)
{
- CUMULATIVE_ARGS cum = *pcum;
+ CUMULATIVE_ARGS cum = *get_cumulative_args (pcum);
/* Skip the current argument. */
- targetm.calls.function_arg_advance (&cum, mode, type, true);
+ targetm.calls.function_arg_advance (pack_cumulative_args (&cum), mode, type,
+ true);
#if TARGET_ABI_OPEN_VMS
/* For VMS, we allocate space for all 6 arg registers plus a count.
if (TARGET_ABI_OPEN_VMS)
{
t = make_tree (ptr_type_node, virtual_incoming_args_rtx);
- t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
- size_int (offset + NUM_ARGS * UNITS_PER_WORD));
+ t = fold_build_pointer_plus_hwi (t, offset + NUM_ARGS * UNITS_PER_WORD);
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
valist, offset_field, NULL_TREE);
t = make_tree (ptr_type_node, virtual_incoming_args_rtx);
- t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
- size_int (offset));
+ t = fold_build_pointer_plus_hwi (t, offset);
t = build2 (MODIFY_EXPR, TREE_TYPE (base_field), base_field, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
}
/* Build the final address and force that value into a temporary. */
- addr = build2 (POINTER_PLUS_EXPR, ptr_type, fold_convert (ptr_type, base),
- fold_convert (sizetype, addend));
+ addr = fold_build_pointer_plus (fold_convert (ptr_type, base), addend);
internal_post = NULL;
gimplify_expr (&addr, pre_p, &internal_post, is_gimple_val, fb_rvalue);
gimple_seq_add_seq (pre_p, internal_post);
static enum insn_code const code_for_builtin[ALPHA_BUILTIN_max] = {
CODE_FOR_builtin_cmpbge,
- CODE_FOR_builtin_extbl,
- CODE_FOR_builtin_extwl,
- CODE_FOR_builtin_extll,
- CODE_FOR_builtin_extql,
- CODE_FOR_builtin_extwh,
- CODE_FOR_builtin_extlh,
- CODE_FOR_builtin_extqh,
+ CODE_FOR_extbl,
+ CODE_FOR_extwl,
+ CODE_FOR_extll,
+ CODE_FOR_extql,
+ CODE_FOR_extwh,
+ CODE_FOR_extlh,
+ CODE_FOR_extqh,
CODE_FOR_builtin_insbl,
CODE_FOR_builtin_inswl,
CODE_FOR_builtin_insll,
- CODE_FOR_builtin_insql,
- CODE_FOR_builtin_inswh,
- CODE_FOR_builtin_inslh,
- CODE_FOR_builtin_insqh,
- CODE_FOR_builtin_mskbl,
- CODE_FOR_builtin_mskwl,
- CODE_FOR_builtin_mskll,
- CODE_FOR_builtin_mskql,
- CODE_FOR_builtin_mskwh,
- CODE_FOR_builtin_msklh,
- CODE_FOR_builtin_mskqh,
+ CODE_FOR_insql,
+ CODE_FOR_inswh,
+ CODE_FOR_inslh,
+ CODE_FOR_insqh,
+ CODE_FOR_mskbl,
+ CODE_FOR_mskwl,
+ CODE_FOR_mskll,
+ CODE_FOR_mskql,
+ CODE_FOR_mskwh,
+ CODE_FOR_msklh,
+ CODE_FOR_mskqh,
CODE_FOR_umuldi3_highpart,
CODE_FOR_builtin_zap,
CODE_FOR_builtin_zapnot,
dimode_integer_type_node = lang_hooks.types.type_for_mode (DImode, 0);
- /* Fwrite on VMS is non-standard. */
-#if TARGET_ABI_OPEN_VMS
- implicit_built_in_decls[(int) BUILT_IN_FWRITE] = NULL_TREE;
- implicit_built_in_decls[(int) BUILT_IN_FWRITE_UNLOCKED] = NULL_TREE;
-#endif
-
- ftype = build_function_type (dimode_integer_type_node, void_list_node);
+ ftype = build_function_type_list (dimode_integer_type_node, NULL_TREE);
alpha_add_builtins (zero_arg_builtins, ARRAY_SIZE (zero_arg_builtins),
ftype);
alpha_add_builtins (two_arg_builtins, ARRAY_SIZE (two_arg_builtins),
ftype);
- ftype = build_function_type (ptr_type_node, void_list_node);
+ ftype = build_function_type_list (ptr_type_node, NULL_TREE);
alpha_builtin_function ("__builtin_thread_pointer", ftype,
ALPHA_BUILTIN_THREAD_POINTER, ECF_NOTHROW);
NULL_TREE);
alpha_builtin_function ("__builtin_revert_vms_condition_handler", ftype,
ALPHA_BUILTIN_REVERT_VMS_CONDITION_HANDLER, 0);
+
+ vms_patch_builtins ();
}
alpha_v8qi_u = build_vector_type (unsigned_intQI_type_node, 8);
unsigned HOST_WIDE_INT loc;
loc = opint[1] & 7;
- if (BYTES_BIG_ENDIAN)
- loc ^= 7;
- loc *= 8;
+ loc *= BITS_PER_UNIT;
if (loc != 0)
{
tree *zap_op = NULL;
loc = opint[1] & 7;
- if (BYTES_BIG_ENDIAN)
- loc ^= 7;
bytemask <<= loc;
temp = opint[0];
unsigned HOST_WIDE_INT loc;
loc = opint[1] & 7;
- if (BYTES_BIG_ENDIAN)
- loc ^= 7;
bytemask <<= loc;
if (is_high)
rtx addr, mem, insn;
addr = plus_constant (base_reg, base_ofs);
- mem = gen_rtx_MEM (DImode, addr);
- set_mem_alias_set (mem, alpha_sr_alias_set);
+ mem = gen_frame_mem (DImode, addr);
insn = emit_move_insn (mem, value);
RTX_FRAME_RELATED_P (insn) = 1;
sa_size = alpha_sa_size ();
frame_size = compute_frame_size (get_frame_size (), sa_size);
- if (flag_stack_usage)
+ if (flag_stack_usage_info)
current_function_static_stack_size = frame_size;
if (TARGET_ABI_OPEN_VMS)
emit_move_insn (last, const0_rtx);
}
- if (TARGET_ABI_WINDOWS_NT || flag_stack_check)
+ if (flag_stack_check)
{
- /* For NT stack unwind (done by 'reverse execution'), it's
- not OK to take the result of a loop, even though the value
- is already in ptr, so we reload it via a single operation
- and subtract it to sp.
-
- Same if -fstack-check is specified, because the probed stack
- size is not equal to the frame size.
-
- Yes, that's correct -- we have to reload the whole constant
- into a temporary via ldah+lda then subtract from sp. */
-
+ /* If -fstack-check is specified we have to load the entire
+ constant into a register and subtract from the sp in one go,
+ because the probed stack size is not equal to the frame size. */
HOST_WIDE_INT lo, hi;
lo = ((frame_size & 0xffff) ^ 0x8000) - 0x8000;
hi = frame_size - lo;
if (TARGET_ABI_OPEN_VMS
&& !TREE_PUBLIC (decl)
&& DECL_CONTEXT (decl)
- && !TYPE_P (DECL_CONTEXT (decl)))
+ && !TYPE_P (DECL_CONTEXT (decl))
+ && TREE_CODE (DECL_CONTEXT (decl)) != TRANSLATION_UNIT_DECL)
{
strcpy (tramp_label, fnname);
strcat (tramp_label, "..tr");
if (TARGET_ABI_OPEN_VMS)
fprintf (file, "\t.base $%d\n", vms_base_regno);
- if (!TARGET_ABI_OPEN_VMS && TARGET_IEEE_CONFORMANT
+ if (TARGET_ABI_OSF
+ && TARGET_IEEE_CONFORMANT
&& !flag_inhibit_size_directive)
{
/* Set flags in procedure descriptor to request IEEE-conformant
fprintf (file, "\t.handler_data %d\n", VMS_COND_HANDLER_FP_OFFSET);
}
- /* Ifdef'ed cause link_section are only available then. */
+#ifdef TARGET_VMS_CRASH_DEBUG
+ /* Support of minimal traceback info. */
switch_to_section (readonly_data_section);
fprintf (file, "\t.align 3\n");
assemble_name (file, fnname); fputs ("..na:\n", file);
fputs ("\t.ascii \"", file);
assemble_name (file, fnname);
fputs ("\\0\"\n", file);
- alpha_need_linkage (fnname, 1);
switch_to_section (text_section);
#endif
+#endif /* TARGET_ABI_OPEN_VMS */
}
/* Emit the .prologue note at the scheduled end of the prologue. */
{
if (TARGET_ABI_OPEN_VMS)
fputs ("\t.prologue\n", file);
- else if (TARGET_ABI_WINDOWS_NT)
- fputs ("\t.prologue 0\n", file);
else if (!flag_inhibit_size_directive)
fprintf (file, "\t.prologue %d\n",
alpha_function_needs_gp || cfun->is_thunk);
alpha_sa_mask (&imask, &fmask);
fp_is_frame_pointer
- = ((TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
- || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed));
+ = (TARGET_ABI_OPEN_VMS
+ ? alpha_procedure_type == PT_STACK
+ : frame_pointer_needed);
fp_offset = 0;
sa_reg = stack_pointer_rtx;
if (sa_size)
{
/* If we have a frame pointer, restore SP from it. */
- if ((TARGET_ABI_OPEN_VMS
- && vms_unwind_regno == HARD_FRAME_POINTER_REGNUM)
- || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed))
+ if (TARGET_ABI_OPEN_VMS
+ ? vms_unwind_regno == HARD_FRAME_POINTER_REGNUM
+ : frame_pointer_needed)
emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
/* Cope with very large offsets to the register save area. */
/* Restore registers in order, excepting a true frame pointer. */
- mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
- if (! eh_ofs)
- set_mem_alias_set (mem, alpha_sr_alias_set);
+ mem = gen_frame_mem (DImode, plus_constant (sa_reg, reg_offset));
reg = gen_rtx_REG (DImode, REG_RA);
emit_move_insn (reg, mem);
cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, cfa_restores);
fp_offset = reg_offset;
else
{
- mem = gen_rtx_MEM (DImode, plus_constant(sa_reg, reg_offset));
- set_mem_alias_set (mem, alpha_sr_alias_set);
+ mem = gen_frame_mem (DImode,
+ plus_constant (sa_reg, reg_offset));
reg = gen_rtx_REG (DImode, i);
emit_move_insn (reg, mem);
cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
for (i = 0; i < 31; ++i)
if (fmask & (1UL << i))
{
- mem = gen_rtx_MEM (DFmode, plus_constant(sa_reg, reg_offset));
- set_mem_alias_set (mem, alpha_sr_alias_set);
+ mem = gen_frame_mem (DFmode, plus_constant (sa_reg, reg_offset));
reg = gen_rtx_REG (DFmode, i+32);
emit_move_insn (reg, mem);
cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, cfa_restores);
if (fp_is_frame_pointer)
{
emit_insn (gen_blockage ());
- mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, fp_offset));
- set_mem_alias_set (mem, alpha_sr_alias_set);
+ mem = gen_frame_mem (DImode, plus_constant (sa_reg, fp_offset));
emit_move_insn (hard_frame_pointer_rtx, mem);
cfa_restores = alloc_reg_note (REG_CFA_RESTORE,
hard_frame_pointer_rtx, cfa_restores);
output_asm_insn (get_insn_template (CODE_FOR_nop, NULL), NULL);
#if TARGET_ABI_OPEN_VMS
- alpha_write_linkage (file, fnname, decl);
+ /* Write the linkage entries. */
+ alpha_write_linkage (file, fnname);
#endif
/* End the function. */
- if (!flag_inhibit_size_directive)
+ if (TARGET_ABI_OPEN_VMS
+ || !flag_inhibit_size_directive)
{
fputs ("\t.end ", file);
assemble_name (file, fnname);
inside_function = FALSE;
}
-#if TARGET_ABI_OPEN_VMS
-void avms_asm_output_external (FILE *file, tree decl ATTRIBUTE_UNUSED, const char *name)
-{
-#ifdef DO_CRTL_NAMES
- DO_CRTL_NAMES;
-#endif
-}
-#endif
-
#if TARGET_ABI_OSF
/* Emit a tail call to FUNCTION after adjusting THIS by DELTA.
fputs ("\t.set noreorder\n", asm_out_file);
fputs ("\t.set volatile\n", asm_out_file);
- if (!TARGET_ABI_OPEN_VMS)
+ if (TARGET_ABI_OSF)
fputs ("\t.set noat\n", asm_out_file);
if (TARGET_EXPLICIT_RELOCS)
fputs ("\t.set nomacro\n", asm_out_file);
/* Structure to collect function names for final output in link section. */
/* Note that items marked with GTY can't be ifdef'ed out. */
-enum links_kind {KIND_UNUSED, KIND_LOCAL, KIND_EXTERN};
-enum reloc_kind {KIND_LINKAGE, KIND_CODEADDR};
+enum reloc_kind
+{
+ KIND_LINKAGE,
+ KIND_CODEADDR
+};
struct GTY(()) alpha_links
{
- int num;
- const char *target;
+ rtx func;
rtx linkage;
- enum links_kind lkind;
enum reloc_kind rkind;
};
-struct GTY(()) alpha_funcs
-{
- int num;
- splay_tree GTY ((param1_is (char *), param2_is (struct alpha_links *)))
- links;
-};
-
-static GTY ((param1_is (char *), param2_is (struct alpha_links *)))
- splay_tree alpha_links_tree;
-static GTY ((param1_is (tree), param2_is (struct alpha_funcs *)))
- splay_tree alpha_funcs_tree;
-
-static GTY(()) int alpha_funcs_num;
-
#if TARGET_ABI_OPEN_VMS
/* Return the VMS argument type corresponding to MODE. */
return GEN_INT (regval);
}
\f
-/* Register the need for a (fake) .linkage entry for calls to function NAME.
- IS_LOCAL is 1 if this is for a definition, 0 if this is for a real call.
- Return a SYMBOL_REF suited to the call instruction. */
-
-rtx
-alpha_need_linkage (const char *name, int is_local)
-{
- splay_tree_node node;
- struct alpha_links *al;
- const char *target;
- tree id;
-
- if (name[0] == '*')
- name++;
-
- if (is_local)
- {
- struct alpha_funcs *cfaf;
-
- if (!alpha_funcs_tree)
- alpha_funcs_tree = splay_tree_new_ggc
- (splay_tree_compare_pointers,
- ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_s,
- ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_node_s);
-
-
- cfaf = ggc_alloc_alpha_funcs ();
-
- cfaf->links = 0;
- cfaf->num = ++alpha_funcs_num;
-
- splay_tree_insert (alpha_funcs_tree,
- (splay_tree_key) current_function_decl,
- (splay_tree_value) cfaf);
- }
-
- if (alpha_links_tree)
- {
- /* Is this name already defined? */
-
- node = splay_tree_lookup (alpha_links_tree, (splay_tree_key) name);
- if (node)
- {
- al = (struct alpha_links *) node->value;
- if (is_local)
- {
- /* Defined here but external assumed. */
- if (al->lkind == KIND_EXTERN)
- al->lkind = KIND_LOCAL;
- }
- else
- {
- /* Used here but unused assumed. */
- if (al->lkind == KIND_UNUSED)
- al->lkind = KIND_LOCAL;
- }
- return al->linkage;
- }
- }
- else
- alpha_links_tree = splay_tree_new_ggc
- ((splay_tree_compare_fn) strcmp,
- ggc_alloc_splay_tree_str_alpha_links_splay_tree_s,
- ggc_alloc_splay_tree_str_alpha_links_splay_tree_node_s);
-
- al = ggc_alloc_alpha_links ();
- name = ggc_strdup (name);
-
- /* Assume external if no definition. */
- al->lkind = (is_local ? KIND_UNUSED : KIND_EXTERN);
-
- /* Ensure we have an IDENTIFIER so assemble_name can mark it used
- and find the ultimate alias target like assemble_name. */
- id = get_identifier (name);
- target = NULL;
- while (IDENTIFIER_TRANSPARENT_ALIAS (id))
- {
- id = TREE_CHAIN (id);
- target = IDENTIFIER_POINTER (id);
- }
-
- al->target = target ? target : name;
- al->linkage = gen_rtx_SYMBOL_REF (Pmode, name);
-
- splay_tree_insert (alpha_links_tree, (splay_tree_key) name,
- (splay_tree_value) al);
-
- return al->linkage;
-}
/* Return a SYMBOL_REF representing the reference to the .linkage entry
of function FUNC built for calls made from CFUNDECL. LFLAG is 1 if
reference (code address only), 0 if this is a full reference. */
rtx
-alpha_use_linkage (rtx func, tree cfundecl, int lflag, int rflag)
+alpha_use_linkage (rtx func, bool lflag, bool rflag)
{
- splay_tree_node cfunnode;
- struct alpha_funcs *cfaf;
- struct alpha_links *al;
+ struct alpha_links *al = NULL;
const char *name = XSTR (func, 0);
- cfaf = (struct alpha_funcs *) 0;
- al = (struct alpha_links *) 0;
-
- cfunnode = splay_tree_lookup (alpha_funcs_tree, (splay_tree_key) cfundecl);
- cfaf = (struct alpha_funcs *) cfunnode->value;
-
- if (cfaf->links)
+ if (cfun->machine->links)
{
splay_tree_node lnode;
/* Is this name already defined? */
-
- lnode = splay_tree_lookup (cfaf->links, (splay_tree_key) name);
+ lnode = splay_tree_lookup (cfun->machine->links, (splay_tree_key) name);
if (lnode)
al = (struct alpha_links *) lnode->value;
}
else
- cfaf->links = splay_tree_new_ggc
+ cfun->machine->links = splay_tree_new_ggc
((splay_tree_compare_fn) strcmp,
ggc_alloc_splay_tree_str_alpha_links_splay_tree_s,
ggc_alloc_splay_tree_str_alpha_links_splay_tree_node_s);
- if (!al)
+ if (al == NULL)
{
- size_t name_len;
- size_t buflen;
+ size_t buf_len;
char *linksym;
- splay_tree_node node = 0;
- struct alpha_links *anl;
+ tree id;
if (name[0] == '*')
name++;
- name_len = strlen (name);
- linksym = (char *) alloca (name_len + 50);
-
- al = ggc_alloc_alpha_links ();
- al->num = cfaf->num;
- al->target = NULL;
-
- node = splay_tree_lookup (alpha_links_tree, (splay_tree_key) name);
- if (node)
- {
- anl = (struct alpha_links *) node->value;
- al->lkind = anl->lkind;
- name = anl->target;
- }
+ /* Follow transparent alias, as this is used for CRTL translations. */
+ id = maybe_get_identifier (name);
+ if (id)
+ {
+ while (IDENTIFIER_TRANSPARENT_ALIAS (id))
+ id = TREE_CHAIN (id);
+ name = IDENTIFIER_POINTER (id);
+ }
- sprintf (linksym, "$%d..%s..lk", cfaf->num, name);
- buflen = strlen (linksym);
+ buf_len = strlen (name) + 8 + 9;
+ linksym = (char *) alloca (buf_len);
+ snprintf (linksym, buf_len, "$%d..%s..lk", cfun->funcdef_no, name);
- al->linkage = gen_rtx_SYMBOL_REF
- (Pmode, ggc_alloc_string (linksym, buflen + 1));
+ al = ggc_alloc_alpha_links ();
+ al->func = func;
+ al->linkage = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (linksym));
- splay_tree_insert (cfaf->links, (splay_tree_key) name,
+ splay_tree_insert (cfun->machine->links,
+ (splay_tree_key) ggc_strdup (name),
(splay_tree_value) al);
}
- if (rflag)
- al->rkind = KIND_CODEADDR;
- else
- al->rkind = KIND_LINKAGE;
+ al->rkind = rflag ? KIND_CODEADDR : KIND_LINKAGE;
if (lflag)
return gen_rtx_MEM (Pmode, plus_constant (al->linkage, 8));
struct alpha_links *link = (struct alpha_links *) node->value;
FILE *stream = (FILE *) data;
- fprintf (stream, "$%d..%s..lk:\n", link->num, name);
+ ASM_OUTPUT_INTERNAL_LABEL (stream, XSTR (link->linkage, 0));
if (link->rkind == KIND_CODEADDR)
{
- if (link->lkind == KIND_LOCAL)
- {
- /* Local and used */
- fprintf (stream, "\t.quad %s..en\n", name);
- }
- else
- {
- /* External and used, request code address. */
- fprintf (stream, "\t.code_address %s\n", name);
- }
+ /* External and used, request code address. */
+ fprintf (stream, "\t.code_address ");
}
else
{
- if (link->lkind == KIND_LOCAL)
+ if (!SYMBOL_REF_EXTERNAL_P (link->func)
+ && SYMBOL_REF_LOCAL_P (link->func))
{
- /* Local and used, build linkage pair. */
+ /* Locally defined, build linkage pair. */
fprintf (stream, "\t.quad %s..en\n", name);
- fprintf (stream, "\t.quad %s\n", name);
+ fprintf (stream, "\t.quad ");
}
else
{
- /* External and used, request linkage pair. */
- fprintf (stream, "\t.linkage %s\n", name);
+ /* External, request linkage pair. */
+ fprintf (stream, "\t.linkage ");
}
}
+ assemble_name (stream, name);
+ fputs ("\n", stream);
return 0;
}
static void
-alpha_write_linkage (FILE *stream, const char *funname, tree fundecl)
+alpha_write_linkage (FILE *stream, const char *funname)
{
- splay_tree_node node;
- struct alpha_funcs *func;
-
fprintf (stream, "\t.link\n");
fprintf (stream, "\t.align 3\n");
in_section = NULL;
- node = splay_tree_lookup (alpha_funcs_tree, (splay_tree_key) fundecl);
- func = (struct alpha_funcs *) node->value;
-
+#ifdef TARGET_VMS_CRASH_DEBUG
fputs ("\t.name ", stream);
assemble_name (stream, funname);
fputs ("..na\n", stream);
+#endif
+
ASM_OUTPUT_LABEL (stream, funname);
fprintf (stream, "\t.pdesc ");
assemble_name (stream, funname);
alpha_procedure_type == PT_STACK ? "stack"
: alpha_procedure_type == PT_REGISTER ? "reg" : "null");
- if (func->links)
+ if (cfun->machine->links)
{
- splay_tree_foreach (func->links, alpha_write_one_linkage, stream);
+ splay_tree_foreach (cfun->machine->links, alpha_write_one_linkage, stream);
/* splay_tree_delete (func->links); */
}
}
assemble_integer (symbol, UNITS_PER_WORD, BITS_PER_WORD, 1);
}
#else
-
-rtx
-alpha_need_linkage (const char *name ATTRIBUTE_UNUSED,
- int is_local ATTRIBUTE_UNUSED)
-{
- return NULL_RTX;
-}
-
rtx
alpha_use_linkage (rtx func ATTRIBUTE_UNUSED,
- tree cfundecl ATTRIBUTE_UNUSED,
- int lflag ATTRIBUTE_UNUSED,
- int rflag ATTRIBUTE_UNUSED)
+ bool lflag ATTRIBUTE_UNUSED,
+ bool rflag ATTRIBUTE_UNUSED)
{
return NULL_RTX;
}
#define TARGET_FUNCTION_OK_FOR_SIBCALL alpha_function_ok_for_sibcall
#undef TARGET_CANNOT_COPY_INSN_P
#define TARGET_CANNOT_COPY_INSN_P alpha_cannot_copy_insn_p
+#undef TARGET_LEGITIMATE_CONSTANT_P
+#define TARGET_LEGITIMATE_CONSTANT_P alpha_legitimate_constant_p
#undef TARGET_CANNOT_FORCE_CONST_MEM
#define TARGET_CANNOT_FORCE_CONST_MEM alpha_cannot_force_const_mem
#define TARGET_STDARG_OPTIMIZE_HOOK alpha_stdarg_optimize_hook
#endif
+/* Use 16-bits anchor. */
+#undef TARGET_MIN_ANCHOR_OFFSET
+#define TARGET_MIN_ANCHOR_OFFSET -0x7fff - 1
+#undef TARGET_MAX_ANCHOR_OFFSET
+#define TARGET_MAX_ANCHOR_OFFSET 0x7fff
+#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
+#define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
+
#undef TARGET_RTX_COSTS
#define TARGET_RTX_COSTS alpha_rtx_costs
#undef TARGET_ADDRESS_COST
#undef TARGET_TRAMPOLINE_INIT
#define TARGET_TRAMPOLINE_INIT alpha_trampoline_init
+#undef TARGET_INSTANTIATE_DECLS
+#define TARGET_INSTANTIATE_DECLS alpha_instantiate_decls
+
#undef TARGET_SECONDARY_RELOAD
#define TARGET_SECONDARY_RELOAD alpha_secondary_reload
#undef TARGET_RELAXED_ORDERING
#define TARGET_RELAXED_ORDERING true
-#undef TARGET_DEFAULT_TARGET_FLAGS
-#define TARGET_DEFAULT_TARGET_FLAGS \
- (TARGET_DEFAULT | TARGET_CPU_DEFAULT | TARGET_DEFAULT_EXPLICIT_RELOCS)
-#undef TARGET_HANDLE_OPTION
-#define TARGET_HANDLE_OPTION alpha_handle_option
-
#undef TARGET_OPTION_OVERRIDE
#define TARGET_OPTION_OVERRIDE alpha_option_override
-#undef TARGET_OPTION_OPTIMIZATION_TABLE
-#define TARGET_OPTION_OPTIMIZATION_TABLE alpha_option_optimization_table
-
#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
#undef TARGET_MANGLE_TYPE
#define TARGET_MANGLE_TYPE alpha_mangle_type