GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
+the Free Software Foundation; either version 3, or (at your option)
any later version.
GCC is distributed in the hope that it will be useful,
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 51 Franklin Street, Fifth Floor,
-Boston, MA 02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "tree-gimple.h"
#include "tree-flow.h"
#include "tree-stdarg.h"
+#include "tm-constrs.h"
+#include "df.h"
/* Specify which cpu to schedule for. */
enum processor_type alpha_tune;
/* The alias set for prologue/epilogue register save/restore. */
-static GTY(()) int alpha_sr_alias_set;
+static GTY(()) alias_set_type alpha_sr_alias_set;
/* The assembler name of the current function. */
}
#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
-/* Implement TARGET_MANGLE_FUNDAMENTAL_TYPE. */
+/* Implement TARGET_MANGLE_TYPE. */
static const char *
-alpha_mangle_fundamental_type (tree type)
+alpha_mangle_type (const_tree type)
{
if (TYPE_MAIN_VARIANT (type) == long_double_type_node
&& TARGET_LONG_DOUBLE_128)
return op;
}
-/* Implements CONST_OK_FOR_LETTER_P. Return true if the value matches
- the range defined for C in [I-P]. */
-
-bool
-alpha_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
-{
- switch (c)
- {
- case 'I':
- /* An unsigned 8 bit constant. */
- return (unsigned HOST_WIDE_INT) value < 0x100;
- case 'J':
- /* The constant zero. */
- return value == 0;
- case 'K':
- /* A signed 16 bit constant. */
- return (unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000;
- case 'L':
- /* A shifted signed 16 bit constant appropriate for LDAH. */
- return ((value & 0xffff) == 0
- && ((value) >> 31 == -1 || value >> 31 == 0));
- case 'M':
- /* A constant that can be AND'ed with using a ZAP insn. */
- return zap_mask (value);
- case 'N':
- /* A complemented unsigned 8 bit constant. */
- return (unsigned HOST_WIDE_INT) (~ value) < 0x100;
- case 'O':
- /* A negated unsigned 8 bit constant. */
- return (unsigned HOST_WIDE_INT) (- value) < 0x100;
- case 'P':
- /* The constant 1, 2 or 3. */
- return value == 1 || value == 2 || value == 3;
-
- default:
- return false;
- }
-}
-
-/* Implements CONST_DOUBLE_OK_FOR_LETTER_P. Return true if VALUE
- matches for C in [GH]. */
-
-bool
-alpha_const_double_ok_for_letter_p (rtx value, int c)
-{
- switch (c)
- {
- case 'G':
- /* The floating point zero constant. */
- return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
- && value == CONST0_RTX (GET_MODE (value)));
-
- case 'H':
- /* A valid operand of a ZAP insn. */
- return (GET_MODE (value) == VOIDmode
- && zap_mask (CONST_DOUBLE_LOW (value))
- && zap_mask (CONST_DOUBLE_HIGH (value)));
-
- default:
- return false;
- }
-}
-
-/* Implements CONST_DOUBLE_OK_FOR_LETTER_P. Return true if VALUE
- matches for C. */
-
-bool
-alpha_extra_constraint (rtx value, int c)
-{
- switch (c)
- {
- case 'Q':
- return normal_memory_operand (value, VOIDmode);
- case 'R':
- return direct_call_operand (value, Pmode);
- case 'S':
- return (GET_CODE (value) == CONST_INT
- && (unsigned HOST_WIDE_INT) INTVAL (value) < 64);
- case 'T':
- return GET_CODE (value) == HIGH;
- case 'U':
- return TARGET_ABI_UNICOSMK && symbolic_operand (value, VOIDmode);
- case 'W':
- return (GET_CODE (value) == CONST_VECTOR
- && value == CONST0_RTX (GET_MODE (value)));
- default:
- return false;
- }
-}
-
/* The scalar modes supported differs from the default check-what-c-supports
version in that sometimes TFmode is available even when long double
indicates only DFmode. On unicosmk, we have the situation that HImode
function in the current unit of translation. */
static bool
-decl_has_samegp (tree decl)
+decl_has_samegp (const_tree decl)
{
/* Functions that are not local can be overridden, and thus may
not share the same gp. */
/* Return true if EXP should be placed in the small data section. */
static bool
-alpha_in_small_data_p (tree exp)
+alpha_in_small_data_p (const_tree exp)
{
/* We want to merge strings, so we never consider them small data. */
if (TREE_CODE (exp) == STRING_CST)
part of the CONST_INT. Then load FOO plus any high-order part of the
CONST_INT into a register. Our address is (plus reg low-part-const).
This is done to reduce the number of GOT entries. */
- if (!no_new_pseudos
+ if (can_create_pseudo_p ()
&& GET_CODE (x) == CONST
&& GET_CODE (XEXP (x, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
/* If we have a (plus reg const), emit the load as in (2), then add
the two registers, and finally generate (plus reg low-part-const) as
our address. */
- if (!no_new_pseudos
+ if (can_create_pseudo_p ()
&& GET_CODE (x) == PLUS
&& GET_CODE (XEXP (x, 0)) == REG
&& GET_CODE (XEXP (x, 1)) == CONST
return x;
else
{
- if (!no_new_pseudos)
+ if (can_create_pseudo_p ())
scratch = gen_reg_rtx (Pmode);
emit_insn (gen_rtx_SET (VOIDmode, scratch,
gen_rtx_HIGH (Pmode, x)));
if (addend)
x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (addend),
- (no_new_pseudos ? scratch : NULL_RTX),
+ (!can_create_pseudo_p () ? scratch : NULL_RTX),
1, OPTAB_LIB_WIDEN);
if (high)
x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (high),
- (no_new_pseudos ? scratch : NULL_RTX),
+ (!can_create_pseudo_p () ? scratch : NULL_RTX),
1, OPTAB_LIB_WIDEN);
return plus_constant (x, low);
Add EXTRA_OFFSET to the address we return. */
rtx
-get_unaligned_address (rtx ref, int extra_offset)
+get_unaligned_address (rtx ref)
{
rtx base;
HOST_WIDE_INT offset = 0;
if (GET_CODE (base) == PLUS)
offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
- return plus_constant (base, offset + extra_offset);
+ return plus_constant (base, offset);
+}
+
+/* Compute a value X, such that X & 7 == (ADDR + OFS) & 7.
+ X is always returned in a register. */
+
+rtx
+get_unaligned_offset (rtx addr, HOST_WIDE_INT ofs)
+{
+ if (GET_CODE (addr) == PLUS)
+ {
+ ofs += INTVAL (XEXP (addr, 1));
+ addr = XEXP (addr, 0);
+ }
+
+ return expand_simple_binop (Pmode, PLUS, addr, GEN_INT (ofs & 7),
+ NULL_RTX, 1, OPTAB_LIB_WIDEN);
}
/* On the Alpha, all (non-symbolic) constants except zero go into
return class;
}
-/* Loading and storing HImode or QImode values to and from memory
- usually requires a scratch register. The exceptions are loading
- QImode and HImode from an aligned address to a general register
- unless byte instructions are permitted.
+/* Inform reload about cases where moving X with a mode MODE to a register in
+ CLASS requires an extra scratch or immediate register. Return the class
+ needed for the immediate register. */
- We also cannot load an unaligned address or a paradoxical SUBREG
- into an FP register.
-
- We also cannot do integral arithmetic into FP regs, as might result
- from register elimination into a DImode fp register. */
-
-enum reg_class
-alpha_secondary_reload_class (enum reg_class class, enum machine_mode mode,
- rtx x, int in)
+static enum reg_class
+alpha_secondary_reload (bool in_p, rtx x, enum reg_class class,
+ enum machine_mode mode, secondary_reload_info *sri)
{
- if ((mode == QImode || mode == HImode) && ! TARGET_BWX)
+ /* Loading and storing HImode or QImode values to and from memory
+ usually requires a scratch register. */
+ if (!TARGET_BWX && (mode == QImode || mode == HImode || mode == CQImode))
{
- if (GET_CODE (x) == MEM
- || (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER)
- || (GET_CODE (x) == SUBREG
- && (GET_CODE (SUBREG_REG (x)) == MEM
- || (GET_CODE (SUBREG_REG (x)) == REG
- && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER))))
+ if (any_memory_operand (x, mode))
{
- if (!in || !aligned_memory_operand(x, mode))
- return GENERAL_REGS;
+ if (in_p)
+ {
+ if (!aligned_memory_operand (x, mode))
+ sri->icode = reload_in_optab[mode];
+ }
+ else
+ sri->icode = reload_out_optab[mode];
+ return NO_REGS;
}
}
+ /* We also cannot do integral arithmetic into FP regs, as might result
+ from register elimination into a DImode fp register. */
if (class == FLOAT_REGS)
{
- if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == AND)
- return GENERAL_REGS;
-
- if (GET_CODE (x) == SUBREG
- && (GET_MODE_SIZE (GET_MODE (x))
- > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
+ if (MEM_P (x) && GET_CODE (XEXP (x, 0)) == AND)
return GENERAL_REGS;
-
- if (in && INTEGRAL_MODE_P (mode)
- && ! (memory_operand (x, mode) || x == const0_rtx))
+ if (in_p && INTEGRAL_MODE_P (mode)
+ && !MEM_P (x) && !REG_P (x) && !CONST_INT_P (x))
return GENERAL_REGS;
}
int i, bits;
/* Use a pseudo if highly optimizing and still generating RTL. */
rtx subtarget
- = (flag_expensive_optimizations && !no_new_pseudos ? 0 : target);
+ = (flag_expensive_optimizations && can_create_pseudo_p () ? 0 : target);
rtx temp, insn;
/* If this is a sign-extended 32-bit constant, we can do this in at most
{
if (no_output)
return pc_rtx;
- if (no_new_pseudos)
+ if (!can_create_pseudo_p ())
{
emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (high << 16)));
temp = target;
we can't make pseudos, we can't do anything since the expand_binop
and expand_unop calls will widen and try to make pseudos. */
- if (n == 1 || (mode == SImode && no_new_pseudos))
+ if (n == 1 || (mode == SImode && !can_create_pseudo_p ()))
return 0;
/* Next, see if we can load a related constant and then shift and possibly
/* If we can't make any pseudos, TARGET is an SImode hard register, we
can't load this constant in one insn, do this in DImode. */
- if (no_new_pseudos && mode == SImode
+ if (!can_create_pseudo_p () && mode == SImode
&& GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
{
result = alpha_emit_set_const_1 (target, mode, c, 1, no_output);
if (reload_in_progress)
{
emit_move_insn (operands[0], XEXP (operands[1], 0));
- operands[1] = copy_rtx (operands[1]);
- XEXP (operands[1], 0) = operands[0];
+ operands[1] = replace_equiv_address (operands[1], operands[0]);
}
else
operands[1] = validize_mem (operands[1]);
bool
alpha_expand_mov_nobwx (enum machine_mode mode, rtx *operands)
{
+ rtx seq;
+
/* If the output is not a register, the input must be. */
- if (GET_CODE (operands[0]) == MEM)
+ if (MEM_P (operands[0]))
operands[1] = force_reg (mode, operands[1]);
/* Handle four memory cases, unaligned and aligned for either the input
or the output. The only case where we can be called during reload is
for aligned loads; all other cases require temporaries. */
- if (GET_CODE (operands[1]) == MEM
- || (GET_CODE (operands[1]) == SUBREG
- && GET_CODE (SUBREG_REG (operands[1])) == MEM)
- || (reload_in_progress && GET_CODE (operands[1]) == REG
- && REGNO (operands[1]) >= FIRST_PSEUDO_REGISTER)
- || (reload_in_progress && GET_CODE (operands[1]) == SUBREG
- && GET_CODE (SUBREG_REG (operands[1])) == REG
- && REGNO (SUBREG_REG (operands[1])) >= FIRST_PSEUDO_REGISTER))
+ if (any_memory_operand (operands[1], mode))
{
if (aligned_memory_operand (operands[1], mode))
{
if (reload_in_progress)
{
- emit_insn ((mode == QImode
- ? gen_reload_inqi_help
- : gen_reload_inhi_help)
- (operands[0], operands[1],
- gen_rtx_REG (SImode, REGNO (operands[0]))));
+ if (mode == QImode)
+ seq = gen_reload_inqi_aligned (operands[0], operands[1]);
+ else
+ seq = gen_reload_inhi_aligned (operands[0], operands[1]);
+ emit_insn (seq);
}
else
{
else
subtarget = gen_reg_rtx (DImode), copyout = true;
- emit_insn ((mode == QImode
- ? gen_aligned_loadqi
- : gen_aligned_loadhi)
- (subtarget, aligned_mem, bitnum, scratch));
+ if (mode == QImode)
+ seq = gen_aligned_loadqi (subtarget, aligned_mem,
+ bitnum, scratch);
+ else
+ seq = gen_aligned_loadhi (subtarget, aligned_mem,
+ bitnum, scratch);
+ emit_insn (seq);
if (copyout)
emit_move_insn (operands[0], gen_lowpart (mode, subtarget));
code depend on parameter evaluation order which will cause
bootstrap failures. */
- rtx temp1, temp2, seq, subtarget;
+ rtx temp1, temp2, subtarget, ua;
bool copyout;
temp1 = gen_reg_rtx (DImode);
else
subtarget = gen_reg_rtx (DImode), copyout = true;
- seq = ((mode == QImode
- ? gen_unaligned_loadqi
- : gen_unaligned_loadhi)
- (subtarget, get_unaligned_address (operands[1], 0),
- temp1, temp2));
+ ua = get_unaligned_address (operands[1]);
+ if (mode == QImode)
+ seq = gen_unaligned_loadqi (subtarget, ua, temp1, temp2);
+ else
+ seq = gen_unaligned_loadhi (subtarget, ua, temp1, temp2);
+
alpha_set_memflags (seq, operands[1]);
emit_insn (seq);
return true;
}
- if (GET_CODE (operands[0]) == MEM
- || (GET_CODE (operands[0]) == SUBREG
- && GET_CODE (SUBREG_REG (operands[0])) == MEM)
- || (reload_in_progress && GET_CODE (operands[0]) == REG
- && REGNO (operands[0]) >= FIRST_PSEUDO_REGISTER)
- || (reload_in_progress && GET_CODE (operands[0]) == SUBREG
- && GET_CODE (SUBREG_REG (operands[0])) == REG
- && REGNO (operands[0]) >= FIRST_PSEUDO_REGISTER))
+ if (any_memory_operand (operands[0], mode))
{
if (aligned_memory_operand (operands[0], mode))
{
rtx temp1 = gen_reg_rtx (DImode);
rtx temp2 = gen_reg_rtx (DImode);
rtx temp3 = gen_reg_rtx (DImode);
- rtx seq = ((mode == QImode
- ? gen_unaligned_storeqi
- : gen_unaligned_storehi)
- (get_unaligned_address (operands[0], 0),
- operands[1], temp1, temp2, temp3));
+ rtx ua = get_unaligned_address (operands[0]);
+
+ if (mode == QImode)
+ seq = gen_unaligned_storeqi (ua, operands[1], temp1, temp2, temp3);
+ else
+ seq = gen_unaligned_storehi (ua, operands[1], temp1, temp2, temp3);
alpha_set_memflags (seq, operands[0]);
emit_insn (seq);
&& !(symbolic_operand (op0, VOIDmode)
|| (GET_CODE (op0) == REG && REG_POINTER (op0))))
{
- HOST_WIDE_INT v = INTVAL (op1), n = -v;
+ rtx n_op1 = GEN_INT (-INTVAL (op1));
- if (! CONST_OK_FOR_LETTER_P (v, 'I')
- && (CONST_OK_FOR_LETTER_P (n, 'K')
- || CONST_OK_FOR_LETTER_P (n, 'L')))
- {
- cmp_code = PLUS, branch_code = code;
- op1 = GEN_INT (n);
- }
+ if (! satisfies_constraint_I (op1)
+ && (satisfies_constraint_K (n_op1)
+ || satisfies_constraint_L (n_op1)))
+ cmp_code = PLUS, branch_code = code, op1 = n_op1;
}
}
conditional move. Make sure we emit only comparisons we have;
swap or reverse as necessary. */
- if (no_new_pseudos)
+ if (!can_create_pseudo_p ())
return NULL_RTX;
switch (code)
if (mode != DImode)
{
target = gen_lowpart (DImode, dest);
- if (! no_new_pseudos)
+ if (can_create_pseudo_p ())
subtarget = gen_reg_rtx (DImode);
else
subtarget = target;
/* We must use tgt here for the target. Alpha-vms port fails if we use
addr for the target, because addr is marked as a pointer and combine
- knows that pointers are always sign-extended 32 bit values. */
+ knows that pointers are always sign-extended 32-bit values. */
addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (48),
addr, 1, OPTAB_WIDEN);
return ret;
}
-/* Expand an an atomic fetch-and-operate pattern. CODE is the binary operation
+/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
to perform. MEM is the memory on which to operate. VAL is the second
operand of the binary operator. BEFORE and AFTER are optional locations to
return the value of MEM either before of after the operation. SCRATCH is
seq = get_insns ();
end_sequence ();
- emit_insn_after (seq, entry_of_function ());
+ emit_insn_at_entry (seq);
cfun->machine->gp_save_rtx = m;
}
rtx top;
if (!has_hard_reg_initial_val (Pmode, REG_RA))
- return regs_ever_live[REG_RA];
+ return (int)df_regs_ever_live_p (REG_RA);
push_topmost_sequence ();
top = get_insns ();
fputc ((TARGET_FLOAT_VAX ? 'g' : 't'), file);
break;
- case '+':
- /* Generates a nop after a noreturn call at the very end of the
- function. */
- if (next_real_insn (current_output_insn) == 0)
- fprintf (file, "\n\tnop");
- break;
-
case '#':
if (alpha_this_literal_sequence_number == 0)
alpha_this_literal_sequence_number = alpha_next_sequence_number++;
/* Return true if TYPE must be returned in memory, instead of in registers. */
static bool
-alpha_return_in_memory (tree type, tree fndecl ATTRIBUTE_UNUSED)
+alpha_return_in_memory (const_tree type, const_tree fndecl ATTRIBUTE_UNUSED)
{
enum machine_mode mode = VOIDmode;
int size;
static bool
alpha_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
enum machine_mode mode,
- tree type ATTRIBUTE_UNUSED,
+ const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
return mode == TFmode || mode == TCmode;
$f0 for floating-point functions. */
rtx
-function_value (tree valtype, tree func ATTRIBUTE_UNUSED,
+function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED,
enum machine_mode mode)
{
unsigned int regnum, dummy;
should not split these values. */
static bool
-alpha_split_complex_arg (tree type)
+alpha_split_complex_arg (const_tree type)
{
return TYPE_MODE (type) != TCmode;
}
if ((TREE_CODE (rhs) != NOP_EXPR
&& TREE_CODE (rhs) != CONVERT_EXPR
- && (TREE_CODE (rhs) != PLUS_EXPR
+ && ((TREE_CODE (rhs) != PLUS_EXPR
+ && TREE_CODE (rhs) != POINTER_PLUS_EXPR)
|| TREE_CODE (TREE_OPERAND (rhs, 1)) != INTEGER_CST
|| !host_integerp (TREE_OPERAND (rhs, 1), 1)))
|| TREE_CODE (TREE_OPERAND (rhs, 0)) != SSA_NAME)
? ap.__offset + cst - 48 : ap.__offset + cst) + cst2).
If the former, indicate that GPR registers are needed,
if the latter, indicate that FPR registers are needed.
+
+ Also look for LHS = (*ptr).field, where ptr is one of the forms
+ listed above.
+
On alpha, cfun->va_list_gpr_size is used as size of the needed
- regs and cfun->va_list_fpr_size is a bitmask, bit 0 set if
- GPR registers are needed and bit 1 set if FPR registers are needed.
- Return true if va_list references should not be scanned for the current
- statement. */
+ regs and cfun->va_list_fpr_size is a bitmask, bit 0 set if GPR
+ registers are needed and bit 1 set if FPR registers are needed.
+ Return true if va_list references should not be scanned for the
+ current statement. */
static bool
-alpha_stdarg_optimize_hook (struct stdarg_info *si, tree lhs, tree rhs)
+alpha_stdarg_optimize_hook (struct stdarg_info *si, const_tree lhs, const_tree rhs)
{
tree base, offset, arg1, arg2;
int offset_arg = 1;
+ while (handled_component_p (rhs))
+ rhs = TREE_OPERAND (rhs, 0);
if (TREE_CODE (rhs) != INDIRECT_REF
|| TREE_CODE (TREE_OPERAND (rhs, 0)) != SSA_NAME)
return false;
lhs = va_list_skip_additions (TREE_OPERAND (rhs, 0));
if (lhs == NULL_TREE
- || TREE_CODE (lhs) != PLUS_EXPR)
+ || TREE_CODE (lhs) != POINTER_PLUS_EXPR)
return false;
base = TREE_OPERAND (lhs, 0);
if (!no_rtl)
{
- int count, set = get_varargs_alias_set ();
+ int count;
+ alias_set_type set = get_varargs_alias_set ();
rtx tmp;
count = cfun->va_list_gpr_size / UNITS_PER_WORD;
valist, offset_field, NULL_TREE);
t = make_tree (ptr_type_node, virtual_incoming_args_rtx);
- t = build2 (PLUS_EXPR, ptr_type_node, t,
- build_int_cst (NULL_TREE, offset));
+ t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
+ size_int (offset));
t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (base_field), base_field, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
}
/* Build the final address and force that value into a temporary. */
- addr = build2 (PLUS_EXPR, ptr_type, fold_convert (ptr_type, base),
- fold_convert (ptr_type, addend));
+ addr = build2 (POINTER_PLUS_EXPR, ptr_type, fold_convert (ptr_type, base),
+ fold_convert (sizetype, addend));
internal_post = NULL;
gimplify_expr (&addr, pre_p, &internal_post, is_gimple_val, fb_rvalue);
append_to_statement_list (internal_post, pre_p);
{
#define MAX_ARGS 2
- tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
+ tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
- tree arglist = TREE_OPERAND (exp, 1);
+ tree arg;
+ call_expr_arg_iterator iter;
enum insn_code icode;
rtx op[MAX_ARGS], pat;
int arity;
nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
- for (arglist = TREE_OPERAND (exp, 1), arity = 0;
- arglist;
- arglist = TREE_CHAIN (arglist), arity++)
+ arity = 0;
+ FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
{
const struct insn_operand_data *insn_op;
- tree arg = TREE_VALUE (arglist);
if (arg == error_mark_node)
return NULL_RTX;
if (arity > MAX_ARGS)
if (!(*insn_op->predicate) (op[arity], insn_op->mode))
op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
+ arity++;
}
if (nonvoid)
#endif
/* Fold the builtin for the CMPBGE instruction. This is a vector comparison
- with an 8 bit output vector. OPINT contains the integer operands; bit N
+ with an 8-bit output vector. OPINT contains the integer operands; bit N
of OP_CONST is set if OPINT[N] is valid. */
static tree
return build_int_cst (long_integer_type_node, opint[0] & mask);
if (op)
- return fold (build2 (BIT_AND_EXPR, long_integer_type_node, op[0],
- build_int_cst (long_integer_type_node, mask)));
+ return fold_build2 (BIT_AND_EXPR, long_integer_type_node, op[0],
+ build_int_cst (long_integer_type_node, mask));
}
else if ((op_const & 1) && opint[0] == 0)
return build_int_cst (long_integer_type_node, 0);
{
tree op0 = fold_convert (vtype, op[0]);
tree op1 = fold_convert (vtype, op[1]);
- tree val = fold (build2 (code, vtype, op0, op1));
+ tree val = fold_build2 (code, vtype, op0, op1);
return fold_convert (long_integer_type_node, val);
}
/* One for every register we have to save. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (! fixed_regs[i] && ! call_used_regs[i]
- && regs_ever_live[i] && i != REG_RA
+ && df_regs_ever_live_p (i) && i != REG_RA
&& (!TARGET_ABI_UNICOSMK || i != HARD_FRAME_POINTER_REGNUM))
{
if (i < 32)
vms_save_fp_regno = -1;
if (vms_base_regno == HARD_FRAME_POINTER_REGNUM)
for (i = 0; i < 32; i++)
- if (! fixed_regs[i] && call_used_regs[i] && ! regs_ever_live[i])
+ if (! fixed_regs[i] && call_used_regs[i] && ! df_regs_ever_live_p (i))
vms_save_fp_regno = i;
if (vms_save_fp_regno == -1 && alpha_procedure_type == PT_REGISTER)
void
alpha_end_function (FILE *file, const char *fnname, tree decl ATTRIBUTE_UNUSED)
{
+ rtx insn;
+
+ /* We output a nop after noreturn calls at the very end of the function to
+ ensure that the return address always remains in the caller's code range,
+ as not doing so might confuse unwinding engines. */
+ insn = get_last_insn ();
+ if (!INSN_P (insn))
+ insn = prev_active_insn (insn);
+ if (GET_CODE (insn) == CALL_INSN)
+ output_asm_insn (get_insn_template (CODE_FOR_nop, NULL), NULL);
+
#if TARGET_ABI_OPEN_VMS
alpha_write_linkage (file, fnname, decl);
#endif
HOST_WIDE_INT hi, lo;
rtx this, insn, funexp;
- reset_block_changes ();
-
/* We always require a valid GP. */
emit_insn (gen_prologue_ldgp ());
emit_note (NOTE_INSN_PROLOGUE_END);
instruction scheduling worth while. Note that use_thunk calls
assemble_start_function and assemble_end_function. */
insn = get_insns ();
- insn_locators_initialize ();
+ insn_locators_alloc ();
shorten_branches (insn);
final_start_function (insn, file, 1);
final (insn, file, 1);
{
if (GET_CODE (i) == NOTE)
{
- switch (NOTE_LINE_NUMBER (i))
+ switch (NOTE_KIND (i))
{
case NOTE_INSN_EH_REGION_BEG:
exception_nesting++;
#endif
#ifdef OBJECT_FORMAT_ELF
+/* Since we don't have a .dynbss section, we should not allow global
+ relocations in the .rodata section. */
+
+static int
+alpha_elf_reloc_rw_mask (void)
+{
+ return flag_pic ? 3 : 2;
+}
/* Return a section for X. The only special thing we do here is to
honor small data. */
return default_elf_select_rtx_section (mode, x, align);
}
+static unsigned int
+alpha_elf_section_type_flags (tree decl, const char *name, int reloc)
+{
+ unsigned int flags = 0;
+
+ if (strcmp (name, ".sdata") == 0
+ || strncmp (name, ".sdata.", 7) == 0
+ || strncmp (name, ".gnu.linkonce.s.", 16) == 0
+ || strcmp (name, ".sbss") == 0
+ || strncmp (name, ".sbss.", 6) == 0
+ || strncmp (name, ".gnu.linkonce.sb.", 17) == 0)
+ flags = SECTION_SMALL;
+
+ flags |= default_section_type_flags (decl, name, reloc);
+ return flags;
+}
#endif /* OBJECT_FORMAT_ELF */
\f
/* Structure to collect function names for final output in link section. */
registers. */
static bool
-unicosmk_must_pass_in_stack (enum machine_mode mode, tree type)
+unicosmk_must_pass_in_stack (enum machine_mode mode, const_tree type)
{
if (type == NULL)
return false;
#endif
#ifdef OBJECT_FORMAT_ELF
+#undef TARGET_ASM_RELOC_RW_MASK
+#define TARGET_ASM_RELOC_RW_MASK alpha_elf_reloc_rw_mask
#undef TARGET_ASM_SELECT_RTX_SECTION
#define TARGET_ASM_SELECT_RTX_SECTION alpha_elf_select_rtx_section
+#undef TARGET_SECTION_TYPE_FLAGS
+#define TARGET_SECTION_TYPE_FLAGS alpha_elf_section_type_flags
#endif
#undef TARGET_ASM_FUNCTION_END_PROLOGUE
#undef TARGET_ASM_OUTPUT_MI_THUNK
#define TARGET_ASM_OUTPUT_MI_THUNK alpha_output_mi_thunk_osf
#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
-#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
+#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
#undef TARGET_STDARG_OPTIMIZE_HOOK
#define TARGET_STDARG_OPTIMIZE_HOOK alpha_stdarg_optimize_hook
#endif
#define TARGET_MACHINE_DEPENDENT_REORG alpha_reorg
#undef TARGET_PROMOTE_FUNCTION_ARGS
-#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
+#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
#undef TARGET_PROMOTE_FUNCTION_RETURN
-#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
+#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
#undef TARGET_PROMOTE_PROTOTYPES
-#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_false
+#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
#undef TARGET_RETURN_IN_MEMORY
#define TARGET_RETURN_IN_MEMORY alpha_return_in_memory
#undef TARGET_PASS_BY_REFERENCE
#undef TARGET_ARG_PARTIAL_BYTES
#define TARGET_ARG_PARTIAL_BYTES alpha_arg_partial_bytes
+#undef TARGET_SECONDARY_RELOAD
+#define TARGET_SECONDARY_RELOAD alpha_secondary_reload
+
#undef TARGET_SCALAR_MODE_SUPPORTED_P
#define TARGET_SCALAR_MODE_SUPPORTED_P alpha_scalar_mode_supported_p
#undef TARGET_VECTOR_MODE_SUPPORTED_P
#define TARGET_HANDLE_OPTION alpha_handle_option
#ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
-#undef TARGET_MANGLE_FUNDAMENTAL_TYPE
-#define TARGET_MANGLE_FUNDAMENTAL_TYPE alpha_mangle_fundamental_type
+#undef TARGET_MANGLE_TYPE
+#define TARGET_MANGLE_TYPE alpha_mangle_type
#endif
struct gcc_target targetm = TARGET_INITIALIZER;