/* So we can assign to cfun in this file. */
#undef cfun
-#ifndef LOCAL_ALIGNMENT
-#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
-#endif
-
#ifndef STACK_ALIGNMENT_NEEDED
#define STACK_ALIGNMENT_NEEDED 1
#endif
VEC_free (int, heap, prologue);
VEC_free (int, heap, epilogue);
VEC_free (int, heap, sibcall_epilogue);
- if (rtl.emit.regno_pointer_align)
- free (rtl.emit.regno_pointer_align);
+ if (crtl->emit.regno_pointer_align)
+ free (crtl->emit.regno_pointer_align);
- memset (&rtl, 0, sizeof (rtl));
+ memset (crtl, 0, sizeof (struct rtl_data));
f->eh = NULL;
f->machine = NULL;
f->cfg = NULL;
- f->arg_offset_rtx = NULL;
- f->return_rtx = NULL;
- f->internal_arg_pointer = NULL;
- f->epilogue_delay_list = NULL;
+ regno_reg_rtx = NULL;
}
\f
/* Return size needed for stack frame based on slots so far allocated.
return FALSE;
}
+/* Return stack slot alignment in bits for TYPE and MODE. */
+
+static unsigned int
+get_stack_local_alignment (tree type, enum machine_mode mode)
+{
+ unsigned int alignment;
+
+ if (mode == BLKmode)
+ alignment = BIGGEST_ALIGNMENT;
+ else
+ alignment = GET_MODE_ALIGNMENT (mode);
+
+ /* Allow the frond-end to (possibly) increase the alignment of this
+ stack slot. */
+ if (! type)
+ type = lang_hooks.types.type_for_mode (mode, 0);
+
+ return STACK_SLOT_ALIGNMENT (type, mode, alignment);
+}
+
/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
with machine mode MODE.
{
rtx x, addr;
int bigend_correction = 0;
- unsigned int alignment;
+ unsigned int alignment, alignment_in_bits;
int frame_off, frame_alignment, frame_phase;
if (align == 0)
{
- tree type;
-
- if (mode == BLKmode)
- alignment = BIGGEST_ALIGNMENT;
- else
- alignment = GET_MODE_ALIGNMENT (mode);
-
- /* Allow the target to (possibly) increase the alignment of this
- stack slot. */
- type = lang_hooks.types.type_for_mode (mode, 0);
- if (type)
- alignment = LOCAL_ALIGNMENT (type, alignment);
-
+ alignment = get_stack_local_alignment (NULL, mode);
alignment /= BITS_PER_UNIT;
}
else if (align == -1)
if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
- if (cfun->stack_alignment_needed < alignment * BITS_PER_UNIT)
- cfun->stack_alignment_needed = alignment * BITS_PER_UNIT;
+ alignment_in_bits = alignment * BITS_PER_UNIT;
+
+ if (crtl->stack_alignment_needed < alignment_in_bits)
+ crtl->stack_alignment_needed = alignment_in_bits;
/* Calculate how many bytes the start of local variables is off from
stack alignment. */
frame_offset += size;
x = gen_rtx_MEM (mode, addr);
+ set_mem_align (x, alignment_in_bits);
MEM_NOTRAP_P (x) = 1;
stack_slot_list
/* These are now unused. */
gcc_assert (keep <= 1);
- if (mode == BLKmode)
- align = BIGGEST_ALIGNMENT;
- else
- align = GET_MODE_ALIGNMENT (mode);
-
- if (! type)
- type = lang_hooks.types.type_for_mode (mode, 0);
-
- if (type)
- align = LOCAL_ALIGNMENT (type, align);
+ align = get_stack_local_alignment (type, mode);
/* Try to find an available, already-allocated temporary of the proper
mode which meets the size and alignment requirements. Choose the
if (best_p->size - rounded_size >= alignment)
{
- p = ggc_alloc (sizeof (struct temp_slot));
+ p = GGC_NEW (struct temp_slot);
p->in_use = p->addr_taken = 0;
p->size = best_p->size - rounded_size;
p->base_offset = best_p->base_offset + rounded_size;
{
HOST_WIDE_INT frame_offset_old = frame_offset;
- p = ggc_alloc (sizeof (struct temp_slot));
+ p = GGC_NEW (struct temp_slot);
/* We are passing an explicit alignment request to assign_stack_local.
One side effect of that is assign_stack_local will not round SIZE
parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
stack space for register parameters is not pushed by the caller, but
rather part of the fixed stack areas and hence not included in
- `current_function_outgoing_args_size'. Nevertheless, we must allow
+ `crtl->outgoing_args_size'. Nevertheless, we must allow
for it when allocating stack dynamic objects. */
#if defined(REG_PARM_STACK_SPACE)
#define STACK_DYNAMIC_OFFSET(FNDECL) \
((ACCUMULATE_OUTGOING_ARGS \
- ? (current_function_outgoing_args_size \
- + (OUTGOING_REG_PARM_STACK_SPACE ? 0 : REG_PARM_STACK_SPACE (FNDECL))) \
+ ? (crtl->outgoing_args_size \
+ + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
+ : REG_PARM_STACK_SPACE (FNDECL))) \
: 0) + (STACK_POINTER_OFFSET))
#else
#define STACK_DYNAMIC_OFFSET(FNDECL) \
-((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
+((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
+ (STACK_POINTER_OFFSET))
#endif
#endif
bool
use_register_for_decl (const_tree decl)
{
+ if (!targetm.calls.allocate_stack_slots_for_args())
+ return true;
+
/* Honor volatile. */
if (TREE_SIDE_EFFECTS (decl))
return false;
/* If struct value address is treated as the first argument, make it so. */
if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
- && ! current_function_returns_pcc_struct
+ && ! cfun->returns_pcc_struct
&& targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
{
tree type = build_pointer_type (TREE_TYPE (fntype));
memset (data, 0, sizeof (*data));
- /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
- if (!current_function_stdarg)
- data->named_arg = 1; /* No varadic parms. */
+ /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
+ if (!cfun->stdarg)
+ data->named_arg = 1; /* No variadic parms. */
else if (TREE_CHAIN (parm))
- data->named_arg = 1; /* Not the last non-varadic parm. */
+ data->named_arg = 1; /* Not the last non-variadic parm. */
else if (targetm.calls.strict_argument_naming (&all->args_so_far))
- data->named_arg = 1; /* Only varadic ones are unnamed. */
+ data->named_arg = 1; /* Only variadic ones are unnamed. */
else
- data->named_arg = 0; /* Treat as varadic. */
+ data->named_arg = 0; /* Treat as variadic. */
nominal_type = TREE_TYPE (parm);
passed_type = DECL_ARG_TYPE (parm);
else
offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
- stack_parm = current_function_internal_arg_pointer;
+ stack_parm = crtl->args.internal_arg_pointer;
if (offset_rtx != const0_rtx)
stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
/* If stack protection is in effect for this function, don't leave any
pointers in their passed stack slots. */
- else if (cfun->stack_protect_guard
+ else if (crtl->stack_protect_guard
&& (flag_stack_protect == 2
|| data->passed_pointer
|| POINTER_TYPE_P (data->nominal_type)))
struct assign_parm_data_all all;
tree fnargs, parm;
- current_function_internal_arg_pointer
+ crtl->args.internal_arg_pointer
= targetm.calls.internal_arg_pointer ();
assign_parms_initialize_all (&all);
continue;
}
- if (current_function_stdarg && !TREE_CHAIN (parm))
+ if (cfun->stdarg && !TREE_CHAIN (parm))
assign_parms_setup_varargs (&all, &data, false);
/* Find out where the parameter arrives in this function. */
}
/* We have aligned all the args, so add space for the pretend args. */
- current_function_pretend_args_size = all.pretend_args_size;
+ crtl->args.pretend_args_size = all.pretend_args_size;
all.stack_args_size.constant += all.extra_pretend_bytes;
- current_function_args_size = all.stack_args_size.constant;
+ crtl->args.size = all.stack_args_size.constant;
/* Adjust function incoming argument size for alignment and
minimum length. */
#ifdef REG_PARM_STACK_SPACE
- current_function_args_size = MAX (current_function_args_size,
+ crtl->args.size = MAX (crtl->args.size,
REG_PARM_STACK_SPACE (fndecl));
#endif
- current_function_args_size = CEIL_ROUND (current_function_args_size,
+ crtl->args.size = CEIL_ROUND (crtl->args.size,
PARM_BOUNDARY / BITS_PER_UNIT);
#ifdef ARGS_GROW_DOWNWARD
- current_function_arg_offset_rtx
+ crtl->args.arg_offset_rtx
= (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
: expand_expr (size_diffop (all.stack_args_size.var,
size_int (-all.stack_args_size.constant)),
NULL_RTX, VOIDmode, 0));
#else
- current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
+ crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
#endif
/* See how many bytes, if any, of its args a function should try to pop
on return. */
- current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
- current_function_args_size);
+ crtl->args.pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
+ crtl->args.size);
/* For stdarg.h function, save info about
regs and stack space used by the named args. */
- current_function_args_info = all.args_so_far;
+ crtl->args.info = all.args_so_far;
/* Set the rtx used for the function return value. Put this in its
own variable so any optimizers that need this information don't have
to include tree.h. Do this here so it gets done when an inlined
function gets output. */
- current_function_return_rtx
+ crtl->return_rtx
= (DECL_RTL_SET_P (DECL_RESULT (fndecl))
? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
fndecl, true);
REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
- /* The delay slot scheduler assumes that current_function_return_rtx
+ /* The delay slot scheduler assumes that crtl->return_rtx
holds the hard register containing the return value, not a
temporary pseudo. */
- current_function_return_rtx = real_decl_rtl;
+ crtl->return_rtx = real_decl_rtl;
}
}
}
calling function side. */
if (boundary > PREFERRED_STACK_BOUNDARY)
boundary = PREFERRED_STACK_BOUNDARY;
- if (cfun->stack_alignment_needed < boundary)
- cfun->stack_alignment_needed = boundary;
+ if (crtl->stack_alignment_needed < boundary)
+ crtl->stack_alignment_needed = boundary;
#ifdef ARGS_GROW_DOWNWARD
locate->slot_offset.constant = -initial_offset_ptr->constant;
tree result;
tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
- cfun = ggc_alloc_cleared (sizeof (struct function));
-
- cfun->stack_alignment_needed = STACK_BOUNDARY;
- cfun->preferred_stack_boundary = STACK_BOUNDARY;
+ cfun = GGC_CNEW (struct function);
current_function_funcdef_no = get_next_funcdef_no ();
if (init_machine_status)
cfun->machine = (*init_machine_status) ();
- if (fndecl != NULL)
+#ifdef OVERRIDE_ABI_FORMAT
+ OVERRIDE_ABI_FORMAT (fndecl);
+#endif
+
+ if (fndecl != NULL_TREE)
{
DECL_STRUCT_FUNCTION (fndecl) = cfun;
cfun->decl = fndecl;
if (!abstract_p && aggregate_value_p (result, fndecl))
{
#ifdef PCC_STATIC_STRUCT_RETURN
- current_function_returns_pcc_struct = 1;
+ cfun->returns_pcc_struct = 1;
#endif
- current_function_returns_struct = 1;
+ cfun->returns_struct = 1;
}
- current_function_stdarg
+ cfun->stdarg
= (fntype
&& TYPE_ARG_TYPES (fntype) != 0
&& (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
static void
prepare_function_start (void)
{
- gcc_assert (!rtl.emit.x_last_insn);
+ gcc_assert (!crtl->emit.x_last_insn);
init_emit ();
init_varasm_status ();
init_expr ();
/* Avoid expand_expr here, because we don't want guard_decl pulled
into registers unless absolutely necessary. And we know that
- cfun->stack_protect_guard is a local stack slot, so this skips
+ crtl->stack_protect_guard is a local stack slot, so this skips
all the fluff. */
- x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
+ x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
y = validize_mem (DECL_RTL (guard_decl));
/* Allow the target to copy from Y to X without leaking Y into a
/* Avoid expand_expr here, because we don't want guard_decl pulled
into registers unless absolutely necessary. And we know that
- cfun->stack_protect_guard is a local stack slot, so this skips
+ crtl->stack_protect_guard is a local stack slot, so this skips
all the fluff. */
- x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
+ x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
y = validize_mem (DECL_RTL (guard_decl));
/* Allow the target to compare Y with X without leaking either into
valid operands of arithmetic insns. */
init_recog_no_volatile ();
- current_function_profile
+ crtl->profile
= (profile_flag
&& ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
- current_function_limit_stack
+ crtl->limit_stack
= (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
/* Make the label for return statements to jump to. Do not special
rtx value_address = 0;
#ifdef PCC_STATIC_STRUCT_RETURN
- if (current_function_returns_pcc_struct)
+ if (cfun->returns_pcc_struct)
{
int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
value_address = assemble_static_space (size);
r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
r_save = convert_memory_address (Pmode, r_save);
- emit_move_insn (r_save, virtual_stack_vars_rtx);
+ emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
update_nonlocal_goto_save_area ();
}
parm_birth_insn = get_last_insn ();
- if (current_function_profile)
+ if (crtl->profile)
{
#ifdef PROFILE_HOOK
PROFILE_HOOK (current_function_funcdef_no);
void
diddle_return_value (void (*doit) (rtx, void *), void *arg)
{
- rtx outgoing = current_function_return_rtx;
+ rtx outgoing = crtl->return_rtx;
if (! outgoing)
return;
static void
do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
{
- emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
+ emit_clobber (reg);
}
void
static void
do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
{
- emit_insn (gen_rtx_USE (VOIDmode, reg));
+ emit_use (reg);
}
static void
/* If arg_pointer_save_area was referenced only from a nested
function, we will not have initialized it yet. Do that now. */
- if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
+ if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
get_arg_pointer_save_area ();
/* If we are doing stack checking and this function makes calls,
? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
: DECL_REGISTER (decl_result))
{
- rtx real_decl_rtl = current_function_return_rtx;
+ rtx real_decl_rtl = crtl->return_rtx;
/* This should be set in assign_parms. */
gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
/* If this is a BLKmode structure being returned in registers,
then use the mode computed in expand_return. Note that if
decl_rtl is memory, then its mode may have been changed,
- but that current_function_return_rtx has not. */
+ but that crtl->return_rtx has not. */
if (GET_MODE (real_decl_rtl) == BLKmode)
PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
If returning a structure PCC style,
the caller also depends on this value.
- And current_function_returns_pcc_struct is not necessarily set. */
- if (current_function_returns_struct
- || current_function_returns_pcc_struct)
+ And cfun->returns_pcc_struct is not necessarily set. */
+ if (cfun->returns_struct
+ || cfun->returns_pcc_struct)
{
rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
/* Show return register used to hold result (in this case the address
of the result. */
- current_function_return_rtx = outgoing;
+ crtl->return_rtx = outgoing;
}
/* Emit the actual code to clobber return register. */
emit_insn (gen_blockage ());
/* If stack protection is enabled for this function, check the guard. */
- if (cfun->stack_protect_guard)
+ if (crtl->stack_protect_guard)
stack_protect_epilogue ();
/* If we had calls to alloca, and this machine needs
an accurate stack pointer to exit the function,
insert some code to save and restore the stack pointer. */
if (! EXIT_IGNORE_STACK
- && current_function_calls_alloca)
+ && cfun->calls_alloca)
{
rtx tem = 0;
arg_pointer_save_area = ret;
}
- if (! cfun->arg_pointer_save_area_init)
+ if (! crtl->arg_pointer_save_area_init)
{
rtx seq;
/* Insert an explicit USE for the frame pointer
if the profiling is on and the frame pointer is required. */
- if (current_function_profile && frame_pointer_needed)
- emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
+ if (crtl->profile && frame_pointer_needed)
+ emit_use (hard_frame_pointer_rtx);
/* Retain a map of the prologue insns. */
record_insns (seq, &prologue);
/* Ensure that instructions are not moved into the prologue when
profiling is on. The call to the profiling routine can be
emitted within the live range of a call-clobbered register. */
- if (current_function_profile)
+ if (crtl->profile)
emit_insn (gen_blockage ());
#endif
rtx op = SET_SRC (p_sets[0]);
int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
- bool *output_matched = alloca (noutputs * sizeof (bool));
+ bool *output_matched = XALLOCAVEC (bool, noutputs);
memset (output_matched, 0, noutputs * sizeof (bool));
for (i = 0; i < ninputs; i++)
emit_insn_before (insns, insn);
/* Now replace all mentions of the input with output. We can't
- just replace the occurence in inputs[i], as the register might
+ just replace the occurrence in inputs[i], as the register might
also be used in some other input (or even in an address of an
output), which would mean possibly increasing the number of
inputs by one (namely 'output' in addition), which might pose
Here 'input' is used in two occurrences as input (once for the
input operand, once for the address in the second output operand).
- If we would replace only the occurence of the input operand (to
+ If we would replace only the occurrence of the input operand (to
make the matching) we would be left with this:
output = input
rtx insn, pat, *p_sets;
int noutputs;
- if (!cfun->has_asm_statement)
+ if (!crtl->has_asm_statement)
return 0;
df_set_flags (DF_DEFER_INSN_RESCAN);