#include "system.h"
#include "coretypes.h"
#include "tm.h"
-#include "rtl.h"
+#include "rtl-error.h"
#include "tree.h"
#include "flags.h"
#include "except.h"
#include "recog.h"
#include "output.h"
#include "basic-block.h"
-#include "toplev.h"
#include "hashtab.h"
#include "ggc.h"
#include "tm_p.h"
\f
htab_t types_used_by_vars_hash = NULL;
-tree types_used_by_cur_var_decl = NULL;
+VEC(tree,gc) *types_used_by_cur_var_decl;
/* Forward declarations. */
return STACK_SLOT_ALIGNMENT (type, mode, alignment);
}
+/* Determine whether it is possible to fit a stack slot of size SIZE and
+ alignment ALIGNMENT into an area in the stack frame that starts at
+ frame offset START and has a length of LENGTH. If so, store the frame
+ offset to be used for the stack slot in *POFFSET and return true;
+ return false otherwise. This function will extend the frame size when
+ given a start/length pair that lies at the end of the frame. */
+
+static bool
+try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
+ HOST_WIDE_INT size, unsigned int alignment,
+ HOST_WIDE_INT *poffset)
+{
+ HOST_WIDE_INT this_frame_offset;
+ int frame_off, frame_alignment, frame_phase;
+
+ /* Calculate how many bytes the start of local variables is off from
+ stack alignment. */
+ frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
+ frame_off = STARTING_FRAME_OFFSET % frame_alignment;
+ frame_phase = frame_off ? frame_alignment - frame_off : 0;
+
+ /* Round the frame offset to the specified alignment. */
+
+ /* We must be careful here, since FRAME_OFFSET might be negative and
+ division with a negative dividend isn't as well defined as we might
+ like. So we instead assume that ALIGNMENT is a power of two and
+ use logical operations which are unambiguous. */
+ if (FRAME_GROWS_DOWNWARD)
+ this_frame_offset
+ = (FLOOR_ROUND (start + length - size - frame_phase,
+ (unsigned HOST_WIDE_INT) alignment)
+ + frame_phase);
+ else
+ this_frame_offset
+ = (CEIL_ROUND (start - frame_phase,
+ (unsigned HOST_WIDE_INT) alignment)
+ + frame_phase);
+
+ /* See if it fits. If this space is at the edge of the frame,
+ consider extending the frame to make it fit. Our caller relies on
+ this when allocating a new slot. */
+ if (frame_offset == start && this_frame_offset < frame_offset)
+ frame_offset = this_frame_offset;
+ else if (this_frame_offset < start)
+ return false;
+ else if (start + length == frame_offset
+ && this_frame_offset + size > start + length)
+ frame_offset = this_frame_offset + size;
+ else if (this_frame_offset + size > start + length)
+ return false;
+
+ *poffset = this_frame_offset;
+ return true;
+}
+
+/* Create a new frame_space structure describing free space in the stack
+ frame beginning at START and ending at END, and chain it into the
+ function's frame_space_list. */
+
+static void
+add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
+{
+ struct frame_space *space = ggc_alloc_frame_space ();
+ space->next = crtl->frame_space_list;
+ crtl->frame_space_list = space;
+ space->start = start;
+ space->length = end - start;
+}
+
/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
with machine mode MODE.
{
rtx x, addr;
int bigend_correction = 0;
+ HOST_WIDE_INT slot_offset = 0, old_frame_offset;
unsigned int alignment, alignment_in_bits;
- int frame_off, frame_alignment, frame_phase;
if (align == 0)
{
alignment_in_bits = alignment * BITS_PER_UNIT;
- if (FRAME_GROWS_DOWNWARD)
- frame_offset -= size;
-
/* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
{
if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
crtl->max_used_stack_slot_alignment = alignment_in_bits;
- /* Calculate how many bytes the start of local variables is off from
- stack alignment. */
- frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
- frame_off = STARTING_FRAME_OFFSET % frame_alignment;
- frame_phase = frame_off ? frame_alignment - frame_off : 0;
+ if (mode != BLKmode || size != 0)
+ {
+ struct frame_space **psp;
- /* Round the frame offset to the specified alignment. The default is
- to always honor requests to align the stack but a port may choose to
- do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
- if (STACK_ALIGNMENT_NEEDED
- || mode != BLKmode
- || size != 0)
+ for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
+ {
+ struct frame_space *space = *psp;
+ if (!try_fit_stack_local (space->start, space->length, size,
+ alignment, &slot_offset))
+ continue;
+ *psp = space->next;
+ if (slot_offset > space->start)
+ add_frame_space (space->start, slot_offset);
+ if (slot_offset + size < space->start + space->length)
+ add_frame_space (slot_offset + size,
+ space->start + space->length);
+ goto found_space;
+ }
+ }
+ else if (!STACK_ALIGNMENT_NEEDED)
{
- /* We must be careful here, since FRAME_OFFSET might be negative and
- division with a negative dividend isn't as well defined as we might
- like. So we instead assume that ALIGNMENT is a power of two and
- use logical operations which are unambiguous. */
- if (FRAME_GROWS_DOWNWARD)
- frame_offset
- = (FLOOR_ROUND (frame_offset - frame_phase,
- (unsigned HOST_WIDE_INT) alignment)
- + frame_phase);
- else
- frame_offset
- = (CEIL_ROUND (frame_offset - frame_phase,
- (unsigned HOST_WIDE_INT) alignment)
- + frame_phase);
+ slot_offset = frame_offset;
+ goto found_space;
}
+ old_frame_offset = frame_offset;
+
+ if (FRAME_GROWS_DOWNWARD)
+ {
+ frame_offset -= size;
+ try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
+
+ if (slot_offset > frame_offset)
+ add_frame_space (frame_offset, slot_offset);
+ if (slot_offset + size < old_frame_offset)
+ add_frame_space (slot_offset + size, old_frame_offset);
+ }
+ else
+ {
+ frame_offset += size;
+ try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
+
+ if (slot_offset > old_frame_offset)
+ add_frame_space (old_frame_offset, slot_offset);
+ if (slot_offset + size < frame_offset)
+ add_frame_space (slot_offset + size, frame_offset);
+ }
+
+ found_space:
/* On a big-endian machine, if we are allocating more space than we will use,
use the least significant bytes of those that are allocated. */
if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
if (virtuals_instantiated)
addr = plus_constant (frame_pointer_rtx,
trunc_int_for_mode
- (frame_offset + bigend_correction
+ (slot_offset + bigend_correction
+ STARTING_FRAME_OFFSET, Pmode));
else
addr = plus_constant (virtual_stack_vars_rtx,
trunc_int_for_mode
- (frame_offset + bigend_correction,
+ (slot_offset + bigend_correction,
Pmode));
- if (!FRAME_GROWS_DOWNWARD)
- frame_offset += size;
-
x = gen_rtx_MEM (mode, addr);
set_mem_align (x, alignment_in_bits);
MEM_NOTRAP_P (x) = 1;
insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
{
void **slot;
- struct temp_slot_address_entry *t = GGC_NEW (struct temp_slot_address_entry);
+ struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry ();
t->address = address;
t->temp_slot = temp_slot;
t->hash = temp_slot_address_compute_hash (t);
if (best_p->size - rounded_size >= alignment)
{
- p = GGC_NEW (struct temp_slot);
+ p = ggc_alloc_temp_slot ();
p->in_use = p->addr_taken = 0;
p->size = best_p->size - rounded_size;
p->base_offset = best_p->base_offset + rounded_size;
{
HOST_WIDE_INT frame_offset_old = frame_offset;
- p = GGC_NEW (struct temp_slot);
+ p = ggc_alloc_temp_slot ();
/* We are passing an explicit alignment request to assign_stack_local.
One side effect of that is assign_stack_local will not round SIZE
return;
}
-#ifdef FUNCTION_INCOMING_ARG
- entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
- data->passed_type, data->named_arg);
-#else
- entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
- data->passed_type, data->named_arg);
-#endif
+ entry_parm = targetm.calls.function_incoming_arg (&all->args_so_far,
+ data->promoted_mode,
+ data->passed_type,
+ data->named_arg);
if (entry_parm == 0)
data->promoted_mode = data->passed_mode;
if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
{
rtx tem;
-#ifdef FUNCTION_INCOMING_ARG
- tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
- data->passed_type, true);
-#else
- tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
- data->passed_type, true);
-#endif
+ tem = targetm.calls.function_incoming_arg (&all->args_so_far,
+ data->promoted_mode,
+ data->passed_type, true);
in_regs = tem != NULL;
}
}
set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
/* Update info on where next arg arrives in registers. */
- FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
- data.passed_type, data.named_arg);
+ targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
+ data.passed_type, data.named_arg);
assign_parm_adjust_stack_rtl (&data);
/* See how many bytes, if any, of its args a function should try to pop
on return. */
- crtl->args.pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
- crtl->args.size);
+ crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
+ TREE_TYPE (fndecl),
+ crtl->args.size);
/* For stdarg.h function, save info about
regs and stack space used by the named args. */
continue;
/* Update info on where next arg arrives in registers. */
- FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
- data.passed_type, data.named_arg);
+ targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
+ data.passed_type, data.named_arg);
/* ??? Once upon a time variable_size stuffed parameter list
SAVE_EXPRs (amongst others) onto a pending sizes list. This
/* If VAR is present in a subblock of BLOCK, return the subblock. */
-tree
+DEBUG_FUNCTION tree
debug_find_var_in_block_tree (tree var, tree block)
{
tree t;
tree result;
tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
- cfun = GGC_CNEW (struct function);
-
- cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
+ cfun = ggc_alloc_cleared_function ();
init_eh_for_function ();
/* Assume all registers in stdarg functions need to be saved. */
cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
+
+ /* ??? This could be set on a per-function basis by the front-end
+ but is this worth the hassle? */
+ cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
}
}
allocate_struct_function (fndecl, false);
}
-/* Reset cfun, and other non-struct-function variables to defaults as
+/* Reset crtl and other non-struct-function variables to defaults as
appropriate for emitting rtl at the start of a function. */
static void
/* We want to ensure that instructions that may trap are not
moved into the epilogue by scheduling, because we don't
always emit unwind information for the epilogue. */
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
emit_insn (gen_blockage ());
}
/* @@@ This is a kludge. We want to ensure that instructions that
may trap are not moved into the epilogue by scheduling, because
we don't always emit unwind information for the epilogue. */
- if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
+ if (!USING_SJLJ_EXCEPTIONS && cfun->can_throw_non_call_exceptions)
emit_insn (gen_blockage ());
/* If stack protection is enabled for this function, check the guard. */
/* So this might be a type referenced by a global variable.
Record that type so that we can later decide to emit its debug
information. */
- types_used_by_cur_var_decl =
- tree_cons (t, NULL, types_used_by_cur_var_decl);
-
+ VEC_safe_push (tree, gc, types_used_by_cur_var_decl, t);
}
}
if (*slot == NULL)
{
struct types_used_by_vars_entry *entry;
- entry = (struct types_used_by_vars_entry*) ggc_alloc
- (sizeof (struct types_used_by_vars_entry));
+ entry = ggc_alloc_types_used_by_vars_entry ();
entry->type = type;
entry->var_decl = var_decl;
*slot = entry;