/* Expands front end tree to back end RTL for GCC.
Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
- 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
- Free Software Foundation, Inc.
+ 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
+ 2010 Free Software Foundation, Inc.
This file is part of GCC.
return STACK_SLOT_ALIGNMENT (type, mode, alignment);
}
+/* Determine whether it is possible to fit a stack slot of size SIZE and
+ alignment ALIGNMENT into an area in the stack frame that starts at
+ frame offset START and has a length of LENGTH. If so, store the frame
+ offset to be used for the stack slot in *POFFSET and return true;
+ return false otherwise. This function will extend the frame size when
+ given a start/length pair that lies at the end of the frame. */
+
+static bool
+try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
+ HOST_WIDE_INT size, unsigned int alignment,
+ HOST_WIDE_INT *poffset)
+{
+ HOST_WIDE_INT this_frame_offset;
+ int frame_off, frame_alignment, frame_phase;
+
+ /* Calculate how many bytes the start of local variables is off from
+ stack alignment. */
+ frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
+ frame_off = STARTING_FRAME_OFFSET % frame_alignment;
+ frame_phase = frame_off ? frame_alignment - frame_off : 0;
+
+ /* Round the frame offset to the specified alignment. */
+
+ /* We must be careful here, since FRAME_OFFSET might be negative and
+ division with a negative dividend isn't as well defined as we might
+ like. So we instead assume that ALIGNMENT is a power of two and
+ use logical operations which are unambiguous. */
+ if (FRAME_GROWS_DOWNWARD)
+ this_frame_offset
+ = (FLOOR_ROUND (start + length - size - frame_phase,
+ (unsigned HOST_WIDE_INT) alignment)
+ + frame_phase);
+ else
+ this_frame_offset
+ = (CEIL_ROUND (start - frame_phase,
+ (unsigned HOST_WIDE_INT) alignment)
+ + frame_phase);
+
+ /* See if it fits. If this space is at the edge of the frame,
+ consider extending the frame to make it fit. Our caller relies on
+ this when allocating a new slot. */
+ if (frame_offset == start && this_frame_offset < frame_offset)
+ frame_offset = this_frame_offset;
+ else if (this_frame_offset < start)
+ return false;
+ else if (start + length == frame_offset
+ && this_frame_offset + size > start + length)
+ frame_offset = this_frame_offset + size;
+ else if (this_frame_offset + size > start + length)
+ return false;
+
+ *poffset = this_frame_offset;
+ return true;
+}
+
+/* Create a new frame_space structure describing free space in the stack
+ frame beginning at START and ending at END, and chain it into the
+ function's frame_space_list. */
+
+static void
+add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
+{
+ struct frame_space *space = GGC_NEW (struct frame_space);
+ space->next = crtl->frame_space_list;
+ crtl->frame_space_list = space;
+ space->start = start;
+ space->length = end - start;
+}
+
/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
with machine mode MODE.
{
rtx x, addr;
int bigend_correction = 0;
+ HOST_WIDE_INT slot_offset, old_frame_offset;
unsigned int alignment, alignment_in_bits;
- int frame_off, frame_alignment, frame_phase;
if (align == 0)
{
alignment_in_bits = alignment * BITS_PER_UNIT;
- if (FRAME_GROWS_DOWNWARD)
- frame_offset -= size;
-
/* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
{
if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
crtl->max_used_stack_slot_alignment = alignment_in_bits;
- /* Calculate how many bytes the start of local variables is off from
- stack alignment. */
- frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
- frame_off = STARTING_FRAME_OFFSET % frame_alignment;
- frame_phase = frame_off ? frame_alignment - frame_off : 0;
+ if (mode != BLKmode || size != 0)
+ {
+ struct frame_space **psp;
- /* Round the frame offset to the specified alignment. The default is
- to always honor requests to align the stack but a port may choose to
- do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
- if (STACK_ALIGNMENT_NEEDED
- || mode != BLKmode
- || size != 0)
+ for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
+ {
+ struct frame_space *space = *psp;
+ if (!try_fit_stack_local (space->start, space->length, size,
+ alignment, &slot_offset))
+ continue;
+ *psp = space->next;
+ if (slot_offset > space->start)
+ add_frame_space (space->start, slot_offset);
+ if (slot_offset + size < space->start + space->length)
+ add_frame_space (slot_offset + size,
+ space->start + space->length);
+ goto found_space;
+ }
+ }
+ else if (!STACK_ALIGNMENT_NEEDED)
{
- /* We must be careful here, since FRAME_OFFSET might be negative and
- division with a negative dividend isn't as well defined as we might
- like. So we instead assume that ALIGNMENT is a power of two and
- use logical operations which are unambiguous. */
- if (FRAME_GROWS_DOWNWARD)
- frame_offset
- = (FLOOR_ROUND (frame_offset - frame_phase,
- (unsigned HOST_WIDE_INT) alignment)
- + frame_phase);
- else
- frame_offset
- = (CEIL_ROUND (frame_offset - frame_phase,
- (unsigned HOST_WIDE_INT) alignment)
- + frame_phase);
+ slot_offset = frame_offset;
+ goto found_space;
}
+ old_frame_offset = frame_offset;
+
+ if (FRAME_GROWS_DOWNWARD)
+ {
+ frame_offset -= size;
+ try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
+
+ if (slot_offset > frame_offset)
+ add_frame_space (frame_offset, slot_offset);
+ if (slot_offset + size < old_frame_offset)
+ add_frame_space (slot_offset + size, old_frame_offset);
+ }
+ else
+ {
+ frame_offset += size;
+ try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
+
+ if (slot_offset > old_frame_offset)
+ add_frame_space (old_frame_offset, slot_offset);
+ if (slot_offset + size < frame_offset)
+ add_frame_space (slot_offset + size, frame_offset);
+ }
+
+ found_space:
/* On a big-endian machine, if we are allocating more space than we will use,
use the least significant bytes of those that are allocated. */
if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
if (virtuals_instantiated)
addr = plus_constant (frame_pointer_rtx,
trunc_int_for_mode
- (frame_offset + bigend_correction
+ (slot_offset + bigend_correction
+ STARTING_FRAME_OFFSET, Pmode));
else
addr = plus_constant (virtual_stack_vars_rtx,
trunc_int_for_mode
- (frame_offset + bigend_correction,
+ (slot_offset + bigend_correction,
Pmode));
- if (!FRAME_GROWS_DOWNWARD)
- frame_offset += size;
-
x = gen_rtx_MEM (mode, addr);
set_mem_align (x, alignment_in_bits);
MEM_NOTRAP_P (x) = 1;
free_temp_slots (void)
{
struct temp_slot *p, *next;
+ bool some_available = false;
for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
{
next = p->next;
if (!p->keep)
- make_slot_available (p);
+ {
+ make_slot_available (p);
+ some_available = true;
+ }
}
- remove_unused_temp_slot_addresses ();
- combine_temp_slots ();
+ if (some_available)
+ {
+ remove_unused_temp_slot_addresses ();
+ combine_temp_slots ();
+ }
}
/* Push deeper into the nesting level for stack temporaries. */
pop_temp_slots (void)
{
struct temp_slot *p, *next;
+ bool some_available = false;
for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
{
next = p->next;
make_slot_available (p);
+ some_available = true;
}
- remove_unused_temp_slot_addresses ();
- combine_temp_slots ();
+ if (some_available)
+ {
+ remove_unused_temp_slot_addresses ();
+ combine_temp_slots ();
+ }
temp_slot_level--;
}
int
aggregate_value_p (const_tree exp, const_tree fntype)
{
+ const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
int i, regno, nregs;
rtx reg;
- const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
-
- /* DECL node associated with FNTYPE when relevant, which we might need to
- check for by-invisible-reference returns, typically for CALL_EXPR input
- EXPressions. */
- const_tree fndecl = NULL_TREE;
-
if (fntype)
switch (TREE_CODE (fntype))
{
case CALL_EXPR:
- fndecl = get_callee_fndecl (fntype);
- fntype = (fndecl
- ? TREE_TYPE (fndecl)
- : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
+ {
+ tree fndecl = get_callee_fndecl (fntype);
+ fntype = (fndecl
+ ? TREE_TYPE (fndecl)
+ : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
+ }
break;
case FUNCTION_DECL:
- fndecl = fntype;
- fntype = TREE_TYPE (fndecl);
+ fntype = TREE_TYPE (fntype);
break;
case FUNCTION_TYPE:
case METHOD_TYPE:
break;
case IDENTIFIER_NODE:
- fntype = 0;
+ fntype = NULL_TREE;
break;
default:
- /* We don't expect other rtl types here. */
+ /* We don't expect other tree types here. */
gcc_unreachable ();
}
- if (TREE_CODE (type) == VOID_TYPE)
+ if (VOID_TYPE_P (type))
return 0;
+ /* If a record should be passed the same as its first (and only) member
+ don't pass it as an aggregate. */
+ if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
+ return aggregate_value_p (first_field (type), fntype);
+
/* If the front end has decided that this needs to be passed by
reference, do so. */
if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
&& DECL_BY_REFERENCE (exp))
return 1;
- /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
- called function RESULT_DECL, meaning the function returns in memory by
- invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
- on the function type, which used to be the way to request such a return
- mechanism but might now be causing troubles at gimplification time if
- temporaries with the function type need to be created. */
- if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
- && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
+ /* Function types that are TREE_ADDRESSABLE force return in memory. */
+ if (fntype && TREE_ADDRESSABLE (fntype))
return 1;
- if (targetm.calls.return_in_memory (type, fntype))
- return 1;
/* Types that are TREE_ADDRESSABLE must be constructed in memory,
and thus can't be returned in registers. */
if (TREE_ADDRESSABLE (type))
return 1;
+
if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
return 1;
+
+ if (targetm.calls.return_in_memory (type, fntype))
+ return 1;
+
/* Make sure we have suitable call-clobbered regs to return
the value in; if not, we must return it in memory. */
reg = hard_function_value (type, 0, fntype, 0);
for (i = 0; i < nregs; i++)
if (! call_used_regs[regno + i])
return 1;
+
return 0;
}
\f
/* GCC post 3.4 passes *all* variable sized types by reference. */
if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
return true;
+
+ /* If a record type should be passed the same as its first (and only)
+ member, use the type and mode of that member. */
+ if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
+ {
+ type = TREE_TYPE (first_field (type));
+ mode = TYPE_MODE (type);
+ }
}
return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
static void
assign_parms_initialize_all (struct assign_parm_data_all *all)
{
- tree fntype;
+ tree fntype ATTRIBUTE_UNUSED;
memset (all, 0, sizeof (*all));
entries of the component type. Return a new list of substitutions are
needed, else the old list. */
-static tree
-split_complex_args (tree args)
+static void
+split_complex_args (VEC(tree, heap) **args)
{
+ unsigned i;
tree p;
- /* Before allocating memory, check for the common case of no complex. */
- for (p = args; p; p = TREE_CHAIN (p))
- {
- tree type = TREE_TYPE (p);
- if (TREE_CODE (type) == COMPLEX_TYPE
- && targetm.calls.split_complex_arg (type))
- goto found;
- }
- return args;
-
- found:
- args = copy_list (args);
-
- for (p = args; p; p = TREE_CHAIN (p))
+ for (i = 0; VEC_iterate (tree, *args, i, p); ++i)
{
tree type = TREE_TYPE (p);
if (TREE_CODE (type) == COMPLEX_TYPE
bool addressable = TREE_ADDRESSABLE (p);
/* Rewrite the PARM_DECL's type with its component. */
+ p = copy_node (p);
TREE_TYPE (p) = subtype;
DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
DECL_MODE (p) = VOIDmode;
DECL_IGNORED_P (p) = addressable;
TREE_ADDRESSABLE (p) = 0;
layout_decl (p, 0);
+ VEC_replace (tree, *args, i, p);
/* Build a second synthetic decl. */
decl = build_decl (EXPR_LOCATION (p),
DECL_ARTIFICIAL (decl) = addressable;
DECL_IGNORED_P (decl) = addressable;
layout_decl (decl, 0);
-
- /* Splice it in; skip the new decl. */
- TREE_CHAIN (decl) = TREE_CHAIN (p);
- TREE_CHAIN (p) = decl;
- p = decl;
+ VEC_safe_insert (tree, heap, *args, ++i, decl);
}
}
-
- return args;
}
/* A subroutine of assign_parms. Adjust the parameter list to incorporate
the hidden struct return argument, and (abi willing) complex args.
Return the new parameter list. */
-static tree
+static VEC(tree, heap) *
assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
{
tree fndecl = current_function_decl;
tree fntype = TREE_TYPE (fndecl);
- tree fnargs = DECL_ARGUMENTS (fndecl);
+ VEC(tree, heap) *fnargs = NULL;
+ tree arg;
+
+ for (arg = DECL_ARGUMENTS (fndecl); arg; arg = TREE_CHAIN (arg))
+ VEC_safe_push (tree, heap, fnargs, arg);
+
+ all->orig_fnargs = DECL_ARGUMENTS (fndecl);
/* If struct value address is treated as the first argument, make it so. */
if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
DECL_ARTIFICIAL (decl) = 1;
DECL_IGNORED_P (decl) = 1;
- TREE_CHAIN (decl) = fnargs;
- fnargs = decl;
+ TREE_CHAIN (decl) = all->orig_fnargs;
+ all->orig_fnargs = decl;
+ VEC_safe_insert (tree, heap, fnargs, 0, decl);
+
all->function_result_decl = decl;
}
- all->orig_fnargs = fnargs;
-
/* If the target wants to split complex arguments into scalars, do so. */
if (targetm.calls.split_complex_arg)
- fnargs = split_complex_args (fnargs);
+ split_complex_args (&fnargs);
return fnargs;
}
passed_mode = TYPE_MODE (passed_type);
nominal_mode = TYPE_MODE (nominal_type);
- /* If the parm is to be passed as a transparent union, use the type of
- the first field for the tests below. We have already verified that
- the modes are the same. */
- if (TREE_CODE (passed_type) == UNION_TYPE
- && TYPE_TRANSPARENT_UNION (passed_type))
- passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
+ /* If the parm is to be passed as a transparent union or record, use the
+ type of the first field for the tests below. We have already verified
+ that the modes are the same. */
+ if ((TREE_CODE (passed_type) == UNION_TYPE
+ || TREE_CODE (passed_type) == RECORD_TYPE)
+ && TYPE_TRANSPARENT_AGGR (passed_type))
+ passed_type = TREE_TYPE (first_field (passed_type));
/* See if this arg was passed by invisible reference. */
if (pass_by_reference (&all->args_so_far, passed_mode,
undo the frobbing that we did in assign_parms_augmented_arg_list. */
static void
-assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
+assign_parms_unsplit_complex (struct assign_parm_data_all *all,
+ VEC(tree, heap) *fnargs)
{
tree parm;
tree orig_fnargs = all->orig_fnargs;
+ unsigned i = 0;
- for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
+ for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
{
if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
&& targetm.calls.split_complex_arg (TREE_TYPE (parm)))
rtx tmp, real, imag;
enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
- real = DECL_RTL (fnargs);
- imag = DECL_RTL (TREE_CHAIN (fnargs));
+ real = DECL_RTL (VEC_index (tree, fnargs, i));
+ imag = DECL_RTL (VEC_index (tree, fnargs, i + 1));
if (inner != GET_MODE (real))
{
real = gen_lowpart_SUBREG (inner, real);
tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
SET_DECL_RTL (parm, tmp);
- real = DECL_INCOMING_RTL (fnargs);
- imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
+ real = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i));
+ imag = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i + 1));
if (inner != GET_MODE (real))
{
real = gen_lowpart_SUBREG (inner, real);
}
tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
set_decl_incoming_rtl (parm, tmp, false);
- fnargs = TREE_CHAIN (fnargs);
+ i++;
}
- else
- {
- SET_DECL_RTL (parm, DECL_RTL (fnargs));
- set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs), false);
-
- /* Set MEM_EXPR to the original decl, i.e. to PARM,
- instead of the copy of decl, i.e. FNARGS. */
- if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
- set_mem_expr (DECL_INCOMING_RTL (parm), parm);
- }
-
- fnargs = TREE_CHAIN (fnargs);
}
}
assign_parms (tree fndecl)
{
struct assign_parm_data_all all;
- tree fnargs, parm;
+ tree parm;
+ VEC(tree, heap) *fnargs;
+ unsigned i;
crtl->args.internal_arg_pointer
= targetm.calls.internal_arg_pointer ();
assign_parms_initialize_all (&all);
fnargs = assign_parms_augmented_arg_list (&all);
- for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
+ for (i = 0; VEC_iterate (tree, fnargs, i, parm); ++i)
{
struct assign_parm_data_one data;
assign_parm_setup_stack (&all, parm, &data);
}
- if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
+ if (targetm.calls.split_complex_arg)
assign_parms_unsplit_complex (&all, fnargs);
+ VEC_free (tree, heap, fnargs);
+
/* Output all parameter conversion instructions (possibly including calls)
now that all parameters have been copied out of hard registers. */
emit_insn (all.first_conversion_insn);
gimplify_parameters (void)
{
struct assign_parm_data_all all;
- tree fnargs, parm;
+ tree parm;
gimple_seq stmts = NULL;
+ VEC(tree, heap) *fnargs;
+ unsigned i;
assign_parms_initialize_all (&all);
fnargs = assign_parms_augmented_arg_list (&all);
- for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
+ for (i = 0; VEC_iterate (tree, fnargs, i, parm); ++i)
{
struct assign_parm_data_one data;
}
}
+ VEC_free (tree, heap, fnargs);
+
return stmts;
}
\f
cfun = GGC_CNEW (struct function);
- cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
-
init_eh_for_function ();
if (init_machine_status)
used_types_insert (tree t)
{
while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
- t = TREE_TYPE (t);
- t = TYPE_MAIN_VARIANT (t);
+ if (TYPE_NAME (t))
+ break;
+ else
+ t = TREE_TYPE (t);
+ if (TYPE_NAME (t) == NULL_TREE
+ || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
+ t = TYPE_MAIN_VARIANT (t);
if (debug_info_level > DINFO_LEVEL_NONE)
{
if (cfun)