/* Expands front end tree to back end RTL for GCC.
Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
- 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
+ Free Software Foundation, Inc.
This file is part of GCC.
#include "langhooks.h"
#include "target.h"
#include "cfglayout.h"
+#include "tree-gimple.h"
#ifndef LOCAL_ALIGNMENT
#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
outer_function_chain = p->outer;
current_function_decl = p->decl;
- reg_renumber = 0;
-
- restore_emit_status (p);
lang_hooks.function.leave_nested (p);
This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
the caller may have to do that. */
-HOST_WIDE_INT
+static HOST_WIDE_INT
get_func_frame_size (struct function *f)
{
#ifdef FRAME_GROWS_DOWNWARD
static struct temp_slot **
temp_slots_at_level (int level)
{
- level++;
if (!used_temp_slots)
VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
if (decl && size == -1
&& TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
{
- error ("%Jsize of variable '%D' is too large", decl, decl);
+ error ("%Jsize of variable %qD is too large", decl, decl);
size = 1;
}
done for BLKmode slots because we can be sure that we won't have alignment
problems in this case. */
-void
+static void
combine_temp_slots (void)
{
struct temp_slot *p, *q, *next, *next_q;
enum machine_mode mode;
rtx addr;
+ if (x == 0)
+ return;
+
+ /* If this is a CONCAT, recurse for the pieces. */
+ if (GET_CODE (x) == CONCAT)
+ {
+ instantiate_decl (XEXP (x, 0), size / 2, valid_only);
+ instantiate_decl (XEXP (x, 1), size / 2, valid_only);
+ return;
+ }
+
/* If this is not a MEM, no need to do anything. Similarly if the
address is a constant or a register that is not a virtual register. */
-
- if (x == 0 || !MEM_P (x))
+ if (!MEM_P (x))
return;
addr = XEXP (x, 0);
instantiate_virtual_regs_lossage (rtx insn)
{
gcc_assert (asm_noperands (PATTERN (insn)) >= 0);
- error_for_asm (insn, "impossible constraint in `asm'");
+ error_for_asm (insn, "impossible constraint in %<asm%>");
delete_insn (insn);
}
/* Given a pointer to a piece of rtx and an optional pointer to the
if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
return false;
- /* Compiler-generated temporaries can always go in registers. */
- if (DECL_ARTIFICIAL (decl))
+ /* If we're not interested in tracking debugging information for
+ this decl, then we can certainly put it in a register. */
+ if (DECL_IGNORED_P (decl))
return true;
-#ifdef NON_SAVING_SETJMP
- /* Protect variables not declared "register" from setjmp. */
- if (NON_SAVING_SETJMP
- && current_function_calls_setjmp
- && !DECL_REGISTER (decl))
- return false;
-#endif
-
return (optimize || DECL_REGISTER (decl));
}
return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
}
+/* Return true if TYPE, which is passed by reference, should be callee
+ copied instead of caller copied. */
+
+bool
+reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
+ tree type, bool named_arg)
+{
+ if (type && TREE_ADDRESSABLE (type))
+ return false;
+ return targetm.calls.callee_copies (ca, mode, type, named_arg);
+}
+
/* Structures to communicate between the subroutines of assign_parms.
The first holds data persistent across all parameters, the second
is cleared out for each parameter. */
struct locate_and_pad_arg_data locate;
int partial;
BOOL_BITFIELD named_arg : 1;
- BOOL_BITFIELD last_named : 1;
BOOL_BITFIELD passed_pointer : 1;
BOOL_BITFIELD on_stack : 1;
BOOL_BITFIELD loaded_in_reg : 1;
{
tree decl;
tree subtype = TREE_TYPE (type);
+ bool addressable = TREE_ADDRESSABLE (p);
/* Rewrite the PARM_DECL's type with its component. */
TREE_TYPE (p) = subtype;
DECL_MODE (p) = VOIDmode;
DECL_SIZE (p) = NULL;
DECL_SIZE_UNIT (p) = NULL;
+ /* If this arg must go in memory, put it in a pseudo here.
+ We can't allow it to go in memory as per normal parms,
+ because the usual place might not have the imag part
+ adjacent to the real part. */
+ DECL_ARTIFICIAL (p) = addressable;
+ DECL_IGNORED_P (p) = addressable;
+ TREE_ADDRESSABLE (p) = 0;
layout_decl (p, 0);
/* Build a second synthetic decl. */
decl = build_decl (PARM_DECL, NULL_TREE, subtype);
DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
+ DECL_ARTIFICIAL (decl) = addressable;
+ DECL_IGNORED_P (decl) = addressable;
layout_decl (decl, 0);
/* Splice it in; skip the new decl. */
decl = build_decl (PARM_DECL, NULL_TREE, type);
DECL_ARG_TYPE (decl) = type;
DECL_ARTIFICIAL (decl) = 1;
+ DECL_IGNORED_P (decl) = 1;
TREE_CHAIN (decl) = fnargs;
fnargs = decl;
memset (data, 0, sizeof (*data));
- /* Set LAST_NAMED if this is last named arg before last anonymous args. */
- if (current_function_stdarg)
- {
- tree tem;
- for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
- if (DECL_NAME (tem))
- break;
- if (tem == 0)
- data->last_named = true;
- }
-
- /* Set NAMED_ARG if this arg should be treated as a named arg. For
- most machines, if this is a varargs/stdarg function, then we treat
- the last named arg as if it were anonymous too. */
- if (targetm.calls.strict_argument_naming (&all->args_so_far))
- data->named_arg = 1;
+ /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
+ if (!current_function_stdarg)
+ data->named_arg = 1; /* No varadic parms. */
+ else if (TREE_CHAIN (parm))
+ data->named_arg = 1; /* Not the last non-varadic parm. */
+ else if (targetm.calls.strict_argument_naming (&all->args_so_far))
+ data->named_arg = 1; /* Only varadic ones are unnamed. */
else
- data->named_arg = !data->last_named;
+ data->named_arg = 0; /* Treat as varadic. */
nominal_type = TREE_TYPE (parm);
passed_type = DECL_ARG_TYPE (parm);
{
int partial;
- partial = FUNCTION_ARG_PARTIAL_NREGS (all->args_so_far,
- data->promoted_mode,
- data->passed_type,
- data->named_arg);
+ partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
+ data->promoted_mode,
+ data->passed_type,
+ data->named_arg);
data->partial = partial;
/* The caller might already have allocated stack space for the
argument on the stack. */
gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
- pretend_bytes = partial * UNITS_PER_WORD;
+ pretend_bytes = partial;
all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
/* We want to align relative to the actual stack pointer, so
set_mem_attributes (stack_parm, parm, 1);
- boundary = FUNCTION_ARG_BOUNDARY (data->promoted_mode, data->passed_type);
- align = 0;
+ boundary = data->locate.boundary;
+ align = BITS_PER_UNIT;
/* If we're padding upward, we know that the alignment of the slot
is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
intentionally forcing upward padding. Otherwise we have to come
up with a guess at the alignment based on OFFSET_RTX. */
- if (data->locate.where_pad == upward || data->entry_parm)
+ if (data->locate.where_pad != downward || data->entry_parm)
align = boundary;
else if (GET_CODE (offset_rtx) == CONST_INT)
{
align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
align = align & -align;
}
- if (align > 0)
- set_mem_align (stack_parm, align);
+ set_mem_align (stack_parm, align);
if (data->entry_parm)
set_reg_attrs_for_parm (data->entry_parm, stack_parm);
data->passed_type,
int_size_in_bytes (data->passed_type));
else
- move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
- data->partial);
+ {
+ gcc_assert (data->partial % UNITS_PER_WORD == 0);
+ move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
+ data->partial / UNITS_PER_WORD);
+ }
entry_parm = stack_parm;
}
/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
always valid and properly aligned. */
-
static void
assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
{
/* If we can't trust the parm stack slot to be aligned enough for its
ultimate type, don't use that slot after entry. We'll make another
stack slot, if we need one. */
- if (STRICT_ALIGNMENT && stack_parm
- && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
+ if (stack_parm
+ && ((STRICT_ALIGNMENT
+ && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
+ || (data->nominal_type
+ && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
+ && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
stack_parm = NULL;
/* If parm was passed in memory, and we need to convert it on entry,
return true;
#ifdef BLOCK_REG_PADDING
- if (data->locate.where_pad == (BYTES_BIG_ENDIAN ? upward : downward)
- && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD)
+ /* Only assign_parm_setup_block knows how to deal with register arguments
+ that are padded at the least significant end. */
+ if (REG_P (data->entry_parm)
+ && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
+ && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
+ == (BYTES_BIG_ENDIAN ? upward : downward)))
return true;
#endif
present and valid in DATA->STACK_RTL. */
static void
-assign_parm_setup_block (tree parm, struct assign_parm_data_one *data)
+assign_parm_setup_block (struct assign_parm_data_all *all,
+ tree parm, struct assign_parm_data_one *data)
{
rtx entry_parm = data->entry_parm;
rtx stack_parm = data->stack_parm;
+ HOST_WIDE_INT size;
+ HOST_WIDE_INT size_stored;
+ rtx orig_entry_parm = entry_parm;
+
+ if (GET_CODE (entry_parm) == PARALLEL)
+ entry_parm = emit_group_move_into_temps (entry_parm);
/* If we've a non-block object that's nevertheless passed in parts,
reconstitute it in register operations rather than on the stack. */
if (GET_CODE (entry_parm) == PARALLEL
- && data->nominal_mode != BLKmode
- && XVECLEN (entry_parm, 0) > 1
- && optimize)
+ && data->nominal_mode != BLKmode)
{
- rtx parmreg = gen_reg_rtx (data->nominal_mode);
+ rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
- emit_group_store (parmreg, entry_parm, data->nominal_type,
- int_size_in_bytes (data->nominal_type));
- SET_DECL_RTL (parm, parmreg);
- return;
+ if ((XVECLEN (entry_parm, 0) > 1
+ || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
+ && use_register_for_decl (parm))
+ {
+ rtx parmreg = gen_reg_rtx (data->nominal_mode);
+
+ push_to_sequence (all->conversion_insns);
+
+ /* For values returned in multiple registers, handle possible
+ incompatible calls to emit_group_store.
+
+ For example, the following would be invalid, and would have to
+ be fixed by the conditional below:
+
+ emit_group_store ((reg:SF), (parallel:DF))
+ emit_group_store ((reg:SI), (parallel:DI))
+
+ An example of this are doubles in e500 v2:
+ (parallel:DF (expr_list (reg:SI) (const_int 0))
+ (expr_list (reg:SI) (const_int 4))). */
+ if (data->nominal_mode != data->passed_mode)
+ {
+ rtx t = gen_reg_rtx (GET_MODE (entry_parm));
+ emit_group_store (t, entry_parm, NULL_TREE,
+ GET_MODE_SIZE (GET_MODE (entry_parm)));
+ convert_move (parmreg, t, 0);
+ }
+ else
+ emit_group_store (parmreg, entry_parm, data->nominal_type,
+ int_size_in_bytes (data->nominal_type));
+
+ all->conversion_insns = get_insns ();
+ end_sequence ();
+
+ SET_DECL_RTL (parm, parmreg);
+ return;
+ }
+ }
+
+ size = int_size_in_bytes (data->passed_type);
+ size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
+ if (stack_parm == 0)
+ {
+ DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
+ stack_parm = assign_stack_local (BLKmode, size_stored,
+ DECL_ALIGN (parm));
+ if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
+ PUT_MODE (stack_parm, GET_MODE (entry_parm));
+ set_mem_attributes (stack_parm, parm, 1);
}
/* If a BLKmode arrives in registers, copy it to a stack slot. Handle
calls that pass values in multiple non-contiguous locations. */
if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
{
- HOST_WIDE_INT size = int_size_in_bytes (data->passed_type);
- HOST_WIDE_INT size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
rtx mem;
/* Note that we will be storing an integral number of words.
So we have to be careful to ensure that we allocate an
- integral number of words. We do this below in the
+ integral number of words. We do this above when we call
assign_stack_local if space was not allocated in the argument
list. If it was, this will not work if PARM_BOUNDARY is not
a multiple of BITS_PER_WORD. It isn't clear how to fix this
if it becomes a problem. Exception is when BLKmode arrives
with arguments not conforming to word_mode. */
- if (stack_parm == 0)
- {
- stack_parm = assign_stack_local (BLKmode, size_stored, 0);
- data->stack_parm = stack_parm;
- PUT_MODE (stack_parm, GET_MODE (entry_parm));
- set_mem_attributes (stack_parm, parm, 1);
- }
+ if (data->stack_parm == 0)
+ ;
else if (GET_CODE (entry_parm) == PARALLEL)
;
else
/* Handle values in multiple non-contiguous locations. */
if (GET_CODE (entry_parm) == PARALLEL)
- emit_group_store (mem, entry_parm, data->passed_type, size);
+ {
+ push_to_sequence (all->conversion_insns);
+ emit_group_store (mem, entry_parm, data->passed_type, size);
+ all->conversion_insns = get_insns ();
+ end_sequence ();
+ }
else if (size == 0)
;
{
rtx tem, x;
int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
- rtx reg = gen_rtx_REG (word_mode, REGNO (data->entry_parm));
+ rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
x = expand_shift (LSHIFT_EXPR, word_mode, reg,
build_int_cst (NULL_TREE, by),
emit_move_insn (tem, x);
}
else
- move_block_from_reg (REGNO (data->entry_parm), mem,
+ move_block_from_reg (REGNO (entry_parm), mem,
size_stored / UNITS_PER_WORD);
}
else
- move_block_from_reg (REGNO (data->entry_parm), mem,
+ move_block_from_reg (REGNO (entry_parm), mem,
size_stored / UNITS_PER_WORD);
}
+ else if (data->stack_parm == 0)
+ {
+ push_to_sequence (all->conversion_insns);
+ emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
+ BLOCK_OP_NORMAL);
+ all->conversion_insns = get_insns ();
+ end_sequence ();
+ }
+ data->stack_parm = stack_parm;
SET_DECL_RTL (parm, stack_parm);
}
/* TREE_USED gets set erroneously during expand_assignment. */
save_tree_used = TREE_USED (parm);
- expand_assignment (parm, make_tree (data->nominal_type, tempreg), 0);
+ expand_assignment (parm, make_tree (data->nominal_type, tempreg));
TREE_USED (parm) = save_tree_used;
all->conversion_insns = get_insns ();
end_sequence ();
emit_move_insn (tempreg, DECL_RTL (parm));
tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
emit_move_insn (parmreg, tempreg);
- all->conversion_insns = get_insns();
+ all->conversion_insns = get_insns ();
end_sequence ();
did_conversion = true;
data->stack_parm = NULL;
}
- /* If we are passed an arg by reference and it is our responsibility
- to make a copy, do it now.
- PASSED_TYPE and PASSED mode now refer to the pointer, not the
- original argument, so we must recreate them in the call to
- FUNCTION_ARG_CALLEE_COPIES. */
- /* ??? Later add code to handle the case that if the argument isn't
- modified, don't do the copy. */
-
- else if (data->passed_pointer)
- {
- tree type = TREE_TYPE (data->passed_type);
-
- if (FUNCTION_ARG_CALLEE_COPIES (all->args_so_far, TYPE_MODE (type),
- type, data->named_arg)
- && !TREE_ADDRESSABLE (type))
- {
- rtx copy;
-
- /* This sequence may involve a library call perhaps clobbering
- registers that haven't been copied to pseudos yet. */
-
- push_to_sequence (all->conversion_insns);
-
- if (!COMPLETE_TYPE_P (type)
- || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
- {
- /* This is a variable sized object. */
- copy = allocate_dynamic_stack_space (expr_size (parm), NULL_RTX,
- TYPE_ALIGN (type));
- copy = gen_rtx_MEM (BLKmode, copy);
- }
- else
- copy = assign_stack_temp (TYPE_MODE (type),
- int_size_in_bytes (type), 1);
- set_mem_attributes (copy, parm, 1);
-
- store_expr (parm, copy, 0);
- emit_move_insn (parmreg, XEXP (copy, 0));
- all->conversion_insns = get_insns ();
- end_sequence ();
-
- did_conversion = true;
- }
- }
-
/* Mark the register as eliminable if we did no conversion and it was
copied from memory at a fixed offset, and the arg pointer was not
copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
{
enum machine_mode submode
= GET_MODE_INNER (GET_MODE (parmreg));
- int regnor = REGNO (gen_realpart (submode, parmreg));
- int regnoi = REGNO (gen_imagpart (submode, parmreg));
- rtx stackr = gen_realpart (submode, data->stack_parm);
- rtx stacki = gen_imagpart (submode, data->stack_parm);
+ int regnor = REGNO (XEXP (parmreg, 0));
+ int regnoi = REGNO (XEXP (parmreg, 1));
+ rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
+ rtx stacki = adjust_address_nv (data->stack_parm, submode,
+ GET_MODE_SIZE (submode));
/* Scan backwards for the set of the real and
imaginary parts. */
{
/* Value must be stored in the stack slot STACK_PARM during function
execution. */
+ bool to_conversion = false;
if (data->promoted_mode != data->nominal_mode)
{
emit_move_insn (tempreg, validize_mem (data->entry_parm));
push_to_sequence (all->conversion_insns);
+ to_conversion = true;
+
data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
TYPE_UNSIGNED (TREE_TYPE (parm)));
/* ??? This may need a big-endian conversion on sparc64. */
data->stack_parm
= adjust_address (data->stack_parm, data->nominal_mode, 0);
-
- all->conversion_insns = get_insns ();
- end_sequence ();
}
if (data->entry_parm != data->stack_parm)
{
+ rtx src, dest;
+
if (data->stack_parm == 0)
{
data->stack_parm
= assign_stack_local (GET_MODE (data->entry_parm),
GET_MODE_SIZE (GET_MODE (data->entry_parm)),
- 0);
+ TYPE_ALIGN (data->passed_type));
set_mem_attributes (data->stack_parm, parm, 1);
}
- if (data->promoted_mode != data->nominal_mode)
+ dest = validize_mem (data->stack_parm);
+ src = validize_mem (data->entry_parm);
+
+ if (MEM_P (src))
{
- push_to_sequence (all->conversion_insns);
- emit_move_insn (validize_mem (data->stack_parm),
- validize_mem (data->entry_parm));
- all->conversion_insns = get_insns ();
- end_sequence ();
+ /* Use a block move to handle potentially misaligned entry_parm. */
+ if (!to_conversion)
+ push_to_sequence (all->conversion_insns);
+ to_conversion = true;
+
+ emit_block_move (dest, src,
+ GEN_INT (int_size_in_bytes (data->passed_type)),
+ BLOCK_OP_NORMAL);
}
else
- emit_move_insn (validize_mem (data->stack_parm),
- validize_mem (data->entry_parm));
+ emit_move_insn (dest, src);
+ }
+
+ if (to_conversion)
+ {
+ all->conversion_insns = get_insns ();
+ end_sequence ();
}
SET_DECL_RTL (parm, data->stack_parm);
undo the frobbing that we did in assign_parms_augmented_arg_list. */
static void
-assign_parms_unsplit_complex (tree orig_fnargs, tree fnargs)
+assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
{
tree parm;
+ tree orig_fnargs = all->orig_fnargs;
for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
{
real = gen_lowpart_SUBREG (inner, real);
imag = gen_lowpart_SUBREG (inner, imag);
}
- tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
+
+ if (TREE_ADDRESSABLE (parm))
+ {
+ rtx rmem, imem;
+ HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
+
+ /* split_complex_arg put the real and imag parts in
+ pseudos. Move them to memory. */
+ tmp = assign_stack_local (DECL_MODE (parm), size,
+ TYPE_ALIGN (TREE_TYPE (parm)));
+ set_mem_attributes (tmp, parm, 1);
+ rmem = adjust_address_nv (tmp, inner, 0);
+ imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
+ push_to_sequence (all->conversion_insns);
+ emit_move_insn (rmem, real);
+ emit_move_insn (imem, imag);
+ all->conversion_insns = get_insns ();
+ end_sequence ();
+ }
+ else
+ tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
SET_DECL_RTL (parm, tmp);
real = DECL_INCOMING_RTL (fnargs);
/* Assign RTL expressions to the function's parameters. This may involve
copying them into registers and using those registers as the DECL_RTL. */
-void
+static void
assign_parms (tree fndecl)
{
struct assign_parm_data_all all;
tree fnargs, parm;
rtx internal_arg_pointer;
- int varargs_setup = 0;
/* If the reg that the virtual arg pointer will be translated into is
not a fixed reg or is the stack pointer, make a copy of the virtual
continue;
}
- /* Handle stdargs. LAST_NAMED is a slight mis-nomer; it's also true
- for the unnamed dummy argument following the last named argument.
- See ABI silliness wrt strict_argument_naming and NAMED_ARG. So
- we only want to do this when we get to the actual last named
- argument, which will be the first time LAST_NAMED gets set. */
- if (data.last_named && !varargs_setup)
- {
- varargs_setup = true;
- assign_parms_setup_varargs (&all, &data, false);
- }
+ if (current_function_stdarg && !TREE_CHAIN (parm))
+ assign_parms_setup_varargs (&all, &data, false);
/* Find out where the parameter arrives in this function. */
assign_parm_find_entry_rtl (&all, &data);
assign_parm_adjust_stack_rtl (&data);
if (assign_parm_setup_block_p (&data))
- assign_parm_setup_block (parm, &data);
+ assign_parm_setup_block (&all, parm, &data);
else if (data.passed_pointer || use_register_for_decl (parm))
assign_parm_setup_reg (&all, parm, &data);
else
}
if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
- assign_parms_unsplit_complex (all.orig_fnargs, fnargs);
+ assign_parms_unsplit_complex (&all, fnargs);
/* Output all parameter conversion instructions (possibly including calls)
now that all parameters have been copied out of hard registers. */
}
}
}
+
+/* A subroutine of gimplify_parameters, invoked via walk_tree.
+ For all seen types, gimplify their sizes. */
+
+static tree
+gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
+{
+ tree t = *tp;
+
+ *walk_subtrees = 0;
+ if (TYPE_P (t))
+ {
+ if (POINTER_TYPE_P (t))
+ *walk_subtrees = 1;
+ else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
+ && !TYPE_SIZES_GIMPLIFIED (t))
+ {
+ gimplify_type_sizes (t, (tree *) data);
+ *walk_subtrees = 1;
+ }
+ }
+
+ return NULL;
+}
+
+/* Gimplify the parameter list for current_function_decl. This involves
+ evaluating SAVE_EXPRs of variable sized parameters and generating code
+ to implement callee-copies reference parameters. Returns a list of
+ statements to add to the beginning of the function, or NULL if nothing
+ to do. */
+
+tree
+gimplify_parameters (void)
+{
+ struct assign_parm_data_all all;
+ tree fnargs, parm, stmts = NULL;
+
+ assign_parms_initialize_all (&all);
+ fnargs = assign_parms_augmented_arg_list (&all);
+
+ for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
+ {
+ struct assign_parm_data_one data;
+
+ /* Extract the type of PARM; adjust it according to ABI. */
+ assign_parm_find_data_types (&all, parm, &data);
+
+ /* Early out for errors and void parameters. */
+ if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
+ continue;
+
+ /* Update info on where next arg arrives in registers. */
+ FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
+ data.passed_type, data.named_arg);
+
+ /* ??? Once upon a time variable_size stuffed parameter list
+ SAVE_EXPRs (amongst others) onto a pending sizes list. This
+ turned out to be less than manageable in the gimple world.
+ Now we have to hunt them down ourselves. */
+ walk_tree_without_duplicates (&data.passed_type,
+ gimplify_parm_type, &stmts);
+
+ if (!TREE_CONSTANT (DECL_SIZE (parm)))
+ {
+ gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
+ gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
+ }
+
+ if (data.passed_pointer)
+ {
+ tree type = TREE_TYPE (data.passed_type);
+ if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
+ type, data.named_arg))
+ {
+ tree local, t;
+
+ /* For constant sized objects, this is trivial; for
+ variable-sized objects, we have to play games. */
+ if (TREE_CONSTANT (DECL_SIZE (parm)))
+ {
+ local = create_tmp_var (type, get_name (parm));
+ DECL_IGNORED_P (local) = 0;
+ }
+ else
+ {
+ tree ptr_type, addr, args;
+
+ ptr_type = build_pointer_type (type);
+ addr = create_tmp_var (ptr_type, get_name (parm));
+ DECL_IGNORED_P (addr) = 0;
+ local = build_fold_indirect_ref (addr);
+
+ args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
+ t = built_in_decls[BUILT_IN_ALLOCA];
+ t = build_function_call_expr (t, args);
+ t = fold_convert (ptr_type, t);
+ t = build2 (MODIFY_EXPR, void_type_node, addr, t);
+ gimplify_and_add (t, &stmts);
+ }
+
+ t = build2 (MODIFY_EXPR, void_type_node, local, parm);
+ gimplify_and_add (t, &stmts);
+
+ DECL_VALUE_EXPR (parm) = local;
+ }
+ }
+ }
+
+ return stmts;
+}
\f
/* Indicate whether REGNO is an incoming argument to the current function
that was promoted to a wider mode. If so, return the RTX for the
}
#endif /* REG_PARM_STACK_SPACE */
- part_size_in_regs = 0;
- if (reg_parm_stack_space == 0)
- part_size_in_regs = ((partial * UNITS_PER_WORD)
- / (PARM_BOUNDARY / BITS_PER_UNIT)
- * (PARM_BOUNDARY / BITS_PER_UNIT));
+ part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
sizetree
= type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
locate->where_pad = where_pad;
+ locate->boundary = boundary;
#ifdef ARGS_GROW_DOWNWARD
locate->slot_offset.constant = -initial_offset_ptr->constant;
&& DECL_RTL_SET_P (decl)
&& REG_P (DECL_RTL (decl))
&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
- warning ("%Jvariable '%D' might be clobbered by `longjmp' or `vfork'",
+ warning ("%Jvariable %qD might be clobbered by %<longjmp%>"
+ " or %<vfork%>",
decl, decl);
}
if (DECL_RTL (decl) != 0
&& REG_P (DECL_RTL (decl))
&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
- warning ("%Jargument '%D' might be clobbered by `longjmp' or `vfork'",
+ warning ("%Jargument %qD might be clobbered by %<longjmp%> or %<vfork%>",
decl, decl);
}
#endif
}
\f
-/* The PENDING_SIZES represent the sizes of variable-sized types.
- Create RTL for the various sizes now (using temporary variables),
- so that we can refer to the sizes from the RTL we are generating
- for the current function. The PENDING_SIZES are a TREE_LIST. The
- TREE_VALUE of each node is a SAVE_EXPR. */
-
-void
-expand_pending_sizes (tree pending_sizes)
-{
- tree tem;
-
- /* Evaluate now the sizes of any types declared among the arguments. */
- for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
- expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
-}
-
/* Start the RTL for a new function, and set variables used for
emitting RTL.
SUBR is the FUNCTION_DECL node.
{
/* Compute the return values into a pseudo reg, which we will copy
into the true return register after the cleanups are done. */
-
- /* In order to figure out what mode to use for the pseudo, we
- figure out what the mode of the eventual return register will
- actually be, and use that. */
- rtx hard_reg
- = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
- subr, 1);
-
- /* Structures that are returned in registers are not aggregate_value_p,
- so we may see a PARALLEL or a REG. */
- if (REG_P (hard_reg))
- SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
+ tree return_type = TREE_TYPE (DECL_RESULT (subr));
+ if (TYPE_MODE (return_type) != BLKmode
+ && targetm.calls.return_in_msb (return_type))
+ /* expand_function_end will insert the appropriate padding in
+ this case. Use the return value's natural (unpadded) mode
+ within the function proper. */
+ SET_DECL_RTL (DECL_RESULT (subr),
+ gen_reg_rtx (TYPE_MODE (return_type)));
else
{
- gcc_assert (GET_CODE (hard_reg) == PARALLEL);
- SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
+ /* In order to figure out what mode to use for the pseudo, we
+ figure out what the mode of the eventual return register will
+ actually be, and use that. */
+ rtx hard_reg = hard_function_value (return_type, subr, 1);
+
+ /* Structures that are returned in registers are not
+ aggregate_value_p, so we may see a PARALLEL or a REG. */
+ if (REG_P (hard_reg))
+ SET_DECL_RTL (DECL_RESULT (subr),
+ gen_reg_rtx (GET_MODE (hard_reg)));
+ else
+ {
+ gcc_assert (GET_CODE (hard_reg) == PARALLEL);
+ SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
+ }
}
/* Set DECL_REGISTER flag so that expand_function_end will copy the
since some things (like trampolines) get placed before this. */
tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
- /* Evaluate now the sizes of any types declared among the arguments. */
- expand_pending_sizes (nreverse (get_pending_sizes ()));
-
/* Make sure there is a line number after the function entry setup code. */
force_next_line_note ();
}
decl; decl = TREE_CHAIN (decl))
if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
&& DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
- warning ("%Junused parameter '%D'", decl, decl);
+ warning ("%Junused parameter %qD", decl, decl);
}
static GTY(()) rtx initial_trampoline;
is computed. */
clobber_after = get_last_insn ();
- /* Output the label for the actual return from the function,
- if one is expected. This happens either because a function epilogue
- is used instead of a return instruction, or because a return was done
- with a goto in order to run local cleanups, or because of pcc-style
- structure returning. */
- if (return_label)
- emit_label (return_label);
+ /* Output the label for the actual return from the function. */
+ emit_label (return_label);
/* Let except.c know where it should emit the call to unregister
the function context for sjlj exceptions. */
if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
sjlj_emit_function_exit_after (get_last_insn ());
- /* If we had calls to alloca, and this machine needs
- an accurate stack pointer to exit the function,
- insert some code to save and restore the stack pointer. */
- if (! EXIT_IGNORE_STACK
- && current_function_calls_alloca)
- {
- rtx tem = 0;
-
- emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
- emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
- }
-
/* If scalar return value was computed in a pseudo-reg, or was a named
return value that got dumped to the stack, copy that to the hard
return register. */
if (GET_MODE (real_decl_rtl) == BLKmode)
PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
+ /* If a non-BLKmode return value should be padded at the least
+ significant end of the register, shift it left by the appropriate
+ amount. BLKmode results are handled using the group load/store
+ machinery. */
+ if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
+ && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
+ {
+ emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
+ REGNO (real_decl_rtl)),
+ decl_rtl);
+ shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
+ }
/* If a named return value dumped decl_return to memory, then
we may need to re-do the PROMOTE_MODE signed/unsigned
extension. */
- if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
+ else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
{
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
/* Emit the actual code to clobber return register. */
{
- rtx seq, after;
+ rtx seq;
start_sequence ();
clobber_return_register ();
+ expand_naked_return ();
seq = get_insns ();
end_sequence ();
- after = emit_insn_after (seq, clobber_after);
+ emit_insn_after (seq, clobber_after);
}
- /* Output the label for the naked return from the function, if one is
- expected. This is currently used only by __builtin_return. */
- if (naked_return_label)
- emit_label (naked_return_label);
+ /* Output the label for the naked return from the function. */
+ emit_label (naked_return_label);
+
+ /* If we had calls to alloca, and this machine needs
+ an accurate stack pointer to exit the function,
+ insert some code to save and restore the stack pointer. */
+ if (! EXIT_IGNORE_STACK
+ && current_function_calls_alloca)
+ {
+ rtx tem = 0;
+
+ emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
+ emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
+ }
/* ??? This should no longer be necessary since stupid is no longer with
us, but there are some parts of the compiler (eg reload_combine, and
end_sequence ();
push_topmost_sequence ();
- emit_insn_after (seq, get_insns ());
+ emit_insn_after (seq, entry_of_function ());
pop_topmost_sequence ();
}
#if defined (HAVE_epilogue) || defined(HAVE_return)
rtx epilogue_end = NULL_RTX;
#endif
+ edge_iterator ei;
#ifdef HAVE_prologue
if (HAVE_prologue)
/* Can't deal with multiple successors of the entry block
at the moment. Function should always have at least one
entry point. */
- gcc_assert (ENTRY_BLOCK_PTR->succ && !ENTRY_BLOCK_PTR->succ->succ_next);
+ gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
- insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
+ insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
inserted = 1;
}
#endif
/* If the exit block has no non-fake predecessors, we don't need
an epilogue. */
- for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
if ((e->flags & EDGE_FAKE) == 0)
break;
if (e == NULL)
emit (conditional) return instructions. */
basic_block last;
- edge e_next;
rtx label;
- for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
if (e->flags & EDGE_FALLTHRU)
break;
if (e == NULL)
if (BB_HEAD (last) == label && LABEL_P (label))
{
+ edge_iterator ei2;
rtx epilogue_line_note = NULL_RTX;
/* Locate the line number associated with the closing brace,
break;
}
- for (e = last->pred; e; e = e_next)
+ for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
{
basic_block bb = e->src;
rtx jump;
- e_next = e->pred_next;
if (bb == ENTRY_BLOCK_PTR)
- continue;
+ {
+ ei_next (&ei2);
+ continue;
+ }
jump = BB_END (bb);
if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
- continue;
+ {
+ ei_next (&ei2);
+ continue;
+ }
/* If we have an unconditional jump, we can replace that
with a simple return instruction. */
else if (condjump_p (jump))
{
if (! redirect_jump (jump, 0, 0))
- continue;
+ {
+ ei_next (&ei2);
+ continue;
+ }
/* If this block has only one successor, it both jumps
and falls through to the fallthru block, so we can't
delete the edge. */
- if (bb->succ->succ_next == NULL)
- continue;
+ if (single_succ_p (bb))
+ {
+ ei_next (&ei2);
+ continue;
+ }
}
else
- continue;
+ {
+ ei_next (&ei2);
+ continue;
+ }
/* Fix up the CFG for the successful change we just made. */
redirect_edge_succ (e, EXIT_BLOCK_PTR);
emit_barrier_after (BB_END (last));
emit_return_into_block (last, epilogue_line_note);
epilogue_end = BB_END (last);
- last->succ->flags &= ~EDGE_FALLTHRU;
+ single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
goto epilogue_done;
}
}
There really shouldn't be a mixture -- either all should have
been converted or none, however... */
- for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
if (e->flags & EDGE_FALLTHRU)
break;
if (e == NULL)
#ifdef HAVE_sibcall_epilogue
/* Emit sibling epilogues before any sibling call sites. */
- for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
+ for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
{
basic_block bb = e->src;
rtx insn = BB_END (bb);
- rtx i;
- rtx newinsn;
if (!CALL_P (insn)
|| ! SIBLING_CALL_P (insn))
- continue;
+ {
+ ei_next (&ei);
+ continue;
+ }
start_sequence ();
emit_insn (gen_sibcall_epilogue ());
record_insns (seq, &sibcall_epilogue);
set_insn_locators (seq, epilogue_locator);
- i = PREV_INSN (insn);
- newinsn = emit_insn_before (seq, insn);
+ emit_insn_before (seq, insn);
+ ei_next (&ei);
}
#endif