/* Convert function calls to rtl insns, for GNU C compiler.
Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005
+ Free Software Foundation, Inc.
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
#include "config.h"
#include "system.h"
This is not the same register as for normal calls on machines with
register windows. */
rtx tail_call_reg;
+ /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
+ form for emit_group_move. */
+ rtx parallel_value;
/* If REG was promoted from the actual mode of the argument expression,
indicates whether the promotion is sign- or zero-extended. */
int unsignedp;
- /* Number of registers to use. 0 means put the whole arg in registers.
- Also 0 if not passed in registers. */
+ /* Number of bytes to put in registers. 0 means put the whole arg
+ in registers. Also 0 if not passed in registers. */
int partial;
/* Nonzero if argument must be passed on stack.
Note that some arguments may be passed on the stack
returns a BLKmode struct) and expand_call must take special action
to make sure the object being constructed does not overlap the
argument list for the constructor call. */
-int stack_arg_under_construction;
+static int stack_arg_under_construction;
static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
HOST_WIDE_INT, rtx, rtx, int, rtx, int,
static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
unsigned int);
-static bool shift_returned_value (tree, rtx *);
+static tree split_complex_values (tree);
+static tree split_complex_types (tree);
#ifdef REG_PARM_STACK_SPACE
static rtx save_fixed_argument_area (int, rtx, int *, int *);
}
else
#endif
- abort ();
+ gcc_unreachable ();
/* Find the call we just emitted. */
call_insn = last_call_insn ();
if (ecf_flags & ECF_NORETURN)
REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
REG_NOTES (call_insn));
- if (ecf_flags & ECF_ALWAYS_RETURN)
- REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
- REG_NOTES (call_insn));
if (ecf_flags & ECF_RETURNS_TWICE)
{
if (rounded_stack_size != 0)
{
- if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN | ECF_LONGJMP))
+ if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN))
/* Just pretend we did the pop. */
stack_pointer_delta -= rounded_stack_size;
else if (flag_defer_pop && inhibit_defer_pop == 0
For example, if the function might return more than one time (setjmp), then
set RETURNS_TWICE to a nonzero value.
- Similarly set LONGJMP for if the function is in the longjmp family.
+ Similarly set NORETURN if the function is in the longjmp family.
Set MAY_BE_ALLOCA for any memory allocation function that might allocate
space from the stack such as alloca. */
if (tname[1] == 'i'
&& ! strcmp (tname, "siglongjmp"))
- flags |= ECF_LONGJMP;
+ flags |= ECF_NORETURN;
}
else if ((tname[0] == 'q' && tname[1] == 's'
&& ! strcmp (tname, "qsetjmp"))
else if (tname[0] == 'l' && tname[1] == 'o'
&& ! strcmp (tname, "longjmp"))
- flags |= ECF_LONGJMP;
+ flags |= ECF_NORETURN;
}
return flags;
}
-/* Return nonzero when tree represent call to longjmp. */
+/* Return nonzero when FNDECL represents a call to setjmp. */
int
setjmp_call_p (tree fndecl)
if (DECL_P (exp))
{
- struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
type = TREE_TYPE (exp);
- if (i)
- {
- if (i->pure_function)
- flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
- if (i->const_function)
- flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
- }
-
/* The function exp may have the `malloc' attribute. */
if (DECL_IS_MALLOC (exp))
flags |= ECF_MALLOC;
+ /* The function exp may have the `returns_twice' attribute. */
+ if (DECL_IS_RETURNS_TWICE (exp))
+ flags |= ECF_RETURNS_TWICE;
+
/* The function exp may have the `pure' attribute. */
if (DECL_IS_PURE (exp))
flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
+ if (DECL_IS_NOVOPS (exp))
+ flags |= ECF_NOVOPS;
+
if (TREE_NOTHROW (exp))
flags |= ECF_NOTHROW;
Set REG_PARM_SEEN if we encounter a register parameter. */
static void
-precompute_register_parameters (int num_actuals, struct arg_data *args, int *reg_parm_seen)
+precompute_register_parameters (int num_actuals, struct arg_data *args,
+ int *reg_parm_seen)
{
int i;
TYPE_MODE (TREE_TYPE (args[i].tree_value)),
args[i].value, args[i].unsignedp);
+ /* If we're going to have to load the value by parts, pull the
+ parts into pseudos. The part extraction process can involve
+ non-trivial computation. */
+ if (GET_CODE (args[i].reg) == PARALLEL)
+ {
+ tree type = TREE_TYPE (args[i].tree_value);
+ args[i].parallel_value
+ = emit_group_load_into_temps (args[i].reg, args[i].value,
+ type, int_size_in_bytes (type));
+ }
+
/* If the value is expensive, and we are inside an appropriately
short loop, put the value into a pseudo and then put the pseudo
into the hard reg.
register parameters. This is to avoid reload conflicts while
loading the parameters registers. */
- if ((! (REG_P (args[i].value)
- || (GET_CODE (args[i].value) == SUBREG
- && REG_P (SUBREG_REG (args[i].value)))))
- && args[i].mode != BLKmode
- && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
- && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
- || preserve_subexpressions_p ()))
+ else if ((! (REG_P (args[i].value)
+ || (GET_CODE (args[i].value) == SUBREG
+ && REG_P (SUBREG_REG (args[i].value)))))
+ && args[i].mode != BLKmode
+ && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
+ && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
+ || optimize))
args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
}
}
< (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
{
int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
- int nregs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
int endian_correction = 0;
- args[i].n_aligned_regs = args[i].partial ? args[i].partial : nregs;
+ if (args[i].partial)
+ {
+ gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
+ args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
+ }
+ else
+ {
+ args[i].n_aligned_regs
+ = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
+ }
+
args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
/* Structures smaller than a word are normally aligned to the
args[i].reg is nonzero if all or part is passed in registers.
args[i].partial is nonzero if part but not all is passed in registers,
- and the exact value says how many words are passed in registers.
+ and the exact value says how many bytes are passed in registers.
args[i].pass_on_stack is nonzero if the argument must at least be
computed on the stack. It may then be loaded back into registers
if (pass_by_reference (args_so_far, TYPE_MODE (type),
type, argpos < n_named_args))
{
- /* If we're compiling a thunk, pass through invisible
- references instead of making a copy. */
+ bool callee_copies;
+ tree base;
+
+ callee_copies
+ = reference_callee_copied (args_so_far, TYPE_MODE (type),
+ type, argpos < n_named_args);
+
+ /* If we're compiling a thunk, pass through invisible references
+ instead of making a copy. */
if (call_from_thunk_p
- || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
- type, argpos < n_named_args)
- /* If it's in a register, we must make a copy of it too. */
- /* ??? Is this a sufficient test? Is there a better one? */
- && !(TREE_CODE (args[i].tree_value) == VAR_DECL
- && REG_P (DECL_RTL (args[i].tree_value)))
- && ! TREE_ADDRESSABLE (type))
- )
+ || (callee_copies
+ && !TREE_ADDRESSABLE (type)
+ && (base = get_base_address (args[i].tree_value))
+ && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
{
- /* C++ uses a TARGET_EXPR to indicate that we want to make a
- new object from the argument. If we are passing by
- invisible reference, the callee will do that for us, so we
- can strip off the TARGET_EXPR. This is not always safe,
- but it is safe in the only case where this is a useful
- optimization; namely, when the argument is a plain object.
- In that case, the frontend is just asking the backend to
- make a bitwise copy of the argument. */
-
- if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
- && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
- && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
- args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
-
- /* We can't use sibcalls if a callee-copied argument is stored
- in the current function's frame. */
- if (!call_from_thunk_p
- && (!DECL_P (args[i].tree_value)
- || !TREE_STATIC (args[i].tree_value)))
+ /* We can't use sibcalls if a callee-copied argument is
+ stored in the current function's frame. */
+ if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
*may_tailcall = false;
- args[i].tree_value = build1 (ADDR_EXPR,
- build_pointer_type (type),
- args[i].tree_value);
- type = build_pointer_type (type);
- }
- else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
- {
- /* In the V3 C++ ABI, parameters are destroyed in the caller.
- We implement this by passing the address of the temporary
- rather than expanding it into another allocated slot. */
- args[i].tree_value = build1 (ADDR_EXPR,
- build_pointer_type (type),
- args[i].tree_value);
- type = build_pointer_type (type);
- *may_tailcall = false;
+ args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
+ type = TREE_TYPE (args[i].tree_value);
+
+ *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
}
else
{
copy = assign_temp (type, 0, 1, 0);
store_expr (args[i].tree_value, copy, 0);
- *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
- args[i].tree_value = build1 (ADDR_EXPR,
- build_pointer_type (type),
- make_tree (type, copy));
- type = build_pointer_type (type);
+ if (callee_copies)
+ *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
+ else
+ *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
+
+ args[i].tree_value
+ = build_fold_addr_expr (make_tree (type, copy));
+ type = TREE_TYPE (args[i].tree_value);
*may_tailcall = false;
}
}
if (args[i].reg)
args[i].partial
- = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
- argpos < n_named_args);
+ = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
+ argpos < n_named_args);
args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
/* We don't handle this case yet. To handle it correctly we have
to add the delta, round and subtract the delta.
Currently no machine description requires this support. */
- if (stack_pointer_delta & (preferred_stack_boundary - 1))
- abort ();
+ gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
args_size->var = round_up (args_size->var, preferred_stack_boundary);
}
enum machine_mode mode;
/* If this is an addressable type, we cannot pre-evaluate it. */
- if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
- abort ();
+ gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
args[i].initial_value = args[i].value
= expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
rtx addr;
+ unsigned int align, boundary;
/* Skip this parm if it will not be passed on the stack. */
if (! args[i].pass_on_stack && args[i].reg != 0)
addr = plus_constant (addr, arg_offset);
args[i].stack = gen_rtx_MEM (args[i].mode, addr);
- set_mem_align (args[i].stack, PARM_BOUNDARY);
set_mem_attributes (args[i].stack,
TREE_TYPE (args[i].tree_value), 1);
+ align = BITS_PER_UNIT;
+ boundary = args[i].locate.boundary;
+ if (args[i].locate.where_pad != downward)
+ align = boundary;
+ else if (GET_CODE (offset) == CONST_INT)
+ {
+ align = INTVAL (offset) * BITS_PER_UNIT | boundary;
+ align = align & -align;
+ }
+ set_mem_align (args[i].stack, align);
if (GET_CODE (slot_offset) == CONST_INT)
addr = plus_constant (arg_reg, INTVAL (slot_offset));
addr = plus_constant (addr, arg_offset);
args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
- set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
set_mem_attributes (args[i].stack_slot,
TREE_TYPE (args[i].tree_value), 1);
+ set_mem_align (args[i].stack_slot, args[i].locate.boundary);
/* Function incoming arguments may overlap with sibling call
outgoing arguments and we cannot allow reordering of reads
Mark all register-parms as living through the call, putting these USE
insns in the CALL_INSN_FUNCTION_USAGE field.
- When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
+ When IS_SIBCALL, perform the check_sibcall_argument_overlap
checking, setting *SIBCALL_FAILURE if appropriate. */
static void
int nregs;
int size = 0;
rtx before_arg = get_last_insn ();
- /* Set to non-negative if must move a word at a time, even if just
- one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
- we just use a normal move insn. This value can be zero if the
- argument is a zero size structure with no fields. */
+ /* Set non-negative if we must move a word at a time, even if
+ just one word (e.g, partial == 4 && mode == DFmode). Set
+ to -1 if we just use a normal move insn. This value can be
+ zero if the argument is a zero size structure. */
nregs = -1;
- if (partial)
- nregs = partial;
+ if (GET_CODE (reg) == PARALLEL)
+ ;
+ else if (partial)
+ {
+ gcc_assert (partial % UNITS_PER_WORD == 0);
+ nregs = partial / UNITS_PER_WORD;
+ }
else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
{
size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
locations. The Irix 6 ABI has examples of this. */
if (GET_CODE (reg) == PARALLEL)
- {
- tree type = TREE_TYPE (args[i].tree_value);
- emit_group_load (reg, args[i].value, type,
- int_size_in_bytes (type));
- }
+ emit_group_move (reg, args[i].parallel_value);
/* If simple case, just do move. If normal partial, store_one_arg
has already loaded the register for us. In all other cases,
seem worth generating rtl to say that. */
reg = gen_rtx_REG (word_mode, REGNO (reg));
x = expand_shift (LSHIFT_EXPR, word_mode, reg,
- build_int_2 (shift, 0), reg, 1);
+ build_int_cst (NULL_TREE, shift),
+ reg, 1);
if (x != reg)
emit_move_insn (reg, x);
}
emit_move_insn (x, tem);
x = expand_shift (dir, word_mode, x,
- build_int_2 (shift, 0), ri, 1);
+ build_int_cst (NULL_TREE, shift),
+ ri, 1);
if (x != ri)
emit_move_insn (ri, x);
}
use_group_regs (call_fusage, reg);
else if (nregs == -1)
use_reg (call_fusage, reg);
- else
- use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
+ else if (nregs > 0)
+ use_regs (call_fusage, REGNO (reg), nregs);
}
}
}
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
i = INTVAL (XEXP (XEXP (x, 0), 1));
else
- return 1;
+ return 0;
#ifdef ARGS_GROW_DOWNWARD
i = -i - GET_MODE_SIZE (GET_MODE (x));
return insn != NULL_RTX;
}
-/* If function value *VALUE was returned at the most significant end of a
- register, shift it towards the least significant end and convert it to
- TYPE's mode. Return true and update *VALUE if some action was needed.
-
- TYPE is the type of the function's return value, which is known not
- to have mode BLKmode. */
-
-static bool
-shift_returned_value (tree type, rtx *value)
-{
- if (targetm.calls.return_in_msb (type))
- {
- HOST_WIDE_INT shift;
-
- shift = (GET_MODE_BITSIZE (GET_MODE (*value))
- - BITS_PER_UNIT * int_size_in_bytes (type));
- if (shift > 0)
- {
- /* Shift the value into the low part of the register. */
- *value = expand_binop (GET_MODE (*value), lshr_optab, *value,
- GEN_INT (shift), 0, 1, OPTAB_WIDEN);
-
- /* Truncate it to the type's mode, or its integer equivalent.
- This is subject to TRULY_NOOP_TRUNCATION. */
- *value = convert_to_mode (int_mode_for_mode (TYPE_MODE (type)),
- *value, 0);
-
- /* Now convert it to the final form. */
- *value = gen_lowpart (TYPE_MODE (type), *value);
- return true;
- }
- }
- return false;
-}
-
-/* Remove all REG_EQUIV notes found in the insn chain. */
+/* Given that a function returns a value of mode MODE at the most
+ significant end of hard register VALUE, shift VALUE left or right
+ as specified by LEFT_P. Return true if some action was needed. */
-static void
-purge_reg_equiv_notes (void)
-{
- rtx insn;
-
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- {
- while (1)
- {
- rtx note = find_reg_note (insn, REG_EQUIV, 0);
- if (note)
- {
- /* Remove the note and keep looking at the notes for
- this insn. */
- remove_note (insn, note);
- continue;
- }
- break;
- }
- }
-}
-
-/* Clear RTX_UNCHANGING_P flag of incoming argument MEMs. */
-
-static void
-purge_mem_unchanging_flag (rtx x)
+bool
+shift_return_value (enum machine_mode mode, bool left_p, rtx value)
{
- RTX_CODE code;
- int i, j;
- const char *fmt;
+ HOST_WIDE_INT shift;
- if (x == NULL_RTX)
- return;
-
- code = GET_CODE (x);
-
- if (code == MEM)
- {
- if (RTX_UNCHANGING_P (x)
- && (XEXP (x, 0) == current_function_internal_arg_pointer
- || (GET_CODE (XEXP (x, 0)) == PLUS
- && XEXP (XEXP (x, 0), 0) ==
- current_function_internal_arg_pointer
- && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
- RTX_UNCHANGING_P (x) = 0;
- return;
- }
+ gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
+ shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
+ if (shift == 0)
+ return false;
- /* Scan all subexpressions. */
- fmt = GET_RTX_FORMAT (code);
- for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
- {
- if (*fmt == 'e')
- purge_mem_unchanging_flag (XEXP (x, i));
- else if (*fmt == 'E')
- for (j = 0; j < XVECLEN (x, i); j++)
- purge_mem_unchanging_flag (XVECEXP (x, i, j));
- }
+ /* Use ashr rather than lshr for right shifts. This is for the benefit
+ of the MIPS port, which requires SImode values to be sign-extended
+ when stored in 64-bit registers. */
+ if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
+ value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
+ gcc_unreachable ();
+ return true;
}
-
/* Generate all the code for a function call
and return an rtx for its value.
Store the value in TARGET (specified as an rtx) if convenient.
int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
char *initial_stack_usage_map = stack_usage_map;
+ char *stack_usage_map_buf = NULL;
int old_stack_allocated;
/* Warn if this value is an aggregate type,
regardless of which calling convention we are using for it. */
- if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
- warning ("function call has aggregate value");
+ if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
+ warning (OPT_Waggregate_return, "function call has aggregate value");
/* If the result of a pure or const function call is ignored (or void),
and none of its arguments are volatile, we can avoid expanding the
{
struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
- if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
- {
- /* The structure value address arg is already in actparms.
- Pull it out. It might be nice to just leave it there, but
- we need to set structure_value_addr. */
- tree return_arg = TREE_VALUE (actparms);
- actparms = TREE_CHAIN (actparms);
- structure_value_addr = expand_expr (return_arg, NULL_RTX,
- VOIDmode, EXPAND_NORMAL);
- }
- else if (target && MEM_P (target))
+ if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
structure_value_addr = XEXP (target, 0);
else
{
/* Operand 0 is a pointer-to-function; get the type of the function. */
funtype = TREE_TYPE (addr);
- if (! POINTER_TYPE_P (funtype))
- abort ();
+ gcc_assert (POINTER_TYPE_P (funtype));
funtype = TREE_TYPE (funtype);
/* Munge the tree to split complex arguments into their imaginary
|| !targetm.function_ok_for_sibcall (fndecl, exp)
/* Functions that do not return exactly once may not be sibcall
optimized. */
- || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
+ || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
|| TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
/* If the called function is nested in the current one, it might access
some of the caller's arguments, but could clobber them beforehand if
the argument areas are shared. */
|| (fndecl && decl_function_context (fndecl) == current_function_decl)
/* If this function requires more stack slots than the current
- function, we cannot change it into a sibling call. */
- || args_size.constant > current_function_args_size
+ function, we cannot change it into a sibling call.
+ current_function_pretend_args_size is not part of the
+ stack allocated by our caller. */
+ || args_size.constant > (current_function_args_size
+ - current_function_pretend_args_size)
/* If the callee pops its own arguments, then it must pop exactly
the same number of arguments as the current function. */
|| (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
Also, do all pending adjustments now if there is any chance
this might be a call to alloca or if we are expanding a sibling
call sequence or if we are calling a function that is to return
- with stack pointer depressed. */
+ with stack pointer depressed.
+ Also do the adjustments before a throwing call, otherwise
+ exception handling can fail; PR 19225. */
if (pending_stack_adjust >= 32
|| (pending_stack_adjust > 0
&& (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
+ || (pending_stack_adjust > 0
+ && flag_exceptions && !(flags & ECF_NOTHROW))
|| pass == 0)
do_pending_stack_adjust ();
if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
start_sequence ();
+ if (pass == 0 && cfun->stack_protect_guard)
+ stack_protect_epilogue ();
+
adjusted_args_size = args_size;
/* Compute the actual size of the argument block required. The variable
and constant sizes must be combined, the size may have to be rounded,
highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
needed);
#endif
- stack_usage_map = alloca (highest_outgoing_arg_in_use);
+ if (stack_usage_map_buf)
+ free (stack_usage_map_buf);
+ stack_usage_map_buf = xmalloc (highest_outgoing_arg_in_use);
+ stack_usage_map = stack_usage_map_buf;
if (initial_highest_arg_in_use)
memcpy (stack_usage_map, initial_stack_usage_map,
= stack_arg_under_construction;
stack_arg_under_construction = 0;
/* Make a new map for the new argument list. */
- stack_usage_map = alloca (highest_outgoing_arg_in_use);
+ if (stack_usage_map_buf)
+ free (stack_usage_map_buf);
+ stack_usage_map_buf = xmalloc (highest_outgoing_arg_in_use);
+ stack_usage_map = stack_usage_map_buf;
memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
highest_outgoing_arg_in_use = 0;
}
{
if (pcc_struct_value)
valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
- fndecl, (pass == 0));
+ fndecl, NULL, (pass == 0));
else
- valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
+ valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
+ (pass == 0));
}
/* Precompute all register parameters. It isn't safe to compute anything
now! */
/* Stack must be properly aligned now. */
- if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
- abort ();
+ gcc_assert (!pass
+ || !(stack_pointer_delta % preferred_unit_stack_boundary));
/* Generate the actual call instruction. */
emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
flags, & args_so_far);
+ /* If a non-BLKmode value is returned at the most significant end
+ of a register, shift the register right by the appropriate amount
+ and update VALREG accordingly. BLKmode values are handled by the
+ group load/store machinery below. */
+ if (!structure_value_addr
+ && !pcc_struct_value
+ && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
+ && targetm.calls.return_in_msb (TREE_TYPE (exp)))
+ {
+ if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
+ sibcall_failure = 1;
+ valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
+ }
+
/* If call is cse'able, make appropriate pair of reg-notes around it.
Test valreg so we don't crash; may safely ignore `const'
if return type is void. Disable for PARALLEL return values, because
end_sequence ();
if (flag_unsafe_math_optimizations
&& fndecl
- && DECL_BUILT_IN (fndecl)
+ && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
&& (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
|| DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
|| DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
if nonvolatile values are live. For functions that cannot return,
inform flow that control does not fall through. */
- if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
+ if ((flags & ECF_NORETURN) || pass == 0)
{
/* The barrier must be emitted
immediately after the CALL_INSN. Some ports emit more
{
last = PREV_INSN (last);
/* There was no CALL_INSN? */
- if (last == before_call)
- abort ();
+ gcc_assert (last != before_call);
}
emit_barrier_after (last);
}
}
- if (flags & ECF_LONGJMP)
- current_function_calls_longjmp = 1;
-
/* If value type not void, return an rtx for the value. */
if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
| TYPE_QUAL_CONST));
target = assign_temp (nt, 0, 1, 1);
- preserve_temp_slots (target);
}
if (! rtx_equal_p (target, valreg))
sibcall_failure = 1;
}
else
- {
- if (shift_returned_value (TREE_TYPE (exp), &valreg))
- sibcall_failure = 1;
-
- target = copy_to_reg (valreg);
- }
+ target = copy_to_reg (valreg);
if (targetm.calls.promote_function_return(funtype))
{
- /* If we promoted this return value, make the proper SUBREG. TARGET
- might be const0_rtx here, so be careful. */
- if (REG_P (target)
- && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
- && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
- {
- tree type = TREE_TYPE (exp);
- int unsignedp = TYPE_UNSIGNED (type);
- int offset = 0;
-
- /* If we don't promote as expected, something is wrong. */
- if (GET_MODE (target)
- != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
- abort ();
-
- if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
- && GET_MODE_SIZE (GET_MODE (target))
- > GET_MODE_SIZE (TYPE_MODE (type)))
- {
- offset = GET_MODE_SIZE (GET_MODE (target))
- - GET_MODE_SIZE (TYPE_MODE (type));
- if (! BYTES_BIG_ENDIAN)
- offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
- else if (! WORDS_BIG_ENDIAN)
- offset %= UNITS_PER_WORD;
- }
- target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
- SUBREG_PROMOTED_VAR_P (target) = 1;
- SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
- }
+ /* If we promoted this return value, make the proper SUBREG.
+ TARGET might be const0_rtx here, so be careful. */
+ if (REG_P (target)
+ && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
+ && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
+ {
+ tree type = TREE_TYPE (exp);
+ int unsignedp = TYPE_UNSIGNED (type);
+ int offset = 0;
+ enum machine_mode pmode;
+
+ pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
+ /* If we don't promote as expected, something is wrong. */
+ gcc_assert (GET_MODE (target) == pmode);
+
+ if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
+ && (GET_MODE_SIZE (GET_MODE (target))
+ > GET_MODE_SIZE (TYPE_MODE (type))))
+ {
+ offset = GET_MODE_SIZE (GET_MODE (target))
+ - GET_MODE_SIZE (TYPE_MODE (type));
+ if (! BYTES_BIG_ENDIAN)
+ offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
+ else if (! WORDS_BIG_ENDIAN)
+ offset %= UNITS_PER_WORD;
+ }
+ target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
+ SUBREG_PROMOTED_VAR_P (target) = 1;
+ SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
+ }
}
/* If size of args is variable or this was a constructor call for a stack
if (args[i].aligned_regs)
free (args[i].aligned_regs);
- /* If this function is returning into a memory location marked as
- readonly, it means it is initializing that location. We normally treat
- functions as not clobbering such locations, so we need to specify that
- this one does. We do this by adding the appropriate CLOBBER to the
- CALL_INSN function usage list. This cannot be done by emitting a
- standalone CLOBBER after the call because the latter would be ignored
- by at least the delay slot scheduling pass. We do this now instead of
- adding to call_fusage before the call to emit_call_1 because TARGET
- may be modified in the meantime. */
- if (structure_value_addr != 0 && target != 0
- && MEM_P (target) && RTX_UNCHANGING_P (target))
- add_function_usage_to
- (last_call_insn (),
- gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
- NULL_RTX));
-
insns = get_insns ();
end_sequence ();
normal_call_insns = insns;
/* Verify that we've deallocated all the stack we used. */
- if (! (flags & (ECF_NORETURN | ECF_LONGJMP))
- && old_stack_allocated != stack_pointer_delta
- - pending_stack_adjust)
- abort ();
+ gcc_assert ((flags & ECF_NORETURN)
+ || (old_stack_allocated
+ == stack_pointer_delta - pending_stack_adjust));
}
/* If something prevents making this a sibling call,
emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
}
+ if (stack_usage_map_buf)
+ free (stack_usage_map_buf);
+
return target;
}
this function's incoming arguments.
At the start of RTL generation we know the only REG_EQUIV notes
- in the rtl chain are those for incoming arguments, so we can safely
- flush any REG_EQUIV note.
+ in the rtl chain are those for incoming arguments, so we can look
+ for REG_EQUIV notes between the start of the function and the
+ NOTE_INSN_FUNCTION_BEG.
This is (slight) overkill. We could keep track of the highest
argument we clobber and be more selective in removing notes, but it
does not seem to be worth the effort. */
+
void
fixup_tail_calls (void)
{
rtx insn;
- tree arg;
-
- purge_reg_equiv_notes ();
-
- /* A sibling call sequence also may invalidate RTX_UNCHANGING_P
- flag of some incoming arguments MEM RTLs, because it can write into
- those slots. We clear all those bits now.
- This is (slight) overkill, we could keep track of which arguments
- we actually write into. */
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- if (INSN_P (insn))
- purge_mem_unchanging_flag (PATTERN (insn));
- }
+ /* There are never REG_EQUIV notes for the incoming arguments
+ after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
+ if (NOTE_P (insn)
+ && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
+ break;
- /* Similarly, invalidate RTX_UNCHANGING_P for any incoming
- arguments passed in registers. */
- for (arg = DECL_ARGUMENTS (current_function_decl);
- arg;
- arg = TREE_CHAIN (arg))
- {
- if (REG_P (DECL_RTL (arg)))
- RTX_UNCHANGING_P (DECL_RTL (arg)) = false;
+ while (1)
+ {
+ rtx note = find_reg_note (insn, REG_EQUIV, 0);
+ if (note)
+ {
+ /* Remove the note and keep looking at the notes for
+ this insn. */
+ remove_note (insn, note);
+ continue;
+ }
+ break;
+ }
}
}
/* Traverse an argument list in VALUES and expand all complex
arguments into their components. */
-tree
+static tree
split_complex_values (tree values)
{
tree p;
/* Traverse a list of TYPES and expand all complex types into their
components. */
-tree
+static tree
split_complex_types (tree types)
{
tree p;
/* Size of the stack reserved for parameter registers. */
int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
char *initial_stack_usage_map = stack_usage_map;
+ char *stack_usage_map_buf = NULL;
rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
case LCT_THROW:
flags = ECF_NORETURN;
break;
- case LCT_ALWAYS_RETURN:
- flags = ECF_ALWAYS_RETURN;
- break;
case LCT_RETURNS_TWICE:
flags = ECF_RETURNS_TWICE;
break;
{
#ifdef PCC_STATIC_STRUCT_RETURN
rtx pointer_reg
- = hard_function_value (build_pointer_type (tfom), 0, 0);
+ = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
mem_value = gen_rtx_MEM (outmode, pointer_reg);
pcc_struct_value = 1;
if (value == 0)
if (mem_value && struct_value == 0 && ! pcc_struct_value)
{
rtx addr = XEXP (mem_value, 0);
+
nargs++;
/* Make sure it is a reasonable operand for a move or push insn. */
argvec[count].partial = 0;
argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
- if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
- abort ();
+ gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
+ NULL_TREE, 1) == 0);
locate_and_pad_parm (Pmode, NULL_TREE,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
/* We cannot convert the arg value to the mode the library wants here;
must do it earlier where we know the signedness of the arg. */
- if (mode == BLKmode
- || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
- abort ();
+ gcc_assert (mode != BLKmode
+ && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
/* Make sure it is a reasonable operand for a move or push insn. */
if (!REG_P (val) && !MEM_P (val)
if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
{
rtx slot;
- int must_copy = ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
- NULL_TREE, 1);
+ int must_copy
+ = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
/* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
functions, so we have to pretend this isn't such a function. */
flags |= ECF_PURE;
}
- if (GET_MODE (val) == MEM && ! must_copy)
+ if (GET_MODE (val) == MEM && !must_copy)
slot = val;
- else if (must_copy)
+ else
{
slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
0, 1, 1);
emit_move_insn (slot, val);
}
- else
- {
- tree type = lang_hooks.types.type_for_mode (mode, 0);
-
- slot
- = gen_rtx_MEM (mode,
- expand_expr (build1 (ADDR_EXPR,
- build_pointer_type (type),
- make_tree (type, val)),
- NULL_RTX, VOIDmode, 0));
- }
call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
gen_rtx_USE (VOIDmode, slot),
argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
argvec[count].partial
- = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
+ = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
locate_and_pad_parm (mode, NULL_TREE,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
argvec[count].partial,
NULL_TREE, &args_size, &argvec[count].locate);
- if (argvec[count].locate.size.var)
- abort ();
+ gcc_assert (!argvec[count].locate.size.var);
if (argvec[count].reg == 0 || argvec[count].partial != 0
|| reg_parm_stack_space > 0)
highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
needed);
#endif
- stack_usage_map = alloca (highest_outgoing_arg_in_use);
+ stack_usage_map_buf = xmalloc (highest_outgoing_arg_in_use);
+ stack_usage_map = stack_usage_map_buf;
if (initial_highest_arg_in_use)
memcpy (stack_usage_map, initial_stack_usage_map,
stack_usage_map[i] = 1;
NO_DEFER_POP;
+
+ if (flags & ECF_CONST)
+ {
+ rtx use;
+
+ /* Indicate argument access so that alias.c knows that these
+ values are live. */
+ if (argblock)
+ use = plus_constant (argblock,
+ argvec[argnum].locate.offset.constant);
+ else
+ /* When arguments are pushed, trying to tell alias.c where
+ exactly this argument is won't work, because the
+ auto-increment causes confusion. So we merely indicate
+ that we access something with a known mode somewhere on
+ the stack. */
+ use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
+ gen_rtx_SCRATCH (Pmode));
+ use = gen_rtx_MEM (argvec[argnum].mode, use);
+ use = gen_rtx_USE (VOIDmode, use);
+ call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
+ }
}
}
? hard_libcall_value (outmode) : NULL_RTX);
/* Stack must be properly aligned now. */
- if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
- abort ();
+ gcc_assert (!(stack_pointer_delta
+ & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
before_call = get_last_insn ();
if nonvolatile values are live. For functions that cannot return,
inform flow that control does not fall through. */
- if (flags & (ECF_NORETURN | ECF_LONGJMP))
+ if (flags & ECF_NORETURN)
{
/* The barrier note must be emitted
immediately after the CALL_INSN. Some ports emit more than
{
last = PREV_INSN (last);
/* There was no CALL_INSN? */
- if (last == before_call)
- abort ();
+ gcc_assert (last != before_call);
}
emit_barrier_after (last);
stack_usage_map = initial_stack_usage_map;
}
+ if (stack_usage_map_buf)
+ free (stack_usage_map_buf);
+
return value;
}
partial = arg->partial;
}
- if (reg != 0 && partial == 0)
- /* Being passed entirely in a register. We shouldn't be called in
- this case. */
- abort ();
-
+ /* Being passed entirely in a register. We shouldn't be called in
+ this case. */
+ gcc_assert (reg == 0 || partial != 0);
+
/* If this arg needs special alignment, don't load the registers
here. */
if (arg->n_aligned_regs != 0)
stack_arg_under_construction--;
}
+ /* Check for overlap with already clobbered argument area. */
+ if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
+ {
+ int i = -1;
+ unsigned HOST_WIDE_INT k;
+ rtx x = arg->value;
+
+ if (XEXP (x, 0) == current_function_internal_arg_pointer)
+ i = 0;
+ else if (GET_CODE (XEXP (x, 0)) == PLUS
+ && XEXP (XEXP (x, 0), 0) ==
+ current_function_internal_arg_pointer
+ && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
+ i = INTVAL (XEXP (XEXP (x, 0), 1));
+ else
+ i = -1;
+
+ if (i >= 0)
+ {
+#ifdef ARGS_GROW_DOWNWARD
+ i = -i - arg->locate.size.constant;
+#endif
+ if (arg->locate.size.constant > 0)
+ {
+ unsigned HOST_WIDE_INT sc = arg->locate.size.constant;
+
+ for (k = 0; k < sc; k++)
+ if (i + k < stored_args_map->n_bits
+ && TEST_BIT (stored_args_map, i + k))
+ {
+ sibcall_failure = 1;
+ break;
+ }
+ }
+ }
+ }
+
/* Don't allow anything left on stack from computation
of argument to alloca. */
if (flags & ECF_MAY_BE_ALLOCA)
}
else
{
- /* PUSH_ROUNDING has no effect on us, because
- emit_push_insn for BLKmode is careful to avoid it. */
- if (reg && GET_CODE (reg) == PARALLEL)
- {
- /* Use the size of the elt to compute excess. */
- rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
- excess = (arg->locate.size.constant
- - int_size_in_bytes (TREE_TYPE (pval))
- + partial * GET_MODE_SIZE (GET_MODE (elt)));
- }
- else
- excess = (arg->locate.size.constant
- - int_size_in_bytes (TREE_TYPE (pval))
- + partial * UNITS_PER_WORD);
+ /* PUSH_ROUNDING has no effect on us, because emit_push_insn
+ for BLKmode is careful to avoid it. */
+ excess = (arg->locate.size.constant
+ - int_size_in_bytes (TREE_TYPE (pval))
+ + partial);
size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
NULL_RTX, TYPE_MODE (sizetype), 0);
}
- /* Some types will require stricter alignment, which will be
- provided for elsewhere in argument layout. */
- parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
+ parm_align = arg->locate.boundary;
/* When an argument is padded down, the block is aligned to
PARM_BOUNDARY, but the actual argument isn't. */
i = INTVAL (XEXP (XEXP (x, 0), 1));
/* expand_call should ensure this. */
- if (arg->locate.offset.var || GET_CODE (size_rtx) != CONST_INT)
- abort ();
+ gcc_assert (!arg->locate.offset.var
+ && GET_CODE (size_rtx) == CONST_INT);
if (arg->locate.offset.constant > i)
{
arg->value = arg->stack_slot;
}
+ if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
+ {
+ tree type = TREE_TYPE (arg->tree_value);
+ arg->parallel_value
+ = emit_group_load_into_temps (arg->reg, arg->value, type,
+ int_size_in_bytes (type));
+ }
+
/* Mark all slots this store used. */
if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
&& argblock && ! variable_size && arg->stack)