/* Convert function calls to rtl insns, for GNU C compiler.
Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
- 1999, 2000, 2001 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
This file is part of GCC.
#include "sbitmap.h"
#include "langhooks.h"
#include "target.h"
+#include "cgraph.h"
+#include "except.h"
/* Decide whether a function's arguments should be processed
from first to last or from last to first.
even though pass_on_stack is zero, just because FUNCTION_ARG says so.
pass_on_stack identifies arguments that *cannot* go in registers. */
int pass_on_stack;
- /* Offset of this argument from beginning of stack-args. */
- struct args_size offset;
- /* Similar, but offset to the start of the stack slot. Different from
- OFFSET if this arg pads downward. */
- struct args_size slot_offset;
- /* Size of this argument on the stack, rounded up for any padding it gets,
- parts of the argument passed in registers do not count.
- If REG_PARM_STACK_SPACE is defined, then register parms
- are counted here as well. */
- struct args_size size;
+ /* Some fields packaged up for locate_and_pad_parm. */
+ struct locate_and_pad_arg_data locate;
/* Location on the stack at which parameter should be stored. The store
has already been done if STACK == VALUE. */
rtx stack;
word-sized pseudos we made. */
rtx *aligned_regs;
int n_aligned_regs;
- /* The amount that the stack pointer needs to be adjusted to
- force alignment for the next argument. */
- struct args_size alignment_pad;
};
/* A vector of one char per byte of stack space. A byte if nonzero if
rtx, int));
static rtx rtx_for_function_call PARAMS ((tree, tree));
static void load_register_parameters PARAMS ((struct arg_data *,
- int, rtx *, int));
+ int, rtx *, int,
+ int, int *));
static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx,
enum libcall_type,
enum machine_mode,
static rtx try_to_integrate PARAMS ((tree, tree, rtx,
int, tree, rtx));
static int check_sibcall_argument_overlap_1 PARAMS ((rtx));
-static int check_sibcall_argument_overlap PARAMS ((rtx, struct arg_data *));
+static int check_sibcall_argument_overlap PARAMS ((rtx, struct arg_data *,
+ int));
static int combine_pending_stack_adjustment_and_call
PARAMS ((int, struct args_size *, int));
+static tree fix_unsafe_tree PARAMS ((tree));
#ifdef REG_PARM_STACK_SPACE
static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
#endif
abort ();
- /* Find the CALL insn we just emitted. */
- for (call_insn = get_last_insn ();
- call_insn && GET_CODE (call_insn) != CALL_INSN;
- call_insn = PREV_INSN (call_insn))
- ;
-
- if (! call_insn)
- abort ();
+ /* Find the call we just emitted. */
+ call_insn = last_call_insn ();
/* Mark memory as used for "pure" function call. */
if (ecf_flags & ECF_PURE)
gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
call_fusage);
- /* Put the register usage information on the CALL. If there is already
- some usage information, put ours at the end. */
- if (CALL_INSN_FUNCTION_USAGE (call_insn))
- {
- rtx link;
-
- for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
- link = XEXP (link, 1))
- ;
-
- XEXP (link, 1) = call_fusage;
- }
- else
- CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
+ /* Put the register usage information there. */
+ add_function_usage_to (call_insn, call_fusage);
/* If this is a const call, then set the insn's unchanging bit. */
if (ecf_flags & (ECF_CONST | ECF_PURE))
if (ecf_flags & ECF_NOTHROW)
REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
REG_NOTES (call_insn));
+ else
+ note_eh_region_may_contain_throw ();
if (ecf_flags & ECF_NORETURN)
REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
Similarly set LONGJMP for if the function is in the longjmp family.
- Set MALLOC for any of the standard memory allocation functions which
- allocate from the heap.
-
Set MAY_BE_ALLOCA for any memory allocation function that might allocate
space from the stack such as alloca. */
|| ((tname[5] == 'p' || tname[5] == 'e')
&& tname[6] == '\0'))))
flags |= ECF_FORK_OR_EXEC;
-
- /* Do not add any more malloc-like functions to this list,
- instead mark them as malloc functions using the malloc attribute.
- Note, realloc is not suitable for attribute malloc since
- it may return the same address across multiple calls.
- C++ operator new is not suitable because it is not required
- to return a unique pointer; indeed, the standard placement new
- just returns its argument. */
- else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
- && (! strcmp (tname, "malloc")
- || ! strcmp (tname, "calloc")
- || ! strcmp (tname, "strdup")))
- flags |= ECF_MALLOC;
}
return flags;
}
{
int flags = 0;
tree type = exp;
- /* ??? We can't set IS_MALLOC for function types? */
+
if (DECL_P (exp))
{
+ struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
type = TREE_TYPE (exp);
+ if (i)
+ {
+ if (i->pure_function)
+ flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
+ if (i->const_function)
+ flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
+ }
+
/* The function exp may have the `malloc' attribute. */
- if (DECL_P (exp) && DECL_IS_MALLOC (exp))
+ if (DECL_IS_MALLOC (exp))
flags |= ECF_MALLOC;
/* The function exp may have the `pure' attribute. */
- if (DECL_P (exp) && DECL_IS_PURE (exp))
+ if (DECL_IS_PURE (exp))
flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
if (TREE_NOTHROW (exp))
int *low_to_save;
int *high_to_save;
{
- int i;
- rtx save_area = NULL_RTX;
+ int low;
+ int high;
- /* Compute the boundary of the that needs to be saved, if any. */
+ /* Compute the boundary of the area that needs to be saved, if any. */
+ high = reg_parm_stack_space;
#ifdef ARGS_GROW_DOWNWARD
- for (i = 0; i < reg_parm_stack_space + 1; i++)
-#else
- for (i = 0; i < reg_parm_stack_space; i++)
+ high += 1;
#endif
- {
- if (i >= highest_outgoing_arg_in_use
- || stack_usage_map[i] == 0)
- continue;
+ if (high > highest_outgoing_arg_in_use)
+ high = highest_outgoing_arg_in_use;
- if (*low_to_save == -1)
- *low_to_save = i;
+ for (low = 0; low < high; low++)
+ if (stack_usage_map[low] != 0)
+ {
+ int num_to_save;
+ enum machine_mode save_mode;
+ int delta;
+ rtx stack_area;
+ rtx save_area;
- *high_to_save = i;
- }
+ while (stack_usage_map[--high] == 0)
+ ;
- if (*low_to_save >= 0)
- {
- int num_to_save = *high_to_save - *low_to_save + 1;
- enum machine_mode save_mode
- = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
- rtx stack_area;
+ *low_to_save = low;
+ *high_to_save = high;
- /* If we don't have the required alignment, must do this in BLKmode. */
- if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
- BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
- save_mode = BLKmode;
+ num_to_save = high - low + 1;
+ save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
+
+ /* If we don't have the required alignment, must do this
+ in BLKmode. */
+ if ((low & (MIN (GET_MODE_SIZE (save_mode),
+ BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
+ save_mode = BLKmode;
#ifdef ARGS_GROW_DOWNWARD
- stack_area
- = gen_rtx_MEM (save_mode,
- memory_address (save_mode,
- plus_constant (argblock,
- - *high_to_save)));
+ delta = -high;
#else
- stack_area = gen_rtx_MEM (save_mode,
- memory_address (save_mode,
- plus_constant (argblock,
- *low_to_save)));
+ delta = low;
#endif
+ stack_area = gen_rtx_MEM (save_mode,
+ memory_address (save_mode,
+ plus_constant (argblock,
+ delta)));
- set_mem_align (stack_area, PARM_BOUNDARY);
- if (save_mode == BLKmode)
- {
- save_area = assign_stack_temp (BLKmode, num_to_save, 0);
- emit_block_move (validize_mem (save_area), stack_area,
- GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
- }
- else
- {
- save_area = gen_reg_rtx (save_mode);
- emit_move_insn (save_area, stack_area);
- }
- }
+ set_mem_align (stack_area, PARM_BOUNDARY);
+ if (save_mode == BLKmode)
+ {
+ save_area = assign_stack_temp (BLKmode, num_to_save, 0);
+ emit_block_move (validize_mem (save_area), stack_area,
+ GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
+ }
+ else
+ {
+ save_area = gen_reg_rtx (save_mode);
+ emit_move_insn (save_area, stack_area);
+ }
+
+ return save_area;
+ }
- return save_area;
+ return NULL_RTX;
}
static void
int low_to_save;
{
enum machine_mode save_mode = GET_MODE (save_area);
+ int delta;
+ rtx stack_area;
+
#ifdef ARGS_GROW_DOWNWARD
- rtx stack_area
- = gen_rtx_MEM (save_mode,
- memory_address (save_mode,
- plus_constant (argblock,
- - high_to_save)));
+ delta = -high_to_save;
#else
- rtx stack_area
- = gen_rtx_MEM (save_mode,
- memory_address (save_mode,
- plus_constant (argblock,
- low_to_save)));
+ delta = low_to_save;
#endif
+ stack_area = gen_rtx_MEM (save_mode,
+ memory_address (save_mode,
+ plus_constant (argblock, delta)));
+ set_mem_align (stack_area, PARM_BOUNDARY);
if (save_mode != BLKmode)
emit_move_insn (stack_area, save_area);
/* Count arg position in order args appear. */
int argpos;
- struct args_size alignment_pad;
int i;
tree p;
with those made by function.c. */
/* See if this argument should be passed by invisible reference. */
- if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
- && contains_placeholder_p (TYPE_SIZE (type)))
+ if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
|| TREE_ADDRESSABLE (type)
#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
|| FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
#else
args[i].reg != 0,
#endif
- fndecl, args_size, &args[i].offset,
- &args[i].size, &alignment_pad);
-
-#ifndef ARGS_GROW_DOWNWARD
- args[i].slot_offset = *args_size;
-#endif
-
- args[i].alignment_pad = alignment_pad;
-
- /* If a part of the arg was put into registers,
- don't include that part in the amount pushed. */
- if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
- args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
- / (PARM_BOUNDARY / BITS_PER_UNIT)
- * (PARM_BOUNDARY / BITS_PER_UNIT));
+ args[i].pass_on_stack ? 0 : args[i].partial,
+ fndecl, args_size, &args[i].locate);
/* Update ARGS_SIZE, the total stack space for args so far. */
- args_size->constant += args[i].size.constant;
- if (args[i].size.var)
- {
- ADD_PARM_SIZE (*args_size, args[i].size.var);
- }
-
- /* Since the slot offset points to the bottom of the slot,
- we must record it after incrementing if the args grow down. */
-#ifdef ARGS_GROW_DOWNWARD
- args[i].slot_offset = *args_size;
-
- args[i].slot_offset.constant = -args_size->constant;
- if (args_size->var)
- SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
-#endif
+ args_size->constant += args[i].locate.size.constant;
+ if (args[i].locate.size.var)
+ ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
/* Increment ARGS_SO_FAR, which has info about which arg-registers
have been used, etc. */
for (i = 0; i < num_actuals; i++)
{
- rtx offset = ARGS_SIZE_RTX (args[i].offset);
- rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
+ rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
+ rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
rtx addr;
/* Skip this parm if it will not be passed on the stack. */
addr = plus_constant (addr, arg_offset);
args[i].stack = gen_rtx_MEM (args[i].mode, addr);
+ set_mem_align (args[i].stack, PARM_BOUNDARY);
set_mem_attributes (args[i].stack,
TREE_TYPE (args[i].tree_value), 1);
addr = plus_constant (addr, arg_offset);
args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
+ set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
set_mem_attributes (args[i].stack_slot,
TREE_TYPE (args[i].tree_value), 1);
FNDECL is the tree node for the target function. For an indirect call
FNDECL will be NULL_TREE.
- EXP is the CALL_EXPR for this call. */
+ ADDR is the operand 0 of CALL_EXPR for this call. */
static rtx
-rtx_for_function_call (fndecl, exp)
+rtx_for_function_call (fndecl, addr)
tree fndecl;
- tree exp;
+ tree addr;
{
rtx funexp;
/* Generate an rtx (probably a pseudo-register) for the address. */
{
push_temp_slots ();
- funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
+ funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
pop_temp_slots (); /* FUNEXP can't be BLKmode. */
emit_queue ();
}
expressions were already evaluated.
Mark all register-parms as living through the call, putting these USE
- insns in the CALL_INSN_FUNCTION_USAGE field. */
+ insns in the CALL_INSN_FUNCTION_USAGE field.
+
+ When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
+ checking, setting *SIBCALL_FAILURE if appropriate. */
static void
-load_register_parameters (args, num_actuals, call_fusage, flags)
+load_register_parameters (args, num_actuals, call_fusage, flags,
+ is_sibcall, sibcall_failure)
struct arg_data *args;
int num_actuals;
rtx *call_fusage;
int flags;
+ int is_sibcall;
+ int *sibcall_failure;
{
int i, j;
if (reg)
{
+ rtx before_arg = get_last_insn ();
/* Set to non-negative if must move a word at a time, even if just
one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
we just use a normal move insn. This value can be zero if the
validize_mem (args[i].value), nregs,
args[i].mode);
+ /* When a parameter is a block, and perhaps in other cases, it is
+ possible that it did a load from an argument slot that was
+ already clobbered. */
+ if (is_sibcall
+ && check_sibcall_argument_overlap (before_arg, &args[i], 0))
+ *sibcall_failure = 1;
+
/* Handle calls that pass values in multiple non-contiguous
locations. The Irix 6 ABI has examples of this. */
if (GET_CODE (reg) == PARALLEL)
the stack before executing the inlined function if it
makes any calls. */
- for (i = reg_parm_stack_space - 1; i >= 0; i--)
- if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
- break;
+ i = reg_parm_stack_space;
+ if (i > highest_outgoing_arg_in_use)
+ i = highest_outgoing_arg_in_use;
+ while (--i >= 0 && stack_usage_map[i] == 0)
+ ;
if (stack_arg_under_construction || i >= 0)
{
/* Scan sequence after INSN if it does not dereference any argument slots
we already clobbered by tail call arguments (as noted in stored_args_map
- bitmap). Add stack slots for ARG to stored_args_map bitmap afterwards.
- Return nonzero if sequence after INSN dereferences such argument slots,
- zero otherwise. */
+ bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
+ stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
+ should be 0). Return nonzero if sequence after INSN dereferences such argument
+ slots, zero otherwise. */
static int
-check_sibcall_argument_overlap (insn, arg)
+check_sibcall_argument_overlap (insn, arg, mark_stored_args_map)
rtx insn;
struct arg_data *arg;
+ int mark_stored_args_map;
{
int low, high;
&& check_sibcall_argument_overlap_1 (PATTERN (insn)))
break;
+ if (mark_stored_args_map)
+ {
#ifdef ARGS_GROW_DOWNWARD
- low = -arg->slot_offset.constant - arg->size.constant;
+ low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
#else
- low = arg->slot_offset.constant;
+ low = arg->locate.slot_offset.constant;
#endif
- for (high = low + arg->size.constant; low < high; low++)
- SET_BIT (stored_args_map, low);
+ for (high = low + arg->locate.size.constant; low < high; low++)
+ SET_BIT (stored_args_map, low);
+ }
return insn != NULL_RTX;
}
+static tree
+fix_unsafe_tree (t)
+ tree t;
+{
+ switch (unsafe_for_reeval (t))
+ {
+ case 0: /* Safe. */
+ break;
+
+ case 1: /* Mildly unsafe. */
+ t = unsave_expr (t);
+ break;
+
+ case 2: /* Wildly unsafe. */
+ {
+ tree var = build_decl (VAR_DECL, NULL_TREE,
+ TREE_TYPE (t));
+ SET_DECL_RTL (var,
+ expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
+ t = var;
+ }
+ break;
+
+ default:
+ abort ();
+ }
+ return t;
+}
+
/* Generate all the code for a function call
and return an rtx for its value.
Store the value in TARGET (specified as an rtx) if convenient.
rtx tail_call_insns = NULL_RTX;
/* Data type of the function. */
tree funtype;
+ tree type_arg_types;
/* Declaration of the function being called,
or 0 if the function is computed (not known by name). */
tree fndecl = 0;
int is_integrable = 0;
#ifdef REG_PARM_STACK_SPACE
/* Define the boundary of the register parm stack space that needs to be
- save, if any. */
- int low_to_save = -1, high_to_save;
+ saved, if any. */
+ int low_to_save, high_to_save;
rtx save_area = 0; /* Place that it is saved */
#endif
int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
char *initial_stack_usage_map = stack_usage_map;
- int old_stack_arg_under_construction = 0;
+ int old_stack_allocated;
+
+ /* State variables to track stack modifications. */
rtx old_stack_level = 0;
+ int old_stack_arg_under_construction = 0;
int old_pending_adj = 0;
int old_inhibit_defer_pop = inhibit_defer_pop;
- int old_stack_allocated;
+
+ /* Some stack pointer alterations we make are performed via
+ allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
+ which we then also need to save/restore along the way. */
+ int old_stack_pointer_delta = 0;
+
rtx call_fusage;
tree p = TREE_OPERAND (exp, 0);
+ tree addr = TREE_OPERAND (exp, 0);
int i;
/* The alignment of the stack, in bits. */
HOST_WIDE_INT preferred_stack_boundary;
else
flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
+ /* Warn if this value is an aggregate type,
+ regardless of which calling convention we are using for it. */
+ if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
+ warning ("function call has aggregate value");
+
+ /* If the result of a pure or const function call is ignored (or void),
+ and none of its arguments are volatile, we can avoid expanding the
+ call and just evaluate the arguments for side-effects. */
+ if ((flags & (ECF_CONST | ECF_PURE))
+ && (ignore || target == const0_rtx
+ || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
+ {
+ bool volatilep = false;
+ tree arg;
+
+ for (arg = actparms; arg; arg = TREE_CHAIN (arg))
+ if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
+ {
+ volatilep = true;
+ break;
+ }
+
+ if (! volatilep)
+ {
+ for (arg = actparms; arg; arg = TREE_CHAIN (arg))
+ expand_expr (TREE_VALUE (arg), const0_rtx,
+ VOIDmode, EXPAND_NORMAL);
+ return const0_rtx;
+ }
+ }
+
#ifdef REG_PARM_STACK_SPACE
#ifdef MAYBE_REG_PARM_STACK_SPACE
reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
must_preallocate = 1;
#endif
- /* Warn if this value is an aggregate type,
- regardless of which calling convention we are using for it. */
- if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
- warning ("function call has aggregate value");
-
/* Set up a place to return a structure. */
/* Cater to broken compilers. */
/* Figure out the amount to which the stack should be aligned. */
preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
+ if (fndecl)
+ {
+ struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
+ if (i && i->preferred_incoming_stack_boundary)
+ preferred_stack_boundary = i->preferred_incoming_stack_boundary;
+ }
/* Operand 0 is a pointer-to-function; get the type of the function. */
- funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
+ funtype = TREE_TYPE (addr);
if (! POINTER_TYPE_P (funtype))
abort ();
funtype = TREE_TYPE (funtype);
+ /* Munge the tree to split complex arguments into their imaginary
+ and real parts. */
+ if (SPLIT_COMPLEX_ARGS)
+ {
+ type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
+ actparms = split_complex_values (actparms);
+ }
+ else
+ type_arg_types = TYPE_ARG_TYPES (funtype);
+
/* See if this is a call to a function that can return more than once
or a call to longjmp or malloc. */
flags |= special_function_p (fndecl, flags);
if ((STRICT_ARGUMENT_NAMING
|| ! PRETEND_OUTGOING_VARARGS_NAMED)
- && TYPE_ARG_TYPES (funtype) != 0)
+ && type_arg_types != 0)
n_named_args
- = (list_length (TYPE_ARG_TYPES (funtype))
+ = (list_length (type_arg_types)
/* Don't include the last named arg. */
- (STRICT_ARGUMENT_NAMING ? 0 : 1)
/* Count the struct value address, if it is passed as a parm. */
calling convention than normal calls. The last argument in
INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
or not. */
- INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
+ INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl);
/* Make a vector to hold all the information about each arg. */
args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
/* Tail recursion fails, when we are not dealing with recursive calls. */
if (!try_tail_recursion
- || TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
- || TREE_OPERAND (TREE_OPERAND (exp, 0), 0) != current_function_decl)
+ || TREE_CODE (addr) != ADDR_EXPR
+ || TREE_OPERAND (addr, 0) != current_function_decl)
try_tail_recursion = 0;
/* Rest of purposes for tail call optimizations to fail. */
/* Functions that do not return exactly once may not be sibcall
optimized. */
|| (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
- || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
+ /* If the called function is nested in the current one, it might access
+ some of the caller's arguments, but could clobber them beforehand if
+ the argument areas are shared. */
+ || (fndecl && decl_function_context (fndecl) == current_function_decl)
/* If this function requires more stack slots than the current
function, we cannot change it into a sibling call. */
|| args_size.constant > current_function_args_size
/* If the callee pops its own arguments, then it must pop exactly
the same number of arguments as the current function. */
- || RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
- != RETURN_POPS_ARGS (current_function_decl,
- TREE_TYPE (current_function_decl),
- current_function_args_size))
+ || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
+ != RETURN_POPS_ARGS (current_function_decl,
+ TREE_TYPE (current_function_decl),
+ current_function_args_size))
+ || !(*lang_hooks.decls.ok_for_sibcall) (fndecl))
try_tail_call = 0;
if (try_tail_call || try_tail_recursion)
for (; i != end; i += inc)
{
- switch (unsafe_for_reeval (args[i].tree_value))
- {
- case 0: /* Safe. */
- break;
-
- case 1: /* Mildly unsafe. */
- args[i].tree_value = unsave_expr (args[i].tree_value);
- break;
-
- case 2: /* Wildly unsafe. */
- {
- tree var = build_decl (VAR_DECL, NULL_TREE,
- TREE_TYPE (args[i].tree_value));
- SET_DECL_RTL (var,
- expand_expr (args[i].tree_value, NULL_RTX,
- VOIDmode, EXPAND_NORMAL));
- args[i].tree_value = var;
- }
- break;
-
- default:
- abort ();
- }
+ args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
/* We need to build actparms for optimize_tail_recursion. We can
safely trash away TREE_PURPOSE, since it is unused by this
function. */
if (try_tail_recursion)
actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
}
+ /* Do the same for the function address if it is an expression. */
+ if (!fndecl)
+ addr = fix_unsafe_tree (addr);
/* Expanding one of those dangerous arguments could have added
cleanups, but otherwise give it a whirl. */
if (any_pending_cleanups (1))
is subject to race conditions, just as with multithreaded
programs. */
- emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__gcov_flush"),
- LCT_ALWAYS_RETURN,
- VOIDmode, 0);
+ emit_library_call (gcov_flush_libfunc, LCT_ALWAYS_RETURN, VOIDmode, 0);
}
/* Ensure current function's preferred stack boundary is at least
if (cfun->preferred_stack_boundary < preferred_stack_boundary
&& fndecl != current_function_decl)
cfun->preferred_stack_boundary = preferred_stack_boundary;
+ if (fndecl == current_function_decl)
+ cfun->recursive_call_emit = true;
preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
/* We want to make two insn chains; one for a sibling call, the other
for a normal call. We will select one of the two chains after
initial RTL generation is complete. */
- for (pass = 0; pass < 2; pass++)
+ for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
{
int sibcall_failure = 0;
/* We want to emit any pending stack adjustments before the tail
if (pass == 0)
{
- if (! try_tail_call)
- continue;
-
/* Emit any queued insns now; otherwise they would end up in
only one of the alternates. */
emit_queue ();
if (old_stack_level == 0)
{
emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
+ old_stack_pointer_delta = stack_pointer_delta;
old_pending_adj = pending_stack_adjust;
pending_stack_adjust = 0;
/* stack_arg_under_construction says whether a stack arg is
if (needed == 0)
argblock = virtual_outgoing_args_rtx;
else
- argblock = push_block (GEN_INT (needed), 0, 0);
+ {
+ argblock = push_block (GEN_INT (needed), 0, 0);
+#ifdef ARGS_GROW_DOWNWARD
+ argblock = plus_constant (argblock, needed);
+#endif
+ }
/* We only really need to call `copy_to_reg' in the case
where push insns are going to be used to pass ARGBLOCK
VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
as well always do it. */
argblock = copy_to_reg (argblock);
+ }
+ }
+ }
- /* The save/restore code in store_one_arg handles all
- cases except one: a constructor call (including a C
- function returning a BLKmode struct) to initialize
- an argument. */
- if (stack_arg_under_construction)
- {
+ if (ACCUMULATE_OUTGOING_ARGS)
+ {
+ /* The save/restore code in store_one_arg handles all
+ cases except one: a constructor call (including a C
+ function returning a BLKmode struct) to initialize
+ an argument. */
+ if (stack_arg_under_construction)
+ {
#ifndef OUTGOING_REG_PARM_STACK_SPACE
- rtx push_size = GEN_INT (reg_parm_stack_space
- + adjusted_args_size.constant);
+ rtx push_size = GEN_INT (reg_parm_stack_space
+ + adjusted_args_size.constant);
#else
- rtx push_size = GEN_INT (adjusted_args_size.constant);
+ rtx push_size = GEN_INT (adjusted_args_size.constant);
#endif
- if (old_stack_level == 0)
- {
- emit_stack_save (SAVE_BLOCK, &old_stack_level,
- NULL_RTX);
- old_pending_adj = pending_stack_adjust;
- pending_stack_adjust = 0;
- /* stack_arg_under_construction says whether a stack
- arg is being constructed at the old stack level.
- Pushing the stack gets a clean outgoing argument
- block. */
- old_stack_arg_under_construction
- = stack_arg_under_construction;
- stack_arg_under_construction = 0;
- /* Make a new map for the new argument list. */
- stack_usage_map = (char *)
- alloca (highest_outgoing_arg_in_use);
- memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
- highest_outgoing_arg_in_use = 0;
- }
- allocate_dynamic_stack_space (push_size, NULL_RTX,
- BITS_PER_UNIT);
- }
- /* If argument evaluation might modify the stack pointer,
- copy the address of the argument list to a register. */
- for (i = 0; i < num_actuals; i++)
- if (args[i].pass_on_stack)
- {
- argblock = copy_addr_to_reg (argblock);
- break;
- }
+ if (old_stack_level == 0)
+ {
+ emit_stack_save (SAVE_BLOCK, &old_stack_level,
+ NULL_RTX);
+ old_stack_pointer_delta = stack_pointer_delta;
+ old_pending_adj = pending_stack_adjust;
+ pending_stack_adjust = 0;
+ /* stack_arg_under_construction says whether a stack
+ arg is being constructed at the old stack level.
+ Pushing the stack gets a clean outgoing argument
+ block. */
+ old_stack_arg_under_construction
+ = stack_arg_under_construction;
+ stack_arg_under_construction = 0;
+ /* Make a new map for the new argument list. */
+ stack_usage_map = (char *)
+ alloca (highest_outgoing_arg_in_use);
+ memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
+ highest_outgoing_arg_in_use = 0;
}
+ allocate_dynamic_stack_space (push_size, NULL_RTX,
+ BITS_PER_UNIT);
}
- }
+ /* If argument evaluation might modify the stack pointer,
+ copy the address of the argument list to a register. */
+ for (i = 0; i < num_actuals; i++)
+ if (args[i].pass_on_stack)
+ {
+ argblock = copy_addr_to_reg (argblock);
+ break;
+ }
+ }
+
compute_argument_addresses (args, argblock, num_actuals);
/* If we push args individually in reverse order, perform stack alignment
be deferred during the evaluation of the arguments. */
NO_DEFER_POP;
- funexp = rtx_for_function_call (fndecl, exp);
+ funexp = rtx_for_function_call (fndecl, addr);
/* Figure out the register where the value, if any, will come back. */
valreg = 0;
reg_parm_stack_space)
|| (pass == 0
&& check_sibcall_argument_overlap (before_arg,
- &args[i])))
+ &args[i], 1)))
sibcall_failure = 1;
}
reg_parm_stack_space)
|| (pass == 0
&& check_sibcall_argument_overlap (before_arg,
- &args[i])))
+ &args[i], 1)))
sibcall_failure = 1;
}
structure value. */
if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
{
+#ifdef POINTERS_EXTEND_UNSIGNED
+ if (GET_MODE (structure_value_addr) != Pmode)
+ structure_value_addr = convert_memory_address
+ (Pmode, structure_value_addr);
+#endif
emit_move_insn (struct_value_rtx,
force_reg (Pmode,
force_operand (structure_value_addr,
funexp = prepare_call_address (funexp, fndecl, &call_fusage,
reg_parm_seen, pass == 0);
- load_register_parameters (args, num_actuals, &call_fusage, flags);
+ load_register_parameters (args, num_actuals, &call_fusage, flags,
+ pass == 0, &sibcall_failure);
/* Perform postincrements before actually calling the function. */
emit_queue ();
next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
flags, & args_so_far);
- /* Verify that we've deallocated all the stack we used. */
- if (pass
- && old_stack_allocated != stack_pointer_delta - pending_stack_adjust)
- abort ();
-
/* If call is cse'able, make appropriate pair of reg-notes around it.
Test valreg so we don't crash; may safely ignore `const'
if return type is void. Disable for PARALLEL return values, because
if (flags & ECF_LONGJMP)
current_function_calls_longjmp = 1;
- /* If this function is returning into a memory location marked as
- readonly, it means it is initializing that location. But we normally
- treat functions as not clobbering such locations, so we need to
- specify that this one does. */
- if (target != 0 && GET_CODE (target) == MEM
- && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
- emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
-
/* If value type not void, return an rtx for the value. */
/* If there are cleanups to be called, don't use a hard reg as target.
If they refer to the same register, this move will be a no-op,
except when function inlining is being done. */
emit_move_insn (target, valreg);
+
+ /* If we are setting a MEM, this code must be executed. Since it is
+ emitted after the call insn, sibcall optimization cannot be
+ performed in that case. */
+ if (GET_CODE (target) == MEM)
+ sibcall_failure = 1;
}
else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
{
if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
{
emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
+ stack_pointer_delta = old_stack_pointer_delta;
pending_stack_adjust = old_pending_adj;
stack_arg_under_construction = old_stack_arg_under_construction;
highest_outgoing_arg_in_use = initial_highest_arg_in_use;
{
#ifdef REG_PARM_STACK_SPACE
if (save_area)
- {
- restore_fixed_argument_area (save_area, argblock,
- high_to_save, low_to_save);
- }
+ restore_fixed_argument_area (save_area, argblock,
+ high_to_save, low_to_save);
#endif
/* If we saved any argument areas, restore them. */
emit_move_insn (stack_area, args[i].save_area);
else
emit_block_move (stack_area, args[i].save_area,
- GEN_INT (args[i].size.constant),
+ GEN_INT (args[i].locate.size.constant),
BLOCK_OP_CALL_PARM);
}
expand_end_target_temps ();
}
+ /* If this function is returning into a memory location marked as
+ readonly, it means it is initializing that location. We normally treat
+ functions as not clobbering such locations, so we need to specify that
+ this one does. We do this by adding the appropriate CLOBBER to the
+ CALL_INSN function usage list. This cannot be done by emitting a
+ standalone CLOBBER after the call because the latter would be ignored
+ by at least the delay slot scheduling pass. We do this now instead of
+ adding to call_fusage before the call to emit_call_1 because TARGET
+ may be modified in the meantime. */
+ if (structure_value_addr != 0 && target != 0
+ && GET_CODE (target) == MEM && RTX_UNCHANGING_P (target))
+ add_function_usage_to
+ (last_call_insn (),
+ gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
+ NULL_RTX));
+
insns = get_insns ();
end_sequence ();
sbitmap_free (stored_args_map);
}
else
- normal_call_insns = insns;
+ {
+ normal_call_insns = insns;
+
+ /* Verify that we've deallocated all the stack we used. */
+ if (old_stack_allocated !=
+ stack_pointer_delta - pending_stack_adjust)
+ abort ();
+ }
/* If something prevents making this a sibling call,
zero out the sequence. */
return target;
}
+
+/* Traverse an argument list in VALUES and expand all complex
+ arguments into their components. */
+tree
+split_complex_values (tree values)
+{
+ tree p;
+
+ values = copy_list (values);
+
+ for (p = values; p; p = TREE_CHAIN (p))
+ {
+ tree complex_value = TREE_VALUE (p);
+ tree complex_type;
+
+ complex_type = TREE_TYPE (complex_value);
+ if (!complex_type)
+ continue;
+
+ if (TREE_CODE (complex_type) == COMPLEX_TYPE)
+ {
+ tree subtype;
+ tree real, imag, next;
+
+ subtype = TREE_TYPE (complex_type);
+ complex_value = save_expr (complex_value);
+ real = build1 (REALPART_EXPR, subtype, complex_value);
+ imag = build1 (IMAGPART_EXPR, subtype, complex_value);
+
+ TREE_VALUE (p) = real;
+ next = TREE_CHAIN (p);
+ imag = build_tree_list (NULL_TREE, imag);
+ TREE_CHAIN (p) = imag;
+ TREE_CHAIN (imag) = next;
+
+ /* Skip the newly created node. */
+ p = TREE_CHAIN (p);
+ }
+ }
+
+ return values;
+}
+
+/* Traverse a list of TYPES and expand all complex types into their
+ components. */
+tree
+split_complex_types (tree types)
+{
+ tree p;
+
+ types = copy_list (types);
+
+ for (p = types; p; p = TREE_CHAIN (p))
+ {
+ tree complex_type = TREE_VALUE (p);
+
+ if (TREE_CODE (complex_type) == COMPLEX_TYPE)
+ {
+ tree next, imag;
+
+ /* Rewrite complex type with component type. */
+ TREE_VALUE (p) = TREE_TYPE (complex_type);
+ next = TREE_CHAIN (p);
+
+ /* Add another component type for the imaginary part. */
+ imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
+ TREE_CHAIN (p) = imag;
+ TREE_CHAIN (imag) = next;
+
+ /* Skip the newly created node. */
+ p = TREE_CHAIN (p);
+ }
+ }
+
+ return types;
+}
\f
/* Output a library call to function FUN (a SYMBOL_REF rtx).
The RETVAL parameter specifies whether return value needs to be saved, other
rtx fun;
int inc;
int count;
- struct args_size alignment_pad;
rtx argblock = 0;
CUMULATIVE_ARGS args_so_far;
struct arg
enum machine_mode mode;
rtx reg;
int partial;
- struct args_size offset;
- struct args_size size;
+ struct locate_and_pad_arg_data locate;
rtx save_area;
};
struct arg *argvec;
#ifdef REG_PARM_STACK_SPACE
/* Define the boundary of the register parm stack space that needs to be
save, if any. */
- int low_to_save = -1, high_to_save = 0;
+ int low_to_save, high_to_save;
rtx save_area = 0; /* Place that it is saved. */
#endif
#else
argvec[count].reg != 0,
#endif
- NULL_TREE, &args_size, &argvec[count].offset,
- &argvec[count].size, &alignment_pad);
+ 0, NULL_TREE, &args_size, &argvec[count].locate);
if (argvec[count].reg == 0 || argvec[count].partial != 0
|| reg_parm_stack_space > 0)
- args_size.constant += argvec[count].size.constant;
+ args_size.constant += argvec[count].locate.size.constant;
FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
{
tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
- slot = gen_rtx_MEM (mode,
- expand_expr (build1 (ADDR_EXPR,
- build_pointer_type
- (type),
- make_tree (type, val)),
- NULL_RTX, VOIDmode, 0));
+ slot
+ = gen_rtx_MEM (mode,
+ expand_expr (build1 (ADDR_EXPR,
+ build_pointer_type (type),
+ make_tree (type, val)),
+ NULL_RTX, VOIDmode, 0));
}
call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
#else
argvec[count].reg != 0,
#endif
- NULL_TREE, &args_size, &argvec[count].offset,
- &argvec[count].size, &alignment_pad);
+ argvec[count].partial,
+ NULL_TREE, &args_size, &argvec[count].locate);
- if (argvec[count].size.var)
+ if (argvec[count].locate.size.var)
abort ();
- if (reg_parm_stack_space == 0 && argvec[count].partial)
- argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
-
if (argvec[count].reg == 0 || argvec[count].partial != 0
|| reg_parm_stack_space > 0)
- args_size.constant += argvec[count].size.constant;
+ args_size.constant += argvec[count].locate.size.constant;
FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
}
{
/* The argument list is the property of the called routine and it
may clobber it. If the fixed area has been used for previous
- parameters, we must save and restore it.
-
- Here we compute the boundary of the that needs to be saved, if any. */
-
-#ifdef ARGS_GROW_DOWNWARD
- for (count = 0; count < reg_parm_stack_space + 1; count++)
-#else
- for (count = 0; count < reg_parm_stack_space; count++)
-#endif
- {
- if (count >= highest_outgoing_arg_in_use
- || stack_usage_map[count] == 0)
- continue;
-
- if (low_to_save == -1)
- low_to_save = count;
-
- high_to_save = count;
- }
-
- if (low_to_save >= 0)
- {
- int num_to_save = high_to_save - low_to_save + 1;
- enum machine_mode save_mode
- = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
- rtx stack_area;
-
- /* If we don't have the required alignment, must do this in BLKmode. */
- if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
- BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
- save_mode = BLKmode;
-
-#ifdef ARGS_GROW_DOWNWARD
- stack_area = gen_rtx_MEM (save_mode,
- memory_address (save_mode,
- plus_constant (argblock,
- -high_to_save)));
-#else
- stack_area = gen_rtx_MEM (save_mode,
- memory_address (save_mode,
- plus_constant (argblock,
- low_to_save)));
-#endif
- if (save_mode == BLKmode)
- {
- save_area = assign_stack_temp (BLKmode, num_to_save, 0);
- set_mem_align (save_area, PARM_BOUNDARY);
- emit_block_move (save_area, stack_area, GEN_INT (num_to_save),
- BLOCK_OP_CALL_PARM);
- }
- else
- {
- save_area = gen_reg_rtx (save_mode);
- emit_move_insn (save_area, stack_area);
- }
- }
+ parameters, we must save and restore it. */
+ save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
+ &low_to_save, &high_to_save);
}
#endif
#ifdef ARGS_GROW_DOWNWARD
/* stack_slot is negative, but we want to index stack_usage_map
with positive values. */
- upper_bound = -argvec[argnum].offset.constant + 1;
- lower_bound = upper_bound - argvec[argnum].size.constant;
+ upper_bound = -argvec[argnum].locate.offset.constant + 1;
+ lower_bound = upper_bound - argvec[argnum].locate.size.constant;
#else
- lower_bound = argvec[argnum].offset.constant;
- upper_bound = lower_bound + argvec[argnum].size.constant;
+ lower_bound = argvec[argnum].locate.offset.constant;
+ upper_bound = lower_bound + argvec[argnum].locate.size.constant;
#endif
- for (i = lower_bound; i < upper_bound; i++)
- if (stack_usage_map[i]
- /* Don't store things in the fixed argument area at this
- point; it has already been saved. */
- && i > reg_parm_stack_space)
- break;
+ i = lower_bound;
+ /* Don't worry about things in the fixed argument area;
+ it has already been saved. */
+ if (i < reg_parm_stack_space)
+ i = reg_parm_stack_space;
+ while (i < upper_bound && stack_usage_map[i] == 0)
+ i++;
- if (i != upper_bound)
+ if (i < upper_bound)
{
- /* We need to make a save area. See what mode we can make
- it. */
+ /* We need to make a save area. */
+ unsigned int size
+ = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
enum machine_mode save_mode
- = mode_for_size (argvec[argnum].size.constant
- * BITS_PER_UNIT,
- MODE_INT, 1);
+ = mode_for_size (size, MODE_INT, 1);
+ rtx adr
+ = plus_constant (argblock,
+ argvec[argnum].locate.offset.constant);
rtx stack_area
- = gen_rtx_MEM
- (save_mode,
- memory_address
- (save_mode,
- plus_constant (argblock,
- argvec[argnum].offset.constant)));
+ = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
argvec[argnum].save_area = gen_reg_rtx (save_mode);
emit_move_insn (argvec[argnum].save_area, stack_area);
emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
partial, reg, 0, argblock,
- GEN_INT (argvec[argnum].offset.constant),
- reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
+ GEN_INT (argvec[argnum].locate.offset.constant),
+ reg_parm_stack_space,
+ ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
/* Now mark the segment we just used. */
if (ACCUMULATE_OUTGOING_ARGS)
{
rtx insns;
- if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
+ if (valreg == 0)
{
insns = get_insns ();
end_sequence ();
else
{
rtx note = 0;
- rtx temp = gen_reg_rtx (GET_MODE (valreg));
+ rtx temp;
int i;
+ if (GET_CODE (valreg) == PARALLEL)
+ {
+ temp = gen_reg_rtx (outmode);
+ emit_group_store (temp, valreg, outmode);
+ valreg = temp;
+ }
+
+ temp = gen_reg_rtx (GET_MODE (valreg));
+
/* Construct an "equal form" for the value which mentions all the
arguments in order as well as the function name. */
for (i = 0; i < nargs; i++)
if (value != mem_value)
emit_move_insn (value, mem_value);
}
+ else if (GET_CODE (valreg) == PARALLEL)
+ {
+ if (value == 0)
+ value = gen_reg_rtx (outmode);
+ emit_group_store (value, valreg, outmode);
+ }
else if (value != 0)
emit_move_insn (value, valreg);
else
{
#ifdef REG_PARM_STACK_SPACE
if (save_area)
- {
- enum machine_mode save_mode = GET_MODE (save_area);
-#ifdef ARGS_GROW_DOWNWARD
- rtx stack_area
- = gen_rtx_MEM (save_mode,
- memory_address (save_mode,
- plus_constant (argblock,
- - high_to_save)));
-#else
- rtx stack_area
- = gen_rtx_MEM (save_mode,
- memory_address (save_mode,
- plus_constant (argblock, low_to_save)));
-#endif
-
- set_mem_align (stack_area, PARM_BOUNDARY);
- if (save_mode != BLKmode)
- emit_move_insn (stack_area, save_area);
- else
- emit_block_move (stack_area, save_area,
- GEN_INT (high_to_save - low_to_save + 1),
- BLOCK_OP_CALL_PARM);
- }
+ restore_fixed_argument_area (save_area, argblock,
+ high_to_save, low_to_save);
#endif
/* If we saved any argument areas, restore them. */
if (argvec[count].save_area)
{
enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
- rtx stack_area
- = gen_rtx_MEM (save_mode,
- memory_address
- (save_mode,
- plus_constant (argblock,
- argvec[count].offset.constant)));
+ rtx adr = plus_constant (argblock,
+ argvec[count].locate.offset.constant);
+ rtx stack_area = gen_rtx_MEM (save_mode,
+ memory_address (save_mode, adr));
emit_move_insn (stack_area, argvec[count].save_area);
}
or other LCT_ value for other types of library calls. */
void
-emit_library_call VPARAMS((rtx orgfun, enum libcall_type fn_type,
- enum machine_mode outmode, int nargs, ...))
+emit_library_call (rtx orgfun, enum libcall_type fn_type,
+ enum machine_mode outmode, int nargs, ...)
{
- VA_OPEN (p, nargs);
- VA_FIXEDARG (p, rtx, orgfun);
- VA_FIXEDARG (p, int, fn_type);
- VA_FIXEDARG (p, enum machine_mode, outmode);
- VA_FIXEDARG (p, int, nargs);
-
+ va_list p;
+
+ va_start (p, nargs);
emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
-
- VA_CLOSE (p);
+ va_end (p);
}
\f
/* Like emit_library_call except that an extra argument, VALUE,
If VALUE is nonzero, VALUE is returned. */
rtx
-emit_library_call_value VPARAMS((rtx orgfun, rtx value,
- enum libcall_type fn_type,
- enum machine_mode outmode, int nargs, ...))
+emit_library_call_value (rtx orgfun, rtx value,
+ enum libcall_type fn_type,
+ enum machine_mode outmode, int nargs, ...)
{
rtx result;
+ va_list p;
- VA_OPEN (p, nargs);
- VA_FIXEDARG (p, rtx, orgfun);
- VA_FIXEDARG (p, rtx, value);
- VA_FIXEDARG (p, int, fn_type);
- VA_FIXEDARG (p, enum machine_mode, outmode);
- VA_FIXEDARG (p, int, nargs);
-
+ va_start (p, nargs);
result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
nargs, p);
-
- VA_CLOSE (p);
+ va_end (p);
return result;
}
else
upper_bound = 0;
- lower_bound = upper_bound - arg->size.constant;
+ lower_bound = upper_bound - arg->locate.size.constant;
#else
if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
else
lower_bound = 0;
- upper_bound = lower_bound + arg->size.constant;
+ upper_bound = lower_bound + arg->locate.size.constant;
#endif
- for (i = lower_bound; i < upper_bound; i++)
- if (stack_usage_map[i]
- /* Don't store things in the fixed argument area at this point;
- it has already been saved. */
- && i > reg_parm_stack_space)
- break;
+ i = lower_bound;
+ /* Don't worry about things in the fixed argument area;
+ it has already been saved. */
+ if (i < reg_parm_stack_space)
+ i = reg_parm_stack_space;
+ while (i < upper_bound && stack_usage_map[i] == 0)
+ i++;
- if (i != upper_bound)
+ if (i < upper_bound)
{
- /* We need to make a save area. See what mode we can make it. */
- enum machine_mode save_mode
- = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
- rtx stack_area
- = gen_rtx_MEM (save_mode,
- memory_address (save_mode,
- XEXP (arg->stack_slot, 0)));
+ /* We need to make a save area. */
+ unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
+ enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
+ rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
+ rtx stack_area = gen_rtx_MEM (save_mode, adr);
if (save_mode == BLKmode)
{
}
}
}
- /* Now that we have saved any slots that will be overwritten by this
- store, mark all slots this store will use. We must do this before
- we actually expand the argument since the expansion itself may
- trigger library calls which might need to use the same stack slot. */
- if (argblock && ! variable_size && arg->stack)
- for (i = lower_bound; i < upper_bound; i++)
- stack_usage_map[i] = 1;
}
/* If this isn't going to be placed on both the stack and in registers,
(partial
|| TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
? NULL_RTX : arg->stack,
- VOIDmode, 0);
+ VOIDmode, EXPAND_STACK_PARM);
/* If we are promoting object (or for any other reason) the mode
doesn't agree, convert the mode. */
This can either be done with push or copy insns. */
emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
PARM_BOUNDARY, partial, reg, used - size, argblock,
- ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
- ARGS_SIZE_RTX (arg->alignment_pad));
+ ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
+ ARGS_SIZE_RTX (arg->locate.alignment_pad));
/* Unless this is a partially-in-register argument, the argument is now
in the stack. */
/* Round its size up to a multiple
of the allocation unit for arguments. */
- if (arg->size.var != 0)
+ if (arg->locate.size.var != 0)
{
excess = 0;
- size_rtx = ARGS_SIZE_RTX (arg->size);
+ size_rtx = ARGS_SIZE_RTX (arg->locate.size);
}
else
{
/* PUSH_ROUNDING has no effect on us, because
emit_push_insn for BLKmode is careful to avoid it. */
- excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
+ excess = (arg->locate.size.constant
+ - int_size_in_bytes (TREE_TYPE (pval))
+ partial * UNITS_PER_WORD);
size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
NULL_RTX, TYPE_MODE (sizetype), 0);
PARM_BOUNDARY, but the actual argument isn't. */
if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
{
- if (arg->size.var)
+ if (arg->locate.size.var)
parm_align = BITS_PER_UNIT;
else if (excess)
{
if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
{
/* emit_push_insn might not work properly if arg->value and
- argblock + arg->offset areas overlap. */
+ argblock + arg->locate.offset areas overlap. */
rtx x = arg->value;
int i = 0;
i = INTVAL (XEXP (XEXP (x, 0), 1));
/* expand_call should ensure this */
- if (arg->offset.var || GET_CODE (size_rtx) != CONST_INT)
+ if (arg->locate.offset.var || GET_CODE (size_rtx) != CONST_INT)
abort ();
- if (arg->offset.constant > i)
+ if (arg->locate.offset.constant > i)
{
- if (arg->offset.constant < i + INTVAL (size_rtx))
+ if (arg->locate.offset.constant < i + INTVAL (size_rtx))
sibcall_failure = 1;
}
- else if (arg->offset.constant < i)
+ else if (arg->locate.offset.constant < i)
{
- if (i < arg->offset.constant + INTVAL (size_rtx))
+ if (i < arg->locate.offset.constant + INTVAL (size_rtx))
sibcall_failure = 1;
}
}
}
- /* Special handling is required if part of the parameter lies in the
- register parameter area. The argument may be copied into the stack
- slot using memcpy(), but the original contents of the register
- parameter area will be restored after the memcpy() call.
-
- To ensure that the part that lies in the register parameter area
- is copied correctly, we emit a separate push for that part. This
- push should be small enough to avoid a call to memcpy(). */
-#ifndef STACK_PARMS_IN_REG_PARM_AREA
- if (arg->reg && arg->pass_on_stack)
-#else
- if (1)
-#endif
- {
- if (arg->offset.constant < reg_parm_stack_space && arg->offset.var)
- error ("variable offset is passed partially in stack and in reg");
- else if (arg->offset.constant < reg_parm_stack_space && arg->size.var)
- error ("variable size is passed partially in stack and in reg");
- else if (arg->offset.constant < reg_parm_stack_space
- && ((arg->offset.constant + arg->size.constant)
- > reg_parm_stack_space))
- {
- rtx size_rtx1 = GEN_INT (reg_parm_stack_space - arg->offset.constant);
- emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx1,
- parm_align, partial, reg, excess, argblock,
- ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
- ARGS_SIZE_RTX (arg->alignment_pad));
- }
- }
-
-
emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
parm_align, partial, reg, excess, argblock,
- ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
- ARGS_SIZE_RTX (arg->alignment_pad));
+ ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
+ ARGS_SIZE_RTX (arg->locate.alignment_pad));
/* Unless this is a partially-in-register argument, the argument is now
in the stack.
arg->value = arg->stack_slot;
}
+ /* Mark all slots this store used. */
+ if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
+ && argblock && ! variable_size && arg->stack)
+ for (i = lower_bound; i < upper_bound; i++)
+ stack_usage_map[i] = 1;
+
/* Once we have pushed something, pops can't safely
be deferred during the rest of the arguments. */
NO_DEFER_POP;
return sibcall_failure;
}
+
+/* Nonzero if we do not know how to pass TYPE solely in registers.
+ We cannot do so in the following cases:
+
+ - if the type has variable size
+ - if the type is marked as addressable (it is required to be constructed
+ into the stack)
+ - if the padding and mode of the type is such that a copy into a register
+ would put it into the wrong part of the register.
+
+ Which padding can't be supported depends on the byte endianness.
+
+ A value in a register is implicitly padded at the most significant end.
+ On a big-endian machine, that is the lower end in memory.
+ So a value padded in memory at the upper end can't go in a register.
+ For a little-endian machine, the reverse is true. */
+
+bool
+default_must_pass_in_stack (mode, type)
+ enum machine_mode mode;
+ tree type;
+{
+ if (!type)
+ return false;
+
+ /* If the type has variable size... */
+ if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
+ return true;
+
+ /* If the type is marked as addressable (it is required
+ to be constructed into the stack)... */
+ if (TREE_ADDRESSABLE (type))
+ return true;
+
+ /* If the padding and mode of the type is such that a copy into
+ a register would put it into the wrong part of the register. */
+ if (mode == BLKmode
+ && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
+ && (FUNCTION_ARG_PADDING (mode, type)
+ == (BYTES_BIG_ENDIAN ? upward : downward)))
+ return true;
+
+ return false;
+}