Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
1999, 2000, 2001 Free Software Foundation, Inc.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
-any later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING. If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA. */
#include "config.h"
#include "system.h"
#include "tree.h"
#include "flags.h"
#include "expr.h"
+#include "libfuncs.h"
#include "function.h"
#include "regs.h"
#include "toplev.h"
#define FUNCTION_OK_FOR_SIBCALL(DECL) 1
#endif
-#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
-#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
-#endif
-
/* Decide whether a function's arguments should be processed
from first to last or from last to first.
/* Nonzero if this is a call to a function that returns with the stack
pointer depressed. */
#define ECF_SP_DEPRESSED 1024
+/* Nonzero if this call is known to always return. */
+#define ECF_ALWAYS_RETURN 2048
static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
HOST_WIDE_INT, HOST_WIDE_INT, rtx,
tree exp;
int which;
{
- register int i;
+ int i;
enum tree_code code = TREE_CODE (exp);
int class = TREE_CODE_CLASS (code);
int length = first_rtl_op (code);
case BLOCK:
{
- register tree local;
- register tree subblock;
+ tree local;
+ tree subblock;
for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
if (DECL_INITIAL (local) != 0
CALL_INSN_FUNCTION_USAGE information. */
rtx
-prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
+prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen, sibcallp)
rtx funexp;
tree fndecl;
rtx *call_fusage;
int reg_parm_seen;
+ int sibcallp;
{
rtx static_chain_value = 0;
funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
? force_not_mem (memory_address (FUNCTION_MODE, funexp))
: memory_address (FUNCTION_MODE, funexp));
- else
+ else if (! sibcallp)
{
#ifndef NO_FUNCTION_CSE
if (optimize && ! flag_no_function_cse)
We restore `inhibit_defer_pop' to that value.
CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
- denote registers used by the called function. */
+ denote registers used by the called function. */
static void
emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
/* If this is a const call, then set the insn's unchanging bit. */
if (ecf_flags & (ECF_CONST | ECF_PURE))
- CONST_CALL_P (call_insn) = 1;
+ CONST_OR_PURE_CALL_P (call_insn) = 1;
/* If this call can't throw, attach a REG_EH_REGION reg note to that
effect. */
if (ecf_flags & ECF_NORETURN)
REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
REG_NOTES (call_insn));
+ if (ecf_flags & ECF_ALWAYS_RETURN)
+ REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
+ REG_NOTES (call_insn));
+
+ if (ecf_flags & ECF_RETURNS_TWICE)
+ {
+ REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
+ REG_NOTES (call_insn));
+ current_function_calls_setjmp = 1;
+ }
SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
move_by_pieces (stack_area, validize_mem (save_area),
high_to_save - low_to_save + 1, PARM_BOUNDARY);
}
-#endif
+#endif /* REG_PARM_STACK_SPACE */
/* If any elements in ARGS refer to parameters that are to be passed in
registers, but not in memory, and whose alignment does not permit a
/* For accumulate outgoing args mode we don't need to align, since the frame
will be already aligned. Align to STACK_BOUNDARY in order to prevent
backends from generating missaligned frame sizes. */
-#ifdef STACK_BOUNDARY
if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
preferred_stack_boundary = STACK_BOUNDARY;
-#endif
/* Compute the actual size of the argument block required. The variable
and constant sizes must be combined, the size may have to be rounded,
args_size->var = ARGS_SIZE_TREE (*args_size);
args_size->constant = 0;
-#ifdef PREFERRED_STACK_BOUNDARY
preferred_stack_boundary /= BITS_PER_UNIT;
if (preferred_stack_boundary > 1)
{
abort ();
args_size->var = round_up (args_size->var, preferred_stack_boundary);
}
-#endif
if (reg_parm_stack_space > 0)
{
}
else
{
-#ifdef PREFERRED_STACK_BOUNDARY
preferred_stack_boundary /= BITS_PER_UNIT;
if (preferred_stack_boundary < 1)
preferred_stack_boundary = 1;
/ preferred_stack_boundary
* preferred_stack_boundary)
- stack_pointer_delta);
-#endif
args_size->constant = MAX (args_size->constant,
reg_parm_stack_space);
outgoing arguments and we cannot allow reordering of reads
from function arguments with stores to outgoing arguments
of sibling calls. */
- MEM_ALIAS_SET (args[i].stack) = 0;
- MEM_ALIAS_SET (args[i].stack_slot) = 0;
+ set_mem_alias_set (args[i].stack, 0);
+ set_mem_alias_set (args[i].stack_slot, 0);
}
}
}
int old_inhibit_defer_pop = inhibit_defer_pop;
int old_stack_allocated;
rtx call_fusage;
- register tree p = TREE_OPERAND (exp, 0);
- register int i;
+ tree p = TREE_OPERAND (exp, 0);
+ int i;
/* The alignment of the stack, in bits. */
HOST_WIDE_INT preferred_stack_boundary;
/* The alignment of the stack, in bytes. */
}
/* Figure out the amount to which the stack should be aligned. */
-#ifdef PREFERRED_STACK_BOUNDARY
preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
-#else
- preferred_stack_boundary = STACK_BOUNDARY;
-#endif
/* Operand 0 is a pointer-to-function; get the type of the function. */
funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
memset ((char *) args, 0, num_actuals * sizeof (struct arg_data));
- /* Build up entries inthe ARGS array, compute the size of the arguments
- into ARGS_SIZE, etc. */
+ /* Build up entries in the ARGS array, compute the size of the
+ arguments into ARGS_SIZE, etc. */
initialize_argument_information (num_actuals, args, &args_size,
n_named_args, actparms, fndecl,
&args_so_far, reg_parm_stack_space,
/* If this function requires a variable-sized argument list, don't
try to make a cse'able block for this call. We may be able to
do this eventually, but it is too complicated to keep track of
- what insns go in the cse'able block and which don't. */
+ what insns go in the cse'able block and which don't. */
flags &= ~(ECF_CONST | ECF_PURE);
must_preallocate = 1;
is subject to race conditions, just as with multithreaded
programs. */
- emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"), 0,
+ emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"),
+ LCT_ALWAYS_RETURN,
VOIDmode, 0);
}
compute_argument_addresses (args, argblock, num_actuals);
-#ifdef PREFERRED_STACK_BOUNDARY
/* If we push args individually in reverse order, perform stack alignment
before the first push (the last arg). */
if (PUSH_ARGS_REVERSED && argblock == 0
/* Now that the stack is properly aligned, pops can't safely
be deferred during the evaluation of the arguments. */
NO_DEFER_POP;
-#endif
-
- /* Don't try to defer pops if preallocating, not even from the first arg,
- since ARGBLOCK probably refers to the SP. */
- if (argblock)
- NO_DEFER_POP;
funexp = rtx_for_function_call (fndecl, exp);
sibcall_failure = 1;
}
-#ifdef PREFERRED_STACK_BOUNDARY
/* If we pushed args in forward order, perform stack alignment
after pushing the last arg. */
if (!PUSH_ARGS_REVERSED && argblock == 0)
anti_adjust_stack (GEN_INT (adjusted_args_size.constant
- unadjusted_args_size));
-#endif
/* If register arguments require space on the stack and stack space
was not preallocated, allocate stack space here for arguments
}
funexp = prepare_call_address (funexp, fndecl, &call_fusage,
- reg_parm_seen);
+ reg_parm_seen, pass == 0);
load_register_parameters (args, num_actuals, &call_fusage, flags);
/* All arguments and registers used for the call must be set up by
now! */
-#ifdef PREFERRED_STACK_BOUNDARY
/* Stack must be properly aligned now. */
if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
abort ();
-#endif
/* Generate the actual call instruction. */
emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
if nonvolatile values are live. For functions that cannot return,
inform flow that control does not fall through. */
- if ((flags & (ECF_RETURNS_TWICE | ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
+ if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
{
- /* The barrier or NOTE_INSN_SETJMP note must be emitted
+ /* The barrier must be emitted
immediately after the CALL_INSN. Some ports emit more
than just a CALL_INSN above, so we must search for it here. */
abort ();
}
- if (flags & ECF_RETURNS_TWICE)
- {
- emit_note_after (NOTE_INSN_SETJMP, last);
- current_function_calls_setjmp = 1;
- }
- else
- emit_barrier_after (last);
+ emit_barrier_after (last);
}
if (flags & ECF_LONGJMP)
\f
/* Output a library call to function FUN (a SYMBOL_REF rtx).
The RETVAL parameter specifies whether return value needs to be saved, other
- parameters are documented in the emit_library_call function bellow. */
+ parameters are documented in the emit_library_call function below. */
static rtx
emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
int retval;
struct args_size args_size;
/* Size of arguments before any adjustments (such as rounding). */
struct args_size original_args_size;
- register int argnum;
+ int argnum;
rtx fun;
int inc;
int count;
case LCT_THROW:
flags = ECF_NORETURN;
break;
+ case LCT_ALWAYS_RETURN:
+ flags = ECF_ALWAYS_RETURN;
+ break;
+ case LCT_RETURNS_TWICE:
+ flags = ECF_RETURNS_TWICE;
+ break;
}
fun = orgfun;
-#ifdef PREFERRED_STACK_BOUNDARY
/* Ensure current function's preferred stack boundary is at least
what we need. */
if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
-#endif
/* If this kind of value comes back in memory,
decide where in memory it should come back. */
assemble_external_libcall (fun);
original_args_size = args_size;
-#ifdef PREFERRED_STACK_BOUNDARY
args_size.constant = (((args_size.constant
+ stack_pointer_delta
+ STACK_BYTES - 1)
/ STACK_BYTES
* STACK_BYTES)
- stack_pointer_delta);
-#endif
args_size.constant = MAX (args_size.constant,
reg_parm_stack_space);
argblock = push_block (GEN_INT (args_size.constant), 0, 0);
}
-#ifdef PREFERRED_STACK_BOUNDARY
/* If we push args individually in reverse order, perform stack alignment
before the first push (the last arg). */
if (argblock == 0 && PUSH_ARGS_REVERSED)
anti_adjust_stack (GEN_INT (args_size.constant
- original_args_size.constant));
-#endif
if (PUSH_ARGS_REVERSED)
{
are to be pushed. */
for (count = 0; count < nargs; count++, argnum += inc)
{
- register enum machine_mode mode = argvec[argnum].mode;
- register rtx val = argvec[argnum].value;
+ enum machine_mode mode = argvec[argnum].mode;
+ rtx val = argvec[argnum].value;
rtx reg = argvec[argnum].reg;
int partial = argvec[argnum].partial;
int lower_bound = 0, upper_bound = 0, i;
}
}
-#ifdef PREFERRED_STACK_BOUNDARY
/* If we pushed args in forward order, perform stack alignment
after pushing the last arg. */
if (argblock == 0 && !PUSH_ARGS_REVERSED)
anti_adjust_stack (GEN_INT (args_size.constant
- original_args_size.constant));
-#endif
if (PUSH_ARGS_REVERSED)
argnum = nargs - 1;
else
argnum = 0;
- fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
+ fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
/* Now load any reg parms into their regs. */
are to be pushed. */
for (count = 0; count < nargs; count++, argnum += inc)
{
- register rtx val = argvec[argnum].value;
+ rtx val = argvec[argnum].value;
rtx reg = argvec[argnum].reg;
int partial = argvec[argnum].partial;
valreg = (mem_value == 0 && outmode != VOIDmode
? hard_libcall_value (outmode) : NULL_RTX);
-#ifdef PREFERRED_STACK_BOUNDARY
/* Stack must be properly aligned now. */
if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
abort ();
-#endif
before_call = get_last_insn ();
if nonvolatile values are live. For functions that cannot return,
inform flow that control does not fall through. */
- if (flags & (ECF_RETURNS_TWICE | ECF_NORETURN | ECF_LONGJMP))
+ if (flags & (ECF_NORETURN | ECF_LONGJMP))
{
- /* The barrier or NOTE_INSN_SETJMP note must be emitted
+ /* The barrier note must be emitted
immediately after the CALL_INSN. Some ports emit more than
just a CALL_INSN above, so we must search for it here. */
abort ();
}
- if (flags & ECF_RETURNS_TWICE)
- {
- emit_note_after (NOTE_INSN_SETJMP, last);
- current_function_calls_setjmp = 1;
- }
- else
- emit_barrier_after (last);
+ emit_barrier_after (last);
}
/* Now restore inhibit_defer_pop to its actual original value. */
and machine_modes to convert them to.
The rtx values should have been passed through protect_from_queue already.
- FN_TYPE will is zero for `normal' calls, one for `const' calls, wich
- which will be enclosed in REG_LIBCALL/REG_RETVAL notes and two for `pure'
- calls, that are handled like `const' calls with extra
+ FN_TYPE will be zero for `normal' calls, one for `const' calls,
+ which will be enclosed in REG_LIBCALL/REG_RETVAL notes, and two for
+ `pure' calls, that are handled like `const' calls with extra
(use (memory (scratch)). */
void
emit_library_call VPARAMS((rtx orgfun, enum libcall_type fn_type,
enum machine_mode outmode, int nargs, ...))
{
-#ifndef ANSI_PROTOTYPES
- rtx orgfun;
- int fn_type;
- enum machine_mode outmode;
- int nargs;
-#endif
- va_list p;
-
- VA_START (p, nargs);
-
-#ifndef ANSI_PROTOTYPES
- orgfun = va_arg (p, rtx);
- fn_type = va_arg (p, int);
- outmode = va_arg (p, enum machine_mode);
- nargs = va_arg (p, int);
-#endif
+ VA_OPEN (p, nargs);
+ VA_FIXEDARG (p, rtx, orgfun);
+ VA_FIXEDARG (p, int, fn_type);
+ VA_FIXEDARG (p, enum machine_mode, outmode);
+ VA_FIXEDARG (p, int, nargs);
emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
- va_end (p);
+ VA_CLOSE (p);
}
\f
/* Like emit_library_call except that an extra argument, VALUE,
enum libcall_type fn_type,
enum machine_mode outmode, int nargs, ...))
{
-#ifndef ANSI_PROTOTYPES
- rtx orgfun;
- rtx value;
- int fn_type;
- enum machine_mode outmode;
- int nargs;
-#endif
- va_list p;
-
- VA_START (p, nargs);
-
-#ifndef ANSI_PROTOTYPES
- orgfun = va_arg (p, rtx);
- value = va_arg (p, rtx);
- fn_type = va_arg (p, int);
- outmode = va_arg (p, enum machine_mode);
- nargs = va_arg (p, int);
-#endif
+ rtx result;
+
+ VA_OPEN (p, nargs);
+ VA_FIXEDARG (p, rtx, orgfun);
+ VA_FIXEDARG (p, rtx, value);
+ VA_FIXEDARG (p, int, fn_type);
+ VA_FIXEDARG (p, enum machine_mode, outmode);
+ VA_FIXEDARG (p, int, nargs);
- value = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode, nargs, p);
+ result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
+ nargs, p);
- va_end (p);
+ VA_CLOSE (p);
- return value;
+ return result;
}
\f
#if 0
int variable_size ATTRIBUTE_UNUSED;
int reg_parm_stack_space;
{
- register tree pval = arg->tree_value;
+ tree pval = arg->tree_value;
rtx reg = 0;
int partial = 0;
int used = 0;
if (reg != 0 && partial == 0)
/* Being passed entirely in a register. We shouldn't be called in
- this case. */
+ this case. */
abort ();
/* If this arg needs special alignment, don't load the registers
}
else if (arg->mode != BLKmode)
{
- register int size;
+ int size;
/* Argument is a scalar, not entirely passed in registers.
(If part is passed in registers, arg->partial says how much
{
/* BLKmode, at least partly to be pushed. */
- register int excess;
+ int excess;
rtx size_rtx;
/* Pushing a nonscalar.
}
}
- /* If parm is passed both in stack and in register and offset is
- greater than reg_parm_stack_space, split the offset. */
+ /* Special handling is required if part of the parameter lies in the
+ register parameter area. The argument may be copied into the stack
+ slot using memcpy(), but the original contents of the register
+ parameter area will be restored after the memcpy() call.
+
+ To ensure that the part that lies in the register parameter area
+ is copied correctly, we emit a separate push for that part. This
+ push should be small enough to avoid a call to memcpy(). */
+#ifndef STACK_PARMS_IN_REG_PARM_AREA
if (arg->reg && arg->pass_on_stack)
+#else
+ if (1)
+#endif
{
if (arg->offset.constant < reg_parm_stack_space && arg->offset.var)
error ("variable offset is passed paritially in stack and in reg");
{
rtx size_rtx1 = GEN_INT (reg_parm_stack_space - arg->offset.constant);
emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx1,
- TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT,
- partial, reg, excess, argblock,
- ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
+ TYPE_ALIGN (TREE_TYPE (pval)), partial, reg,
+ excess, argblock, ARGS_SIZE_RTX (arg->offset),
+ reg_parm_stack_space,
ARGS_SIZE_RTX (arg->alignment_pad));
-
- size_rtx = GEN_INT (INTVAL(size_rtx) - reg_parm_stack_space);
}
}