/* Convert function calls to rtl insns, for GNU C compiler.
- Copyright (C) 1989, 1992, 1993 Free Software Foundation, Inc.
+ Copyright (C) 1989, 1992, 1993, 1994 Free Software Foundation, Inc.
This file is part of GNU CC.
#include "tree.h"
#include "flags.h"
#include "expr.h"
-#include "gvarargs.h"
+#ifdef __STDC__
+#include <stdarg.h>
+#else
+#include <varargs.h>
+#endif
#include "insn-flags.h"
/* Decide whether a function's arguments should be processed
#ifdef PUSH_ROUNDING
-#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
+#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
#define PUSH_ARGS_REVERSED /* If it's last to first */
#endif
int which;
{
register int i;
- int type = TREE_CODE_CLASS (TREE_CODE (exp));
- int length = tree_code_length[(int) TREE_CODE (exp)];
+ enum tree_code code = TREE_CODE (exp);
+ int type = TREE_CODE_CLASS (code);
+ int length = tree_code_length[(int) code];
- /* Only expressions and references can contain calls. */
+ /* If this code is langauge-specific, we don't know what it will do. */
+ if ((int) code >= NUM_TREE_CODES)
+ return 1;
+ /* Only expressions and references can contain calls. */
if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
&& type != 'b')
return 0;
- switch (TREE_CODE (exp))
+ switch (code)
{
case CALL_EXPR:
if (which == 0)
return 1;
else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
&& (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
- == FUNCTION_DECL)
- && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
- && (DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
- == BUILT_IN_ALLOCA))
- return 1;
+ == FUNCTION_DECL))
+ {
+ tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
+
+ if ((DECL_BUILT_IN (fndecl)
+ && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
+ || (DECL_SAVED_INSNS (fndecl)
+ && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
+ & FUNCTION_FLAGS_CALLS_ALLOCA)))
+ return 1;
+ }
/* Third operand is RTL. */
length = 2;
and return that as an rtx. Also load the static chain register
if FNDECL is a nested function.
- USE_INSNS points to a variable holding a chain of USE insns
- to which a USE of the static chain
- register should be added, if required. */
+ CALL_FUSAGE points to a variable holding the prospective
+ CALL_INSN_FUNCTION_USAGE information. */
rtx
-prepare_call_address (funexp, fndecl, use_insns)
+prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
rtx funexp;
tree fndecl;
- rtx *use_insns;
+ rtx *call_fusage;
+ int reg_parm_seen;
{
rtx static_chain_value = 0;
/* Make a valid memory address and copy constants thru pseudo-regs,
but not for a constant address if -fno-function-cse. */
if (GET_CODE (funexp) != SYMBOL_REF)
- funexp = memory_address (FUNCTION_MODE, funexp);
+ funexp =
+#ifdef SMALL_REGISTER_CLASSES
+ /* If we are using registers for parameters, force the
+ function address into a register now. */
+ reg_parm_seen ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
+ :
+#endif
+ memory_address (FUNCTION_MODE, funexp);
else
{
#ifndef NO_FUNCTION_CSE
{
emit_move_insn (static_chain_rtx, static_chain_value);
- /* Put the USE insn in the chain we were passed. It will later be
- output immediately in front of the CALL insn. */
- push_to_sequence (*use_insns);
- emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
- *use_insns = get_insns ();
- end_sequence ();
+ use_reg (call_fusage, static_chain_rtx);
}
return funexp;
the args to this call were processed.
We restore `inhibit_defer_pop' to that value.
- USE_INSNS is a chain of USE insns to be emitted immediately before
- the actual CALL insn.
+ CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
+ denote registers used by the called function.
IS_CONST is true if this is a `const' call. */
static void
emit_call_1 (funexp, funtype, stack_size, struct_value_size, next_arg_reg,
- valreg, old_inhibit_defer_pop, use_insns, is_const)
+ valreg, old_inhibit_defer_pop, call_fusage, is_const)
rtx funexp;
tree funtype;
int stack_size;
rtx next_arg_reg;
rtx valreg;
int old_inhibit_defer_pop;
- rtx use_insns;
+ rtx call_fusage;
int is_const;
{
rtx stack_size_rtx = GEN_INT (stack_size);
#endif
abort ();
- /* Find the CALL insn we just emitted and write the USE insns before it. */
+ /* Find the CALL insn we just emitted. */
for (call_insn = get_last_insn ();
call_insn && GET_CODE (call_insn) != CALL_INSN;
call_insn = PREV_INSN (call_insn))
if (! call_insn)
abort ();
- /* Put the USE insns before the CALL. */
- emit_insns_before (use_insns, call_insn);
+ /* Put the register usage information on the CALL. If there is already
+ some usage information, put ours at the end. */
+ if (CALL_INSN_FUNCTION_USAGE (call_insn))
+ {
+ rtx link;
+
+ for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
+ link = XEXP (link, 1))
+ ;
+
+ XEXP (link, 1) = call_fusage;
+ }
+ else
+ CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
/* If this is a const call, then set the insn's unchanging bit. */
if (is_const)
if (stack_size != 0 && RETURN_POPS_ARGS (funtype, stack_size) > 0)
{
if (!already_popped)
- emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
+ CALL_INSN_FUNCTION_USAGE (call_insn) =
+ gen_rtx (EXPR_LIST, CLOBBER, stack_pointer_rtx,
+ CALL_INSN_FUNCTION_USAGE (call_insn));
stack_size -= RETURN_POPS_ARGS (funtype, stack_size);
stack_size_rtx = GEN_INT (stack_size);
}
#endif
rtx old_stack_level = 0;
- int old_pending_adj;
+ int old_pending_adj = 0;
int old_stack_arg_under_construction;
int old_inhibit_defer_pop = inhibit_defer_pop;
tree old_cleanups = cleanups_this_call;
-
- rtx use_insns = 0;
-
+ rtx call_fusage = 0;
register tree p;
register int i, j;
{
fndecl = TREE_OPERAND (p, 0);
if (TREE_CODE (fndecl) != FUNCTION_DECL)
- {
- /* May still be a `const' function if it is
- a call through a pointer-to-const.
- But we don't handle that. */
- fndecl = 0;
- }
+ fndecl = 0;
else
{
if (!flag_no_inline
Use abstraction instead of setting TREE_ADDRESSABLE
directly. */
- if (DECL_INLINE (fndecl) && extra_warnings && warn_inline
- && !flag_no_inline)
- warning_with_decl (fndecl, "can't inline call to `%s' which was declared inline");
+ if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline)
+ warning_with_decl (fndecl, "can't inline call to `%s'");
mark_addressable (fndecl);
}
if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
&& TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
is_const = 1;
+
+ if (TREE_THIS_VOLATILE (fndecl))
+ is_volatile = 1;
}
}
- is_volatile = TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
+ /* If we don't have specific function to call, see if we have a
+ constant or `noreturn' function from the type. */
+ if (fndecl == 0)
+ {
+ is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
+ is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
+ }
#ifdef REG_PARM_STACK_SPACE
#ifdef MAYBE_REG_PARM_STACK_SPACE
/* Warn if this value is an aggregate type,
regardless of which calling convention we are using for it. */
- if (warn_aggregate_return
- && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
- || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
- || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
- || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE))
+ if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
warning ("function call has aggregate value");
/* Set up a place to return a structure. */
#ifdef PCC_STATIC_STRUCT_RETURN
{
pcc_struct_value = 1;
- is_integrable = 0; /* Easier than making that case work right. */
+ /* Easier than making that case work right. */
+ if (is_integrable)
+ {
+ /* In case this is a static function, note that it has been
+ used. */
+ if (! TREE_ADDRESSABLE (fndecl))
+ mark_addressable (fndecl);
+ is_integrable = 0;
+ }
}
#else /* not PCC_STATIC_STRUCT_RETURN */
{
/* If inlining succeeded, return. */
if ((HOST_WIDE_INT) temp != -1)
{
- /* Perform all cleanups needed for the arguments of this call
- (i.e. destructors in C++). It is ok if these destructors
- clobber RETURN_VALUE_REG, because the only time we care about
- this is when TARGET is that register. But in C++, we take
- care to never return that register directly. */
- expand_cleanups_to (old_cleanups);
+ if (flag_short_temps)
+ {
+ /* Perform all cleanups needed for the arguments of this
+ call (i.e. destructors in C++). It is ok if these
+ destructors clobber RETURN_VALUE_REG, because the
+ only time we care about this is when TARGET is that
+ register. But in C++, we take care to never return
+ that register directly. */
+ expand_cleanups_to (old_cleanups);
+ }
#ifdef ACCUMULATE_OUTGOING_ARGS
/* If the outgoing argument list must be preserved, push
}
/* If inlining failed, mark FNDECL as needing to be compiled
- separately after all. */
+ separately after all. If function was declared inline,
+ give a warning. */
+ if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
+ && ! TREE_ADDRESSABLE (fndecl))
+ warning_with_decl (fndecl, "can't inline call to `%s'");
mark_addressable (fndecl);
}
{
char *tname = name;
+ /* Disregard prefix _, __ or __x. */
if (name[0] == '_')
- tname += ((name[1] == '_' && name[2] == 'x') ? 3 : 1);
+ {
+ if (name[1] == '_' && name[2] == 'x')
+ tname += 3;
+ else if (name[1] == '_')
+ tname += 2;
+ else
+ tname += 1;
+ }
if (tname[0] == 's')
{
abort ();
funtype = TREE_TYPE (funtype);
- /* Push the temporary stack slot level so that we can free temporaries used
- by each of the arguments separately. */
+ /* Push the temporary stack slot level so that we can free any temporaries
+ we make. */
push_temp_slots ();
/* Start updating where the next arg would go. */
/* Make a vector to hold all the information about each arg. */
args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
- bzero (args, num_actuals * sizeof (struct arg_data));
+ bzero ((char *) args, num_actuals * sizeof (struct arg_data));
args_size.constant = 0;
args_size.var = 0;
for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
{
tree type = TREE_TYPE (TREE_VALUE (p));
+ int unsignedp;
enum machine_mode mode;
args[i].tree_value = TREE_VALUE (p);
These decisions are driven by the FUNCTION_... macros and must agree
with those made by function.c. */
-#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
/* See if this argument should be passed by invisible reference. */
- if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type), type,
- argpos < n_named_args))
+ if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
+ && contains_placeholder_p (TYPE_SIZE (type)))
+#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
+ || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type),
+ type, argpos < n_named_args)
+#endif
+ )
{
#ifdef FUNCTION_ARG_CALLEE_COPIES
if (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type), type,
copy = assign_stack_temp (TYPE_MODE (type), size, 1);
}
+ MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
+
store_expr (args[i].tree_value, copy, 0);
args[i].tree_value = build1 (ADDR_EXPR,
type = build_pointer_type (type);
}
}
-#endif /* FUNCTION_ARG_PASS_BY_REFERENCE */
mode = TYPE_MODE (type);
+ unsignedp = TREE_UNSIGNED (type);
#ifdef PROMOTE_FUNCTION_ARGS
- /* Compute the mode in which the arg is actually to be extended to. */
- if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
- || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
- || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
- || TREE_CODE (type) == OFFSET_TYPE)
- {
- int unsignedp = TREE_UNSIGNED (type);
- PROMOTE_MODE (mode, unsignedp, type);
- args[i].unsignedp = unsignedp;
- }
+ mode = promote_mode (type, mode, &unsignedp, 1);
#endif
+ args[i].unsignedp = unsignedp;
args[i].mode = mode;
args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
argpos < n_named_args);
|| (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
&& calls_function (args[i].tree_value, 0)))
{
+ push_temp_slots ();
+
args[i].initial_value = args[i].value
= expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
- if (GET_MODE (args[i].value ) != VOIDmode
- && GET_MODE (args[i].value) != args[i].mode)
- args[i].value = convert_to_mode (args[i].mode, args[i].value,
- args[i].unsignedp);
- preserve_temp_slots (args[i].value);
+ if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
+ args[i].value
+ = convert_modes (args[i].mode,
+ TYPE_MODE (TREE_TYPE (args[i].tree_value)),
+ args[i].value, args[i].unsignedp);
- free_temp_slots ();
+ preserve_temp_slots (args[i].value);
+ pop_temp_slots ();
/* ANSI doesn't require a sequence point here,
but PCC has one, so this will avoid some problems. */
addr = plus_constant (addr, arg_offset);
args[i].stack = gen_rtx (MEM, args[i].mode, addr);
+ MEM_IN_STRUCT_P (args[i].stack)
+ = AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value));
if (GET_CODE (slot_offset) == CONST_INT)
addr = plus_constant (arg_reg, INTVAL (slot_offset));
else
/* Generate an rtx (probably a pseudo-register) for the address. */
{
+ push_temp_slots ();
funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
- free_temp_slots (); /* FUNEXP can't be BLKmode */
+ pop_temp_slots (); /* FUNEXP can't be BLKmode */
emit_queue ();
}
if (args[i].value == 0)
{
+ push_temp_slots ();
args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
VOIDmode, 0);
preserve_temp_slots (args[i].value);
- free_temp_slots ();
+ pop_temp_slots ();
/* ANSI doesn't require a sequence point here,
but PCC has one, so this will avoid some problems. */
/* If we are to promote the function arg to a wider mode,
do it now. */
- if (GET_MODE (args[i].value) != VOIDmode
- && GET_MODE (args[i].value) != args[i].mode)
- args[i].value = convert_to_mode (args[i].mode, args[i].value,
- args[i].unsignedp);
+ if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
+ args[i].value
+ = convert_modes (args[i].mode,
+ TYPE_MODE (TREE_TYPE (args[i].tree_value)),
+ args[i].value, args[i].unsignedp);
+
+ /* If the value is expensive, and we are inside an appropriately
+ short loop, put the value into a pseudo and then put the pseudo
+ into the hard reg.
+
+ For small register classes, also do this if this call uses
+ register parameters. This is to avoid reload conflicts while
+ loading the parameters registers. */
+
+ if ((! (GET_CODE (args[i].value) == REG
+ || (GET_CODE (args[i].value) == SUBREG
+ && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
+ && args[i].mode != BLKmode
+ && rtx_cost (args[i].value, SET) > 2
+#ifdef SMALL_REGISTER_CLASSES
+ && (reg_parm_seen || preserve_subexpressions_p ()))
+#else
+ && preserve_subexpressions_p ())
+#endif
+ args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
}
#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
< MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
{
int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
+ int big_endian_correction = 0;
args[i].n_aligned_regs
= args[i].partial ? args[i].partial
args[i].aligned_regs = (rtx *) alloca (sizeof (rtx)
* args[i].n_aligned_regs);
+ /* Structures smaller than a word are aligned to the least signifcant
+ byte (to the right). On a BYTES_BIG_ENDIAN machine, this means we
+ must skip the empty high order bytes when calculating the bit
+ offset. */
+ if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
+ big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
+
for (j = 0; j < args[i].n_aligned_regs; j++)
{
rtx reg = gen_reg_rtx (word_mode);
bitpos < BITS_PER_WORD && bytes > 0;
bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
{
- int xbitpos = (BYTES_BIG_ENDIAN
- ? BITS_PER_WORD - bitpos - bitsize
- : bitpos);
+ int xbitpos = bitpos + big_endian_correction;
store_bit_field (reg, bitsize, xbitpos, word_mode,
- extract_bit_field (word, bitsize, xbitpos, 1,
+ extract_bit_field (word, bitsize, bitpos, 1,
NULL_RTX, word_mode,
word_mode,
bitsize / BITS_PER_UNIT,
force_operand (structure_value_addr,
NULL_RTX)));
if (GET_CODE (struct_value_rtx) == REG)
- {
- push_to_sequence (use_insns);
- emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
- use_insns = get_insns ();
- end_sequence ();
- }
+ use_reg (&call_fusage, struct_value_rtx);
}
+ funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
+
/* Now do the register loads required for any wholly-register parms or any
parms which are passed both on the stack and in a register. Their
expressions were already evaluated.
Mark all register-parms as living through the call, putting these USE
- insns in a list headed by USE_INSNS. */
+ insns in the CALL_INSN_FUNCTION_USAGE field. */
for (i = 0; i < num_actuals; i++)
{
else
reg = list, list = 0;
- /* Set to non-zero if must move a word at a time, even if just one
- word (e.g, partial == 1 && mode == DFmode). Set to zero if
- we just use a normal move insn. */
+ /* Set to non-negative if must move a word at a time, even if just
+ one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
+ we just use a normal move insn. This value can be zero if the
+ argument is a zero size structure with no fields. */
nregs = (partial ? partial
: (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
- : 0));
+ : -1));
/* If simple case, just do move. If normal partial, store_one_arg
has already loaded the register for us. In all other cases,
load the register(s) from memory. */
- if (nregs == 0)
+ if (nregs == -1)
emit_move_insn (reg, args[i].value);
#ifdef STRICT_ALIGNMENT
move_block_to_reg (REGNO (reg),
validize_mem (args[i].value), nregs,
args[i].mode);
-
- push_to_sequence (use_insns);
- if (nregs == 0)
- emit_insn (gen_rtx (USE, VOIDmode, reg));
+
+ if (nregs == -1)
+ use_reg (&call_fusage, reg);
else
- use_regs (REGNO (reg), nregs);
- use_insns = get_insns ();
- end_sequence ();
+ use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
/* PARTIAL referred only to the first register, so clear it for the
next time. */
/* All arguments and registers used for the call must be set up by now! */
- funexp = prepare_call_address (funexp, fndecl, &use_insns);
-
/* Generate the actual call instruction. */
emit_call_1 (funexp, funtype, args_size.constant, struct_value_size,
FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
- valreg, old_inhibit_defer_pop, use_insns, is_const);
+ valreg, old_inhibit_defer_pop, call_fusage, is_const);
/* If call is cse'able, make appropriate pair of reg-notes around it.
Test valreg so we don't crash; may safely ignore `const'
target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
memory_address (TYPE_MODE (TREE_TYPE (exp)),
structure_value_addr));
- MEM_IN_STRUCT_P (target)
- = (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
- || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
- || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
- || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE);
+ MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
}
}
else if (pcc_struct_value)
int_size_in_bytes (TREE_TYPE (exp)),
0);
+ MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
+
/* Save this temp slot around the pop below. */
preserve_temp_slots (target);
}
if (GET_CODE (target) == REG
&& GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
{
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
- int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
-
- if (TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE
- || TREE_CODE (TREE_TYPE (exp)) == ENUMERAL_TYPE
- || TREE_CODE (TREE_TYPE (exp)) == BOOLEAN_TYPE
- || TREE_CODE (TREE_TYPE (exp)) == CHAR_TYPE
- || TREE_CODE (TREE_TYPE (exp)) == REAL_TYPE
- || TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE
- || TREE_CODE (TREE_TYPE (exp)) == OFFSET_TYPE)
- {
- PROMOTE_MODE (mode, unsignedp, TREE_TYPE (exp));
- }
+ tree type = TREE_TYPE (exp);
+ int unsignedp = TREE_UNSIGNED (type);
- /* If we didn't promote as expected, something is wrong. */
- if (mode != GET_MODE (target))
+ /* If we don't promote as expected, something is wrong. */
+ if (GET_MODE (target)
+ != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
abort ();
- target = gen_rtx (SUBREG, TYPE_MODE (TREE_TYPE (exp)), target, 0);
+ target = gen_rtx (SUBREG, TYPE_MODE (type), target, 0);
SUBREG_PROMOTED_VAR_P (target) = 1;
SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
}
#endif
- /* Perform all cleanups needed for the arguments of this call
- (i.e. destructors in C++). */
- expand_cleanups_to (old_cleanups);
+ if (flag_short_temps)
+ {
+ /* Perform all cleanups needed for the arguments of this call
+ (i.e. destructors in C++). */
+ expand_cleanups_to (old_cleanups);
+ }
/* If size of args is variable or this was a constructor call for a stack
argument, restore saved stack-pointer value. */
move memory references across the non-const call. */
void
-emit_library_call (va_alist)
- va_dcl
+emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
+ int nargs, ...))
{
+#ifndef __STDC__
+ rtx orgfun;
+ int no_queue;
+ enum machine_mode outmode;
+ int nargs;
+#endif
va_list p;
/* Total size in bytes of all the stack-parms scanned so far. */
struct args_size args_size;
/* Size of arguments before any adjustments (such as rounding). */
struct args_size original_args_size;
register int argnum;
- enum machine_mode outmode;
- int nargs;
rtx fun;
- rtx orgfun;
int inc;
int count;
rtx argblock = 0;
struct args_size offset; struct args_size size; };
struct arg *argvec;
int old_inhibit_defer_pop = inhibit_defer_pop;
- int no_queue = 0;
- rtx use_insns;
+ rtx call_fusage = 0;
/* library calls are never indirect calls. */
int current_call_is_indirect = 0;
- va_start (p);
- orgfun = fun = va_arg (p, rtx);
+ VA_START (p, nargs);
+
+#ifndef __STDC__
+ orgfun = va_arg (p, rtx);
no_queue = va_arg (p, int);
outmode = va_arg (p, enum machine_mode);
nargs = va_arg (p, int);
+#endif
+
+ fun = orgfun;
/* Copy all the libcall-arguments out of the varargs data
and into a vector ARGVEC.
args_size.constant = 0;
args_size.var = 0;
+ push_temp_slots ();
+
for (count = 0; count < nargs; count++)
{
rtx val = va_arg (p, rtx);
Pass it as a double instead. */
#ifdef LIBGCC_NEEDS_DOUBLE
if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
- val = convert_to_mode (DFmode, val, 0), mode = DFmode;
+ val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
#endif
/* There's no need to call protect_from_queue, because
&& ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
val = force_operand (val, NULL_RTX);
- argvec[count].value = val;
- argvec[count].mode = mode;
-
#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
- abort ();
+ {
+ /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
+ be viewed as just an efficiency improvement. */
+ rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
+ emit_move_insn (slot, val);
+ val = XEXP (slot, 0);
+ mode = Pmode;
+ }
#endif
+ argvec[count].value = val;
+ argvec[count].mode = mode;
+
argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
abort ();
argnum = 0;
#endif
+ fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
+
/* Now load any reg parms into their regs. */
for (count = 0; count < nargs; count++, argnum += inc)
emit_queue ();
/* Any regs containing parms remain in use through the call. */
- start_sequence ();
for (count = 0; count < nargs; count++)
if (argvec[count].reg != 0)
- emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
-
- use_insns = get_insns ();
- end_sequence ();
-
- fun = prepare_call_address (fun, NULL_TREE, &use_insns);
+ use_reg (&call_fusage, argvec[count].reg);
/* Don't allow popping to be deferred, since then
cse'ing of library calls could delete a call and leave the pop. */
emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
- old_inhibit_defer_pop + 1, use_insns, no_queue);
+ old_inhibit_defer_pop + 1, call_fusage, no_queue);
+
+ pop_temp_slots ();
/* Now restore inhibit_defer_pop to its actual original value. */
OK_DEFER_POP;
\f
/* Like emit_library_call except that an extra argument, VALUE,
comes second and says where to store the result.
- (If VALUE is zero, the result comes in the function value register.) */
+ (If VALUE is zero, this function chooses a convenient way
+ to return the value.
-void
-emit_library_call_value (va_alist)
- va_dcl
+ This function returns an rtx for where the value is to be found.
+ If VALUE is nonzero, VALUE is returned. */
+
+rtx
+emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
+ enum machine_mode outmode, int nargs, ...))
{
+#ifndef __STDC__
+ rtx orgfun;
+ rtx value;
+ int no_queue;
+ enum machine_mode outmode;
+ int nargs;
+#endif
va_list p;
/* Total size in bytes of all the stack-parms scanned so far. */
struct args_size args_size;
/* Size of arguments before any adjustments (such as rounding). */
struct args_size original_args_size;
register int argnum;
- enum machine_mode outmode;
- int nargs;
rtx fun;
- rtx orgfun;
int inc;
int count;
rtx argblock = 0;
struct args_size offset; struct args_size size; };
struct arg *argvec;
int old_inhibit_defer_pop = inhibit_defer_pop;
- int no_queue = 0;
- rtx use_insns;
- rtx value;
+ rtx call_fusage = 0;
rtx mem_value = 0;
+ int pcc_struct_value = 0;
+ int struct_value_size = 0;
/* library calls are never indirect calls. */
int current_call_is_indirect = 0;
+ int is_const;
+
+ VA_START (p, nargs);
- va_start (p);
- orgfun = fun = va_arg (p, rtx);
+#ifndef __STDC__
+ orgfun = va_arg (p, rtx);
value = va_arg (p, rtx);
no_queue = va_arg (p, int);
outmode = va_arg (p, enum machine_mode);
nargs = va_arg (p, int);
+#endif
+
+ is_const = no_queue;
+ fun = orgfun;
/* If this kind of value comes back in memory,
decide where in memory it should come back. */
- if (RETURN_IN_MEMORY (type_for_mode (outmode, 0)))
+ if (aggregate_value_p (type_for_mode (outmode, 0)))
{
- if (GET_CODE (value) == MEM)
+#ifdef PCC_STATIC_STRUCT_RETURN
+ rtx pointer_reg
+ = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
+ 0);
+ mem_value = gen_rtx (MEM, outmode, pointer_reg);
+ pcc_struct_value = 1;
+ if (value == 0)
+ value = gen_reg_rtx (outmode);
+#else /* not PCC_STATIC_STRUCT_RETURN */
+ struct_value_size = GET_MODE_SIZE (outmode);
+ if (value != 0 && GET_CODE (value) == MEM)
mem_value = value;
else
mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
+#endif
+
+ /* This call returns a big structure. */
+ is_const = 0;
}
/* ??? Unfinished: must pass the memory address as an argument. */
count = 0;
+ push_temp_slots ();
+
/* If there's a structure value address to be passed,
either pass it in the special place, or pass it as an extra argument. */
- if (mem_value)
+ if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
{
rtx addr = XEXP (mem_value, 0);
+ nargs++;
- if (! struct_value_rtx)
- {
- nargs++;
-
- /* Make sure it is a reasonable operand for a move or push insn. */
- if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
- && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
- addr = force_operand (addr, NULL_RTX);
+ /* Make sure it is a reasonable operand for a move or push insn. */
+ if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
+ && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
+ addr = force_operand (addr, NULL_RTX);
- argvec[count].value = addr;
- argvec[count].mode = outmode;
- argvec[count].partial = 0;
+ argvec[count].value = addr;
+ argvec[count].mode = Pmode;
+ argvec[count].partial = 0;
- argvec[count].reg = FUNCTION_ARG (args_so_far, outmode, NULL_TREE, 1);
+ argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
#ifdef FUNCTION_ARG_PARTIAL_NREGS
- if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, outmode, NULL_TREE, 1))
- abort ();
+ if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
+ abort ();
#endif
- locate_and_pad_parm (outmode, NULL_TREE,
- argvec[count].reg && argvec[count].partial == 0,
- NULL_TREE, &args_size, &argvec[count].offset,
- &argvec[count].size);
+ locate_and_pad_parm (Pmode, NULL_TREE,
+ argvec[count].reg && argvec[count].partial == 0,
+ NULL_TREE, &args_size, &argvec[count].offset,
+ &argvec[count].size);
- if (argvec[count].reg == 0 || argvec[count].partial != 0
+ if (argvec[count].reg == 0 || argvec[count].partial != 0
#ifdef REG_PARM_STACK_SPACE
- || 1
+ || 1
#endif
- )
- args_size.constant += argvec[count].size.constant;
+ )
+ args_size.constant += argvec[count].size.constant;
- FUNCTION_ARG_ADVANCE (args_so_far, outmode, (tree)0, 1);
- }
+ FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree)0, 1);
+
+ count++;
}
for (; count < nargs; count++)
Pass it as a double instead. */
#ifdef LIBGCC_NEEDS_DOUBLE
if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
- val = convert_to_mode (DFmode, val, 0), mode = DFmode;
+ val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
#endif
/* There's no need to call protect_from_queue, because
&& ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
val = force_operand (val, NULL_RTX);
- argvec[count].value = val;
- argvec[count].mode = mode;
-
#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
- abort ();
+ {
+ /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
+ be viewed as just an efficiency improvement. */
+ rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
+ emit_move_insn (slot, val);
+ val = XEXP (slot, 0);
+ mode = Pmode;
+ }
#endif
+ argvec[count].value = val;
+ argvec[count].mode = mode;
+
argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
abort ();
argnum = 0;
#endif
- /* Now load any reg parms into their regs. */
+ fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
- if (mem_value != 0 && struct_value_rtx != 0)
- emit_move_insn (struct_value_rtx, XEXP (mem_value, 0));
+ /* Now load any reg parms into their regs. */
for (count = 0; count < nargs; count++, argnum += inc)
{
#endif
/* Any regs containing parms remain in use through the call. */
- start_sequence ();
for (count = 0; count < nargs; count++)
if (argvec[count].reg != 0)
- emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
-
- use_insns = get_insns ();
- end_sequence ();
+ use_reg (&call_fusage, argvec[count].reg);
- fun = prepare_call_address (fun, NULL_TREE, &use_insns);
+ /* Pass the function the address in which to return a structure value. */
+ if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
+ {
+ emit_move_insn (struct_value_rtx,
+ force_reg (Pmode,
+ force_operand (XEXP (mem_value, 0),
+ NULL_RTX)));
+ if (GET_CODE (struct_value_rtx) == REG)
+ use_reg (&call_fusage, struct_value_rtx);
+ }
/* Don't allow popping to be deferred, since then
cse'ing of library calls could delete a call and leave the pop. */
/* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
will set inhibit_defer_pop to that value. */
- emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
+ emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant,
+ struct_value_size,
FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
- outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
- old_inhibit_defer_pop + 1, use_insns, no_queue);
+ (outmode != VOIDmode && mem_value == 0
+ ? hard_libcall_value (outmode) : NULL_RTX),
+ old_inhibit_defer_pop + 1, call_fusage, is_const);
/* Now restore inhibit_defer_pop to its actual original value. */
OK_DEFER_POP;
+ pop_temp_slots ();
+
/* Copy the value to the right place. */
if (outmode != VOIDmode)
{
if (mem_value)
{
if (value == 0)
- value = hard_libcall_value (outmode);
+ value = mem_value;
if (value != mem_value)
emit_move_insn (value, mem_value);
}
else if (value != 0)
emit_move_insn (value, hard_libcall_value (outmode));
+ else
+ value = hard_libcall_value (outmode);
}
+
+ return value;
}
\f
#if 0
if (TREE_CODE (pval) == ERROR_MARK)
return;
+ /* Push a new temporary level for any temporaries we make for
+ this argument. */
+ push_temp_slots ();
+
#ifdef ACCUMULATE_OUTGOING_ARGS
/* If this is being stored into a pre-allocated, fixed-size, stack area,
save any previous data at that location. */
{
arg->save_area = assign_stack_temp (BLKmode,
arg->size.constant, 1);
+ preserve_temp_slots (arg->save_area);
emit_block_move (validize_mem (arg->save_area), stack_area,
GEN_INT (arg->size.constant),
PARM_BOUNDARY / BITS_PER_UNIT);
if (arg->pass_on_stack)
stack_arg_under_construction++;
#endif
- arg->value = expand_expr (pval, partial ? NULL_RTX : arg->stack,
+ arg->value = expand_expr (pval,
+ (partial
+ || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
+ ? NULL_RTX : arg->stack,
VOIDmode, 0);
/* If we are promoting object (or for any other reason) the mode
doesn't agree, convert the mode. */
- if (GET_MODE (arg->value) != VOIDmode
- && GET_MODE (arg->value) != arg->mode)
- arg->value = convert_to_mode (arg->mode, arg->value, arg->unsignedp);
+ if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
+ arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
+ arg->value, arg->unsignedp);
#ifdef ACCUMULATE_OUTGOING_ARGS
if (arg->pass_on_stack)
/* Free any temporary slots made in processing this argument. */
free_temp_slots ();
+ pop_temp_slots ();
#ifdef ACCUMULATE_OUTGOING_ARGS
/* Now mark the segment we just used. */