argument list for the constructor call. */
int stack_arg_under_construction;
-static int calls_function (tree, int);
-static int calls_function_1 (tree, int);
-
static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
HOST_WIDE_INT, rtx, rtx, int, rtx, int,
CUMULATIVE_ARGS *);
static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
- int);
-static tree fix_unsafe_tree (tree);
+ unsigned int);
static bool shift_returned_value (tree, rtx *);
#ifdef REG_PARM_STACK_SPACE
static void restore_fixed_argument_area (rtx, rtx, int, int);
#endif
\f
-/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
- `alloca'.
-
- If WHICH is 0, return 1 if EXP contains a call to any function.
- Actually, we only need return 1 if evaluating EXP would require pushing
- arguments on the stack, but that is too difficult to compute, so we just
- assume any function call might require the stack. */
-
-static tree calls_function_save_exprs;
-
-static int
-calls_function (tree exp, int which)
-{
- int val;
-
- calls_function_save_exprs = 0;
- val = calls_function_1 (exp, which);
- calls_function_save_exprs = 0;
- return val;
-}
-
-/* Recursive function to do the work of above function. */
-
-static int
-calls_function_1 (tree exp, int which)
-{
- int i;
- enum tree_code code = TREE_CODE (exp);
- int class = TREE_CODE_CLASS (code);
- int length = first_rtl_op (code);
-
- /* If this code is language-specific, we don't know what it will do. */
- if ((int) code >= NUM_TREE_CODES)
- return 1;
-
- switch (code)
- {
- case CALL_EXPR:
- if (which == 0)
- return 1;
- else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
- == FUNCTION_TYPE)
- && (TYPE_RETURNS_STACK_DEPRESSED
- (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
- return 1;
- else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
- && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
- == FUNCTION_DECL)
- && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
- 0)
- & ECF_MAY_BE_ALLOCA))
- return 1;
-
- break;
-
- case CONSTRUCTOR:
- {
- tree tem;
-
- for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
- if (calls_function_1 (TREE_VALUE (tem), which))
- return 1;
- }
-
- return 0;
-
- case SAVE_EXPR:
- if (SAVE_EXPR_RTL (exp) != 0)
- return 0;
- if (value_member (exp, calls_function_save_exprs))
- return 0;
- calls_function_save_exprs = tree_cons (NULL_TREE, exp,
- calls_function_save_exprs);
- return (TREE_OPERAND (exp, 0) != 0
- && calls_function_1 (TREE_OPERAND (exp, 0), which));
-
- case BLOCK:
- {
- tree local;
- tree subblock;
-
- for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
- if (DECL_INITIAL (local) != 0
- && calls_function_1 (DECL_INITIAL (local), which))
- return 1;
-
- for (subblock = BLOCK_SUBBLOCKS (exp);
- subblock;
- subblock = TREE_CHAIN (subblock))
- if (calls_function_1 (subblock, which))
- return 1;
- }
- return 0;
-
- case TREE_LIST:
- for (; exp != 0; exp = TREE_CHAIN (exp))
- if (calls_function_1 (TREE_VALUE (exp), which))
- return 1;
- return 0;
-
- default:
- break;
- }
-
- /* Only expressions and blocks can contain calls.
- Blocks were handled above. */
- if (! IS_EXPR_CODE_CLASS (class))
- return 0;
-
- for (i = 0; i < length; i++)
- if (TREE_OPERAND (exp, i) != 0
- && calls_function_1 (TREE_OPERAND (exp, i), which))
- return 1;
-
- return 0;
-}
-\f
/* Force FUNEXP into a form suitable for the address of a CALL,
and return that as an rtx. Also load the static chain register
if FNDECL is a nested function.
prepare_call_address (rtx funexp, rtx static_chain_value,
rtx *call_fusage, int reg_parm_seen, int sibcallp)
{
- funexp = protect_from_queue (funexp, 0);
-
/* Make a valid memory address and copy constants through pseudo-regs,
but not for a constant address if -fno-function-cse. */
if (GET_CODE (funexp) != SYMBOL_REF)
if (static_chain_value != 0)
{
+ static_chain_value = convert_memory_address (Pmode, static_chain_value);
emit_move_insn (static_chain_rtx, static_chain_value);
if (REG_P (static_chain_rtx))
hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
because you can declare fork() inside a function if you
wish. */
- && (DECL_CONTEXT (fndecl) == NULL_TREE
+ && (DECL_CONTEXT (fndecl) == NULL_TREE
|| TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
&& TREE_PUBLIC (fndecl))
{
VOIDmode, 0);
preserve_temp_slots (args[i].value);
pop_temp_slots ();
-
- /* ANSI doesn't require a sequence point here,
- but PCC has one, so this will avoid some problems. */
- emit_queue ();
}
/* If the value is a non-legitimate constant, force it into a
&& args[i].mode != BLKmode
&& rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
&& ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
- || preserve_subexpressions_p ()))
+ || optimize))
args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
}
}
args[i].aligned_regs[j] = reg;
word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
- word_mode, word_mode, BITS_PER_WORD);
+ word_mode, word_mode);
/* There is no need to restrict this code to loading items
in TYPE_ALIGN sized hunks. The bitfield instructions can
bytes -= bitsize / BITS_PER_UNIT;
store_bit_field (reg, bitsize, endian_correction, word_mode,
- word, BITS_PER_WORD);
+ word);
}
}
}
and may be modified by this routine.
OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
- flags which may may be modified by this routine.
+ flags which may may be modified by this routine.
MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
that requires allocation of stack space.
with those made by function.c. */
/* See if this argument should be passed by invisible reference. */
- if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
- || TREE_ADDRESSABLE (type)
-#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
- || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
- type, argpos < n_named_args)
-#endif
- )
+ if (pass_by_reference (args_so_far, TYPE_MODE (type),
+ type, argpos < n_named_args))
{
/* If we're compiling a thunk, pass through invisible
references instead of making a copy. */
if (call_from_thunk_p
-#ifdef FUNCTION_ARG_CALLEE_COPIES
|| (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
type, argpos < n_named_args)
/* If it's in a register, we must make a copy of it too. */
&& !(TREE_CODE (args[i].tree_value) == VAR_DECL
&& REG_P (DECL_RTL (args[i].tree_value)))
&& ! TREE_ADDRESSABLE (type))
-#endif
)
{
/* C++ uses a TARGET_EXPR to indicate that we want to make a
args[i].tail_call_reg = args[i].reg;
#endif
-#ifdef FUNCTION_ARG_PARTIAL_NREGS
if (args[i].reg)
args[i].partial
= FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
argpos < n_named_args);
-#endif
- args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
+ args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
/* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
it means that we are to pass this arg in the register(s) designated
int i;
/* If this is a libcall, then precompute all arguments so that we do not
- get extraneous instructions emitted as part of the libcall sequence.
-
- If this target defines ACCUMULATE_OUTGOING_ARGS to true, then we must
- precompute all arguments that contain function calls. Otherwise,
- computing arguments for a subcall may clobber arguments for this call.
-
- If this target defines ACCUMULATE_OUTGOING_ARGS to false, then we only
- need to precompute arguments that change the stack pointer, such as calls
- to alloca, and calls that do not pop all of their arguments. */
+ get extraneous instructions emitted as part of the libcall sequence. */
+ if ((flags & ECF_LIBCALL_BLOCK) == 0)
+ return;
for (i = 0; i < num_actuals; i++)
- if ((flags & ECF_LIBCALL_BLOCK)
- || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
- {
- enum machine_mode mode;
-
- /* If this is an addressable type, we cannot pre-evaluate it. */
- if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
- abort ();
-
- args[i].value
- = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
+ {
+ enum machine_mode mode;
- /* ANSI doesn't require a sequence point here,
- but PCC has one, so this will avoid some problems. */
- emit_queue ();
+ /* If this is an addressable type, we cannot pre-evaluate it. */
+ if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
+ abort ();
- args[i].initial_value = args[i].value
- = protect_from_queue (args[i].value, 0);
+ args[i].initial_value = args[i].value
+ = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
- mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
- if (mode != args[i].mode)
- {
- args[i].value
- = convert_modes (args[i].mode, mode,
- args[i].value, args[i].unsignedp);
+ mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
+ if (mode != args[i].mode)
+ {
+ args[i].value
+ = convert_modes (args[i].mode, mode,
+ args[i].value, args[i].unsignedp);
#if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
- /* CSE will replace this only if it contains args[i].value
- pseudo, so convert it down to the declared mode using
- a SUBREG. */
- if (REG_P (args[i].value)
- && GET_MODE_CLASS (args[i].mode) == MODE_INT)
- {
- args[i].initial_value
- = gen_lowpart_SUBREG (mode, args[i].value);
- SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
- SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
- args[i].unsignedp);
- }
+ /* CSE will replace this only if it contains args[i].value
+ pseudo, so convert it down to the declared mode using
+ a SUBREG. */
+ if (REG_P (args[i].value)
+ && GET_MODE_CLASS (args[i].mode) == MODE_INT)
+ {
+ args[i].initial_value
+ = gen_lowpart_SUBREG (mode, args[i].value);
+ SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
+ SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
+ args[i].unsignedp);
+ }
#endif
- }
- }
+ }
+ }
}
/* Given the current state of MUST_PREALLOCATE and information about
push_temp_slots ();
funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
pop_temp_slots (); /* FUNEXP can't be BLKmode. */
- emit_queue ();
}
return funexp;
}
call only uses SIZE bytes at the msb end, but it doesn't
seem worth generating rtl to say that. */
reg = gen_rtx_REG (word_mode, REGNO (reg));
- x = expand_binop (word_mode, ashl_optab, reg,
- GEN_INT (shift), reg, 1, OPTAB_WIDEN);
+ x = expand_shift (LSHIFT_EXPR, word_mode, reg,
+ build_int_cst (NULL_TREE, shift),
+ reg, 1);
if (x != reg)
emit_move_insn (reg, x);
}
rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
rtx x = gen_reg_rtx (word_mode);
int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
- optab dir = BYTES_BIG_ENDIAN ? lshr_optab : ashl_optab;
+ enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
+ : LSHIFT_EXPR;
emit_move_insn (x, tem);
- x = expand_binop (word_mode, dir, x, GEN_INT (shift),
- ri, 1, OPTAB_WIDEN);
+ x = expand_shift (dir, word_mode, x,
+ build_int_cst (NULL_TREE, shift),
+ ri, 1);
if (x != ri)
emit_move_insn (ri, x);
}
static int
combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
struct args_size *args_size,
- int preferred_unit_stack_boundary)
+ unsigned int preferred_unit_stack_boundary)
{
/* The number of bytes to pop so that the stack will be
under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
HOST_WIDE_INT adjustment;
/* The alignment of the stack after the arguments are pushed, if we
just pushed the arguments without adjust the stack here. */
- HOST_WIDE_INT unadjusted_alignment;
+ unsigned HOST_WIDE_INT unadjusted_alignment;
unadjusted_alignment
= ((stack_pointer_delta + unadjusted_args_size)
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
i = INTVAL (XEXP (XEXP (x, 0), 1));
else
- return 0;
+ return 1;
#ifdef ARGS_GROW_DOWNWARD
i = -i - GET_MODE_SIZE (GET_MODE (x));
return insn != NULL_RTX;
}
-static tree
-fix_unsafe_tree (tree t)
-{
- switch (unsafe_for_reeval (t))
- {
- case 0: /* Safe. */
- break;
-
- case 1: /* Mildly unsafe. */
- t = unsave_expr (t);
- break;
-
- case 2: /* Wildly unsafe. */
- {
- tree var = build_decl (VAR_DECL, NULL_TREE,
- TREE_TYPE (t));
- SET_DECL_RTL (var,
- expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
- t = var;
- }
- break;
-
- default:
- abort ();
- }
- return t;
-}
-
-
/* If function value *VALUE was returned at the most significant end of a
register, shift it towards the least significant end and convert it to
TYPE's mode. Return true and update *VALUE if some action was needed.
}
}
-/* Clear RTX_UNCHANGING_P flag of incoming argument MEMs. */
-
-static void
-purge_mem_unchanging_flag (rtx x)
-{
- RTX_CODE code;
- int i, j;
- const char *fmt;
-
- if (x == NULL_RTX)
- return;
-
- code = GET_CODE (x);
-
- if (code == MEM)
- {
- if (RTX_UNCHANGING_P (x)
- && (XEXP (x, 0) == current_function_internal_arg_pointer
- || (GET_CODE (XEXP (x, 0)) == PLUS
- && XEXP (XEXP (x, 0), 0) ==
- current_function_internal_arg_pointer
- && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
- RTX_UNCHANGING_P (x) = 0;
- return;
- }
-
- /* Scan all subexpressions. */
- fmt = GET_RTX_FORMAT (code);
- for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
- {
- if (*fmt == 'e')
- purge_mem_unchanging_flag (XEXP (x, i));
- else if (*fmt == 'E')
- for (j = 0; j < XVECLEN (x, i); j++)
- purge_mem_unchanging_flag (XVECEXP (x, i, j));
- }
-}
-
-
/* Generate all the code for a function call
and return an rtx for its value.
Store the value in TARGET (specified as an rtx) if convenient.
tree addr = TREE_OPERAND (exp, 0);
int i;
/* The alignment of the stack, in bits. */
- HOST_WIDE_INT preferred_stack_boundary;
+ unsigned HOST_WIDE_INT preferred_stack_boundary;
/* The alignment of the stack, in bytes. */
- HOST_WIDE_INT preferred_unit_stack_boundary;
+ unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
/* The static chain value to use for this call. */
rtx static_chain_value;
/* See if this is "nothrow" function call. */
|| (ACCUMULATE_OUTGOING_ARGS
&& stack_arg_under_construction
&& structure_value_addr == virtual_outgoing_args_rtx)
- ? copy_addr_to_reg (convert_memory_address
+ ? copy_addr_to_reg (convert_memory_address
(Pmode, structure_value_addr))
: structure_value_addr);
|| (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
structure_value_addr = copy_to_reg (structure_value_addr);
- /* Tail calls can make things harder to debug, and we're traditionally
+ /* Tail calls can make things harder to debug, and we've traditionally
pushed these optimizations into -O2. Don't try if we're already
expanding a call, as that means we're an argument. Don't try if
- there's cleanups, as we know there's code to follow the call.
-
- If rtx_equal_function_value_matters is false, that means we've
- finished with regular parsing. Which means that some of the
- machinery we use to generate tail-calls is no longer in place.
- This is most often true of sjlj-exceptions, which we couldn't
- tail-call to anyway.
+ there's cleanups, as we know there's code to follow the call. */
- If current_nesting_level () == 0, we're being called after
- the function body has been expanded. This can happen when
- setting up trampolines in expand_function_end. */
if (currently_expanding_call++ != 0
|| !flag_optimize_sibling_calls
- || !rtx_equal_function_value_matters
- || current_nesting_level () == 0
- || any_pending_cleanups ()
|| args_size.var
|| lookup_stmt_eh_region (exp) >= 0)
try_tail_call = 0;
|| !lang_hooks.decls.ok_for_sibcall (fndecl))
try_tail_call = 0;
- if (try_tail_call)
- {
- int end, inc;
- actparms = NULL_TREE;
- /* Ok, we're going to give the tail call the old college try.
- This means we're going to evaluate the function arguments
- up to three times. There are two degrees of badness we can
- encounter, those that can be unsaved and those that can't.
- (See unsafe_for_reeval commentary for details.)
-
- Generate a new argument list. Pass safe arguments through
- unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
- For hard badness, evaluate them now and put their resulting
- rtx in a temporary VAR_DECL.
-
- initialize_argument_information has ordered the array for the
- order to be pushed, and we must remember this when reconstructing
- the original argument order. */
-
- if (PUSH_ARGS_REVERSED)
- {
- inc = 1;
- i = 0;
- end = num_actuals;
- }
- else
- {
- inc = -1;
- i = num_actuals - 1;
- end = -1;
- }
-
- for (; i != end; i += inc)
- {
- args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
- }
- /* Do the same for the function address if it is an expression. */
- if (!fndecl)
- addr = fix_unsafe_tree (addr);
- /* Expanding one of those dangerous arguments could have added
- cleanups, but otherwise give it a whirl. */
- if (any_pending_cleanups ())
- try_tail_call = 0;
- }
-
-
/* Ensure current function's preferred stack boundary is at least
what we need. We don't have to increase alignment for recursive
functions. */
int sibcall_failure = 0;
/* We want to emit any pending stack adjustments before the tail
recursion "call". That way we know any adjustment after the tail
- recursion call can be ignored if we indeed use the tail
+ recursion call can be ignored if we indeed use the tail
call expansion. */
int save_pending_stack_adjust = 0;
int save_stack_pointer_delta = 0;
if (pass == 0)
{
- /* Emit any queued insns now; otherwise they would end up in
- only one of the alternates. */
- emit_queue ();
-
/* State variables we need to save and restore between
iterations. */
save_pending_stack_adjust = pending_stack_adjust;
sibcall_failure instead of continuing the loop. */
start_sequence ();
- if (pass == 0)
- {
- /* We know at this point that there are not currently any
- pending cleanups. If, however, in the process of evaluating
- the arguments we were to create some, we'll need to be
- able to get rid of them. */
- expand_start_target_temps ();
- }
-
/* Don't let pending stack adjusts add up to too much.
Also, do all pending adjustments now if there is any chance
this might be a call to alloca or if we are expanding a sibling
structure value. */
if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
{
- structure_value_addr
+ structure_value_addr
= convert_memory_address (Pmode, structure_value_addr);
emit_move_insn (struct_value,
force_reg (Pmode,
load_register_parameters (args, num_actuals, &call_fusage, flags,
pass == 0, &sibcall_failure);
- /* Perform postincrements before actually calling the function. */
- emit_queue ();
-
/* Save a pointer to the last insn before the call, so that we can
later safely search backwards to find the CALL_INSN. */
before_call = get_last_insn ();
/* Expansion of block moves possibly introduced a loop that may
not appear inside libcall block. */
for (insn = insns; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
failed = true;
if (failed)
&& (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
|| DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
|| DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
- note = gen_rtx_fmt_e (SQRT,
- GET_MODE (temp),
+ note = gen_rtx_fmt_e (SQRT,
+ GET_MODE (temp),
args[0].initial_value);
else
{
note = gen_rtx_EXPR_LIST (VOIDmode,
args[i].initial_value, note);
note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
-
+
if (flags & ECF_PURE)
note = gen_rtx_EXPR_LIST (VOIDmode,
gen_rtx_USE (VOIDmode,
than just a CALL_INSN above, so we must search for it here. */
rtx last = get_last_insn ();
- while (GET_CODE (last) != CALL_INSN)
+ while (!CALL_P (last))
{
last = PREV_INSN (last);
/* There was no CALL_INSN? */
/* If value type not void, return an rtx for the value. */
- /* If there are cleanups to be called, don't use a hard reg as target.
- We need to double check this and see if it matters anymore. */
- if (any_pending_cleanups ())
- {
- if (target && REG_P (target)
- && REGNO (target) < FIRST_PSEUDO_REGISTER)
- target = 0;
- sibcall_failure = 1;
- }
-
if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
|| ignore)
target = const0_rtx;
emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
stack_pointer_delta = old_stack_pointer_delta;
pending_stack_adjust = old_pending_adj;
+ old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
stack_arg_under_construction = old_stack_arg_under_construction;
highest_outgoing_arg_in_use = initial_highest_arg_in_use;
stack_usage_map = initial_stack_usage_map;
if (args[i].aligned_regs)
free (args[i].aligned_regs);
- if (pass == 0)
- {
- /* Undo the fake expand_start_target_temps we did earlier. If
- there had been any cleanups created, we've already set
- sibcall_failure. */
- expand_end_target_temps ();
- }
-
- /* If this function is returning into a memory location marked as
- readonly, it means it is initializing that location. We normally treat
- functions as not clobbering such locations, so we need to specify that
- this one does. We do this by adding the appropriate CLOBBER to the
- CALL_INSN function usage list. This cannot be done by emitting a
- standalone CLOBBER after the call because the latter would be ignored
- by at least the delay slot scheduling pass. We do this now instead of
- adding to call_fusage before the call to emit_call_1 because TARGET
- may be modified in the meantime. */
- if (structure_value_addr != 0 && target != 0
- && MEM_P (target) && RTX_UNCHANGING_P (target))
- add_function_usage_to
- (last_call_insn (),
- gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
- NULL_RTX));
-
insns = get_insns ();
end_sequence ();
clear_pending_stack_adjust ();
emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
- save_stack_pointer ();
}
return target;
void
fixup_tail_calls (void)
{
- rtx insn;
- tree arg;
-
purge_reg_equiv_notes ();
-
- /* A sibling call sequence also may invalidate RTX_UNCHANGING_P
- flag of some incoming arguments MEM RTLs, because it can write into
- those slots. We clear all those bits now.
-
- This is (slight) overkill, we could keep track of which arguments
- we actually write into. */
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- {
- if (INSN_P (insn))
- purge_mem_unchanging_flag (PATTERN (insn));
- }
-
- /* Similarly, invalidate RTX_UNCHANGING_P for any incoming
- arguments passed in registers. */
- for (arg = DECL_ARGUMENTS (current_function_decl);
- arg;
- arg = TREE_CHAIN (arg))
- {
- if (REG_P (DECL_RTL (arg)))
- RTX_UNCHANGING_P (DECL_RTL (arg)) = false;
- }
}
/* Traverse an argument list in VALUES and expand all complex
argvec[count].partial = 0;
argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
-#ifdef FUNCTION_ARG_PARTIAL_NREGS
if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
abort ();
-#endif
locate_and_pad_parm (Pmode, NULL_TREE,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
|| (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
abort ();
- /* There's no need to call protect_from_queue, because
- either emit_move_insn or emit_push_insn will do that. */
-
/* Make sure it is a reasonable operand for a move or push insn. */
if (!REG_P (val) && !MEM_P (val)
&& ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
val = force_operand (val, NULL_RTX);
-#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
- if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
+ if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
{
rtx slot;
- int must_copy = 1
-#ifdef FUNCTION_ARG_CALLEE_COPIES
- && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
- NULL_TREE, 1)
-#endif
- ;
+ int must_copy = ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
+ NULL_TREE, 1);
/* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
functions, so we have to pretend this isn't such a function. */
mode = Pmode;
val = force_operand (XEXP (slot, 0), NULL_RTX);
}
-#endif
argvec[count].value = val;
argvec[count].mode = mode;
argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
-#ifdef FUNCTION_ARG_PARTIAL_NREGS
argvec[count].partial
= FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
-#else
- argvec[count].partial = 0;
-#endif
locate_and_pad_parm (mode, NULL_TREE,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
are to be pushed. */
for (count = 0; count < nargs; count++, argnum += inc)
{
+ enum machine_mode mode = argvec[argnum].mode;
rtx val = argvec[argnum].value;
rtx reg = argvec[argnum].reg;
int partial = argvec[argnum].partial;
/* Handle calls that pass values in multiple non-contiguous
locations. The PA64 has examples of this for library calls. */
if (reg != 0 && GET_CODE (reg) == PARALLEL)
- emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (GET_MODE (val)));
+ emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
else if (reg != 0 && partial == 0)
emit_move_insn (reg, val);
just a CALL_INSN above, so we must search for it here. */
rtx last = get_last_insn ();
- while (GET_CODE (last) != CALL_INSN)
+ while (!CALL_P (last))
{
last = PREV_INSN (last);
/* There was no CALL_INSN? */
if (GET_CODE (valreg) == PARALLEL)
{
temp = gen_reg_rtx (outmode);
- emit_group_store (temp, valreg, NULL_TREE,
+ emit_group_store (temp, valreg, NULL_TREE,
GET_MODE_SIZE (outmode));
valreg = temp;
}
for a value of mode OUTMODE,
with NARGS different arguments, passed as alternating rtx values
and machine_modes to convert them to.
- The rtx values should have been passed through protect_from_queue already.
FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
excess = (arg->locate.size.constant
- int_size_in_bytes (TREE_TYPE (pval))
+ partial * GET_MODE_SIZE (GET_MODE (elt)));
- }
+ }
else
excess = (arg->locate.size.constant
- int_size_in_bytes (TREE_TYPE (pval))
be deferred during the rest of the arguments. */
NO_DEFER_POP;
- /* ANSI doesn't require a sequence point here,
- but PCC has one, so this will avoid some problems. */
- emit_queue ();
-
/* Free any temporary slots made in processing this argument. Show
that we might have taken the address of something and pushed that
as an operand. */
return sibcall_failure;
}
-/* Nonzero if we do not know how to pass TYPE solely in registers.
- We cannot do so in the following cases:
+/* Nonzero if we do not know how to pass TYPE solely in registers. */
- - if the type has variable size
- - if the type is marked as addressable (it is required to be constructed
- into the stack)
- - if the padding and mode of the type is such that a copy into a register
- would put it into the wrong part of the register.
+bool
+must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
+ tree type)
+{
+ if (!type)
+ return false;
- Which padding can't be supported depends on the byte endianness.
+ /* If the type has variable size... */
+ if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
+ return true;
+
+ /* If the type is marked as addressable (it is required
+ to be constructed into the stack)... */
+ if (TREE_ADDRESSABLE (type))
+ return true;
+
+ return false;
+}
- A value in a register is implicitly padded at the most significant end.
- On a big-endian machine, that is the lower end in memory.
- So a value padded in memory at the upper end can't go in a register.
- For a little-endian machine, the reverse is true. */
+/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
+ takes trailing padding of a structure into account. */
+/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
bool
-default_must_pass_in_stack (enum machine_mode mode, tree type)
+must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type)
{
if (!type)
return false;