if (ecf_flags & ECF_RETURNS_TWICE)
{
REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
- REG_NOTES (call_insn));
+ REG_NOTES (call_insn));
current_function_calls_setjmp = 1;
}
/* Exclude functions not at the file scope, or not `extern',
since they are not the magic functions we would otherwise
think they are.
- FIXME: this should be handled with attributes, not with this
- hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
- because you can declare fork() inside a function if you
- wish. */
+ FIXME: this should be handled with attributes, not with this
+ hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
+ because you can declare fork() inside a function if you
+ wish. */
&& (DECL_CONTEXT (fndecl) == NULL_TREE
|| TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
&& TREE_PUBLIC (fndecl))
else if ((tname[0] == 'q' && tname[1] == 's'
&& ! strcmp (tname, "qsetjmp"))
|| (tname[0] == 'v' && tname[1] == 'f'
- && ! strcmp (tname, "vfork")))
+ && ! strcmp (tname, "vfork"))
+ || (tname[0] == 'g' && tname[1] == 'e'
+ && !strcmp (tname, "getcontext")))
flags |= ECF_RETURNS_TWICE;
else if (tname[0] == 'l' && tname[1] == 'o'
/* The function exp may have the `pure' attribute. */
if (DECL_IS_PURE (exp))
- flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
+ flags |= ECF_PURE;
if (DECL_IS_NOVOPS (exp))
flags |= ECF_NOVOPS;
flags |= ECF_NOTHROW;
if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
- flags |= ECF_LIBCALL_BLOCK | ECF_CONST;
+ flags |= ECF_CONST;
flags = special_function_p (exp, flags);
}
if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
{
flags |= ECF_SP_DEPRESSED;
- flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
+ flags &= ~(ECF_PURE | ECF_CONST);
}
return flags;
if (args[i].value == 0)
{
push_temp_slots ();
- args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
- VOIDmode, 0);
+ args[i].value = expand_normal (args[i].tree_value);
preserve_temp_slots (args[i].value);
pop_temp_slots ();
}
= (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
}
- args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
+ args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
/* Structures smaller than a word are normally aligned to the
least significant byte. On a BYTES_BIG_ENDIAN machine,
gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
args[i].initial_value = args[i].value
- = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
+ = expand_normal (args[i].tree_value);
mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
if (mode != args[i].mode)
/* Generate an rtx (probably a pseudo-register) for the address. */
{
push_temp_slots ();
- funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
+ funexp = expand_normal (addr);
pop_temp_slots (); /* FUNEXP can't be BLKmode. */
}
return funexp;
}
+/* Return true if and only if SIZE storage units (usually bytes)
+ starting from address ADDR overlap with already clobbered argument
+ area. This function is used to determine if we should give up a
+ sibcall. */
+
+static bool
+mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
+{
+ HOST_WIDE_INT i;
+
+ if (addr == current_function_internal_arg_pointer)
+ i = 0;
+ else if (GET_CODE (addr) == PLUS
+ && (XEXP (addr, 0)
+ == current_function_internal_arg_pointer)
+ && GET_CODE (XEXP (addr, 1)) == CONST_INT)
+ i = INTVAL (XEXP (addr, 1));
+ else
+ return false;
+
+#ifdef ARGS_GROW_DOWNWARD
+ i = -i - size;
+#endif
+ if (size > 0)
+ {
+ unsigned HOST_WIDE_INT k;
+
+ for (k = 0; k < size; k++)
+ if (i + k < stored_args_map->n_bits
+ && TEST_BIT (stored_args_map, i + k))
+ return true;
+ }
+
+ return false;
+}
+
/* Do the register loads required for any wholly-register parms or any
parms which are passed both on the stack and in a register. Their
expressions were already evaluated.
Mark all register-parms as living through the call, putting these USE
insns in the CALL_INSN_FUNCTION_USAGE field.
- When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
+ When IS_SIBCALL, perform the check_sibcall_argument_overlap
checking, setting *SIBCALL_FAILURE if appropriate. */
static void
{
rtx mem = validize_mem (args[i].value);
+ /* Check for overlap with already clobbered argument area. */
+ if (is_sibcall
+ && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
+ size))
+ *sibcall_failure = 1;
+
/* Handle a BLKmode that needs shifting. */
if (nregs == 1 && size < UNITS_PER_WORD
#ifdef BLOCK_REG_PADDING
{
RTX_CODE code;
int i, j;
- unsigned int k;
const char *fmt;
if (x == NULL_RTX)
code = GET_CODE (x);
if (code == MEM)
- {
- if (XEXP (x, 0) == current_function_internal_arg_pointer)
- i = 0;
- else if (GET_CODE (XEXP (x, 0)) == PLUS
- && XEXP (XEXP (x, 0), 0) ==
- current_function_internal_arg_pointer
- && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
- i = INTVAL (XEXP (XEXP (x, 0), 1));
- else
- return 0;
-
-#ifdef ARGS_GROW_DOWNWARD
- i = -i - GET_MODE_SIZE (GET_MODE (x));
-#endif
-
- for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
- if (i + k < stored_args_map->n_bits
- && TEST_BIT (stored_args_map, i + k))
- return 1;
-
- return 0;
- }
+ return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
+ GET_MODE_SIZE (GET_MODE (x)));
/* Scan all subexpressions. */
fmt = GET_RTX_FORMAT (code);
into a sibcall. */
|| !targetm.function_ok_for_sibcall (fndecl, exp)
/* Functions that do not return exactly once may not be sibcall
- optimized. */
+ optimized. */
|| (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
|| TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
/* If the called function is nested in the current one, it might access
- some of the caller's arguments, but could clobber them beforehand if
- the argument areas are shared. */
+ some of the caller's arguments, but could clobber them beforehand if
+ the argument areas are shared. */
|| (fndecl && decl_function_context (fndecl) == current_function_decl)
/* If this function requires more stack slots than the current
function, we cannot change it into a sibling call.
old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
/* The argument block when performing a sibling call is the
- incoming argument block. */
+ incoming argument block. */
if (pass == 0)
{
argblock = virtual_incoming_args_rtx;
#endif
if (stack_usage_map_buf)
free (stack_usage_map_buf);
- stack_usage_map_buf = xmalloc (highest_outgoing_arg_in_use);
+ stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
stack_usage_map = stack_usage_map_buf;
if (initial_highest_arg_in_use)
/* Make a new map for the new argument list. */
if (stack_usage_map_buf)
free (stack_usage_map_buf);
- stack_usage_map_buf = xmalloc (highest_outgoing_arg_in_use);
+ stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
stack_usage_map = stack_usage_map_buf;
memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
highest_outgoing_arg_in_use = 0;
precompute_register_parameters (num_actuals, args, ®_parm_seen);
if (TREE_OPERAND (exp, 2))
- static_chain_value = expand_expr (TREE_OPERAND (exp, 2),
- NULL_RTX, VOIDmode, 0);
+ static_chain_value = expand_normal (TREE_OPERAND (exp, 2));
else
static_chain_value = 0;
rtx insn;
bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
- insns = get_insns ();
+ insns = get_insns ();
/* Expansion of block moves possibly introduced a loop that may
not appear inside libcall block. */
&& GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
&& GET_MODE (target) == GET_MODE (valreg))
{
- /* TARGET and VALREG cannot be equal at this point because the
- latter would not have REG_FUNCTION_VALUE_P true, while the
- former would if it were referring to the same register.
-
- If they refer to the same register, this move will be a no-op,
- except when function inlining is being done. */
- emit_move_insn (target, valreg);
-
- /* If we are setting a MEM, this code must be executed. Since it is
- emitted after the call insn, sibcall optimization cannot be
- performed in that case. */
- if (MEM_P (target))
- sibcall_failure = 1;
+ bool may_overlap = false;
+
+ /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
+ reg to a plain register. */
+ if (REG_P (valreg)
+ && HARD_REGISTER_P (valreg)
+ && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (valreg)))
+ && !(REG_P (target) && !HARD_REGISTER_P (target)))
+ valreg = copy_to_reg (valreg);
+
+ /* If TARGET is a MEM in the argument area, and we have
+ saved part of the argument area, then we can't store
+ directly into TARGET as it may get overwritten when we
+ restore the argument save area below. Don't work too
+ hard though and simply force TARGET to a register if it
+ is a MEM; the optimizer is quite likely to sort it out. */
+ if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
+ for (i = 0; i < num_actuals; i++)
+ if (args[i].save_area)
+ {
+ may_overlap = true;
+ break;
+ }
+
+ if (may_overlap)
+ target = copy_to_reg (valreg);
+ else
+ {
+ /* TARGET and VALREG cannot be equal at this point
+ because the latter would not have
+ REG_FUNCTION_VALUE_P true, while the former would if
+ it were referring to the same register.
+
+ If they refer to the same register, this move will be
+ a no-op, except when function inlining is being
+ done. */
+ emit_move_insn (target, valreg);
+
+ /* If we are setting a MEM, this code must be executed.
+ Since it is emitted after the call insn, sibcall
+ optimization cannot be performed in that case. */
+ if (MEM_P (target))
+ sibcall_failure = 1;
+ }
}
else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
{
int unsignedp = TYPE_UNSIGNED (type);
int offset = 0;
enum machine_mode pmode;
-
+
pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
/* If we don't promote as expected, something is wrong. */
gcc_assert (GET_MODE (target) == pmode);
-
+
if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
&& (GET_MODE_SIZE (GET_MODE (target))
> GET_MODE_SIZE (TYPE_MODE (type))))
tree type = TREE_TYPE (TREE_VALUE (p));
if (type && TREE_CODE (type) == COMPLEX_TYPE
&& targetm.calls.split_complex_arg (type))
- goto found;
+ goto found;
}
return values;
tree type = TREE_VALUE (p);
if (TREE_CODE (type) == COMPLEX_TYPE
&& targetm.calls.split_complex_arg (type))
- goto found;
+ goto found;
}
return types;
if (mem_value && struct_value == 0 && ! pcc_struct_value)
{
rtx addr = XEXP (mem_value, 0);
-
+
nargs++;
/* Make sure it is a reasonable operand for a move or push insn. */
locate_and_pad_parm (Pmode, NULL_TREE,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
- 1,
+ 1,
#else
argvec[count].reg != 0,
#endif
highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
needed);
#endif
- stack_usage_map_buf = xmalloc (highest_outgoing_arg_in_use);
+ stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
stack_usage_map = stack_usage_map_buf;
if (initial_highest_arg_in_use)
needed = 0;
/* We must be careful to use virtual regs before they're instantiated,
- and real regs afterwards. Loop optimization, for example, can create
+ and real regs afterwards. Loop optimization, for example, can create
new libcalls after we've instantiated the virtual regs, and if we
use virtuals anyway, they won't match the rtl patterns. */
{
argvec[argnum].save_area
= assign_stack_temp (BLKmode,
- argvec[argnum].locate.size.constant,
+ argvec[argnum].locate.size.constant,
0);
emit_block_move (validize_mem (argvec[argnum].save_area),
- stack_area,
+ stack_area,
GEN_INT (argvec[argnum].locate.size.constant),
BLOCK_OP_CALL_PARM);
}
auto-increment causes confusion. So we merely indicate
that we access something with a known mode somewhere on
the stack. */
- use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
+ use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
gen_rtx_SCRATCH (Pmode));
use = gen_rtx_MEM (argvec[argnum].mode, use);
use = gen_rtx_USE (VOIDmode, use);
if (save_mode == BLKmode)
emit_block_move (stack_area,
- validize_mem (argvec[count].save_area),
+ validize_mem (argvec[count].save_area),
GEN_INT (argvec[count].locate.size.constant),
BLOCK_OP_CALL_PARM);
else
/* Being passed entirely in a register. We shouldn't be called in
this case. */
gcc_assert (reg == 0 || partial != 0);
-
+
/* If this arg needs special alignment, don't load the registers
here. */
if (arg->n_aligned_regs != 0)
}
/* Check for overlap with already clobbered argument area. */
- if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
- {
- int i = -1;
- unsigned HOST_WIDE_INT k;
- rtx x = arg->value;
-
- if (XEXP (x, 0) == current_function_internal_arg_pointer)
- i = 0;
- else if (GET_CODE (XEXP (x, 0)) == PLUS
- && XEXP (XEXP (x, 0), 0) ==
- current_function_internal_arg_pointer
- && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
- i = INTVAL (XEXP (XEXP (x, 0), 1));
- else
- i = -1;
-
- if (i >= 0)
- {
-#ifdef ARGS_GROW_DOWNWARD
- i = -i - arg->locate.size.constant;
-#endif
- if (arg->locate.size.constant > 0)
- {
- unsigned HOST_WIDE_INT sc = arg->locate.size.constant;
-
- for (k = 0; k < sc; k++)
- if (i + k < stored_args_map->n_bits
- && TEST_BIT (stored_args_map, i + k))
- {
- sibcall_failure = 1;
- break;
- }
- }
- }
- }
+ if ((flags & ECF_SIBCALL)
+ && MEM_P (arg->value)
+ && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
+ arg->locate.size.constant))
+ sibcall_failure = 1;
/* Don't allow anything left on stack from computation
of argument to alloca. */