static int finalize_must_preallocate (int, int, struct arg_data *,
struct args_size *);
static void precompute_arguments (int, int, struct arg_data *);
-static int compute_argument_block_size (int, struct args_size *, int);
+static int compute_argument_block_size (int, struct args_size *, tree, int);
static void initialize_argument_information (int, struct arg_data *,
struct args_size *, int,
tree, tree,
even if the call has no arguments to pop. */
#if defined (HAVE_call) && defined (HAVE_call_value)
if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
- && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
+ && n_popped > 0)
#else
if (HAVE_call_pop && HAVE_call_value_pop)
#endif
/* Find the call we just emitted. */
call_insn = last_call_insn ();
- /* Mark memory as used for "pure" function call. */
- if (ecf_flags & ECF_PURE)
- call_fusage
- = gen_rtx_EXPR_LIST
- (VOIDmode,
- gen_rtx_USE (VOIDmode,
- gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
- call_fusage);
-
/* Put the register usage information there. */
add_function_usage_to (call_insn, call_fusage);
/* If this is a const call, then set the insn's unchanging bit. */
- if (ecf_flags & (ECF_CONST | ECF_PURE))
- CONST_OR_PURE_CALL_P (call_insn) = 1;
+ if (ecf_flags & ECF_CONST)
+ RTL_CONST_CALL_P (call_insn) = 1;
+
+ /* If this is a pure call, then set the insn's unchanging bit. */
+ if (ecf_flags & ECF_PURE)
+ RTL_PURE_CALL_P (call_insn) = 1;
+
+ /* If this is a const call, then set the insn's unchanging bit. */
+ if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
+ RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
/* If this call can't throw, attach a REG_EH_REGION reg note to that
effect. */
if (rn > 0)
REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
REG_NOTES (call_insn));
- note_current_region_may_contain_throw ();
}
if (ecf_flags & ECF_NORETURN)
{
REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
REG_NOTES (call_insn));
- current_function_calls_setjmp = 1;
+ cfun->calls_setjmp = 1;
}
SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
if (rounded_stack_size != 0)
{
- if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN))
+ if (ecf_flags & ECF_NORETURN)
/* Just pretend we did the pop. */
stack_pointer_delta -= rounded_stack_size;
else if (flag_defer_pop && inhibit_defer_pop == 0
if (DECL_IS_RETURNS_TWICE (exp))
flags |= ECF_RETURNS_TWICE;
- /* The function exp may have the `pure' attribute. */
- if (DECL_IS_PURE (exp))
+ /* Process the pure and const attributes. */
+ if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
+ flags |= ECF_CONST;
+ if (DECL_PURE_P (exp))
flags |= ECF_PURE;
+ if (DECL_LOOPING_CONST_OR_PURE_P (exp))
+ flags |= ECF_LOOPING_CONST_OR_PURE;
if (DECL_IS_NOVOPS (exp))
flags |= ECF_NOVOPS;
if (TREE_NOTHROW (exp))
flags |= ECF_NOTHROW;
- if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
- flags |= ECF_CONST;
-
flags = special_function_p (exp, flags);
}
else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
if (TREE_THIS_VOLATILE (exp))
flags |= ECF_NORETURN;
- /* Mark if the function returns with the stack pointer depressed. We
- cannot consider it pure or constant in that case. */
- if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
- {
- flags |= ECF_SP_DEPRESSED;
- flags &= ~(ECF_PURE | ECF_CONST);
- }
-
return flags;
}
args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
type = TREE_TYPE (args[i].tree_value);
- *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
+ if (*ecf_flags & ECF_CONST)
+ *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
+ *ecf_flags &= ~ECF_LIBCALL_BLOCK;
}
else
{
store_expr (args[i].tree_value, copy, 0, false);
- if (callee_copies)
- *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
- else
- *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
+ *ecf_flags &= ~(ECF_LIBCALL_BLOCK);
+
+ /* Just change the const function to pure and then let
+ the next test clear the pure based on
+ callee_copies. */
+ if (*ecf_flags & ECF_CONST)
+ {
+ *ecf_flags &= ~ECF_CONST;
+ *ecf_flags |= ECF_PURE;
+ }
+
+ if (!callee_copies && *ecf_flags & ECF_PURE)
+ *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
args[i].tree_value
= build_fold_addr_expr (make_tree (type, copy));
static int
compute_argument_block_size (int reg_parm_stack_space,
struct args_size *args_size,
+ tree fndecl ATTRIBUTE_UNUSED,
int preferred_stack_boundary ATTRIBUTE_UNUSED)
{
int unadjusted_args_size = args_size->constant;
/* The area corresponding to register parameters is not to count in
the size of the block we need. So make the adjustment. */
- if (!OUTGOING_REG_PARM_STACK_SPACE)
+ if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL_TREE : TREE_TYPE (fndecl))))
args_size->var
= size_binop (MINUS_EXPR, args_size->var,
ssize_int (reg_parm_stack_space));
args_size->constant = MAX (args_size->constant,
reg_parm_stack_space);
- if (!OUTGOING_REG_PARM_STACK_SPACE)
+ if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL_TREE : TREE_TYPE (fndecl))))
args_size->constant -= reg_parm_stack_space;
}
return unadjusted_args_size;
{
HOST_WIDE_INT i;
- if (addr == current_function_internal_arg_pointer)
+ if (addr == crtl->args.internal_arg_pointer)
i = 0;
else if (GET_CODE (addr) == PLUS
- && XEXP (addr, 0) == current_function_internal_arg_pointer
+ && XEXP (addr, 0) == crtl->args.internal_arg_pointer
&& GET_CODE (XEXP (addr, 1)) == CONST_INT)
i = INTVAL (XEXP (addr, 1));
/* Return true for arg pointer based indexed addressing. */
else if (GET_CODE (addr) == PLUS
- && (XEXP (addr, 0) == current_function_internal_arg_pointer
- || XEXP (addr, 1) == current_function_internal_arg_pointer))
+ && (XEXP (addr, 0) == crtl->args.internal_arg_pointer
+ || XEXP (addr, 1) == crtl->args.internal_arg_pointer))
return true;
else
return false;
if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
warning (OPT_Waggregate_return, "function call has aggregate value");
- /* If the result of a pure or const function call is ignored (or void),
- and none of its arguments are volatile, we can avoid expanding the
- call and just evaluate the arguments for side-effects. */
+ /* If the result of a non looping pure or const function call is
+ ignored (or void), and none of its arguments are volatile, we can
+ avoid expanding the call and just evaluate the arguments for
+ side-effects. */
if ((flags & (ECF_CONST | ECF_PURE))
+ && (!(flags & ECF_LOOPING_CONST_OR_PURE))
&& (ignore || target == const0_rtx
|| TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
{
reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
#endif
- if (!OUTGOING_REG_PARM_STACK_SPACE && reg_parm_stack_space > 0 && PUSH_ARGS)
+ if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL_TREE : TREE_TYPE (fndecl)))
+ && reg_parm_stack_space > 0 && PUSH_ARGS)
must_preallocate = 1;
/* Set up a place to return a structure. */
if (aggregate_value_p (exp, fndecl))
{
/* This call returns a big structure. */
- flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
+ flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE
+ | ECF_LIBCALL_BLOCK);
#ifdef PCC_STATIC_STRUCT_RETURN
{
if (fndecl)
{
struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
- if (i && i->preferred_incoming_stack_boundary)
+ /* Without automatic stack alignment, we can't increase preferred
+ stack boundary. With automatic stack alignment, it is
+ unnecessary since unless we can guarantee that all callers will
+ align the outgoing stack properly, callee has to align its
+ stack anyway. */
+ if (i
+ && i->preferred_incoming_stack_boundary
+ && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
preferred_stack_boundary = i->preferred_incoming_stack_boundary;
}
type_arg_types = TYPE_ARG_TYPES (funtype);
if (flags & ECF_MAY_BE_ALLOCA)
- current_function_calls_alloca = 1;
+ cfun->calls_alloca = 1;
/* If struct_value_rtx is 0, it means pass the address
as if it were an extra parameter. Put the argument expression
|| (fndecl && decl_function_context (fndecl) == current_function_decl)
/* If this function requires more stack slots than the current
function, we cannot change it into a sibling call.
- current_function_pretend_args_size is not part of the
+ crtl->args.pretend_args_size is not part of the
stack allocated by our caller. */
- || args_size.constant > (current_function_args_size
- - current_function_pretend_args_size)
+ || args_size.constant > (crtl->args.size
+ - crtl->args.pretend_args_size)
/* If the callee pops its own arguments, then it must pop exactly
the same number of arguments as the current function. */
|| (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
!= RETURN_POPS_ARGS (current_function_decl,
TREE_TYPE (current_function_decl),
- current_function_args_size))
+ crtl->args.size))
|| !lang_hooks.decls.ok_for_sibcall (fndecl))
try_tail_call = 0;
/* Ensure current function's preferred stack boundary is at least
what we need. We don't have to increase alignment for recursive
functions. */
- if (cfun->preferred_stack_boundary < preferred_stack_boundary
+ if (crtl->preferred_stack_boundary < preferred_stack_boundary
&& fndecl != current_function_decl)
- cfun->preferred_stack_boundary = preferred_stack_boundary;
- if (fndecl == current_function_decl)
- cfun->recursive_call_emit = true;
+ crtl->preferred_stack_boundary = preferred_stack_boundary;
preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
/* Don't let pending stack adjusts add up to too much.
Also, do all pending adjustments now if there is any chance
this might be a call to alloca or if we are expanding a sibling
- call sequence or if we are calling a function that is to return
- with stack pointer depressed.
+ call sequence.
Also do the adjustments before a throwing call, otherwise
exception handling can fail; PR 19225. */
if (pending_stack_adjust >= 32
|| (pending_stack_adjust > 0
- && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
+ && (flags & ECF_MAY_BE_ALLOCA))
|| (pending_stack_adjust > 0
&& flag_exceptions && !(flags & ECF_NOTHROW))
|| pass == 0)
if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
start_sequence ();
- if (pass == 0 && cfun->stack_protect_guard)
+ if (pass == 0 && crtl->stack_protect_guard)
stack_protect_epilogue ();
adjusted_args_size = args_size;
unadjusted_args_size
= compute_argument_block_size (reg_parm_stack_space,
&adjusted_args_size,
+ fndecl,
(pass == 0 ? 0
: preferred_stack_boundary));
argblock = virtual_incoming_args_rtx;
argblock
#ifdef STACK_GROWS_DOWNWARD
- = plus_constant (argblock, current_function_pretend_args_size);
+ = plus_constant (argblock, crtl->args.pretend_args_size);
#else
- = plus_constant (argblock, -current_function_pretend_args_size);
+ = plus_constant (argblock, -crtl->args.pretend_args_size);
#endif
stored_args_map = sbitmap_alloc (args_size.constant);
sbitmap_zero (stored_args_map);
the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
checking). */
- if (needed > current_function_outgoing_args_size)
- current_function_outgoing_args_size = needed;
+ if (needed > crtl->outgoing_args_size)
+ crtl->outgoing_args_size = needed;
if (must_preallocate)
{
/* Since we will be writing into the entire argument area,
the map must be allocated for its entire size, not just
the part that is the responsibility of the caller. */
- if (!OUTGOING_REG_PARM_STACK_SPACE)
+ if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL_TREE : TREE_TYPE (fndecl))))
needed += reg_parm_stack_space;
#ifdef ARGS_GROW_DOWNWARD
{
rtx push_size
= GEN_INT (adjusted_args_size.constant
- + (OUTGOING_REG_PARM_STACK_SPACE ? 0
+ + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL
+ : TREE_TYPE (fndecl))) ? 0
: reg_parm_stack_space));
if (old_stack_level == 0)
{
/* If register arguments require space on the stack and stack space
was not preallocated, allocate stack space here for arguments
passed in registers. */
- if (OUTGOING_REG_PARM_STACK_SPACE && !ACCUMULATE_OUTGOING_ARGS
+ if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL_TREE : TREE_TYPE (fndecl)))
+ && !ACCUMULATE_OUTGOING_ARGS
&& must_preallocate == 0 && reg_parm_stack_space > 0)
anti_adjust_stack (GEN_INT (reg_parm_stack_space));
note = gen_rtx_EXPR_LIST (VOIDmode,
args[i].initial_value, note);
note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
-
- if (flags & ECF_PURE)
- note = gen_rtx_EXPR_LIST (VOIDmode,
- gen_rtx_USE (VOIDmode,
- gen_rtx_MEM (BLKmode,
- gen_rtx_SCRATCH (VOIDmode))),
- note);
}
emit_libcall_block (insns, temp, valreg, note);
/* If size of args is variable or this was a constructor call for a stack
argument, restore saved stack-pointer value. */
- if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
+ if (old_stack_level)
{
emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
stack_pointer_delta = old_stack_pointer_delta;
if (tail_call_insns)
{
emit_insn (tail_call_insns);
- cfun->tail_call_emit = true;
+ crtl->tail_call_emit = true;
}
else
emit_insn (normal_call_insns);
currently_expanding_call--;
- /* If this function returns with the stack pointer depressed, ensure
- this block saves and restores the stack pointer, show it was
- changed, and adjust for any outgoing arg space. */
- if (flags & ECF_SP_DEPRESSED)
- {
- clear_pending_stack_adjust ();
- emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
- emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
- }
-
if (stack_usage_map_buf)
free (stack_usage_map_buf);
struct args_size original_args_size;
int argnum;
rtx fun;
+ /* Todo, choose the correct decl type of orgfun. Sadly this information
+ isn't present here, so we default to native calling abi here. */
+ tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
int inc;
int count;
rtx argblock = 0;
/* Ensure current function's preferred stack boundary is at least
what we need. */
- if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
- cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
+ if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
+ crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
/* If this kind of value comes back in memory,
decide where in memory it should come back. */
mem_value = assign_temp (tfom, 0, 1, 1);
#endif
/* This call returns a big structure. */
- flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
+ flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE
+ | ECF_LIBCALL_BLOCK);
}
}
else
end_sequence ();
emit_insn (insns);
}
- flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
- /* If this was a CONST function, it is now PURE since
- it now reads memory. */
+ /* If this was a CONST function, it is now PURE since it now
+ reads memory. */
if (flags & ECF_CONST)
{
flags &= ~ECF_CONST;
args_size.constant = MAX (args_size.constant,
reg_parm_stack_space);
- if (!OUTGOING_REG_PARM_STACK_SPACE)
+ if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL_TREE : TREE_TYPE (fndecl))))
args_size.constant -= reg_parm_stack_space;
- if (args_size.constant > current_function_outgoing_args_size)
- current_function_outgoing_args_size = args_size.constant;
+ if (args_size.constant > crtl->outgoing_args_size)
+ crtl->outgoing_args_size = args_size.constant;
if (ACCUMULATE_OUTGOING_ARGS)
{
/* Since we will be writing into the entire argument area, the
map must be allocated for its entire size, not just the part that
is the responsibility of the caller. */
- if (!OUTGOING_REG_PARM_STACK_SPACE)
+ if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL_TREE : TREE_TYPE (fndecl))))
needed += reg_parm_stack_space;
#ifdef ARGS_GROW_DOWNWARD
insns = get_insns ();
end_sequence ();
-
- if (flags & ECF_PURE)
- note = gen_rtx_EXPR_LIST (VOIDmode,
- gen_rtx_USE (VOIDmode,
- gen_rtx_MEM (BLKmode,
- gen_rtx_SCRATCH (VOIDmode))),
- note);
-
emit_libcall_block (insns, temp, valreg, note);
valreg = temp;
rtx x = arg->value;
int i = 0;
- if (XEXP (x, 0) == current_function_internal_arg_pointer
+ if (XEXP (x, 0) == crtl->args.internal_arg_pointer
|| (GET_CODE (XEXP (x, 0)) == PLUS
&& XEXP (XEXP (x, 0), 0) ==
- current_function_internal_arg_pointer
+ crtl->args.internal_arg_pointer
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
{
- if (XEXP (x, 0) != current_function_internal_arg_pointer)
+ if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
i = INTVAL (XEXP (XEXP (x, 0), 1));
/* expand_call should ensure this. */