rtx stack;
/* Location on the stack of the start of this argument slot. This can
differ from STACK if this arg pads downward. This location is known
- to be aligned to FUNCTION_ARG_BOUNDARY. */
+ to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
rtx stack_slot;
/* Place that this stack area has been saved, if needed. */
rtx save_area;
flags |= ECF_RETURNS_TWICE;
/* Process the pure and const attributes. */
- if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
+ if (TREE_READONLY (exp))
flags |= ECF_CONST;
if (DECL_PURE_P (exp))
flags |= ECF_PURE;
if (DECL_IS_NOVOPS (exp))
flags |= ECF_NOVOPS;
+ if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
+ flags |= ECF_LEAF;
if (TREE_NOTHROW (exp))
flags |= ECF_NOTHROW;
flags = special_function_p (exp, flags);
}
- else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
+ else if (TYPE_P (exp) && TYPE_READONLY (exp))
flags |= ECF_CONST;
if (TREE_THIS_VOLATILE (exp))
- flags |= ECF_NORETURN;
+ {
+ flags |= ECF_NORETURN;
+ if (flags & (ECF_CONST|ECF_PURE))
+ flags |= ECF_LOOPING_CONST_OR_PURE;
+ }
return flags;
}
int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
args[i].aligned_regs[j] = reg;
- word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
+ word = extract_bit_field (word, bitsize, 0, 1, false, NULL_RTX,
word_mode, word_mode);
/* There is no need to restrict this code to loading items
pending_stack_adjust = 0;
}
- copy = gen_rtx_MEM (BLKmode,
- allocate_dynamic_stack_space
- (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
+ /* We can pass TRUE as the 4th argument because we just
+ saved the stack pointer and will restore it right after
+ the call. */
+ copy = allocate_dynamic_stack_space (size_rtx,
+ TYPE_ALIGN (type),
+ TYPE_ALIGN (type),
+ true);
+ copy = gen_rtx_MEM (BLKmode, copy);
set_mem_attributes (copy, type, 1);
}
else
emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
args[i].aligned_regs[j]);
- else if ((partial == 0 || args[i].pass_on_stack)
- && size != 0)
+ else if (partial == 0 || args[i].pass_on_stack)
{
rtx mem = validize_mem (args[i].value);
- /* Check for overlap with already clobbered argument area. */
+ /* Check for overlap with already clobbered argument area,
+ providing that this has non-zero size. */
if (is_sibcall
- && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
- size))
+ && (size == 0
+ || mem_overlaps_already_clobbered_arg_p
+ (XEXP (args[i].value, 0), size)))
*sibcall_failure = 1;
/* Handle a BLKmode that needs shifting. */
if (REG_P (x)
&& HARD_REGISTER_P (x)
- && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (x))))
+ && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
{
/* Make sure that we generate a REG rather than a CONCAT.
Moves into CONCATs can need nontrivial instructions,
stack_arg_under_construction = 0;
}
argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
+ if (flag_stack_usage)
+ current_function_has_unbounded_dynamic_stack_size = 1;
}
else
{
stack_usage_map = stack_usage_map_buf;
highest_outgoing_arg_in_use = 0;
}
- allocate_dynamic_stack_space (push_size, NULL_RTX,
- BITS_PER_UNIT);
+ /* We can pass TRUE as the 4th argument because we just
+ saved the stack pointer and will restore it right after
+ the call. */
+ allocate_dynamic_stack_space (push_size, 0,
+ BIGGEST_ALIGNMENT, true);
}
/* If argument evaluation might modify the stack pointer,
be deferred during the evaluation of the arguments. */
NO_DEFER_POP;
+ /* Record the maximum pushed stack space size. We need to delay
+ doing it this far to take into account the optimization done
+ by combine_pending_stack_adjustment_and_call. */
+ if (flag_stack_usage
+ && !ACCUMULATE_OUTGOING_ARGS
+ && pass
+ && adjusted_args_size.var == 0)
+ {
+ int pushed = adjusted_args_size.constant + pending_stack_adjust;
+ if (pushed > current_function_pushed_stack_size)
+ current_function_pushed_stack_size = pushed;
+ }
+
funexp = rtx_for_function_call (fndecl, addr);
/* Figure out the register where the value, if any, will come back. */
if (args_size.constant > crtl->outgoing_args_size)
crtl->outgoing_args_size = args_size.constant;
+ if (flag_stack_usage && !ACCUMULATE_OUTGOING_ARGS)
+ {
+ int pushed = args_size.constant + pending_stack_adjust;
+ if (pushed > current_function_pushed_stack_size)
+ current_function_pushed_stack_size = pushed;
+ }
+
if (ACCUMULATE_OUTGOING_ARGS)
{
/* Since the stack pointer will never be pushed, it is possible for