/* Convert function calls to rtl insns, for GNU C compiler.
Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
- Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+ 2011 Free Software Foundation, Inc.
This file is part of GCC.
#include "sbitmap.h"
#include "langhooks.h"
#include "target.h"
-#include "debug.h"
#include "cgraph.h"
#include "except.h"
#include "dbgcnt.h"
rtx stack;
/* Location on the stack of the start of this argument slot. This can
differ from STACK if this arg pads downward. This location is known
- to be aligned to FUNCTION_ARG_BOUNDARY. */
+ to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
rtx stack_slot;
/* Place that this stack area has been saved, if needed. */
rtx save_area;
CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
{
rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
- rtx call_insn;
+ rtx call_insn, call, funmem;
int already_popped = 0;
HOST_WIDE_INT n_popped
= targetm.calls.return_pops_args (fndecl, funtype, stack_size);
if (GET_CODE (funexp) != SYMBOL_REF)
funexp = memory_address (FUNCTION_MODE, funexp);
+ funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
+ if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
+ set_mem_expr (funmem, fndecl);
+ else if (fntree)
+ set_mem_expr (funmem, build_fold_indirect_ref (CALL_EXPR_FN (fntree)));
+
#if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
if ((ecf_flags & ECF_SIBCALL)
&& HAVE_sibcall_pop && HAVE_sibcall_value_pop
if possible, for the sake of frame pointer elimination. */
if (valreg)
- pat = GEN_SIBCALL_VALUE_POP (valreg,
- gen_rtx_MEM (FUNCTION_MODE, funexp),
- rounded_stack_size_rtx, next_arg_reg,
- n_pop);
+ pat = GEN_SIBCALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
+ next_arg_reg, n_pop);
else
- pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
- rounded_stack_size_rtx, next_arg_reg, n_pop);
+ pat = GEN_SIBCALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
+ n_pop);
emit_call_insn (pat);
already_popped = 1;
if possible, for the sake of frame pointer elimination. */
if (valreg)
- pat = GEN_CALL_VALUE_POP (valreg,
- gen_rtx_MEM (FUNCTION_MODE, funexp),
- rounded_stack_size_rtx, next_arg_reg, n_pop);
+ pat = GEN_CALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
+ next_arg_reg, n_pop);
else
- pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
- rounded_stack_size_rtx, next_arg_reg, n_pop);
+ pat = GEN_CALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
+ n_pop);
emit_call_insn (pat);
already_popped = 1;
&& HAVE_sibcall && HAVE_sibcall_value)
{
if (valreg)
- emit_call_insn (GEN_SIBCALL_VALUE (valreg,
- gen_rtx_MEM (FUNCTION_MODE, funexp),
+ emit_call_insn (GEN_SIBCALL_VALUE (valreg, funmem,
rounded_stack_size_rtx,
next_arg_reg, NULL_RTX));
else
- emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
- rounded_stack_size_rtx, next_arg_reg,
+ emit_call_insn (GEN_SIBCALL (funmem, rounded_stack_size_rtx,
+ next_arg_reg,
GEN_INT (struct_value_size)));
}
else
if (HAVE_call && HAVE_call_value)
{
if (valreg)
- emit_call_insn (GEN_CALL_VALUE (valreg,
- gen_rtx_MEM (FUNCTION_MODE, funexp),
- rounded_stack_size_rtx, next_arg_reg,
- NULL_RTX));
+ emit_call_insn (GEN_CALL_VALUE (valreg, funmem, rounded_stack_size_rtx,
+ next_arg_reg, NULL_RTX));
else
- emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
- rounded_stack_size_rtx, next_arg_reg,
+ emit_call_insn (GEN_CALL (funmem, rounded_stack_size_rtx, next_arg_reg,
GEN_INT (struct_value_size)));
}
else
/* Find the call we just emitted. */
call_insn = last_call_insn ();
+ /* Some target create a fresh MEM instead of reusing the one provided
+ above. Set its MEM_EXPR. */
+ call = PATTERN (call_insn);
+ if (GET_CODE (call) == PARALLEL)
+ call = XVECEXP (call, 0, 0);
+ if (GET_CODE (call) == SET)
+ call = SET_SRC (call);
+ if (GET_CODE (call) == CALL
+ && MEM_P (XEXP (call, 0))
+ && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
+ && MEM_EXPR (funmem) != NULL_TREE)
+ set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
+
/* Put the register usage information there. */
add_function_usage_to (call_insn, call_fusage);
SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
- /* Record debug information for virtual calls. */
- if (flag_enable_icf_debug && fndecl == NULL)
- (*debug_hooks->virtual_call_token) (CALL_EXPR_FN (fntree),
- INSN_UID (call_insn));
-
/* Restore this now, so that we do defer pops for this call's args
if the context of the call as a whole permits. */
inhibit_defer_pop = old_inhibit_defer_pop;
if (DECL_IS_NOVOPS (exp))
flags |= ECF_NOVOPS;
+ if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
+ flags |= ECF_LEAF;
if (TREE_NOTHROW (exp))
flags |= ECF_NOTHROW;
int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
args[i].aligned_regs[j] = reg;
- word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
+ word = extract_bit_field (word, bitsize, 0, 1, false, NULL_RTX,
word_mode, word_mode);
/* There is no need to restrict this code to loading items
if (*old_stack_level == 0)
{
- emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
+ emit_stack_save (SAVE_BLOCK, old_stack_level);
*old_pending_adj = pending_stack_adjust;
pending_stack_adjust = 0;
}
- copy = gen_rtx_MEM (BLKmode,
- allocate_dynamic_stack_space
- (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
+ /* We can pass TRUE as the 4th argument because we just
+ saved the stack pointer and will restore it right after
+ the call. */
+ copy = allocate_dynamic_stack_space (size_rtx,
+ TYPE_ALIGN (type),
+ TYPE_ALIGN (type),
+ true);
+ copy = gen_rtx_MEM (BLKmode, copy);
set_mem_attributes (copy, type, 1);
}
else
if (REG_P (x)
&& HARD_REGISTER_P (x)
- && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (x))))
+ && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
{
/* Make sure that we generate a REG rather than a CONCAT.
Moves into CONCATs can need nontrivial instructions,
{
if (old_stack_level == 0)
{
- emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
+ emit_stack_save (SAVE_BLOCK, &old_stack_level);
old_stack_pointer_delta = stack_pointer_delta;
old_pending_adj = pending_stack_adjust;
pending_stack_adjust = 0;
stack_arg_under_construction = 0;
}
argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
+ if (flag_stack_usage)
+ current_function_has_unbounded_dynamic_stack_size = 1;
}
else
{
: reg_parm_stack_space));
if (old_stack_level == 0)
{
- emit_stack_save (SAVE_BLOCK, &old_stack_level,
- NULL_RTX);
+ emit_stack_save (SAVE_BLOCK, &old_stack_level);
old_stack_pointer_delta = stack_pointer_delta;
old_pending_adj = pending_stack_adjust;
pending_stack_adjust = 0;
stack_usage_map = stack_usage_map_buf;
highest_outgoing_arg_in_use = 0;
}
- allocate_dynamic_stack_space (push_size, NULL_RTX,
- BITS_PER_UNIT);
+ /* We can pass TRUE as the 4th argument because we just
+ saved the stack pointer and will restore it right after
+ the call. */
+ allocate_dynamic_stack_space (push_size, 0,
+ BIGGEST_ALIGNMENT, true);
}
/* If argument evaluation might modify the stack pointer,
be deferred during the evaluation of the arguments. */
NO_DEFER_POP;
+ /* Record the maximum pushed stack space size. We need to delay
+ doing it this far to take into account the optimization done
+ by combine_pending_stack_adjustment_and_call. */
+ if (flag_stack_usage
+ && !ACCUMULATE_OUTGOING_ARGS
+ && pass
+ && adjusted_args_size.var == 0)
+ {
+ int pushed = adjusted_args_size.constant + pending_stack_adjust;
+ if (pushed > current_function_pushed_stack_size)
+ current_function_pushed_stack_size = pushed;
+ }
+
funexp = rtx_for_function_call (fndecl, addr);
/* Figure out the register where the value, if any, will come back. */
sibcall_failure = 1;
}
- if (((flags & ECF_CONST)
- || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
- && args[i].stack)
+ if (args[i].stack)
call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
gen_rtx_USE (VOIDmode,
args[i].stack),
if (old_stack_level)
{
- emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
+ emit_stack_restore (SAVE_BLOCK, old_stack_level);
stack_pointer_delta = old_stack_pointer_delta;
pending_stack_adjust = old_pending_adj;
old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
if (args_size.constant > crtl->outgoing_args_size)
crtl->outgoing_args_size = args_size.constant;
+ if (flag_stack_usage && !ACCUMULATE_OUTGOING_ARGS)
+ {
+ int pushed = args_size.constant + pending_stack_adjust;
+ if (pushed > current_function_pushed_stack_size)
+ current_function_pushed_stack_size = pushed;
+ }
+
if (ACCUMULATE_OUTGOING_ARGS)
{
/* Since the stack pointer will never be pushed, it is possible for
if (! (reg != 0 && partial == 0))
{
+ rtx use;
+
if (ACCUMULATE_OUTGOING_ARGS)
{
/* If this is being stored into a pre-allocated, fixed-size,
NO_DEFER_POP;
- if ((flags & ECF_CONST)
- || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
- {
- rtx use;
-
- /* Indicate argument access so that alias.c knows that these
- values are live. */
- if (argblock)
- use = plus_constant (argblock,
- argvec[argnum].locate.offset.constant);
- else
- /* When arguments are pushed, trying to tell alias.c where
- exactly this argument is won't work, because the
- auto-increment causes confusion. So we merely indicate
- that we access something with a known mode somewhere on
- the stack. */
- use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
- gen_rtx_SCRATCH (Pmode));
- use = gen_rtx_MEM (argvec[argnum].mode, use);
- use = gen_rtx_USE (VOIDmode, use);
- call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
- }
+ /* Indicate argument access so that alias.c knows that these
+ values are live. */
+ if (argblock)
+ use = plus_constant (argblock,
+ argvec[argnum].locate.offset.constant);
+ else
+ /* When arguments are pushed, trying to tell alias.c where
+ exactly this argument is won't work, because the
+ auto-increment causes confusion. So we merely indicate
+ that we access something with a known mode somewhere on
+ the stack. */
+ use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
+ gen_rtx_SCRATCH (Pmode));
+ use = gen_rtx_MEM (argvec[argnum].mode, use);
+ use = gen_rtx_USE (VOIDmode, use);
+ call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
}
}