/* Expands front end tree to back end RTL for GCC.
Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
- 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
+ 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
This file is part of GCC.
#include "tm_p.h"
#include "integrate.h"
#include "langhooks.h"
+#include "target.h"
#ifndef TRAMPOLINE_ALIGNMENT
#define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
#define STACK_ALIGNMENT_NEEDED 1
#endif
+#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
+
/* Some systems use __main in a way incompatible with its use in gcc, in these
cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
give the same symbol without quotes for an alternative entry point. You
static int all_blocks (tree, tree *);
static tree *get_block_vector (tree, int *);
extern tree debug_find_var_in_block_tree (tree, tree);
-/* We always define `record_insns' even if its not used so that we
+/* We always define `record_insns' even if it's not used so that we
can always export `prologue_epilogue_contains'. */
static void record_insns (rtx, varray_type *) ATTRIBUTE_UNUSED;
static int contains (rtx, varray_type);
static int insns_for_mem_comp (const void *, const void *);
static int insns_for_mem_walk (rtx *, void *);
static void compute_insns_for_mem (rtx, rtx, htab_t);
-static void prepare_function_start (void);
+static void prepare_function_start (tree);
static void do_clobber_return_reg (rtx, void *);
static void do_use_return_reg (rtx, void *);
static void instantiate_virtual_regs_lossage (rtx);
static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
\f
/* Pointer to chain of `struct function' for containing functions. */
-static GTY(()) struct function *outer_function_chain;
+struct function *outer_function_chain;
/* List of insns that were postponed by purge_addressof_1. */
static rtx postponed_insns;
f->x_nonlocal_goto_stack_level = NULL;
f->x_cleanup_label = NULL;
f->x_return_label = NULL;
+ f->x_naked_return_label = NULL;
f->computed_goto_common_label = NULL;
f->computed_goto_common_reg = NULL;
f->x_save_expr_regs = NULL;
if (best_p->size - rounded_size >= alignment)
{
- p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
+ p = ggc_alloc (sizeof (struct temp_slot));
p->in_use = p->addr_taken = 0;
p->size = best_p->size - rounded_size;
p->base_offset = best_p->base_offset + rounded_size;
{
HOST_WIDE_INT frame_offset_old = frame_offset;
- p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
+ p = ggc_alloc (sizeof (struct temp_slot));
/* We are passing an explicit alignment request to assign_stack_local.
One side effect of that is assign_stack_local will not round SIZE
if (decl && size == -1
&& TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
{
- error_with_decl (decl, "size of variable `%s' is too large");
+ error ("%Jsize of variable '%D' is too large", decl, decl);
size = 1;
}
if (function->decl == context)
break;
- /* If this is a variable-size object with a pseudo to address it,
- put that pseudo into the stack, if the var is nonlocal. */
+ /* If this is a variable-sized object or a structure passed by invisible
+ reference, with a pseudo to address it, put that pseudo into the stack
+ if the var is non-local. */
if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
&& GET_CODE (reg) == MEM
&& GET_CODE (XEXP (reg, 0)) == REG
decl_mode = promoted_mode = GET_MODE (reg);
}
+ /* If this variable lives in the current function and we don't need to put it
+ in the stack for the sake of setjmp or the non-locality, try to keep it in
+ a register until we know we actually need the address. */
can_use_addressof
= (function == 0
+ && ! (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl))
&& optimize > 0
/* FIXME make it work for promoted modes too */
&& decl_mode == promoted_mode
if (GET_CODE (reg) == REG)
{
- /* If this variable lives in the current function and we don't need
- to put things in the stack for the sake of setjmp, try to keep it
- in a register until we know we actually need the address. */
if (can_use_addressof)
gen_mem_addressof (reg, decl, rescan);
else
{
struct var_refs_queue *temp;
- temp
- = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
+ temp = ggc_alloc (sizeof (struct var_refs_queue));
temp->modified = reg;
temp->promoted_mode = promoted_mode;
temp->unsignedp = unsigned_p;
rtx first_insn = get_insns ();
struct sequence_stack *stack = seq_stack;
tree rtl_exps = rtl_expr_chain;
+ int save_volatile_ok = volatile_ok;
/* If there's a hash table, it must record all uses of VAR. */
if (ht)
return;
}
+ /* Volatile is valid in MEMs because all we're doing in changing the
+ address inside. */
+ volatile_ok = 1;
fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
stack == 0, may_share);
end_sequence ();
}
}
+
+ volatile_ok = save_volatile_ok;
}
\f
/* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
if (p == 0)
{
- p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
+ p = xmalloc (sizeof (struct fixup_replacement));
p->old = x;
p->new = 0;
p->next = *replacements;
rtx insn_list;
tmp.key = var;
- ime = (struct insns_for_mem_entry *) htab_find (ht, &tmp);
+ ime = htab_find (ht, &tmp);
for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
if (INSN_P (XEXP (insn_list, 0)))
fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
replacement = find_fixup_replacement (replacements, x);
if (replacement->new)
{
+ enum machine_mode mode = GET_MODE (x);
*loc = replacement->new;
+
+ /* Careful! We may have just replaced a SUBREG by a MEM, which
+ means that the insn may have become invalid again. We can't
+ in this case make a new replacement since we already have one
+ and we must deal with MATCH_DUPs. */
+ if (GET_CODE (replacement->new) == MEM)
+ {
+ INSN_CODE (insn) = -1;
+ if (recog_memoized (insn) >= 0)
+ return;
+
+ fixup_var_refs_1 (replacement->new, mode, &PATTERN (insn),
+ insn, replacements, no_share);
+ }
+
return;
}
fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), reg, 0);
}
else if (rescan)
- fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
+ {
+ /* This can only happen during reload. Clear the same flag bits as
+ reload. */
+ MEM_VOLATILE_P (reg) = 0;
+ RTX_UNCHANGING_P (reg) = 0;
+ MEM_IN_STRUCT_P (reg) = 0;
+ MEM_SCALAR_P (reg) = 0;
+ MEM_ATTRS (reg) = 0;
+
+ fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
+ }
return reg;
}
int i, j;
const char *fmt;
bool result = true;
+ bool libcall = false;
/* Re-start here to avoid recursion in common cases. */
restart:
if (x == 0)
return true;
+ /* Is this a libcall? */
+ if (!insn)
+ libcall = REG_NOTE_KIND (*loc) == REG_RETVAL;
+
code = GET_CODE (x);
/* If we don't return in any of the cases below, we will recurse inside
which can be succinctly described with a simple SUBREG.
Note that removing the REG_EQUAL note is not an option
on the last insn of a libcall, so we must do a replacement. */
- if (! purge_addressof_replacements
- && ! purge_bitfield_addressof_replacements)
+
+ /* In compile/990107-1.c:7 compiled at -O1 -m1 for sh-elf,
+ we got
+ (mem:DI (addressof:SI (reg/v:DF 160) 159 0x401c8510)
+ [0 S8 A32]), which can be expressed with a simple
+ same-size subreg */
+ if ((GET_MODE_SIZE (GET_MODE (x))
+ <= GET_MODE_SIZE (GET_MODE (sub)))
+ /* Again, invalid pointer casts (as in
+ compile/990203-1.c) can require paradoxical
+ subregs. */
+ || (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
+ && (GET_MODE_SIZE (GET_MODE (x))
+ > GET_MODE_SIZE (GET_MODE (sub)))
+ && libcall))
{
- /* In compile/990107-1.c:7 compiled at -O1 -m1 for sh-elf,
- we got
- (mem:DI (addressof:SI (reg/v:DF 160) 159 0x401c8510)
- [0 S8 A32]), which can be expressed with a simple
- same-size subreg */
- if ((GET_MODE_SIZE (GET_MODE (x))
- == GET_MODE_SIZE (GET_MODE (sub)))
- /* Again, invalid pointer casts (as in
- compile/990203-1.c) can require paradoxical
- subregs. */
- || (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
- && (GET_MODE_SIZE (GET_MODE (x))
- > GET_MODE_SIZE (GET_MODE (sub)))))
- {
- *loc = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
- return true;
- }
- /* ??? Are there other cases we should handle? */
+ *loc = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
+ return true;
}
+ /* ??? Are there other cases we should handle? */
+
/* Sometimes we may not be able to find the replacement. For
example when the original insn was a MEM in a wider mode,
and the note is part of a sign extension of a narrowed
return true;
}
purge_addressof_replacements
- = gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
- gen_rtx_EXPR_LIST (VOIDmode, sub,
- purge_addressof_replacements));
+ = gen_rtx_EXPR_LIST (VOIDmode, XEXP (x, 0),
+ gen_rtx_EXPR_LIST (VOIDmode, sub,
+ purge_addressof_replacements));
return true;
}
goto restart;
{
struct insns_for_mem_entry *ifme;
tmp.key = *r;
- ifme = (struct insns_for_mem_entry *) htab_find (ifmwi->ht, &tmp);
+ ifme = htab_find (ifmwi->ht, &tmp);
/* If we have not already recorded this INSN, do so now. Since
we process the INSNs in order, we know that if we have
EXP may be a type node or an expression (whose type is tested). */
int
-aggregate_value_p (tree exp)
+aggregate_value_p (tree exp, tree fntype)
{
int i, regno, nregs;
rtx reg;
tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
+ if (fntype)
+ switch (TREE_CODE (fntype))
+ {
+ case CALL_EXPR:
+ fntype = get_callee_fndecl (fntype);
+ fntype = fntype ? TREE_TYPE (fntype) : 0;
+ break;
+ case FUNCTION_DECL:
+ fntype = TREE_TYPE (fntype);
+ break;
+ case FUNCTION_TYPE:
+ case METHOD_TYPE:
+ break;
+ case IDENTIFIER_NODE:
+ fntype = 0;
+ break;
+ default:
+ /* We don't expect other rtl types here. */
+ abort();
+ }
+
if (TREE_CODE (type) == VOID_TYPE)
return 0;
- if (RETURN_IN_MEMORY (type))
+ if (targetm.calls.return_in_memory (type, fntype))
return 1;
/* Types that are TREE_ADDRESSABLE must be constructed in memory,
and thus can't be returned in registers. */
return 0;
regno = REGNO (reg);
- nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
+ nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
for (i = 0; i < nregs; i++)
if (! call_used_regs[regno + i])
return 1;
/* Total space needed so far for args on the stack,
given as a constant and a tree-expression. */
struct args_size stack_args_size;
+ HOST_WIDE_INT extra_pretend_bytes = 0;
tree fntype = TREE_TYPE (fndecl);
tree fnargs = DECL_ARGUMENTS (fndecl), orig_fnargs;
/* This is used for the arg pointer when referring to stack args. */
/* This is a dummy PARM_DECL that we used for the function result if
the function returns a structure. */
tree function_result_decl = 0;
-#ifdef SETUP_INCOMING_VARARGS
int varargs_setup = 0;
-#endif
- int reg_parm_stack_space = 0;
+ int reg_parm_stack_space ATTRIBUTE_UNUSED = 0;
rtx conversion_insns = 0;
/* Nonzero if function takes extra anonymous args.
stack_args_size.var = 0;
/* If struct value address is treated as the first argument, make it so. */
- if (aggregate_value_p (DECL_RESULT (fndecl))
+ if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
&& ! current_function_returns_pcc_struct
- && struct_value_incoming_rtx == 0)
+ && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
{
tree type = build_pointer_type (TREE_TYPE (fntype));
orig_fnargs = fnargs;
max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
- parm_reg_stack_loc = (rtx *) ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
+ parm_reg_stack_loc = ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
if (SPLIT_COMPLEX_ARGS)
fnargs = split_complex_args (fnargs);
#ifdef REG_PARM_STACK_SPACE
-#ifdef MAYBE_REG_PARM_STACK_SPACE
- reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
-#else
reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
#endif
-#endif
#ifdef INIT_CUMULATIVE_INCOMING_ARGS
INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
#else
- INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, fndecl);
+ INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, fndecl, -1);
#endif
/* We haven't yet found an argument that we must push and pretend the
int last_named = 0, named_arg;
int in_regs;
int partial = 0;
+ int pretend_bytes = 0;
+ int loaded_in_reg = 0;
/* Set LAST_NAMED if this is last named arg before last
anonymous args. */
/* Set NAMED_ARG if this arg should be treated as a named arg. For
most machines, if this is a varargs/stdarg function, then we treat
the last named arg as if it were anonymous too. */
- named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
+ named_arg = (targetm.calls.strict_argument_naming (&args_so_far)
+ ? 1 : !last_named);
if (TREE_TYPE (parm) == error_mark_node
/* This can happen after weird syntax errors
promoted_mode = passed_mode;
-#ifdef PROMOTE_FUNCTION_ARGS
- /* Compute the mode in which the arg is actually extended to. */
- unsignedp = TREE_UNSIGNED (passed_type);
- promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
-#endif
+ if (targetm.calls.promote_function_args (TREE_TYPE (fndecl)))
+ {
+ /* Compute the mode in which the arg is actually extended to. */
+ unsignedp = TREE_UNSIGNED (passed_type);
+ promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
+ }
/* Let machine desc say which reg (if any) the parm arrives in.
0 means it arrives on the stack. */
if (entry_parm == 0)
promoted_mode = passed_mode;
-#ifdef SETUP_INCOMING_VARARGS
/* If this is the last named parameter, do any required setup for
varargs or stdargs. We need to know about the case of this being an
addressable type, in which case we skip the registers it
Also, indicate when RTL generation is to be suppressed. */
if (last_named && !varargs_setup)
{
- SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
- current_function_pretend_args_size, 0);
+ int varargs_pretend_bytes = 0;
+ targetm.calls.setup_incoming_varargs (&args_so_far, promoted_mode,
+ passed_type,
+ &varargs_pretend_bytes, 0);
varargs_setup = 1;
+
+ /* If the back-end has requested extra stack space, record how
+ much is needed. Do not change pretend_args_size otherwise
+ since it may be nonzero from an earlier partial argument. */
+ if (varargs_pretend_bytes > 0)
+ current_function_pretend_args_size = varargs_pretend_bytes;
}
-#endif
/* Determine parm's home in the stack,
in case it arrives in the stack or we should pretend it did.
#endif
if (!in_regs && !named_arg)
{
- int pretend_named = PRETEND_OUTGOING_VARARGS_NAMED;
+ int pretend_named =
+ targetm.calls.pretend_outgoing_varargs_named (&args_so_far);
if (pretend_named)
{
#ifdef FUNCTION_INCOMING_ARG
#ifdef FUNCTION_ARG_PARTIAL_NREGS
if (entry_parm)
- partial = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
- passed_type, named_arg);
+ {
+ partial = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
+ passed_type, named_arg);
+ if (partial
+ /* The caller might already have allocated stack space
+ for the register parameters. */
+ && reg_parm_stack_space == 0)
+ {
+ /* Part of this argument is passed in registers and part
+ is passed on the stack. Ask the prologue code to extend
+ the stack part so that we can recreate the full value.
+
+ PRETEND_BYTES is the size of the registers we need to store.
+ CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
+ stack space that the prologue should allocate.
+
+ Internally, gcc assumes that the argument pointer is
+ aligned to STACK_BOUNDARY bits. This is used both for
+ alignment optimizations (see init_emit) and to locate
+ arguments that are aligned to more than PARM_BOUNDARY
+ bits. We must preserve this invariant by rounding
+ CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to a stack
+ boundary. */
+
+ /* We assume at most one partial arg, and it must be the first
+ argument on the stack. */
+ if (extra_pretend_bytes || current_function_pretend_args_size)
+ abort ();
+
+ pretend_bytes = partial * UNITS_PER_WORD;
+ current_function_pretend_args_size
+ = CEIL_ROUND (pretend_bytes, STACK_BYTES);
+
+ /* We want to align relative to the actual stack pointer, so
+ don't include this in the stack size until later. */
+ extra_pretend_bytes = current_function_pretend_args_size;
+ }
+ }
#endif
memset (&locate, 0, sizeof (locate));
locate_and_pad_parm (promoted_mode, passed_type, in_regs,
entry_parm ? partial : 0, fndecl,
&stack_args_size, &locate);
+ /* Adjust offsets to include pretend args, unless this is the
+ split arg. */
+ if (pretend_bytes == 0)
+ {
+ locate.slot_offset.constant += extra_pretend_bytes;
+ locate.offset.constant += extra_pretend_bytes;
+ }
{
rtx offset_rtx;
offset_rtx));
set_mem_attributes (stack_parm, parm, 1);
+ if (entry_parm && MEM_ATTRS (stack_parm)->align < PARM_BOUNDARY)
+ set_mem_align (stack_parm, PARM_BOUNDARY);
/* Set also REG_ATTRS if parameter was passed in a register. */
if (entry_parm)
if (partial)
{
-#ifndef MAYBE_REG_PARM_STACK_SPACE
- /* When REG_PARM_STACK_SPACE is nonzero, stack space for
- split parameters was allocated by our caller, so we
- won't be pushing it in the prolog. */
- if (reg_parm_stack_space == 0)
-#endif
- current_function_pretend_args_size
- = (((partial * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
- / (PARM_BOUNDARY / BITS_PER_UNIT)
- * (PARM_BOUNDARY / BITS_PER_UNIT));
-
/* Handle calls that pass values in multiple non-contiguous
locations. The Irix 6 ABI has examples of this. */
if (GET_CODE (entry_parm) == PARALLEL)
emit_group_store (validize_mem (stack_parm), entry_parm,
+ TREE_TYPE (parm),
int_size_in_bytes (TREE_TYPE (parm)));
else
entry_parm = stack_parm;
/* Record permanently how this parm was passed. */
- DECL_INCOMING_RTL (parm) = entry_parm;
+ set_decl_incoming_rtl (parm, entry_parm);
/* If there is actually space on the stack for this parm,
count it in stack_args_size; otherwise set stack_parm to 0
if (entry_parm == stack_parm
|| (GET_CODE (entry_parm) == PARALLEL
&& XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
-#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
+#if defined (REG_PARM_STACK_SPACE)
/* On some machines, even if a parm value arrives in a register
- there is still an (uninitialized) stack slot allocated for it.
-
- ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
- whether this parameter already has a stack slot allocated,
- because an arg block exists only if current_function_args_size
- is larger than some threshold, and we haven't calculated that
- yet. So, for now, we just assume that stack slots never exist
- in this case. */
+ there is still an (uninitialized) stack slot allocated
+ for it. */
|| REG_PARM_STACK_SPACE (fndecl) > 0
#endif
)
{
stack_args_size.constant += locate.size.constant;
- /* locate.size doesn't include the part in regs. */
- if (partial)
- stack_args_size.constant += current_function_pretend_args_size;
if (locate.size.var)
ADD_PARM_SIZE (stack_args_size, locate.size.var);
}
&& INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
{
entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
- DECL_INCOMING_RTL (parm) = entry_parm;
+ set_decl_incoming_rtl (parm, entry_parm);
break;
}
}
Set DECL_RTL to that place. */
- if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
+ if (GET_CODE (entry_parm) == PARALLEL && nominal_mode != BLKmode
+ && XVECLEN (entry_parm, 0) > 1)
+ {
+ /* Reconstitute objects the size of a register or larger using
+ register operations instead of the stack. */
+ rtx parmreg = gen_reg_rtx (nominal_mode);
+
+ if (REG_P (parmreg))
+ {
+ unsigned int regno = REGNO (parmreg);
+
+ emit_group_store (parmreg, entry_parm, TREE_TYPE (parm),
+ int_size_in_bytes (TREE_TYPE (parm)));
+ SET_DECL_RTL (parm, parmreg);
+ loaded_in_reg = 1;
+
+ if (regno >= max_parm_reg)
+ {
+ rtx *new;
+ int old_max_parm_reg = max_parm_reg;
+
+ /* It's slow to expand this one register at a time,
+ but it's also rare and we need max_parm_reg to be
+ precisely correct. */
+ max_parm_reg = regno + 1;
+ new = ggc_realloc (parm_reg_stack_loc,
+ max_parm_reg * sizeof (rtx));
+ memset (new + old_max_parm_reg, 0,
+ (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
+ parm_reg_stack_loc = new;
+ parm_reg_stack_loc[regno] = stack_parm;
+ }
+ }
+ }
+
+ if (nominal_mode == BLKmode
+#ifdef BLOCK_REG_PADDING
+ || (locate.where_pad == (BYTES_BIG_ENDIAN ? upward : downward)
+ && GET_MODE_SIZE (promoted_mode) < UNITS_PER_WORD)
+#endif
+ || GET_CODE (entry_parm) == PARALLEL)
{
/* If a BLKmode arrives in registers, copy it to a stack slot.
Handle calls that pass values in multiple non-contiguous
locations. The Irix 6 ABI has examples of this. */
if (GET_CODE (entry_parm) == REG
- || GET_CODE (entry_parm) == PARALLEL)
+ || (GET_CODE (entry_parm) == PARALLEL
+ && (!loaded_in_reg || !optimize)))
{
int size = int_size_in_bytes (TREE_TYPE (parm));
int size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
assign_stack_local if space was not allocated in the argument
list. If it was, this will not work if PARM_BOUNDARY is not
a multiple of BITS_PER_WORD. It isn't clear how to fix this
- if it becomes a problem. */
+ if it becomes a problem. Exception is when BLKmode arrives
+ with arguments not conforming to word_mode. */
if (stack_parm == 0)
{
- stack_parm
- = assign_stack_local (GET_MODE (entry_parm),
- size_stored, 0);
+ stack_parm = assign_stack_local (BLKmode, size_stored, 0);
+ PUT_MODE (stack_parm, GET_MODE (entry_parm));
set_mem_attributes (stack_parm, parm, 1);
}
-
+ else if (GET_CODE (entry_parm) == PARALLEL
+ && GET_MODE(entry_parm) == BLKmode)
+ ;
else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
abort ();
/* Handle calls that pass values in multiple non-contiguous
locations. The Irix 6 ABI has examples of this. */
if (GET_CODE (entry_parm) == PARALLEL)
- emit_group_store (mem, entry_parm, size);
+ emit_group_store (mem, entry_parm, TREE_TYPE (parm), size);
else if (size == 0)
;
enum machine_mode mode
= mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
- if (mode != BLKmode)
+ if (mode != BLKmode
+#ifdef BLOCK_REG_PADDING
+ && (size == UNITS_PER_WORD
+ || (BLOCK_REG_PADDING (mode, TREE_TYPE (parm), 1)
+ != (BYTES_BIG_ENDIAN ? upward : downward)))
+#endif
+ )
{
rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
emit_move_insn (change_address (mem, mode, 0), reg);
to memory. Note that the previous test doesn't
handle all cases (e.g. SIZE == 3). */
else if (size != UNITS_PER_WORD
- && BYTES_BIG_ENDIAN)
+#ifdef BLOCK_REG_PADDING
+ && (BLOCK_REG_PADDING (mode, TREE_TYPE (parm), 1)
+ == downward)
+#else
+ && BYTES_BIG_ENDIAN
+#endif
+ )
{
rtx tem, x;
int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
move_block_from_reg (REGNO (entry_parm), mem,
size_stored / UNITS_PER_WORD);
}
- SET_DECL_RTL (parm, stack_parm);
+ /* If parm is already bound to register pair, don't change
+ this binding. */
+ if (! DECL_RTL_SET_P (parm))
+ SET_DECL_RTL (parm, stack_parm);
}
else if (! ((! optimize
&& ! DECL_REGISTER (parm))
/* TREE_USED gets set erroneously during expand_assignment. */
save_tree_used = TREE_USED (parm);
expand_assignment (parm,
- make_tree (nominal_type, tempreg), 0, 0);
+ make_tree (nominal_type, tempreg), 0);
TREE_USED (parm) = save_tree_used;
conversion_insns = get_insns ();
did_conversion = 1;
else if (passed_pointer
&& FUNCTION_ARG_CALLEE_COPIES (args_so_far,
- TYPE_MODE (DECL_ARG_TYPE (parm)),
- DECL_ARG_TYPE (parm),
+ TYPE_MODE (TREE_TYPE (passed_type)),
+ TREE_TYPE (passed_type),
named_arg)
- && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
+ && ! TREE_ADDRESSABLE (TREE_TYPE (passed_type)))
{
rtx copy;
- tree type = DECL_ARG_TYPE (parm);
+ tree type = TREE_TYPE (passed_type);
/* This sequence may involve a library call perhaps clobbering
registers that haven't been copied to pseudos yet. */
but it's also rare and we need max_parm_reg to be
precisely correct. */
max_parm_reg = regno + 1;
- new = (rtx *) ggc_realloc (parm_reg_stack_loc,
- max_parm_reg * sizeof (rtx));
- memset ((char *) (new + old_max_parm_reg), 0,
- (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
+ new = ggc_realloc (parm_reg_stack_loc,
+ max_parm_reg * sizeof (rtx));
+ memset (new + old_max_parm_reg, 0,
+ (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
parm_reg_stack_loc = new;
}
{
if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE)
{
- SET_DECL_RTL (parm,
- gen_rtx_CONCAT (DECL_MODE (parm),
- DECL_RTL (fnargs),
- DECL_RTL (TREE_CHAIN (fnargs))));
- DECL_INCOMING_RTL (parm)
- = gen_rtx_CONCAT (DECL_MODE (parm),
- DECL_INCOMING_RTL (fnargs),
- DECL_INCOMING_RTL (TREE_CHAIN (fnargs)));
+ rtx tmp, real, imag;
+ enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
+
+ real = DECL_RTL (fnargs);
+ imag = DECL_RTL (TREE_CHAIN (fnargs));
+ if (inner != GET_MODE (real))
+ {
+ real = gen_lowpart_SUBREG (inner, real);
+ imag = gen_lowpart_SUBREG (inner, imag);
+ }
+ tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
+ SET_DECL_RTL (parm, tmp);
+
+ real = DECL_INCOMING_RTL (fnargs);
+ imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
+ if (inner != GET_MODE (real))
+ {
+ real = gen_lowpart_SUBREG (inner, real);
+ imag = gen_lowpart_SUBREG (inner, imag);
+ }
+ tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
+ set_decl_incoming_rtl (parm, tmp);
fnargs = TREE_CHAIN (fnargs);
}
else
{
SET_DECL_RTL (parm, DECL_RTL (fnargs));
- DECL_INCOMING_RTL (parm) = DECL_INCOMING_RTL (fnargs);
+ set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
}
fnargs = TREE_CHAIN (fnargs);
}
rtx addr = DECL_RTL (function_result_decl);
rtx x;
-#ifdef POINTERS_EXTEND_UNSIGNED
- if (GET_MODE (addr) != Pmode)
- addr = convert_memory_address (Pmode, addr);
-#endif
-
+ addr = convert_memory_address (Pmode, addr);
x = gen_rtx_MEM (DECL_MODE (result), addr);
set_mem_attributes (x, result, 1);
SET_DECL_RTL (result, x);
last_parm_insn = get_last_insn ();
+ /* We have aligned all the args, so add space for the pretend args. */
+ stack_args_size.constant += extra_pretend_bytes;
current_function_args_size = stack_args_size.constant;
/* Adjust function incoming argument size for alignment and
minimum length. */
#ifdef REG_PARM_STACK_SPACE
-#ifndef MAYBE_REG_PARM_STACK_SPACE
current_function_args_size = MAX (current_function_args_size,
REG_PARM_STACK_SPACE (fndecl));
#endif
-#endif
-
-#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
current_function_args_size
= ((current_function_args_size + STACK_BYTES - 1)
that REGNO is promoted from and whether the promotion was signed or
unsigned. */
-#ifdef PROMOTE_FUNCTION_ARGS
-
rtx
promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
{
return 0;
}
-#endif
\f
/* Compute the size and offset from the start of the stacked arguments for a
parm passed in mode PASSED_MODE and with type TYPE.
int part_size_in_regs;
#ifdef REG_PARM_STACK_SPACE
-#ifdef MAYBE_REG_PARM_STACK_SPACE
- reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
-#else
reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
-#endif
/* If we have found a stack parm before we reach the end of the
area reserved for registers, skip that area. */
= type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
+ locate->where_pad = where_pad;
#ifdef ARGS_GROW_DOWNWARD
locate->slot_offset.constant = -initial_offset_ptr->constant;
{
tree save_var = NULL_TREE;
HOST_WIDE_INT save_constant = 0;
-
int boundary_in_bytes = boundary / BITS_PER_UNIT;
+ HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
+
+#ifdef SPARC_STACK_BOUNDARY_HACK
+ /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
+ higher than the real alignment of %sp. However, when it does this,
+ the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
+ This is a temporary hack while the sparc port is fixed. */
+ if (SPARC_STACK_BOUNDARY_HACK)
+ sp_offset = 0;
+#endif
if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
{
{
if (offset_ptr->var)
{
- offset_ptr->var =
+ tree sp_offset_tree = ssize_int (sp_offset);
+ tree offset = size_binop (PLUS_EXPR,
+ ARGS_SIZE_TREE (*offset_ptr),
+ sp_offset_tree);
#ifdef ARGS_GROW_DOWNWARD
- round_down
+ tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
#else
- round_up
+ tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
#endif
- (ARGS_SIZE_TREE (*offset_ptr),
- boundary / BITS_PER_UNIT);
+
+ offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
/* ARGS_SIZE_TREE includes constant term. */
offset_ptr->constant = 0;
if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
}
else
{
- offset_ptr->constant =
+ offset_ptr->constant = -sp_offset +
#ifdef ARGS_GROW_DOWNWARD
- FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
+ FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
#else
- CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
+ CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
#endif
if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
alignment_pad->constant = offset_ptr->constant - save_constant;
flow.c that the entire aggregate was initialized.
Unions are troublesome because members may be shorter. */
&& ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
- && DECL_RTL (decl) != 0
+ && DECL_RTL_SET_P (decl)
&& GET_CODE (DECL_RTL (decl)) == REG
/* Global optimizations can make it difficult to determine if a
particular variable has been initialized. However, a VAR_DECL
with a nonzero DECL_INITIAL had an initializer, so do not
claim it is potentially uninitialized.
- We do not care about the actual value in DECL_INITIAL, so we do
- not worry that it may be a dangling pointer. */
- && DECL_INITIAL (decl) == NULL_TREE
+ When the DECL_INITIAL is NULL call the language hook to tell us
+ if we want to warn. */
+ && (DECL_INITIAL (decl) == NULL_TREE || lang_hooks.decl_uninit (decl))
&& regno_uninitialized (REGNO (DECL_RTL (decl))))
- warning_with_decl (decl,
- "`%s' might be used uninitialized in this function");
+ warning ("%J'%D' might be used uninitialized in this function",
+ decl, decl);
if (extra_warnings
&& TREE_CODE (decl) == VAR_DECL
- && DECL_RTL (decl) != 0
+ && DECL_RTL_SET_P (decl)
&& GET_CODE (DECL_RTL (decl)) == REG
&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
- warning_with_decl (decl,
- "variable `%s' might be clobbered by `longjmp' or `vfork'");
+ warning ("%Jvariable '%D' might be clobbered by `longjmp' or `vfork'",
+ decl, decl);
}
for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
uninitialized_vars_warning (sub);
if (DECL_RTL (decl) != 0
&& GET_CODE (DECL_RTL (decl)) == REG
&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
- warning_with_decl (decl,
- "argument `%s' might be clobbered by `longjmp' or `vfork'");
+ warning ("%Jargument '%D' might be clobbered by `longjmp' or `vfork'",
+ decl, decl);
}
/* If this function call setjmp, put all vars into the stack
/* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
depth-first order. */
block_vector = get_block_vector (block, &n_blocks);
- block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
+ block_stack = xmalloc (n_blocks * sizeof (tree));
last_block_vector = identify_blocks_1 (get_insns (),
block_vector + 1,
tree *block_vector;
*n_blocks_p = all_blocks (block, NULL);
- block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
+ block_vector = xmalloc (*n_blocks_p * sizeof (tree));
all_blocks (block, block_vector);
return block_vector;
return NULL_TREE;
}
\f
-/* Allocate a function structure and reset its contents to the defaults. */
+/* Allocate a function structure for FNDECL and set its contents
+ to the defaults. */
-static void
-prepare_function_start (void)
+void
+allocate_struct_function (tree fndecl)
{
- cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
-
- init_stmt_for_function ();
- init_eh_for_function ();
-
- cse_not_expected = ! optimize;
-
- /* Caller save not needed yet. */
- caller_save_needed = 0;
-
- /* No stack slots have been made yet. */
- stack_slot_list = 0;
-
- current_function_has_nonlocal_label = 0;
- current_function_has_nonlocal_goto = 0;
+ tree result;
- /* There is no stack slot for handling nonlocal gotos. */
- nonlocal_goto_handler_slots = 0;
- nonlocal_goto_stack_level = 0;
+ cfun = ggc_alloc_cleared (sizeof (struct function));
- /* No labels have been declared for nonlocal use. */
- nonlocal_labels = 0;
- nonlocal_goto_handler_labels = 0;
-
- /* No function calls so far in this function. */
- function_call_count = 0;
-
- /* No parm regs have been allocated.
- (This is important for output_inline_function.) */
max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
- /* Initialize the RTL mechanism. */
- init_emit ();
-
- /* Initialize the queue of pending postincrement and postdecrements,
- and some other info in expr.c. */
- init_expr ();
+ cfun->stack_alignment_needed = STACK_BOUNDARY;
+ cfun->preferred_stack_boundary = STACK_BOUNDARY;
- /* We haven't done register allocation yet. */
- reg_renumber = 0;
+ current_function_funcdef_no = funcdef_no++;
- init_varasm_status (cfun);
+ cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
- /* Clear out data used for inlining. */
- cfun->inlinable = 0;
- cfun->original_decl_initial = 0;
- cfun->original_arg_vector = 0;
+ init_stmt_for_function ();
+ init_eh_for_function ();
- cfun->stack_alignment_needed = STACK_BOUNDARY;
- cfun->preferred_stack_boundary = STACK_BOUNDARY;
+ (*lang_hooks.function.init) (cfun);
+ if (init_machine_status)
+ cfun->machine = (*init_machine_status) ();
- /* Set if a call to setjmp is seen. */
- current_function_calls_setjmp = 0;
+ if (fndecl == NULL)
+ return;
- /* Set if a call to longjmp is seen. */
- current_function_calls_longjmp = 0;
+ DECL_STRUCT_FUNCTION (fndecl) = cfun;
+ cfun->decl = fndecl;
- current_function_calls_alloca = 0;
- current_function_calls_eh_return = 0;
- current_function_calls_constant_p = 0;
- current_function_contains_functions = 0;
- current_function_is_leaf = 0;
- current_function_nothrow = 0;
- current_function_sp_is_unchanging = 0;
- current_function_uses_only_leaf_regs = 0;
- current_function_has_computed_jump = 0;
- current_function_is_thunk = 0;
+ result = DECL_RESULT (fndecl);
+ if (aggregate_value_p (result, fndecl))
+ {
+#ifdef PCC_STATIC_STRUCT_RETURN
+ current_function_returns_pcc_struct = 1;
+#endif
+ current_function_returns_struct = 1;
+ }
- current_function_returns_pcc_struct = 0;
- current_function_returns_struct = 0;
- current_function_epilogue_delay_list = 0;
- current_function_uses_const_pool = 0;
- current_function_uses_pic_offset_table = 0;
- current_function_cannot_inline = 0;
+ current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
- /* We have not yet needed to make a label to jump to for tail-recursion. */
- tail_recursion_label = 0;
+ current_function_needs_context
+ = (decl_function_context (current_function_decl) != 0
+ && ! DECL_NO_STATIC_CHAIN (current_function_decl));
+}
- /* We haven't had a need to make a save area for ap yet. */
- arg_pointer_save_area = 0;
+/* Reset cfun, and other non-struct-function variables to defaults as
+ appropriate for emitting rtl at the start of a function. */
- /* No stack slots allocated yet. */
- frame_offset = 0;
+static void
+prepare_function_start (tree fndecl)
+{
+ if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
+ cfun = DECL_STRUCT_FUNCTION (fndecl);
+ else
+ allocate_struct_function (fndecl);
+ init_emit ();
+ init_varasm_status (cfun);
+ init_expr ();
- /* No SAVE_EXPRs in this function yet. */
- save_expr_regs = 0;
+ cse_not_expected = ! optimize;
- /* No RTL_EXPRs in this function yet. */
- rtl_expr_chain = 0;
+ /* Caller save not needed yet. */
+ caller_save_needed = 0;
- /* Set up to allocate temporaries. */
- init_temp_slots ();
+ /* We haven't done register allocation yet. */
+ reg_renumber = 0;
/* Indicate that we need to distinguish between the return value of the
present function and the return value of a function being called. */
/* Indicate we have no need of a frame pointer yet. */
frame_pointer_needed = 0;
-
- /* By default assume not stdarg. */
- current_function_stdarg = 0;
-
- /* We haven't made any trampolines for this function yet. */
- trampoline_list = 0;
-
- init_pending_stack_adjust ();
- inhibit_defer_pop = 0;
-
- current_function_outgoing_args_size = 0;
-
- current_function_funcdef_no = funcdef_no++;
-
- cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
-
- cfun->max_jumptable_ents = 0;
-
- (*lang_hooks.function.init) (cfun);
- if (init_machine_status)
- cfun->machine = (*init_machine_status) ();
}
/* Initialize the rtl expansion mechanism so that we can do simple things
void
init_dummy_function_start (void)
{
- prepare_function_start ();
+ prepare_function_start (NULL);
}
/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
void
init_function_start (tree subr)
{
- prepare_function_start ();
-
- current_function_name = (*lang_hooks.decl_printable_name) (subr, 2);
- cfun->decl = subr;
-
- /* Nonzero if this is a nested function that uses a static chain. */
-
- current_function_needs_context
- = (decl_function_context (current_function_decl) != 0
- && ! DECL_NO_STATIC_CHAIN (current_function_decl));
+ prepare_function_start (subr);
/* Within function body, compute a type's size as soon it is laid out. */
immediate_size_expand++;
Also, final expects a note to appear there. */
emit_note (NOTE_INSN_DELETED);
- /* Set flags used by final.c. */
- if (aggregate_value_p (DECL_RESULT (subr)))
- {
-#ifdef PCC_STATIC_STRUCT_RETURN
- current_function_returns_pcc_struct = 1;
-#endif
- current_function_returns_struct = 1;
- }
-
/* Warn if this value is an aggregate type,
regardless of which calling convention we are using for it. */
if (warn_aggregate_return
&& AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
warning ("function returns an aggregate");
-
- current_function_returns_pointer
- = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
}
/* Make sure all values used by the optimization passes have sane
before any library calls that assign parms might generate. */
/* Decide whether to return the value in memory or in a register. */
- if (aggregate_value_p (DECL_RESULT (subr)))
+ if (aggregate_value_p (DECL_RESULT (subr), subr))
{
/* Returning something that won't go in a register. */
rtx value_address = 0;
else
#endif
{
+ rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
/* Expect to be passed the address of a place to store the value.
If it is passed as an argument, assign_parms will take care of
it. */
- if (struct_value_incoming_rtx)
+ if (sv)
{
value_address = gen_reg_rtx (Pmode);
- emit_move_insn (value_address, struct_value_incoming_rtx);
+ emit_move_insn (value_address, sv);
}
}
if (value_address)
tem = decl_function_context (tem);
if (tem == 0)
break;
- /* Chain thru stack frames, assuming pointer to next lexical frame
+ /* Chain through stack frames, assuming pointer to next lexical frame
is found at the place we always store it. */
#ifdef FRAME_GROWS_DOWNWARD
last_ptr = plus_constant (last_ptr,
decl; decl = TREE_CHAIN (decl))
if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
&& DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
- warning_with_decl (decl, "unused parameter `%s'");
+ warning ("%Junused parameter '%D'", decl, decl);
}
/* Delete handlers for nonlocal gotos if nothing uses them. */
/* If we had calls to alloca, and this machine needs
an accurate stack pointer to exit the function,
insert some code to save and restore the stack pointer. */
-#ifdef EXIT_IGNORE_STACK
- if (! EXIT_IGNORE_STACK)
-#endif
- if (current_function_calls_alloca)
- {
- rtx tem = 0;
+ if (! EXIT_IGNORE_STACK
+ && current_function_calls_alloca)
+ {
+ rtx tem = 0;
- emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
- emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
- }
+ emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
+ emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
+ }
/* If scalar return value was computed in a pseudo-reg, or was a named
return value that got dumped to the stack, copy that to the hard
{
int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
-#ifdef PROMOTE_FUNCTION_RETURN
- promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
- &unsignedp, 1);
-#endif
+ if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
+ promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
+ &unsignedp, 1);
convert_move (real_decl_rtl, decl_rtl, unsignedp);
}
emit_group_move (real_decl_rtl, decl_rtl);
else
emit_group_load (real_decl_rtl, decl_rtl,
+ TREE_TYPE (decl_result),
int_size_in_bytes (TREE_TYPE (decl_result)));
}
else
assignment and USE below when inlining this function. */
REG_FUNCTION_VALUE_P (outgoing) = 1;
-#ifdef POINTERS_EXTEND_UNSIGNED
/* The address may be ptr_mode and OUTGOING may be Pmode. */
- if (GET_MODE (outgoing) != GET_MODE (value_address))
- value_address = convert_memory_address (GET_MODE (outgoing),
- value_address);
-#endif
+ value_address = convert_memory_address (GET_MODE (outgoing),
+ value_address);
emit_move_insn (outgoing, value_address);
cfun->x_clobber_return_insn = after;
}
+ /* Output the label for the naked return from the function, if one is
+ expected. This is currently used only by __builtin_return. */
+ if (naked_return_label)
+ emit_label (naked_return_label);
+
/* ??? This should no longer be necessary since stupid is no longer with
us, but there are some parts of the compiler (eg reload_combine, and
sh mach_dep_reorg) that still try and compute their own lifetime info
}
}
-/* Set the specified locator to the insn chain. */
+/* Set the locator of the insn chain starting at INSN to LOC. */
static void
set_insn_locators (rtx insn, int loc)
{
static void
emit_return_into_block (basic_block bb, rtx line_note)
{
- emit_jump_insn_after (gen_return (), bb->end);
+ emit_jump_insn_after (gen_return (), BB_END (bb));
if (line_note)
- emit_note_copy_after (line_note, PREV_INSN (bb->end));
+ emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
}
#endif /* HAVE_return */
rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
should be set to once we no longer need
its value. */
+ rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
+ for registers. */
};
static void handle_epilogue_set (rtx, struct epi_info *);
+static void update_epilogue_consts (rtx, rtx, void *);
static void emit_equiv_load (struct epi_info *);
/* Modify INSN, a list of one or more insns that is part of the epilogue, to
struct epi_info info;
rtx insn, next;
- /* If the epilogue is just a single instruction, it ust be OK as is. */
-
+ /* If the epilogue is just a single instruction, it must be OK as is. */
if (NEXT_INSN (insns) == NULL_RTX)
return insns;
info.sp_offset = 0;
info.equiv_reg_src = 0;
+ for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
+ info.const_equiv[j] = 0;
+
insn = insns;
next = NULL_RTX;
while (insn != NULL_RTX)
&& !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
regno)
&& !refers_to_regno_p (regno,
- regno + HARD_REGNO_NREGS (regno,
- Pmode),
- info.equiv_reg_src, NULL))
+ regno + hard_regno_nregs[regno]
+ [Pmode],
+ info.equiv_reg_src, NULL)
+ && info.const_equiv[regno] == 0)
break;
if (regno == FIRST_PSEUDO_REGISTER)
info.sp_equiv_reg = info.new_sp_equiv_reg;
info.sp_offset = info.new_sp_offset;
+ /* Now update any constants this insn sets. */
+ note_stores (PATTERN (insn), update_epilogue_consts, &info);
insn = next;
}
if (SET_DEST (set) != stack_pointer_rtx)
abort ();
- if (GET_CODE (SET_SRC (set)) == PLUS
- && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
+ if (GET_CODE (SET_SRC (set)) == PLUS)
{
p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
- p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
+ if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
+ p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
+ else if (GET_CODE (XEXP (SET_SRC (set), 1)) == REG
+ && REGNO (XEXP (SET_SRC (set), 1)) < FIRST_PSEUDO_REGISTER
+ && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))] != 0)
+ p->new_sp_offset
+ = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
+ else
+ abort ();
}
else
p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
there seems little point in handling that case. Note that we have
to allow for the case where we are setting the register set in
the previous part of a PARALLEL inside a single insn. But use the
- old offset for any updates within this insn. */
+ old offset for any updates within this insn. We must allow for the case
+ where the register is being set in a different (usually wider) mode than
+ Pmode). */
else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
{
- if (!rtx_equal_p (p->new_sp_equiv_reg, SET_DEST (set))
- || p->equiv_reg_src != 0)
+ if (p->equiv_reg_src != 0
+ || GET_CODE (p->new_sp_equiv_reg) != REG
+ || GET_CODE (SET_DEST (set)) != REG
+ || GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) > BITS_PER_WORD
+ || REGNO (p->new_sp_equiv_reg) != REGNO (SET_DEST (set)))
abort ();
else
p->equiv_reg_src
}
}
+/* Update the tracking information for registers set to constants. */
+
+static void
+update_epilogue_consts (rtx dest, rtx x, void *data)
+{
+ struct epi_info *p = (struct epi_info *) data;
+ rtx new;
+
+ if (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
+ return;
+
+ /* If we are either clobbering a register or doing a partial set,
+ show we don't know the value. */
+ else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
+ p->const_equiv[REGNO (dest)] = 0;
+
+ /* If we are setting it to a constant, record that constant. */
+ else if (GET_CODE (SET_SRC (x)) == CONST_INT)
+ p->const_equiv[REGNO (dest)] = SET_SRC (x);
+
+ /* If this is a binary operation between a register we have been tracking
+ and a constant, see if we can compute a new constant value. */
+ else if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
+ || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2')
+ && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
+ && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
+ && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
+ && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
+ && 0 != (new = simplify_binary_operation
+ (GET_CODE (SET_SRC (x)), GET_MODE (dest),
+ p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
+ XEXP (SET_SRC (x), 1)))
+ && GET_CODE (new) == CONST_INT)
+ p->const_equiv[REGNO (dest)] = new;
+
+ /* Otherwise, we can't do anything with this value. */
+ else
+ p->const_equiv[REGNO (dest)] = 0;
+}
+
/* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
static void
emit_equiv_load (struct epi_info *p)
{
if (p->equiv_reg_src != 0)
- emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
+ {
+ rtx dest = p->sp_equiv_reg;
+
+ if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
+ dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
+ REGNO (p->sp_equiv_reg));
- p->equiv_reg_src = 0;
+ emit_move_insn (dest, p->equiv_reg_src);
+ p->equiv_reg_src = 0;
+ }
}
#endif
last = e->src;
/* Verify that there are no active instructions in the last block. */
- label = last->end;
+ label = BB_END (last);
while (label && GET_CODE (label) != CODE_LABEL)
{
if (active_insn_p (label))
label = PREV_INSN (label);
}
- if (last->head == label && GET_CODE (label) == CODE_LABEL)
+ if (BB_HEAD (last) == label && GET_CODE (label) == CODE_LABEL)
{
rtx epilogue_line_note = NULL_RTX;
if (bb == ENTRY_BLOCK_PTR)
continue;
- jump = bb->end;
+ jump = BB_END (bb);
if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
continue;
/* Emit a return insn for the exit fallthru block. Whether
this is still reachable will be determined later. */
- emit_barrier_after (last->end);
+ emit_barrier_after (BB_END (last));
emit_return_into_block (last, epilogue_line_note);
- epilogue_end = last->end;
+ epilogue_end = BB_END (last);
last->succ->flags &= ~EDGE_FALLTHRU;
goto epilogue_done;
}
for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
{
basic_block bb = e->src;
- rtx insn = bb->end;
+ rtx insn = BB_END (bb);
rtx i;
rtx newinsn;
#endif
#ifdef HAVE_prologue
+ /* This is probably all useless now that we use locators. */
if (prologue_end)
{
rtx insn, prev;
}
/* Find the last line number note in the first block. */
- for (insn = ENTRY_BLOCK_PTR->next_bb->end;
+ for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
insn != prologue_end && insn;
insn = PREV_INSN (insn))
if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
}
+/* Returns the name of the current function. */
+const char *
+current_function_name (void)
+{
+ return (*lang_hooks.decl_printable_name) (cfun->decl, 2);
+}
+
#include "gt-function.h"