/* Expands front end tree to back end RTL for GCC.
Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
- 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
+ Free Software Foundation, Inc.
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
/* This file handles the generation of rtl code from tree structure
at the level of the function as a whole.
#include "langhooks.h"
#include "target.h"
#include "cfglayout.h"
+#include "tree-gimple.h"
+#include "tree-pass.h"
+#include "predict.h"
#ifndef LOCAL_ALIGNMENT
#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
/* The currently compiled function. */
struct function *cfun = 0;
+DEF_VEC_I(int);
+DEF_VEC_ALLOC_I(int,heap);
+
/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
-static GTY(()) varray_type prologue;
-static GTY(()) varray_type epilogue;
+static VEC(int,heap) *prologue;
+static VEC(int,heap) *epilogue;
/* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
in this function. */
-static GTY(()) varray_type sibcall_epilogue;
+static VEC(int,heap) *sibcall_epilogue;
\f
/* In order to evaluate some expressions, such as function calls returning
structures in memory, we need to temporarily allocate stack locations.
static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
struct function *);
static struct temp_slot *find_temp_slot_from_address (rtx);
-static void instantiate_decls (tree, int);
-static void instantiate_decls_1 (tree, int);
-static void instantiate_decl (rtx, HOST_WIDE_INT, int);
-static rtx instantiate_new_reg (rtx, HOST_WIDE_INT *);
-static int instantiate_virtual_regs_1 (rtx *, rtx, int);
static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
static void pad_below (struct args_size *, enum machine_mode, tree);
-static void reorder_blocks_1 (rtx, tree, varray_type *);
+static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
static void reorder_fix_fragments (tree);
static int all_blocks (tree, tree *);
static tree *get_block_vector (tree, int *);
extern tree debug_find_var_in_block_tree (tree, tree);
/* We always define `record_insns' even if it's not used so that we
can always export `prologue_epilogue_contains'. */
-static void record_insns (rtx, varray_type *) ATTRIBUTE_UNUSED;
-static int contains (rtx, varray_type);
+static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
+static int contains (rtx, VEC(int,heap) **);
#ifdef HAVE_return
static void emit_return_into_block (basic_block, rtx);
#endif
static void prepare_function_start (tree);
static void do_clobber_return_reg (rtx, void *);
static void do_use_return_reg (rtx, void *);
-static void instantiate_virtual_regs_lossage (rtx);
static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
\f
/* Pointer to chain of `struct function' for containing functions. */
if (p->decl == decl)
return p;
- abort ();
+ gcc_unreachable ();
}
/* Save the current context for compilation of a nested function.
variables. */
void
-push_function_context_to (tree context)
+push_function_context_to (tree context ATTRIBUTE_UNUSED)
{
struct function *p;
- if (context)
- {
- if (context == current_function_decl)
- cfun->contains_functions = 1;
- else
- {
- struct function *containing = find_function_data (context);
- containing->contains_functions = 1;
- }
- }
-
if (cfun == 0)
init_dummy_function_start ();
p = cfun;
outer_function_chain = p->outer;
current_function_decl = p->decl;
- reg_renumber = 0;
-
- restore_emit_status (p);
lang_hooks.function.leave_nested (p);
void
free_after_compilation (struct function *f)
{
+ VEC_free (int, heap, prologue);
+ VEC_free (int, heap, epilogue);
+ VEC_free (int, heap, sibcall_epilogue);
+
f->eh = NULL;
f->expr = NULL;
f->emit = NULL;
f->varasm = NULL;
f->machine = NULL;
+ f->cfg = NULL;
f->x_avail_temp_slots = NULL;
f->x_used_temp_slots = NULL;
This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
the caller may have to do that. */
-HOST_WIDE_INT
+static HOST_WIDE_INT
get_func_frame_size (struct function *f)
{
-#ifdef FRAME_GROWS_DOWNWARD
- return -f->x_frame_offset;
-#else
- return f->x_frame_offset;
-#endif
+ if (FRAME_GROWS_DOWNWARD)
+ return -f->x_frame_offset;
+ else
+ return f->x_frame_offset;
}
/* Return size needed for stack frame based on slots so far allocated.
else
alignment = align / BITS_PER_UNIT;
-#ifdef FRAME_GROWS_DOWNWARD
- function->x_frame_offset -= size;
-#endif
+ if (FRAME_GROWS_DOWNWARD)
+ function->x_frame_offset -= size;
/* Ignore alignment we can't do with expected alignment of the boundary. */
if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
division with a negative dividend isn't as well defined as we might
like. So we instead assume that ALIGNMENT is a power of two and
use logical operations which are unambiguous. */
-#ifdef FRAME_GROWS_DOWNWARD
- function->x_frame_offset
- = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
- (unsigned HOST_WIDE_INT) alignment)
- + frame_phase);
-#else
- function->x_frame_offset
- = (CEIL_ROUND (function->x_frame_offset - frame_phase,
- (unsigned HOST_WIDE_INT) alignment)
- + frame_phase);
-#endif
+ if (FRAME_GROWS_DOWNWARD)
+ function->x_frame_offset
+ = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
+ (unsigned HOST_WIDE_INT) alignment)
+ + frame_phase);
+ else
+ function->x_frame_offset
+ = (CEIL_ROUND (function->x_frame_offset - frame_phase,
+ (unsigned HOST_WIDE_INT) alignment)
+ + frame_phase);
}
/* On a big-endian machine, if we are allocating more space than we will use,
use the least significant bytes of those that are allocated. */
- if (BYTES_BIG_ENDIAN && mode != BLKmode)
+ if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
bigend_correction = size - GET_MODE_SIZE (mode);
/* If we have already instantiated virtual registers, return the actual
(function->x_frame_offset + bigend_correction,
Pmode));
-#ifndef FRAME_GROWS_DOWNWARD
- function->x_frame_offset += size;
-#endif
+ if (!FRAME_GROWS_DOWNWARD)
+ function->x_frame_offset += size;
x = gen_rtx_MEM (mode, addr);
+ MEM_NOTRAP_P (x) = 1;
function->x_stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
+ /* Try to detect frame size overflows on native platforms. */
+#if BITS_PER_WORD >= 32
+ if ((FRAME_GROWS_DOWNWARD
+ ? (unsigned HOST_WIDE_INT) -function->x_frame_offset
+ : (unsigned HOST_WIDE_INT) function->x_frame_offset)
+ > ((unsigned HOST_WIDE_INT) 1 << (BITS_PER_WORD - 1))
+ /* Leave room for the fixed part of the frame. */
+ - 64 * UNITS_PER_WORD)
+ {
+ error ("%Jtotal size of local objects too large", function->decl);
+ /* Avoid duplicate error messages as much as possible. */
+ function->x_frame_offset = 0;
+ }
+#endif
+
return x;
}
static struct temp_slot **
temp_slots_at_level (int level)
{
- level++;
if (!used_temp_slots)
VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
free_temp_slots. Automatic variables for a block are allocated
with this flag. KEEP values of 2 or 3 were needed respectively
for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
- or for SAVE_EXPRs, but they are now unused and will abort.
+ or for SAVE_EXPRs, but they are now unused.
TYPE is the type that will be used for the stack slot. */
rtx
-assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, int keep,
- tree type)
+assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
+ int keep, tree type)
{
unsigned int align;
struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
/* If SIZE is -1 it means that somebody tried to allocate a temporary
of a variable size. */
- if (size == -1)
- abort ();
+ gcc_assert (size != -1);
/* These are now unused. */
- if (keep > 1)
- abort ();
+ gcc_assert (keep <= 1);
if (mode == BLKmode)
align = BIGGEST_ALIGNMENT;
p->size = best_p->size - rounded_size;
p->base_offset = best_p->base_offset + rounded_size;
p->full_size = best_p->full_size - rounded_size;
- p->slot = gen_rtx_MEM (BLKmode,
- plus_constant (XEXP (best_p->slot, 0),
- rounded_size));
+ p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
p->align = best_p->align;
p->address = 0;
p->type = best_p->type;
So for requests which depended on the rounding of SIZE, we go ahead
and round it now. We also make sure ALIGNMENT is at least
BIGGEST_ALIGNMENT. */
- if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
- abort ();
+ gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
p->slot = assign_stack_local (mode,
(mode == BLKmode
? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
can be either above or below this stack slot depending on which
way the frame grows. We include the extra space if and only if it
is above this slot. */
-#ifdef FRAME_GROWS_DOWNWARD
- p->size = frame_offset_old - frame_offset;
-#else
- p->size = size;
-#endif
+ if (FRAME_GROWS_DOWNWARD)
+ p->size = frame_offset_old - frame_offset;
+ else
+ p->size = size;
/* Now define the fields used by combine_temp_slots. */
-#ifdef FRAME_GROWS_DOWNWARD
- p->base_offset = frame_offset;
- p->full_size = frame_offset_old - frame_offset;
-#else
- p->base_offset = frame_offset_old;
- p->full_size = frame_offset - frame_offset_old;
-#endif
+ if (FRAME_GROWS_DOWNWARD)
+ {
+ p->base_offset = frame_offset;
+ p->full_size = frame_offset_old - frame_offset;
+ }
+ else
+ {
+ p->base_offset = frame_offset_old;
+ p->full_size = frame_offset - frame_offset_old;
+ }
p->address = 0;
selected = p;
/* If a type is specified, set the relevant flags. */
if (type != 0)
{
- RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
- && TYPE_READONLY (type));
MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
}
+ MEM_NOTRAP_P (slot) = 1;
return slot;
}
/* The size of the temporary may be too large to fit into an integer. */
/* ??? Not sure this should happen except for user silliness, so limit
this to things that aren't compiler-generated temporaries. The
- rest of the time we'll abort in assign_stack_temp_for_type. */
+ rest of the time we'll die in assign_stack_temp_for_type. */
if (decl && size == -1
&& TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
{
- error ("%Jsize of variable '%D' is too large", decl, decl);
+ error ("size of variable %q+D is too large", decl);
size = 1;
}
done for BLKmode slots because we can be sure that we won't have alignment
problems in this case. */
-void
+static void
combine_temp_slots (void)
{
struct temp_slot *p, *q, *next, *next_q;
#endif
#endif
-/* On most machines, the CFA coincides with the first incoming parm. */
-
-#ifndef ARG_POINTER_CFA_OFFSET
-#define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
-#endif
-
\f
-/* Pass through the INSNS of function FNDECL and convert virtual register
- references to hard register references. */
+/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
+ is a virtual register, return the equivalent hard register and set the
+ offset indirectly through the pointer. Otherwise, return 0. */
-void
-instantiate_virtual_regs (void)
+static rtx
+instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
{
- rtx insn;
-
- /* Compute the offsets to use for this function. */
- in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
- var_offset = STARTING_FRAME_OFFSET;
- dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
- out_arg_offset = STACK_POINTER_OFFSET;
- cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
-
- /* Scan all variables and parameters of this function. For each that is
- in memory, instantiate all virtual registers if the result is a valid
- address. If not, we do it later. That will handle most uses of virtual
- regs on many machines. */
- instantiate_decls (current_function_decl, 1);
-
- /* Initialize recognition, indicating that volatile is OK. */
- init_recog ();
-
- /* Scan through all the insns, instantiating every virtual register still
- present. */
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
- || GET_CODE (insn) == CALL_INSN)
- {
- instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
- if (INSN_DELETED_P (insn))
- continue;
- instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
- /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
- if (GET_CODE (insn) == CALL_INSN)
- instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
- NULL_RTX, 0);
-
- /* Past this point all ASM statements should match. Verify that
- to avoid failures later in the compilation process. */
- if (asm_noperands (PATTERN (insn)) >= 0
- && ! check_asm_operands (PATTERN (insn)))
- instantiate_virtual_regs_lossage (insn);
- }
+ rtx new;
+ HOST_WIDE_INT offset;
- /* Now instantiate the remaining register equivalences for debugging info.
- These will not be valid addresses. */
- instantiate_decls (current_function_decl, 0);
+ if (x == virtual_incoming_args_rtx)
+ new = arg_pointer_rtx, offset = in_arg_offset;
+ else if (x == virtual_stack_vars_rtx)
+ new = frame_pointer_rtx, offset = var_offset;
+ else if (x == virtual_stack_dynamic_rtx)
+ new = stack_pointer_rtx, offset = dynamic_offset;
+ else if (x == virtual_outgoing_args_rtx)
+ new = stack_pointer_rtx, offset = out_arg_offset;
+ else if (x == virtual_cfa_rtx)
+ {
+#ifdef FRAME_POINTER_CFA_OFFSET
+ new = frame_pointer_rtx;
+#else
+ new = arg_pointer_rtx;
+#endif
+ offset = cfa_offset;
+ }
+ else
+ return NULL_RTX;
- /* Indicate that, from now on, assign_stack_local should use
- frame_pointer_rtx. */
- virtuals_instantiated = 1;
+ *poffset = offset;
+ return new;
}
-/* Scan all decls in FNDECL (both variables and parameters) and instantiate
- all virtual registers in their DECL_RTL's.
-
- If VALID_ONLY, do this only if the resulting address is still valid.
- Otherwise, always do it. */
+/* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
+ Instantiate any virtual registers present inside of *LOC. The expression
+ is simplified, as much as possible, but is not to be considered "valid"
+ in any sense implied by the target. If any change is made, set CHANGED
+ to true. */
-static void
-instantiate_decls (tree fndecl, int valid_only)
+static int
+instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
{
- tree decl;
+ HOST_WIDE_INT offset;
+ bool *changed = (bool *) data;
+ rtx x, new;
- /* Process all parameters of the function. */
- for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
+ x = *loc;
+ if (x == 0)
+ return 0;
+
+ switch (GET_CODE (x))
{
- HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
- HOST_WIDE_INT size_rtl;
+ case REG:
+ new = instantiate_new_reg (x, &offset);
+ if (new)
+ {
+ *loc = plus_constant (new, offset);
+ if (changed)
+ *changed = true;
+ }
+ return -1;
+
+ case PLUS:
+ new = instantiate_new_reg (XEXP (x, 0), &offset);
+ if (new)
+ {
+ new = plus_constant (new, offset);
+ *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
+ if (changed)
+ *changed = true;
+ return -1;
+ }
- instantiate_decl (DECL_RTL (decl), size, valid_only);
+ /* FIXME -- from old code */
+ /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
+ we can commute the PLUS and SUBREG because pointers into the
+ frame are well-behaved. */
+ break;
- /* If the parameter was promoted, then the incoming RTL mode may be
- larger than the declared type size. We must use the larger of
- the two sizes. */
- size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
- size = MAX (size_rtl, size);
- instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
+ default:
+ break;
}
- /* Now process all variables defined in the function or its subblocks. */
- instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
+ return 0;
}
-/* Subroutine of instantiate_decls: Process all decls in the given
- BLOCK node and all its subblocks. */
+/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
+ matches the predicate for insn CODE operand OPERAND. */
-static void
-instantiate_decls_1 (tree let, int valid_only)
+static int
+safe_insn_predicate (int code, int operand, rtx x)
{
- tree t;
+ const struct insn_operand_data *op_data;
- for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
- if (DECL_RTL_SET_P (t))
- instantiate_decl (DECL_RTL (t),
- int_size_in_bytes (TREE_TYPE (t)),
- valid_only);
+ if (code < 0)
+ return true;
- /* Process all subblocks. */
- for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
- instantiate_decls_1 (t, valid_only);
-}
+ op_data = &insn_data[code].operand[operand];
+ if (op_data->predicate == NULL)
+ return true;
-/* Subroutine of the preceding procedures: Given RTL representing a
- decl and the size of the object, do any instantiation required.
+ return op_data->predicate (x, op_data->mode);
+}
- If VALID_ONLY is nonzero, it means that the RTL should only be
- changed if the new address is valid. */
+/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
+ registers present inside of insn. The result will be a valid insn. */
static void
-instantiate_decl (rtx x, HOST_WIDE_INT size, int valid_only)
+instantiate_virtual_regs_in_insn (rtx insn)
{
- enum machine_mode mode;
- rtx addr;
-
- /* If this is not a MEM, no need to do anything. Similarly if the
- address is a constant or a register that is not a virtual register. */
-
- if (x == 0 || !MEM_P (x))
- return;
+ HOST_WIDE_INT offset;
+ int insn_code, i;
+ bool any_change = false;
+ rtx set, new, x, seq;
- addr = XEXP (x, 0);
- if (CONSTANT_P (addr)
- || (REG_P (addr)
- && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
- || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
- return;
+ /* There are some special cases to be handled first. */
+ set = single_set (insn);
+ if (set)
+ {
+ /* We're allowed to assign to a virtual register. This is interpreted
+ to mean that the underlying register gets assigned the inverse
+ transformation. This is used, for example, in the handling of
+ non-local gotos. */
+ new = instantiate_new_reg (SET_DEST (set), &offset);
+ if (new)
+ {
+ start_sequence ();
- /* If we should only do this if the address is valid, copy the address.
- We need to do this so we can undo any changes that might make the
- address invalid. This copy is unfortunate, but probably can't be
- avoided. */
+ for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
+ x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
+ GEN_INT (-offset));
+ x = force_operand (x, new);
+ if (x != new)
+ emit_move_insn (new, x);
- if (valid_only)
- addr = copy_rtx (addr);
+ seq = get_insns ();
+ end_sequence ();
- instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
+ emit_insn_before (seq, insn);
+ delete_insn (insn);
+ return;
+ }
- if (valid_only && size >= 0)
- {
- unsigned HOST_WIDE_INT decl_size = size;
+ /* Handle a straight copy from a virtual register by generating a
+ new add insn. The difference between this and falling through
+ to the generic case is avoiding a new pseudo and eliminating a
+ move insn in the initial rtl stream. */
+ new = instantiate_new_reg (SET_SRC (set), &offset);
+ if (new && offset != 0
+ && REG_P (SET_DEST (set))
+ && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
+ {
+ start_sequence ();
- /* Now verify that the resulting address is valid for every integer or
- floating-point mode up to and including SIZE bytes long. We do this
- since the object might be accessed in any mode and frame addresses
- are shared. */
+ x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
+ new, GEN_INT (offset), SET_DEST (set),
+ 1, OPTAB_LIB_WIDEN);
+ if (x != SET_DEST (set))
+ emit_move_insn (SET_DEST (set), x);
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
- mode = GET_MODE_WIDER_MODE (mode))
- if (! memory_address_p (mode, addr))
- return;
+ seq = get_insns ();
+ end_sequence ();
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
- mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
- mode = GET_MODE_WIDER_MODE (mode))
- if (! memory_address_p (mode, addr))
+ emit_insn_before (seq, insn);
+ delete_insn (insn);
return;
- }
+ }
- /* Put back the address now that we have updated it and we either know
- it is valid or we don't care whether it is valid. */
+ extract_insn (insn);
+ insn_code = INSN_CODE (insn);
+
+ /* Handle a plus involving a virtual register by determining if the
+ operands remain valid if they're modified in place. */
+ if (GET_CODE (SET_SRC (set)) == PLUS
+ && recog_data.n_operands >= 3
+ && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
+ && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
+ && GET_CODE (recog_data.operand[2]) == CONST_INT
+ && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
+ {
+ offset += INTVAL (recog_data.operand[2]);
- XEXP (x, 0) = addr;
-}
-\f
-/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
- is a virtual register, return the equivalent hard register and set the
- offset indirectly through the pointer. Otherwise, return 0. */
+ /* If the sum is zero, then replace with a plain move. */
+ if (offset == 0
+ && REG_P (SET_DEST (set))
+ && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
+ {
+ start_sequence ();
+ emit_move_insn (SET_DEST (set), new);
+ seq = get_insns ();
+ end_sequence ();
-static rtx
-instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
-{
- rtx new;
- HOST_WIDE_INT offset;
+ emit_insn_before (seq, insn);
+ delete_insn (insn);
+ return;
+ }
- if (x == virtual_incoming_args_rtx)
- new = arg_pointer_rtx, offset = in_arg_offset;
- else if (x == virtual_stack_vars_rtx)
- new = frame_pointer_rtx, offset = var_offset;
- else if (x == virtual_stack_dynamic_rtx)
- new = stack_pointer_rtx, offset = dynamic_offset;
- else if (x == virtual_outgoing_args_rtx)
- new = stack_pointer_rtx, offset = out_arg_offset;
- else if (x == virtual_cfa_rtx)
- new = arg_pointer_rtx, offset = cfa_offset;
- else
- return 0;
+ x = gen_int_mode (offset, recog_data.operand_mode[2]);
- *poffset = offset;
- return new;
-}
-\f
+ /* Using validate_change and apply_change_group here leaves
+ recog_data in an invalid state. Since we know exactly what
+ we want to check, do those two by hand. */
+ if (safe_insn_predicate (insn_code, 1, new)
+ && safe_insn_predicate (insn_code, 2, x))
+ {
+ *recog_data.operand_loc[1] = recog_data.operand[1] = new;
+ *recog_data.operand_loc[2] = recog_data.operand[2] = x;
+ any_change = true;
-/* Called when instantiate_virtual_regs has failed to update the instruction.
- Usually this means that non-matching instruction has been emit, however for
- asm statements it may be the problem in the constraints. */
-static void
-instantiate_virtual_regs_lossage (rtx insn)
-{
- if (asm_noperands (PATTERN (insn)) >= 0)
- {
- error_for_asm (insn, "impossible constraint in `asm'");
- delete_insn (insn);
+ /* Fall through into the regular operand fixup loop in
+ order to take care of operands other than 1 and 2. */
+ }
+ }
}
else
- abort ();
-}
-/* Given a pointer to a piece of rtx and an optional pointer to the
- containing object, instantiate any virtual registers present in it.
+ {
+ extract_insn (insn);
+ insn_code = INSN_CODE (insn);
+ }
- If EXTRA_INSNS, we always do the replacement and generate
- any extra insns before OBJECT. If it zero, we do nothing if replacement
- is not valid.
+ /* In the general case, we expect virtual registers to appear only in
+ operands, and then only as either bare registers or inside memories. */
+ for (i = 0; i < recog_data.n_operands; ++i)
+ {
+ x = recog_data.operand[i];
+ switch (GET_CODE (x))
+ {
+ case MEM:
+ {
+ rtx addr = XEXP (x, 0);
+ bool changed = false;
- Return 1 if we either had nothing to do or if we were able to do the
- needed replacement. Return 0 otherwise; we only return zero if
- EXTRA_INSNS is zero.
+ for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
+ if (!changed)
+ continue;
- We first try some simple transformations to avoid the creation of extra
- pseudos. */
+ start_sequence ();
+ x = replace_equiv_address (x, addr);
+ seq = get_insns ();
+ end_sequence ();
+ if (seq)
+ emit_insn_before (seq, insn);
+ }
+ break;
-static int
-instantiate_virtual_regs_1 (rtx *loc, rtx object, int extra_insns)
-{
- rtx x;
- RTX_CODE code;
- rtx new = 0;
- HOST_WIDE_INT offset = 0;
- rtx temp;
- rtx seq;
- int i, j;
- const char *fmt;
+ case REG:
+ new = instantiate_new_reg (x, &offset);
+ if (new == NULL)
+ continue;
+ if (offset == 0)
+ x = new;
+ else
+ {
+ start_sequence ();
- /* Re-start here to avoid recursion in common cases. */
- restart:
+ /* Careful, special mode predicates may have stuff in
+ insn_data[insn_code].operand[i].mode that isn't useful
+ to us for computing a new value. */
+ /* ??? Recognize address_operand and/or "p" constraints
+ to see if (plus new offset) is a valid before we put
+ this through expand_simple_binop. */
+ x = expand_simple_binop (GET_MODE (x), PLUS, new,
+ GEN_INT (offset), NULL_RTX,
+ 1, OPTAB_LIB_WIDEN);
+ seq = get_insns ();
+ end_sequence ();
+ emit_insn_before (seq, insn);
+ }
+ break;
- x = *loc;
- if (x == 0)
- return 1;
+ case SUBREG:
+ new = instantiate_new_reg (SUBREG_REG (x), &offset);
+ if (new == NULL)
+ continue;
+ if (offset != 0)
+ {
+ start_sequence ();
+ new = expand_simple_binop (GET_MODE (new), PLUS, new,
+ GEN_INT (offset), NULL_RTX,
+ 1, OPTAB_LIB_WIDEN);
+ seq = get_insns ();
+ end_sequence ();
+ emit_insn_before (seq, insn);
+ }
+ x = simplify_gen_subreg (recog_data.operand_mode[i], new,
+ GET_MODE (new), SUBREG_BYTE (x));
+ break;
- /* We may have detected and deleted invalid asm statements. */
- if (object && INSN_P (object) && INSN_DELETED_P (object))
- return 1;
+ default:
+ continue;
+ }
- code = GET_CODE (x);
+ /* At this point, X contains the new value for the operand.
+ Validate the new value vs the insn predicate. Note that
+ asm insns will have insn_code -1 here. */
+ if (!safe_insn_predicate (insn_code, i, x))
+ x = force_reg (insn_data[insn_code].operand[i].mode, x);
- /* Check for some special cases. */
- switch (code)
- {
- case CONST_INT:
- case CONST_DOUBLE:
- case CONST_VECTOR:
- case CONST:
- case SYMBOL_REF:
- case CODE_LABEL:
- case PC:
- case CC0:
- case ASM_INPUT:
- case ADDR_VEC:
- case ADDR_DIFF_VEC:
- case RETURN:
- return 1;
+ *recog_data.operand_loc[i] = recog_data.operand[i] = x;
+ any_change = true;
+ }
- case SET:
- /* We are allowed to set the virtual registers. This means that
- the actual register should receive the source minus the
- appropriate offset. This is used, for example, in the handling
- of non-local gotos. */
- if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
- {
- rtx src = SET_SRC (x);
+ if (any_change)
+ {
+ /* Propagate operand changes into the duplicates. */
+ for (i = 0; i < recog_data.n_dups; ++i)
+ *recog_data.dup_loc[i]
+ = recog_data.operand[(unsigned)recog_data.dup_num[i]];
- /* We are setting the register, not using it, so the relevant
- offset is the negative of the offset to use were we using
- the register. */
- offset = - offset;
- instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
+ /* Force re-recognition of the instruction for validation. */
+ INSN_CODE (insn) = -1;
+ }
- /* The only valid sources here are PLUS or REG. Just do
- the simplest possible thing to handle them. */
- if (!REG_P (src) && GET_CODE (src) != PLUS)
- {
- instantiate_virtual_regs_lossage (object);
- return 1;
- }
+ if (asm_noperands (PATTERN (insn)) >= 0)
+ {
+ if (!check_asm_operands (PATTERN (insn)))
+ {
+ error_for_asm (insn, "impossible constraint in %<asm%>");
+ delete_insn (insn);
+ }
+ }
+ else
+ {
+ if (recog_memoized (insn) < 0)
+ fatal_insn_not_found (insn);
+ }
+}
- start_sequence ();
- if (!REG_P (src))
- temp = force_operand (src, NULL_RTX);
- else
- temp = src;
- temp = force_operand (plus_constant (temp, offset), NULL_RTX);
- seq = get_insns ();
- end_sequence ();
+/* Subroutine of instantiate_decls. Given RTL representing a decl,
+ do any instantiation required. */
- emit_insn_before (seq, object);
- SET_DEST (x) = new;
+static void
+instantiate_decl (rtx x)
+{
+ rtx addr;
- if (! validate_change (object, &SET_SRC (x), temp, 0)
- || ! extra_insns)
- instantiate_virtual_regs_lossage (object);
+ if (x == 0)
+ return;
- return 1;
- }
+ /* If this is a CONCAT, recurse for the pieces. */
+ if (GET_CODE (x) == CONCAT)
+ {
+ instantiate_decl (XEXP (x, 0));
+ instantiate_decl (XEXP (x, 1));
+ return;
+ }
- instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
- loc = &SET_SRC (x);
- goto restart;
+ /* If this is not a MEM, no need to do anything. Similarly if the
+ address is a constant or a register that is not a virtual register. */
+ if (!MEM_P (x))
+ return;
- case PLUS:
- /* Handle special case of virtual register plus constant. */
- if (CONSTANT_P (XEXP (x, 1)))
- {
- rtx old, new_offset;
+ addr = XEXP (x, 0);
+ if (CONSTANT_P (addr)
+ || (REG_P (addr)
+ && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
+ || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
+ return;
- /* Check for (plus (plus VIRT foo) (const_int)) first. */
- if (GET_CODE (XEXP (x, 0)) == PLUS)
- {
- if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
- {
- instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
- extra_insns);
- new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
- }
- else
- {
- loc = &XEXP (x, 0);
- goto restart;
- }
- }
+ for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
+}
-#ifdef POINTERS_EXTEND_UNSIGNED
- /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
- we can commute the PLUS and SUBREG because pointers into the
- frame are well-behaved. */
- else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
- && GET_CODE (XEXP (x, 1)) == CONST_INT
- && 0 != (new
- = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
- &offset))
- && validate_change (object, loc,
- plus_constant (gen_lowpart (ptr_mode,
- new),
- offset
- + INTVAL (XEXP (x, 1))),
- 0))
- return 1;
-#endif
- else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
- {
- /* We know the second operand is a constant. Unless the
- first operand is a REG (which has been already checked),
- it needs to be checked. */
- if (!REG_P (XEXP (x, 0)))
- {
- loc = &XEXP (x, 0);
- goto restart;
- }
- return 1;
- }
+/* Subroutine of instantiate_decls: Process all decls in the given
+ BLOCK node and all its subblocks. */
- new_offset = plus_constant (XEXP (x, 1), offset);
+static void
+instantiate_decls_1 (tree let)
+{
+ tree t;
- /* If the new constant is zero, try to replace the sum with just
- the register. */
- if (new_offset == const0_rtx
- && validate_change (object, loc, new, 0))
- return 1;
+ for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
+ if (DECL_RTL_SET_P (t))
+ instantiate_decl (DECL_RTL (t));
- /* Next try to replace the register and new offset.
- There are two changes to validate here and we can't assume that
- in the case of old offset equals new just changing the register
- will yield a valid insn. In the interests of a little efficiency,
- however, we only call validate change once (we don't queue up the
- changes and then call apply_change_group). */
+ /* Process all subblocks. */
+ for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
+ instantiate_decls_1 (t);
+}
- old = XEXP (x, 0);
- if (offset == 0
- ? ! validate_change (object, &XEXP (x, 0), new, 0)
- : (XEXP (x, 0) = new,
- ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
- {
- if (! extra_insns)
- {
- XEXP (x, 0) = old;
- return 0;
- }
+/* Scan all decls in FNDECL (both variables and parameters) and instantiate
+ all virtual registers in their DECL_RTL's. */
- /* Otherwise copy the new constant into a register and replace
- constant with that register. */
- temp = gen_reg_rtx (Pmode);
- XEXP (x, 0) = new;
- if (validate_change (object, &XEXP (x, 1), temp, 0))
- emit_insn_before (gen_move_insn (temp, new_offset), object);
- else
- {
- /* If that didn't work, replace this expression with a
- register containing the sum. */
+static void
+instantiate_decls (tree fndecl)
+{
+ tree decl;
- XEXP (x, 0) = old;
- new = gen_rtx_PLUS (Pmode, new, new_offset);
+ /* Process all parameters of the function. */
+ for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
+ {
+ instantiate_decl (DECL_RTL (decl));
+ instantiate_decl (DECL_INCOMING_RTL (decl));
+ }
- start_sequence ();
- temp = force_operand (new, NULL_RTX);
- seq = get_insns ();
- end_sequence ();
+ /* Now process all variables defined in the function or its subblocks. */
+ instantiate_decls_1 (DECL_INITIAL (fndecl));
+}
- emit_insn_before (seq, object);
- if (! validate_change (object, loc, temp, 0)
- && ! validate_replace_rtx (x, temp, object))
- {
- instantiate_virtual_regs_lossage (object);
- return 1;
- }
- }
- }
+/* Pass through the INSNS of function FNDECL and convert virtual register
+ references to hard register references. */
- return 1;
- }
+void
+instantiate_virtual_regs (void)
+{
+ rtx insn;
- /* Fall through to generic two-operand expression case. */
- case EXPR_LIST:
- case CALL:
- case COMPARE:
- case MINUS:
- case MULT:
- case DIV: case UDIV:
- case MOD: case UMOD:
- case AND: case IOR: case XOR:
- case ROTATERT: case ROTATE:
- case ASHIFTRT: case LSHIFTRT: case ASHIFT:
- case NE: case EQ:
- case GE: case GT: case GEU: case GTU:
- case LE: case LT: case LEU: case LTU:
- if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
- instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
- loc = &XEXP (x, 0);
- goto restart;
-
- case MEM:
- /* Most cases of MEM that convert to valid addresses have already been
- handled by our scan of decls. The only special handling we
- need here is to make a copy of the rtx to ensure it isn't being
- shared if we have to change it to a pseudo.
-
- If the rtx is a simple reference to an address via a virtual register,
- it can potentially be shared. In such cases, first try to make it
- a valid address, which can also be shared. Otherwise, copy it and
- proceed normally.
-
- First check for common cases that need no processing. These are
- usually due to instantiation already being done on a previous instance
- of a shared rtx. */
-
- temp = XEXP (x, 0);
- if (CONSTANT_ADDRESS_P (temp)
-#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
- || temp == arg_pointer_rtx
-#endif
-#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
- || temp == hard_frame_pointer_rtx
-#endif
- || temp == frame_pointer_rtx)
- return 1;
-
- if (GET_CODE (temp) == PLUS
- && CONSTANT_ADDRESS_P (XEXP (temp, 1))
- && (XEXP (temp, 0) == frame_pointer_rtx
-#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
- || XEXP (temp, 0) == hard_frame_pointer_rtx
-#endif
-#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
- || XEXP (temp, 0) == arg_pointer_rtx
+ /* Compute the offsets to use for this function. */
+ in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
+ var_offset = STARTING_FRAME_OFFSET;
+ dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
+ out_arg_offset = STACK_POINTER_OFFSET;
+#ifdef FRAME_POINTER_CFA_OFFSET
+ cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
+#else
+ cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
#endif
- ))
- return 1;
-
- if (temp == virtual_stack_vars_rtx
- || temp == virtual_incoming_args_rtx
- || (GET_CODE (temp) == PLUS
- && CONSTANT_ADDRESS_P (XEXP (temp, 1))
- && (XEXP (temp, 0) == virtual_stack_vars_rtx
- || XEXP (temp, 0) == virtual_incoming_args_rtx)))
- {
- /* This MEM may be shared. If the substitution can be done without
- the need to generate new pseudos, we want to do it in place
- so all copies of the shared rtx benefit. The call below will
- only make substitutions if the resulting address is still
- valid.
-
- Note that we cannot pass X as the object in the recursive call
- since the insn being processed may not allow all valid
- addresses. However, if we were not passed on object, we can
- only modify X without copying it if X will have a valid
- address.
-
- ??? Also note that this can still lose if OBJECT is an insn that
- has less restrictions on an address that some other insn.
- In that case, we will modify the shared address. This case
- doesn't seem very likely, though. One case where this could
- happen is in the case of a USE or CLOBBER reference, but we
- take care of that below. */
-
- if (instantiate_virtual_regs_1 (&XEXP (x, 0),
- object ? object : x, 0))
- return 1;
-
- /* Otherwise make a copy and process that copy. We copy the entire
- RTL expression since it might be a PLUS which could also be
- shared. */
- *loc = x = copy_rtx (x);
- }
-
- /* Fall through to generic unary operation case. */
- case PREFETCH:
- case SUBREG:
- case STRICT_LOW_PART:
- case NEG: case NOT:
- case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
- case SIGN_EXTEND: case ZERO_EXTEND:
- case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
- case FLOAT: case FIX:
- case UNSIGNED_FIX: case UNSIGNED_FLOAT:
- case ABS:
- case SQRT:
- case FFS:
- case CLZ: case CTZ:
- case POPCOUNT: case PARITY:
- /* These case either have just one operand or we know that we need not
- check the rest of the operands. */
- loc = &XEXP (x, 0);
- goto restart;
-
- case USE:
- case CLOBBER:
- /* If the operand is a MEM, see if the change is a valid MEM. If not,
- go ahead and make the invalid one, but do it to a copy. For a REG,
- just make the recursive call, since there's no chance of a problem. */
-
- if ((MEM_P (XEXP (x, 0))
- && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
- 0))
- || (REG_P (XEXP (x, 0))
- && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
- return 1;
-
- XEXP (x, 0) = copy_rtx (XEXP (x, 0));
- loc = &XEXP (x, 0);
- goto restart;
- case REG:
- /* Try to replace with a PLUS. If that doesn't work, compute the sum
- in front of this insn and substitute the temporary. */
- if ((new = instantiate_new_reg (x, &offset)) != 0)
- {
- temp = plus_constant (new, offset);
- if (!validate_change (object, loc, temp, 0))
- {
- if (! extra_insns)
- return 0;
+ /* Initialize recognition, indicating that volatile is OK. */
+ init_recog ();
- start_sequence ();
- temp = force_operand (temp, NULL_RTX);
- seq = get_insns ();
- end_sequence ();
+ /* Scan through all the insns, instantiating every virtual register still
+ present. */
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ if (INSN_P (insn))
+ {
+ /* These patterns in the instruction stream can never be recognized.
+ Fortunately, they shouldn't contain virtual registers either. */
+ if (GET_CODE (PATTERN (insn)) == USE
+ || GET_CODE (PATTERN (insn)) == CLOBBER
+ || GET_CODE (PATTERN (insn)) == ADDR_VEC
+ || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
+ || GET_CODE (PATTERN (insn)) == ASM_INPUT)
+ continue;
- emit_insn_before (seq, object);
- if (! validate_change (object, loc, temp, 0)
- && ! validate_replace_rtx (x, temp, object))
- instantiate_virtual_regs_lossage (object);
- }
- }
+ instantiate_virtual_regs_in_insn (insn);
- return 1;
+ if (INSN_DELETED_P (insn))
+ continue;
- default:
- break;
- }
+ for_each_rtx (®_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
- /* Scan all subexpressions. */
- fmt = GET_RTX_FORMAT (code);
- for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
- if (*fmt == 'e')
- {
- if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
- return 0;
+ /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
+ if (GET_CODE (insn) == CALL_INSN)
+ for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
+ instantiate_virtual_regs_in_rtx, NULL);
}
- else if (*fmt == 'E')
- for (j = 0; j < XVECLEN (x, i); j++)
- if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
- extra_insns))
- return 0;
- return 1;
+ /* Instantiate the virtual registers in the DECLs for debugging purposes. */
+ instantiate_decls (current_function_decl);
+
+ /* Indicate that, from now on, assign_stack_local should use
+ frame_pointer_rtx. */
+ virtuals_instantiated = 1;
}
+
+struct tree_opt_pass pass_instantiate_virtual_regs =
+{
+ "vregs", /* name */
+ NULL, /* gate */
+ instantiate_virtual_regs, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
+};
+
\f
/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
This means a type for which function calls must pass an address to the
break;
default:
/* We don't expect other rtl types here. */
- abort();
+ gcc_unreachable ();
}
if (TREE_CODE (type) == VOID_TYPE)
return 1;
/* Make sure we have suitable call-clobbered regs to return
the value in; if not, we must return it in memory. */
- reg = hard_function_value (type, 0, 0);
+ reg = hard_function_value (type, 0, fntype, 0);
/* If we have something other than a REG (e.g. a PARALLEL), then assume
it is OK. */
if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
return false;
- /* Compiler-generated temporaries can always go in registers. */
- if (DECL_ARTIFICIAL (decl))
+ /* If we're not interested in tracking debugging information for
+ this decl, then we can certainly put it in a register. */
+ if (DECL_IGNORED_P (decl))
return true;
-#ifdef NON_SAVING_SETJMP
- /* Protect variables not declared "register" from setjmp. */
- if (NON_SAVING_SETJMP
- && current_function_calls_setjmp
- && !DECL_REGISTER (decl))
- return false;
-#endif
-
return (optimize || DECL_REGISTER (decl));
}
return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
}
+/* Return true if TYPE, which is passed by reference, should be callee
+ copied instead of caller copied. */
+
+bool
+reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
+ tree type, bool named_arg)
+{
+ if (type && TREE_ADDRESSABLE (type))
+ return false;
+ return targetm.calls.callee_copies (ca, mode, type, named_arg);
+}
+
/* Structures to communicate between the subroutines of assign_parms.
The first holds data persistent across all parameters, the second
is cleared out for each parameter. */
struct locate_and_pad_arg_data locate;
int partial;
BOOL_BITFIELD named_arg : 1;
- BOOL_BITFIELD last_named : 1;
BOOL_BITFIELD passed_pointer : 1;
BOOL_BITFIELD on_stack : 1;
BOOL_BITFIELD loaded_in_reg : 1;
{
tree decl;
tree subtype = TREE_TYPE (type);
+ bool addressable = TREE_ADDRESSABLE (p);
/* Rewrite the PARM_DECL's type with its component. */
TREE_TYPE (p) = subtype;
DECL_MODE (p) = VOIDmode;
DECL_SIZE (p) = NULL;
DECL_SIZE_UNIT (p) = NULL;
+ /* If this arg must go in memory, put it in a pseudo here.
+ We can't allow it to go in memory as per normal parms,
+ because the usual place might not have the imag part
+ adjacent to the real part. */
+ DECL_ARTIFICIAL (p) = addressable;
+ DECL_IGNORED_P (p) = addressable;
+ TREE_ADDRESSABLE (p) = 0;
layout_decl (p, 0);
/* Build a second synthetic decl. */
decl = build_decl (PARM_DECL, NULL_TREE, subtype);
DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
+ DECL_ARTIFICIAL (decl) = addressable;
+ DECL_IGNORED_P (decl) = addressable;
layout_decl (decl, 0);
/* Splice it in; skip the new decl. */
decl = build_decl (PARM_DECL, NULL_TREE, type);
DECL_ARG_TYPE (decl) = type;
DECL_ARTIFICIAL (decl) = 1;
+ DECL_IGNORED_P (decl) = 1;
TREE_CHAIN (decl) = fnargs;
fnargs = decl;
memset (data, 0, sizeof (*data));
- /* Set LAST_NAMED if this is last named arg before last anonymous args. */
- if (current_function_stdarg)
- {
- tree tem;
- for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
- if (DECL_NAME (tem))
- break;
- if (tem == 0)
- data->last_named = true;
- }
-
- /* Set NAMED_ARG if this arg should be treated as a named arg. For
- most machines, if this is a varargs/stdarg function, then we treat
- the last named arg as if it were anonymous too. */
- if (targetm.calls.strict_argument_naming (&all->args_so_far))
- data->named_arg = 1;
+ /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
+ if (!current_function_stdarg)
+ data->named_arg = 1; /* No varadic parms. */
+ else if (TREE_CHAIN (parm))
+ data->named_arg = 1; /* Not the last non-varadic parm. */
+ else if (targetm.calls.strict_argument_naming (&all->args_so_far))
+ data->named_arg = 1; /* Only varadic ones are unnamed. */
else
- data->named_arg = !data->last_named;
+ data->named_arg = 0; /* Treat as varadic. */
nominal_type = TREE_TYPE (parm);
passed_type = DECL_ARG_TYPE (parm);
/* If the parm is to be passed as a transparent union, use the type of
the first field for the tests below. We have already verified that
the modes are the same. */
- if (DECL_TRANSPARENT_UNION (parm)
- || (TREE_CODE (passed_type) == UNION_TYPE
- && TYPE_TRANSPARENT_UNION (passed_type)))
+ if (TREE_CODE (passed_type) == UNION_TYPE
+ && TYPE_TRANSPARENT_UNION (passed_type))
passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
/* See if this arg was passed by invisible reference. */
{
int partial;
- partial = FUNCTION_ARG_PARTIAL_NREGS (all->args_so_far,
- data->promoted_mode,
- data->passed_type,
- data->named_arg);
+ partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
+ data->promoted_mode,
+ data->passed_type,
+ data->named_arg);
data->partial = partial;
/* The caller might already have allocated stack space for the
/* We assume at most one partial arg, and it must be the first
argument on the stack. */
- if (all->extra_pretend_bytes || all->pretend_args_size)
- abort ();
+ gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
- pretend_bytes = partial * UNITS_PER_WORD;
+ pretend_bytes = partial;
all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
/* We want to align relative to the actual stack pointer, so
assign_parm_is_stack_parm (struct assign_parm_data_all *all,
struct assign_parm_data_one *data)
{
- /* Trivially true if we've no incomming register. */
+ /* Trivially true if we've no incoming register. */
if (data->entry_parm == NULL)
;
/* Also true if we're partially in registers and partially not,
set_mem_attributes (stack_parm, parm, 1);
- boundary = FUNCTION_ARG_BOUNDARY (data->promoted_mode, data->passed_type);
- align = 0;
+ boundary = data->locate.boundary;
+ align = BITS_PER_UNIT;
/* If we're padding upward, we know that the alignment of the slot
is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
intentionally forcing upward padding. Otherwise we have to come
up with a guess at the alignment based on OFFSET_RTX. */
- if (data->locate.where_pad == upward || data->entry_parm)
+ if (data->locate.where_pad != downward || data->entry_parm)
align = boundary;
else if (GET_CODE (offset_rtx) == CONST_INT)
{
align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
align = align & -align;
}
- if (align > 0)
- set_mem_align (stack_parm, align);
+ set_mem_align (stack_parm, align);
if (data->entry_parm)
set_reg_attrs_for_parm (data->entry_parm, stack_parm);
data->passed_type,
int_size_in_bytes (data->passed_type));
else
- move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
- data->partial);
+ {
+ gcc_assert (data->partial % UNITS_PER_WORD == 0);
+ move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
+ data->partial / UNITS_PER_WORD);
+ }
entry_parm = stack_parm;
}
/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
always valid and properly aligned. */
-
static void
assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
{
/* If we can't trust the parm stack slot to be aligned enough for its
ultimate type, don't use that slot after entry. We'll make another
stack slot, if we need one. */
- if (STRICT_ALIGNMENT && stack_parm
- && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
+ if (stack_parm
+ && ((STRICT_ALIGNMENT
+ && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
+ || (data->nominal_type
+ && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
+ && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
stack_parm = NULL;
/* If parm was passed in memory, and we need to convert it on entry,
&& data->nominal_mode != data->passed_mode)
stack_parm = NULL;
+ /* If stack protection is in effect for this function, don't leave any
+ pointers in their passed stack slots. */
+ else if (cfun->stack_protect_guard
+ && (flag_stack_protect == 2
+ || data->passed_pointer
+ || POINTER_TYPE_P (data->nominal_type)))
+ stack_parm = NULL;
+
data->stack_parm = stack_parm;
}
return true;
#ifdef BLOCK_REG_PADDING
- if (data->locate.where_pad == (BYTES_BIG_ENDIAN ? upward : downward)
- && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD)
+ /* Only assign_parm_setup_block knows how to deal with register arguments
+ that are padded at the least significant end. */
+ if (REG_P (data->entry_parm)
+ && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
+ && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
+ == (BYTES_BIG_ENDIAN ? upward : downward)))
return true;
#endif
present and valid in DATA->STACK_RTL. */
static void
-assign_parm_setup_block (tree parm, struct assign_parm_data_one *data)
+assign_parm_setup_block (struct assign_parm_data_all *all,
+ tree parm, struct assign_parm_data_one *data)
{
rtx entry_parm = data->entry_parm;
rtx stack_parm = data->stack_parm;
+ HOST_WIDE_INT size;
+ HOST_WIDE_INT size_stored;
+ rtx orig_entry_parm = entry_parm;
+
+ if (GET_CODE (entry_parm) == PARALLEL)
+ entry_parm = emit_group_move_into_temps (entry_parm);
/* If we've a non-block object that's nevertheless passed in parts,
reconstitute it in register operations rather than on the stack. */
if (GET_CODE (entry_parm) == PARALLEL
- && data->nominal_mode != BLKmode
- && XVECLEN (entry_parm, 0) > 1
- && optimize)
+ && data->nominal_mode != BLKmode)
{
- rtx parmreg = gen_reg_rtx (data->nominal_mode);
+ rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
- emit_group_store (parmreg, entry_parm, data->nominal_type,
- int_size_in_bytes (data->nominal_type));
- SET_DECL_RTL (parm, parmreg);
- return;
+ if ((XVECLEN (entry_parm, 0) > 1
+ || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
+ && use_register_for_decl (parm))
+ {
+ rtx parmreg = gen_reg_rtx (data->nominal_mode);
+
+ push_to_sequence (all->conversion_insns);
+
+ /* For values returned in multiple registers, handle possible
+ incompatible calls to emit_group_store.
+
+ For example, the following would be invalid, and would have to
+ be fixed by the conditional below:
+
+ emit_group_store ((reg:SF), (parallel:DF))
+ emit_group_store ((reg:SI), (parallel:DI))
+
+ An example of this are doubles in e500 v2:
+ (parallel:DF (expr_list (reg:SI) (const_int 0))
+ (expr_list (reg:SI) (const_int 4))). */
+ if (data->nominal_mode != data->passed_mode)
+ {
+ rtx t = gen_reg_rtx (GET_MODE (entry_parm));
+ emit_group_store (t, entry_parm, NULL_TREE,
+ GET_MODE_SIZE (GET_MODE (entry_parm)));
+ convert_move (parmreg, t, 0);
+ }
+ else
+ emit_group_store (parmreg, entry_parm, data->nominal_type,
+ int_size_in_bytes (data->nominal_type));
+
+ all->conversion_insns = get_insns ();
+ end_sequence ();
+
+ SET_DECL_RTL (parm, parmreg);
+ return;
+ }
+ }
+
+ size = int_size_in_bytes (data->passed_type);
+ size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
+ if (stack_parm == 0)
+ {
+ DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
+ stack_parm = assign_stack_local (BLKmode, size_stored,
+ DECL_ALIGN (parm));
+ if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
+ PUT_MODE (stack_parm, GET_MODE (entry_parm));
+ set_mem_attributes (stack_parm, parm, 1);
}
/* If a BLKmode arrives in registers, copy it to a stack slot. Handle
calls that pass values in multiple non-contiguous locations. */
if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
{
- HOST_WIDE_INT size = int_size_in_bytes (data->passed_type);
- HOST_WIDE_INT size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
rtx mem;
/* Note that we will be storing an integral number of words.
So we have to be careful to ensure that we allocate an
- integral number of words. We do this below in the
+ integral number of words. We do this above when we call
assign_stack_local if space was not allocated in the argument
list. If it was, this will not work if PARM_BOUNDARY is not
a multiple of BITS_PER_WORD. It isn't clear how to fix this
if it becomes a problem. Exception is when BLKmode arrives
with arguments not conforming to word_mode. */
- if (stack_parm == 0)
- {
- stack_parm = assign_stack_local (BLKmode, size_stored, 0);
- data->stack_parm = stack_parm;
- PUT_MODE (stack_parm, GET_MODE (entry_parm));
- set_mem_attributes (stack_parm, parm, 1);
- }
+ if (data->stack_parm == 0)
+ ;
else if (GET_CODE (entry_parm) == PARALLEL)
;
- else if (size != 0 && PARM_BOUNDARY % BITS_PER_WORD != 0)
- abort ();
+ else
+ gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
mem = validize_mem (stack_parm);
/* Handle values in multiple non-contiguous locations. */
if (GET_CODE (entry_parm) == PARALLEL)
- emit_group_store (mem, entry_parm, data->passed_type, size);
+ {
+ push_to_sequence (all->conversion_insns);
+ emit_group_store (mem, entry_parm, data->passed_type, size);
+ all->conversion_insns = get_insns ();
+ end_sequence ();
+ }
else if (size == 0)
;
{
rtx tem, x;
int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
- rtx reg = gen_rtx_REG (word_mode, REGNO (data->entry_parm));
+ rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
x = expand_shift (LSHIFT_EXPR, word_mode, reg,
- build_int_2 (by, 0), NULL_RTX, 1);
+ build_int_cst (NULL_TREE, by),
+ NULL_RTX, 1);
tem = change_address (mem, word_mode, 0);
emit_move_insn (tem, x);
}
else
- move_block_from_reg (REGNO (data->entry_parm), mem,
+ move_block_from_reg (REGNO (entry_parm), mem,
size_stored / UNITS_PER_WORD);
}
else
- move_block_from_reg (REGNO (data->entry_parm), mem,
+ move_block_from_reg (REGNO (entry_parm), mem,
size_stored / UNITS_PER_WORD);
}
+ else if (data->stack_parm == 0)
+ {
+ push_to_sequence (all->conversion_insns);
+ emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
+ BLOCK_OP_NORMAL);
+ all->conversion_insns = get_insns ();
+ end_sequence ();
+ }
+ data->stack_parm = stack_parm;
SET_DECL_RTL (parm, stack_parm);
}
SET_DECL_RTL (parm, x);
}
else
- {
- SET_DECL_RTL (parm, parmreg);
- maybe_set_unchanging (DECL_RTL (parm), parm);
- }
+ SET_DECL_RTL (parm, parmreg);
/* Copy the value into the register. */
if (data->nominal_mode != data->passed_mode
/* TREE_USED gets set erroneously during expand_assignment. */
save_tree_used = TREE_USED (parm);
- expand_assignment (parm, make_tree (data->nominal_type, tempreg), 0);
+ expand_assignment (parm, make_tree (data->nominal_type, tempreg));
TREE_USED (parm) = save_tree_used;
all->conversion_insns = get_insns ();
end_sequence ();
emit_move_insn (tempreg, DECL_RTL (parm));
tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
emit_move_insn (parmreg, tempreg);
- all->conversion_insns = get_insns();
+ all->conversion_insns = get_insns ();
end_sequence ();
did_conversion = true;
data->stack_parm = NULL;
}
- /* If we are passed an arg by reference and it is our responsibility
- to make a copy, do it now.
- PASSED_TYPE and PASSED mode now refer to the pointer, not the
- original argument, so we must recreate them in the call to
- FUNCTION_ARG_CALLEE_COPIES. */
- /* ??? Later add code to handle the case that if the argument isn't
- modified, don't do the copy. */
-
- else if (data->passed_pointer)
- {
- tree type = TREE_TYPE (data->passed_type);
-
- if (FUNCTION_ARG_CALLEE_COPIES (all->args_so_far, TYPE_MODE (type),
- type, data->named_arg)
- && !TREE_ADDRESSABLE (type))
- {
- rtx copy;
-
- /* This sequence may involve a library call perhaps clobbering
- registers that haven't been copied to pseudos yet. */
-
- push_to_sequence (all->conversion_insns);
-
- if (!COMPLETE_TYPE_P (type)
- || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
- {
- /* This is a variable sized object. */
- copy = allocate_dynamic_stack_space (expr_size (parm), NULL_RTX,
- TYPE_ALIGN (type));
- copy = gen_rtx_MEM (BLKmode, copy);
- }
- else
- copy = assign_stack_temp (TYPE_MODE (type),
- int_size_in_bytes (type), 1);
- set_mem_attributes (copy, parm, 1);
-
- store_expr (parm, copy, 0);
- emit_move_insn (parmreg, XEXP (copy, 0));
- all->conversion_insns = get_insns ();
- end_sequence ();
-
- did_conversion = true;
- }
- }
-
/* Mark the register as eliminable if we did no conversion and it was
copied from memory at a fixed offset, and the arg pointer was not
copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
{
enum machine_mode submode
= GET_MODE_INNER (GET_MODE (parmreg));
- int regnor = REGNO (gen_realpart (submode, parmreg));
- int regnoi = REGNO (gen_imagpart (submode, parmreg));
- rtx stackr = gen_realpart (submode, data->stack_parm);
- rtx stacki = gen_imagpart (submode, data->stack_parm);
+ int regnor = REGNO (XEXP (parmreg, 0));
+ int regnoi = REGNO (XEXP (parmreg, 1));
+ rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
+ rtx stacki = adjust_address_nv (data->stack_parm, submode,
+ GET_MODE_SIZE (submode));
/* Scan backwards for the set of the real and
imaginary parts. */
{
/* Value must be stored in the stack slot STACK_PARM during function
execution. */
+ bool to_conversion = false;
if (data->promoted_mode != data->nominal_mode)
{
emit_move_insn (tempreg, validize_mem (data->entry_parm));
push_to_sequence (all->conversion_insns);
+ to_conversion = true;
+
data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
TYPE_UNSIGNED (TREE_TYPE (parm)));
/* ??? This may need a big-endian conversion on sparc64. */
data->stack_parm
= adjust_address (data->stack_parm, data->nominal_mode, 0);
-
- all->conversion_insns = get_insns ();
- end_sequence ();
}
if (data->entry_parm != data->stack_parm)
{
+ rtx src, dest;
+
if (data->stack_parm == 0)
{
data->stack_parm
= assign_stack_local (GET_MODE (data->entry_parm),
GET_MODE_SIZE (GET_MODE (data->entry_parm)),
- 0);
+ TYPE_ALIGN (data->passed_type));
set_mem_attributes (data->stack_parm, parm, 1);
}
- if (data->promoted_mode != data->nominal_mode)
+ dest = validize_mem (data->stack_parm);
+ src = validize_mem (data->entry_parm);
+
+ if (MEM_P (src))
{
- push_to_sequence (all->conversion_insns);
- emit_move_insn (validize_mem (data->stack_parm),
- validize_mem (data->entry_parm));
- all->conversion_insns = get_insns ();
- end_sequence ();
+ /* Use a block move to handle potentially misaligned entry_parm. */
+ if (!to_conversion)
+ push_to_sequence (all->conversion_insns);
+ to_conversion = true;
+
+ emit_block_move (dest, src,
+ GEN_INT (int_size_in_bytes (data->passed_type)),
+ BLOCK_OP_NORMAL);
}
else
- emit_move_insn (validize_mem (data->stack_parm),
- validize_mem (data->entry_parm));
+ emit_move_insn (dest, src);
+ }
+
+ if (to_conversion)
+ {
+ all->conversion_insns = get_insns ();
+ end_sequence ();
}
SET_DECL_RTL (parm, data->stack_parm);
undo the frobbing that we did in assign_parms_augmented_arg_list. */
static void
-assign_parms_unsplit_complex (tree orig_fnargs, tree fnargs)
+assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
{
tree parm;
+ tree orig_fnargs = all->orig_fnargs;
for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
{
real = gen_lowpart_SUBREG (inner, real);
imag = gen_lowpart_SUBREG (inner, imag);
}
- tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
+
+ if (TREE_ADDRESSABLE (parm))
+ {
+ rtx rmem, imem;
+ HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
+
+ /* split_complex_arg put the real and imag parts in
+ pseudos. Move them to memory. */
+ tmp = assign_stack_local (DECL_MODE (parm), size,
+ TYPE_ALIGN (TREE_TYPE (parm)));
+ set_mem_attributes (tmp, parm, 1);
+ rmem = adjust_address_nv (tmp, inner, 0);
+ imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
+ push_to_sequence (all->conversion_insns);
+ emit_move_insn (rmem, real);
+ emit_move_insn (imem, imag);
+ all->conversion_insns = get_insns ();
+ end_sequence ();
+ }
+ else
+ tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
SET_DECL_RTL (parm, tmp);
real = DECL_INCOMING_RTL (fnargs);
/* Assign RTL expressions to the function's parameters. This may involve
copying them into registers and using those registers as the DECL_RTL. */
-void
+static void
assign_parms (tree fndecl)
{
struct assign_parm_data_all all;
tree fnargs, parm;
- rtx internal_arg_pointer;
- int varargs_setup = 0;
-
- /* If the reg that the virtual arg pointer will be translated into is
- not a fixed reg or is the stack pointer, make a copy of the virtual
- arg pointer, and address parms via the copy. The frame pointer is
- considered fixed even though it is not marked as such.
-
- The second time through, simply use ap to avoid generating rtx. */
- if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
- || ! (fixed_regs[ARG_POINTER_REGNUM]
- || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
- internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
- else
- internal_arg_pointer = virtual_incoming_args_rtx;
- current_function_internal_arg_pointer = internal_arg_pointer;
+ current_function_internal_arg_pointer
+ = targetm.calls.internal_arg_pointer ();
assign_parms_initialize_all (&all);
fnargs = assign_parms_augmented_arg_list (&all);
continue;
}
- /* Handle stdargs. LAST_NAMED is a slight mis-nomer; it's also true
- for the unnamed dummy argument following the last named argument.
- See ABI silliness wrt strict_argument_naming and NAMED_ARG. So
- we only want to do this when we get to the actual last named
- argument, which will be the first time LAST_NAMED gets set. */
- if (data.last_named && !varargs_setup)
- {
- varargs_setup = true;
- assign_parms_setup_varargs (&all, &data, false);
- }
+ if (current_function_stdarg && !TREE_CHAIN (parm))
+ assign_parms_setup_varargs (&all, &data, false);
/* Find out where the parameter arrives in this function. */
assign_parm_find_entry_rtl (&all, &data);
assign_parm_adjust_stack_rtl (&data);
if (assign_parm_setup_block_p (&data))
- assign_parm_setup_block (parm, &data);
+ assign_parm_setup_block (&all, parm, &data);
else if (data.passed_pointer || use_register_for_decl (parm))
assign_parm_setup_reg (&all, parm, &data);
else
}
if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
- assign_parms_unsplit_complex (all.orig_fnargs, fnargs);
+ assign_parms_unsplit_complex (&all, fnargs);
/* Output all parameter conversion instructions (possibly including calls)
now that all parameters have been copied out of hard registers. */
REG_PARM_STACK_SPACE (fndecl));
#endif
- current_function_args_size
- = ((current_function_args_size + STACK_BYTES - 1)
- / STACK_BYTES) * STACK_BYTES;
+ current_function_args_size = CEIL_ROUND (current_function_args_size,
+ PARM_BOUNDARY / BITS_PER_UNIT);
#ifdef ARGS_GROW_DOWNWARD
current_function_arg_offset_rtx
{
rtx real_decl_rtl;
-#ifdef FUNCTION_OUTGOING_VALUE
- real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
- fndecl);
-#else
- real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
- fndecl);
-#endif
+ real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
+ fndecl, true);
REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
/* The delay slot scheduler assumes that current_function_return_rtx
holds the hard register containing the return value, not a
}
}
}
+
+/* A subroutine of gimplify_parameters, invoked via walk_tree.
+ For all seen types, gimplify their sizes. */
+
+static tree
+gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
+{
+ tree t = *tp;
+
+ *walk_subtrees = 0;
+ if (TYPE_P (t))
+ {
+ if (POINTER_TYPE_P (t))
+ *walk_subtrees = 1;
+ else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
+ && !TYPE_SIZES_GIMPLIFIED (t))
+ {
+ gimplify_type_sizes (t, (tree *) data);
+ *walk_subtrees = 1;
+ }
+ }
+
+ return NULL;
+}
+
+/* Gimplify the parameter list for current_function_decl. This involves
+ evaluating SAVE_EXPRs of variable sized parameters and generating code
+ to implement callee-copies reference parameters. Returns a list of
+ statements to add to the beginning of the function, or NULL if nothing
+ to do. */
+
+tree
+gimplify_parameters (void)
+{
+ struct assign_parm_data_all all;
+ tree fnargs, parm, stmts = NULL;
+
+ assign_parms_initialize_all (&all);
+ fnargs = assign_parms_augmented_arg_list (&all);
+
+ for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
+ {
+ struct assign_parm_data_one data;
+
+ /* Extract the type of PARM; adjust it according to ABI. */
+ assign_parm_find_data_types (&all, parm, &data);
+
+ /* Early out for errors and void parameters. */
+ if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
+ continue;
+
+ /* Update info on where next arg arrives in registers. */
+ FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
+ data.passed_type, data.named_arg);
+
+ /* ??? Once upon a time variable_size stuffed parameter list
+ SAVE_EXPRs (amongst others) onto a pending sizes list. This
+ turned out to be less than manageable in the gimple world.
+ Now we have to hunt them down ourselves. */
+ walk_tree_without_duplicates (&data.passed_type,
+ gimplify_parm_type, &stmts);
+
+ if (!TREE_CONSTANT (DECL_SIZE (parm)))
+ {
+ gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
+ gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
+ }
+
+ if (data.passed_pointer)
+ {
+ tree type = TREE_TYPE (data.passed_type);
+ if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
+ type, data.named_arg))
+ {
+ tree local, t;
+
+ /* For constant sized objects, this is trivial; for
+ variable-sized objects, we have to play games. */
+ if (TREE_CONSTANT (DECL_SIZE (parm)))
+ {
+ local = create_tmp_var (type, get_name (parm));
+ DECL_IGNORED_P (local) = 0;
+ }
+ else
+ {
+ tree ptr_type, addr, args;
+
+ ptr_type = build_pointer_type (type);
+ addr = create_tmp_var (ptr_type, get_name (parm));
+ DECL_IGNORED_P (addr) = 0;
+ local = build_fold_indirect_ref (addr);
+
+ args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
+ t = built_in_decls[BUILT_IN_ALLOCA];
+ t = build_function_call_expr (t, args);
+ t = fold_convert (ptr_type, t);
+ t = build2 (MODIFY_EXPR, void_type_node, addr, t);
+ gimplify_and_add (t, &stmts);
+ }
+
+ t = build2 (MODIFY_EXPR, void_type_node, local, parm);
+ gimplify_and_add (t, &stmts);
+
+ SET_DECL_VALUE_EXPR (parm, local);
+ DECL_HAS_VALUE_EXPR_P (parm) = 1;
+ }
+ }
+ }
+
+ return stmts;
+}
\f
/* Indicate whether REGNO is an incoming argument to the current function
that was promoted to a wider mode. If so, return the RTX for the
{
tree sizetree;
enum direction where_pad;
- int boundary;
+ unsigned int boundary;
int reg_parm_stack_space = 0;
int part_size_in_regs;
}
#endif /* REG_PARM_STACK_SPACE */
- part_size_in_regs = 0;
- if (reg_parm_stack_space == 0)
- part_size_in_regs = ((partial * UNITS_PER_WORD)
- / (PARM_BOUNDARY / BITS_PER_UNIT)
- * (PARM_BOUNDARY / BITS_PER_UNIT));
+ part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
sizetree
= type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
locate->where_pad = where_pad;
+ locate->boundary = boundary;
+
+ /* Remember if the outgoing parameter requires extra alignment on the
+ calling function side. */
+ if (boundary > PREFERRED_STACK_BOUNDARY)
+ boundary = PREFERRED_STACK_BOUNDARY;
+ if (cfun->stack_alignment_needed < boundary)
+ cfun->stack_alignment_needed = boundary;
#ifdef ARGS_GROW_DOWNWARD
locate->slot_offset.constant = -initial_offset_ptr->constant;
HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
#ifdef SPARC_STACK_BOUNDARY_HACK
- /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
- higher than the real alignment of %sp. However, when it does this,
- the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
- This is a temporary hack while the sparc port is fixed. */
+ /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
+ the real alignment of %sp. However, when it does this, the
+ alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
if (SPARC_STACK_BOUNDARY_HACK)
sp_offset = 0;
#endif
&& DECL_RTL_SET_P (decl)
&& REG_P (DECL_RTL (decl))
&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
- warning ("%Jvariable '%D' might be clobbered by `longjmp' or `vfork'",
- decl, decl);
+ warning (0, "variable %q+D might be clobbered by %<longjmp%>"
+ " or %<vfork%>",
+ decl);
}
for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
if (DECL_RTL (decl) != 0
&& REG_P (DECL_RTL (decl))
&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
- warning ("%Jargument '%D' might be clobbered by `longjmp' or `vfork'",
- decl, decl);
+ warning (0, "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
+ decl);
}
\f
reorder_blocks (void)
{
tree block = DECL_INITIAL (current_function_decl);
- varray_type block_stack;
+ VEC(tree,heap) *block_stack;
if (block == NULL_TREE)
return;
- VARRAY_TREE_INIT (block_stack, 10, "block_stack");
+ block_stack = VEC_alloc (tree, heap, 10);
/* Reset the TREE_ASM_WRITTEN bit for all blocks. */
clear_block_marks (block);
/* Remove deleted blocks from the block fragment chains. */
reorder_fix_fragments (block);
+
+ VEC_free (tree, heap, block_stack);
}
/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
}
static void
-reorder_blocks_1 (rtx insns, tree current_block, varray_type *p_block_stack)
+reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
{
rtx insn;
BLOCK_SUBBLOCKS (current_block) = block;
current_block = block;
}
- VARRAY_PUSH_TREE (*p_block_stack, block);
+ VEC_safe_push (tree, heap, *p_block_stack, block);
}
else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
{
- NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
- VARRAY_POP (*p_block_stack);
+ NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
BLOCK_SUBBLOCKS (current_block)
= blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
current_block = BLOCK_SUPERCONTEXT (current_block);
&& TYPE_ARG_TYPES (fntype) != 0
&& (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
!= void_type_node));
+
+ /* Assume all registers in stdarg functions need to be saved. */
+ cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
+ cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
}
/* Reset cfun, and other non-struct-function variables to defaults as
/* Warn if this value is an aggregate type,
regardless of which calling convention we are using for it. */
- if (warn_aggregate_return
- && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
- warning ("function returns an aggregate");
+ if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
+ warning (OPT_Waggregate_return, "function returns an aggregate");
}
/* Make sure all values used by the optimization passes have sane
{
reg_renumber = 0;
- /* No prologue/epilogue insns yet. */
- VARRAY_GROW (prologue, 0);
- VARRAY_GROW (epilogue, 0);
- VARRAY_GROW (sibcall_epilogue, 0);
+ /* No prologue/epilogue insns yet. Make sure that these vectors are
+ empty. */
+ gcc_assert (VEC_length (int, prologue) == 0);
+ gcc_assert (VEC_length (int, epilogue) == 0);
+ gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
}
-/* Expand a call to __main at the beginning of a possible main function. */
+struct tree_opt_pass pass_init_function =
+{
+ NULL, /* name */
+ NULL, /* gate */
+ init_function_for_compilation, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+ 0 /* letter */
+};
-#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
-#undef HAS_INIT_SECTION
-#define HAS_INIT_SECTION
-#endif
void
expand_main_function (void)
{
-#ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
- if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
- {
- int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
- rtx tmp, seq;
-
- start_sequence ();
- /* Forcibly align the stack. */
-#ifdef STACK_GROWS_DOWNWARD
- tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
- stack_pointer_rtx, 1, OPTAB_WIDEN);
-#else
- tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
- GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
- tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
- stack_pointer_rtx, 1, OPTAB_WIDEN);
+#if (defined(INVOKE__main) \
+ || (!defined(HAS_INIT_SECTION) \
+ && !defined(INIT_SECTION_ASM_OP) \
+ && !defined(INIT_ARRAY_SECTION_ASM_OP)))
+ emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
#endif
- if (tmp != stack_pointer_rtx)
- emit_move_insn (stack_pointer_rtx, tmp);
+}
+\f
+/* Expand code to initialize the stack_protect_guard. This is invoked at
+ the beginning of a function to be protected. */
- /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
- tmp = force_reg (Pmode, const0_rtx);
- allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
- seq = get_insns ();
- end_sequence ();
+#ifndef HAVE_stack_protect_set
+# define HAVE_stack_protect_set 0
+# define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
+#endif
- for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
- if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
- break;
- if (tmp)
- emit_insn_before (seq, tmp);
- else
- emit_insn (seq);
+void
+stack_protect_prologue (void)
+{
+ tree guard_decl = targetm.stack_protect_guard ();
+ rtx x, y;
+
+ /* Avoid expand_expr here, because we don't want guard_decl pulled
+ into registers unless absolutely necessary. And we know that
+ cfun->stack_protect_guard is a local stack slot, so this skips
+ all the fluff. */
+ x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
+ y = validize_mem (DECL_RTL (guard_decl));
+
+ /* Allow the target to copy from Y to X without leaking Y into a
+ register. */
+ if (HAVE_stack_protect_set)
+ {
+ rtx insn = gen_stack_protect_set (x, y);
+ if (insn)
+ {
+ emit_insn (insn);
+ return;
+ }
}
-#endif
-#ifndef HAS_INIT_SECTION
- emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
-#endif
+ /* Otherwise do a straight move. */
+ emit_move_insn (x, y);
}
-\f
-/* The PENDING_SIZES represent the sizes of variable-sized types.
- Create RTL for the various sizes now (using temporary variables),
- so that we can refer to the sizes from the RTL we are generating
- for the current function. The PENDING_SIZES are a TREE_LIST. The
- TREE_VALUE of each node is a SAVE_EXPR. */
+
+/* Expand code to verify the stack_protect_guard. This is invoked at
+ the end of a function to be protected. */
+
+#ifndef HAVE_stack_protect_test
+# define HAVE_stack_protect_test 0
+# define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
+#endif
void
-expand_pending_sizes (tree pending_sizes)
+stack_protect_epilogue (void)
{
- tree tem;
+ tree guard_decl = targetm.stack_protect_guard ();
+ rtx label = gen_label_rtx ();
+ rtx x, y, tmp;
+
+ /* Avoid expand_expr here, because we don't want guard_decl pulled
+ into registers unless absolutely necessary. And we know that
+ cfun->stack_protect_guard is a local stack slot, so this skips
+ all the fluff. */
+ x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
+ y = validize_mem (DECL_RTL (guard_decl));
+
+ /* Allow the target to compare Y with X without leaking either into
+ a register. */
+ switch (HAVE_stack_protect_test != 0)
+ {
+ case 1:
+ tmp = gen_stack_protect_test (x, y, label);
+ if (tmp)
+ {
+ emit_insn (tmp);
+ break;
+ }
+ /* FALLTHRU */
- /* Evaluate now the sizes of any types declared among the arguments. */
- for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
- expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
-}
+ default:
+ emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
+ break;
+ }
+ /* The noreturn predictor has been moved to the tree level. The rtl-level
+ predictors estimate this branch about 20%, which isn't enough to get
+ things moved out of line. Since this is the only extant case of adding
+ a noreturn function at the rtl level, it doesn't seem worth doing ought
+ except adding the prediction by hand. */
+ tmp = get_last_insn ();
+ if (JUMP_P (tmp))
+ predict_insn_def (tmp, PRED_NORETURN, TAKEN);
+
+ expand_expr_stmt (targetm.stack_protect_fail ());
+ emit_label (label);
+}
+\f
/* Start the RTL for a new function, and set variables used for
emitting RTL.
SUBR is the FUNCTION_DECL node.
}
if (value_address)
{
- rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
- set_mem_attributes (x, DECL_RESULT (subr), 1);
+ rtx x = value_address;
+ if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
+ {
+ x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
+ set_mem_attributes (x, DECL_RESULT (subr), 1);
+ }
SET_DECL_RTL (DECL_RESULT (subr), x);
}
}
{
/* Compute the return values into a pseudo reg, which we will copy
into the true return register after the cleanups are done. */
-
- /* In order to figure out what mode to use for the pseudo, we
- figure out what the mode of the eventual return register will
- actually be, and use that. */
- rtx hard_reg
- = hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
- subr, 1);
-
- /* Structures that are returned in registers are not aggregate_value_p,
- so we may see a PARALLEL or a REG. */
- if (REG_P (hard_reg))
- SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
- else if (GET_CODE (hard_reg) == PARALLEL)
- SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
+ tree return_type = TREE_TYPE (DECL_RESULT (subr));
+ if (TYPE_MODE (return_type) != BLKmode
+ && targetm.calls.return_in_msb (return_type))
+ /* expand_function_end will insert the appropriate padding in
+ this case. Use the return value's natural (unpadded) mode
+ within the function proper. */
+ SET_DECL_RTL (DECL_RESULT (subr),
+ gen_reg_rtx (TYPE_MODE (return_type)));
else
- abort ();
+ {
+ /* In order to figure out what mode to use for the pseudo, we
+ figure out what the mode of the eventual return register will
+ actually be, and use that. */
+ rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
+
+ /* Structures that are returned in registers are not
+ aggregate_value_p, so we may see a PARALLEL or a REG. */
+ if (REG_P (hard_reg))
+ SET_DECL_RTL (DECL_RESULT (subr),
+ gen_reg_rtx (GET_MODE (hard_reg)));
+ else
+ {
+ gcc_assert (GET_CODE (hard_reg) == PARALLEL);
+ SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
+ }
+ }
/* Set DECL_REGISTER flag so that expand_function_end will copy the
result to the real return register(s). */
set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
SET_DECL_RTL (parm, local);
- maybe_set_unchanging (local, parm);
mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
emit_move_insn (local, static_chain_incoming_rtx);
since some things (like trampolines) get placed before this. */
tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
- /* Evaluate now the sizes of any types declared among the arguments. */
- expand_pending_sizes (nreverse (get_pending_sizes ()));
-
/* Make sure there is a line number after the function entry setup code. */
force_next_line_note ();
}
decl; decl = TREE_CHAIN (decl))
if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
&& DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
- warning ("%Junused parameter '%D'", decl, decl);
+ warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
}
static GTY(()) rtx initial_trampoline;
clear_pending_stack_adjust ();
do_pending_stack_adjust ();
- /* @@@ This is a kludge. We want to ensure that instructions that
- may trap are not moved into the epilogue by scheduling, because
- we don't always emit unwind information for the epilogue.
- However, not all machine descriptions define a blockage insn, so
- emit an ASM_INPUT to act as one. */
- if (flag_non_call_exceptions)
- emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
-
/* Mark the end of the function body.
If control reaches this insn, the function can drop through
without returning a value. */
is computed. */
clobber_after = get_last_insn ();
- /* Output the label for the actual return from the function,
- if one is expected. This happens either because a function epilogue
- is used instead of a return instruction, or because a return was done
- with a goto in order to run local cleanups, or because of pcc-style
- structure returning. */
- if (return_label)
- emit_label (return_label);
-
- /* Let except.c know where it should emit the call to unregister
- the function context for sjlj exceptions. */
- if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
- sjlj_emit_function_exit_after (get_last_insn ());
+ /* Output the label for the actual return from the function. */
+ emit_label (return_label);
- /* If we had calls to alloca, and this machine needs
- an accurate stack pointer to exit the function,
- insert some code to save and restore the stack pointer. */
- if (! EXIT_IGNORE_STACK
- && current_function_calls_alloca)
+ if (USING_SJLJ_EXCEPTIONS)
{
- rtx tem = 0;
-
- emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
- emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
+ /* Let except.c know where it should emit the call to unregister
+ the function context for sjlj exceptions. */
+ if (flag_exceptions)
+ sjlj_emit_function_exit_after (get_last_insn ());
+ }
+ else
+ {
+ /* @@@ This is a kludge. We want to ensure that instructions that
+ may trap are not moved into the epilogue by scheduling, because
+ we don't always emit unwind information for the epilogue.
+ However, not all machine descriptions define a blockage insn, so
+ emit an ASM_INPUT to act as one. */
+ if (flag_non_call_exceptions)
+ emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
}
+ /* If this is an implementation of throw, do what's necessary to
+ communicate between __builtin_eh_return and the epilogue. */
+ expand_eh_return ();
+
/* If scalar return value was computed in a pseudo-reg, or was a named
return value that got dumped to the stack, copy that to the hard
return register. */
rtx real_decl_rtl = current_function_return_rtx;
/* This should be set in assign_parms. */
- if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
- abort ();
+ gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
/* If this is a BLKmode structure being returned in registers,
then use the mode computed in expand_return. Note that if
if (GET_MODE (real_decl_rtl) == BLKmode)
PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
+ /* If a non-BLKmode return value should be padded at the least
+ significant end of the register, shift it left by the appropriate
+ amount. BLKmode results are handled using the group load/store
+ machinery. */
+ if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
+ && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
+ {
+ emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
+ REGNO (real_decl_rtl)),
+ decl_rtl);
+ shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
+ }
/* If a named return value dumped decl_return to memory, then
we may need to re-do the PROMOTE_MODE signed/unsigned
extension. */
- if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
+ else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
{
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
TREE_TYPE (decl_result),
int_size_in_bytes (TREE_TYPE (decl_result)));
}
+ /* In the case of complex integer modes smaller than a word, we'll
+ need to generate some non-trivial bitfield insertions. Do that
+ on a pseudo and not the hard register. */
+ else if (GET_CODE (decl_rtl) == CONCAT
+ && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
+ && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
+ {
+ int old_generating_concat_p;
+ rtx tmp;
+
+ old_generating_concat_p = generating_concat_p;
+ generating_concat_p = 0;
+ tmp = gen_reg_rtx (GET_MODE (decl_rtl));
+ generating_concat_p = old_generating_concat_p;
+
+ emit_move_insn (tmp, decl_rtl);
+ emit_move_insn (real_decl_rtl, tmp);
+ }
else
emit_move_insn (real_decl_rtl, decl_rtl);
}
else
value_address = XEXP (value_address, 0);
-#ifdef FUNCTION_OUTGOING_VALUE
- outgoing = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
- current_function_decl);
-#else
- outgoing = FUNCTION_VALUE (build_pointer_type (type),
- current_function_decl);
-#endif
+ outgoing = targetm.calls.function_value (build_pointer_type (type),
+ current_function_decl, true);
/* Mark this as a function return value so integrate will delete the
assignment and USE below when inlining this function. */
current_function_return_rtx = outgoing;
}
- /* If this is an implementation of throw, do what's necessary to
- communicate between __builtin_eh_return and the epilogue. */
- expand_eh_return ();
-
/* Emit the actual code to clobber return register. */
{
- rtx seq, after;
+ rtx seq;
start_sequence ();
clobber_return_register ();
+ expand_naked_return ();
seq = get_insns ();
end_sequence ();
- after = emit_insn_after (seq, clobber_after);
+ emit_insn_after (seq, clobber_after);
}
- /* Output the label for the naked return from the function, if one is
- expected. This is currently used only by __builtin_return. */
- if (naked_return_label)
- emit_label (naked_return_label);
+ /* Output the label for the naked return from the function. */
+ emit_label (naked_return_label);
+
+ /* If stack protection is enabled for this function, check the guard. */
+ if (cfun->stack_protect_guard)
+ stack_protect_epilogue ();
+
+ /* If we had calls to alloca, and this machine needs
+ an accurate stack pointer to exit the function,
+ insert some code to save and restore the stack pointer. */
+ if (! EXIT_IGNORE_STACK
+ && current_function_calls_alloca)
+ {
+ rtx tem = 0;
+
+ emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
+ emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
+ }
/* ??? This should no longer be necessary since stupid is no longer with
us, but there are some parts of the compiler (eg reload_combine, and
end_sequence ();
push_topmost_sequence ();
- emit_insn_after (seq, get_insns ());
+ emit_insn_after (seq, entry_of_function ());
pop_topmost_sequence ();
}
(a list of one or more insns). */
static void
-record_insns (rtx insns, varray_type *vecp)
+record_insns (rtx insns, VEC(int,heap) **vecp)
{
- int i, len;
rtx tmp;
- tmp = insns;
- len = 0;
- while (tmp != NULL_RTX)
- {
- len++;
- tmp = NEXT_INSN (tmp);
- }
-
- i = VARRAY_SIZE (*vecp);
- VARRAY_GROW (*vecp, i + len);
- tmp = insns;
- while (tmp != NULL_RTX)
- {
- VARRAY_INT (*vecp, i) = INSN_UID (tmp);
- i++;
- tmp = NEXT_INSN (tmp);
- }
+ for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
+ VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
}
/* Set the locator of the insn chain starting at INSN to LOC. */
be running after reorg, SEQUENCE rtl is possible. */
static int
-contains (rtx insn, varray_type vec)
+contains (rtx insn, VEC(int,heap) **vec)
{
int i, j;
{
int count = 0;
for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
- for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
- if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
+ for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
+ if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
+ == VEC_index (int, *vec, j))
count++;
return count;
}
else
{
- for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
- if (INSN_UID (insn) == VARRAY_INT (vec, j))
+ for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
+ if (INSN_UID (insn) == VEC_index (int, *vec, j))
return 1;
}
return 0;
int
prologue_epilogue_contains (rtx insn)
{
- if (contains (insn, prologue))
+ if (contains (insn, &prologue))
return 1;
- if (contains (insn, epilogue))
+ if (contains (insn, &epilogue))
return 1;
return 0;
}
sibcall_epilogue_contains (rtx insn)
{
if (sibcall_epilogue)
- return contains (insn, sibcall_epilogue);
+ return contains (insn, &sibcall_epilogue);
return 0;
}
#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
-/* These functions convert the epilogue into a variant that does not modify the
- stack pointer. This is used in cases where a function returns an object
- whose size is not known until it is computed. The called function leaves the
- object on the stack, leaves the stack depressed, and returns a pointer to
- the object.
-
- What we need to do is track all modifications and references to the stack
- pointer, deleting the modifications and changing the references to point to
- the location the stack pointer would have pointed to had the modifications
- taken place.
-
- These functions need to be portable so we need to make as few assumptions
- about the epilogue as we can. However, the epilogue basically contains
- three things: instructions to reset the stack pointer, instructions to
- reload registers, possibly including the frame pointer, and an
- instruction to return to the caller.
-
- If we can't be sure of what a relevant epilogue insn is doing, we abort.
- We also make no attempt to validate the insns we make since if they are
- invalid, we probably can't do anything valid. The intent is that these
- routines get "smarter" as more and more machines start to use them and
- they try operating on different epilogues.
-
- We use the following structure to track what the part of the epilogue that
- we've already processed has done. We keep two copies of the SP equivalence,
- one for use during the insn we are processing and one for use in the next
- insn. The difference is because one part of a PARALLEL may adjust SP
- and the other may use it. */
+/* These functions convert the epilogue into a variant that does not
+ modify the stack pointer. This is used in cases where a function
+ returns an object whose size is not known until it is computed.
+ The called function leaves the object on the stack, leaves the
+ stack depressed, and returns a pointer to the object.
+
+ What we need to do is track all modifications and references to the
+ stack pointer, deleting the modifications and changing the
+ references to point to the location the stack pointer would have
+ pointed to had the modifications taken place.
+
+ These functions need to be portable so we need to make as few
+ assumptions about the epilogue as we can. However, the epilogue
+ basically contains three things: instructions to reset the stack
+ pointer, instructions to reload registers, possibly including the
+ frame pointer, and an instruction to return to the caller.
+
+ We must be sure of what a relevant epilogue insn is doing. We also
+ make no attempt to validate the insns we make since if they are
+ invalid, we probably can't do anything valid. The intent is that
+ these routines get "smarter" as more and more machines start to use
+ them and they try operating on different epilogues.
+
+ We use the following structure to track what the part of the
+ epilogue that we've already processed has done. We keep two copies
+ of the SP equivalence, one for use during the insn we are
+ processing and one for use in the next insn. The difference is
+ because one part of a PARALLEL may adjust SP and the other may use
+ it. */
struct epi_info
{
insn = next;
continue;
}
- else if (MEM_P (retaddr)
- && REG_P (XEXP (retaddr, 0)))
- base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
- else if (MEM_P (retaddr)
- && GET_CODE (XEXP (retaddr, 0)) == PLUS
- && REG_P (XEXP (XEXP (retaddr, 0), 0))
- && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
+ else
{
- base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
- offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
+ rtx ret_ptr;
+ gcc_assert (MEM_P (retaddr));
+
+ ret_ptr = XEXP (retaddr, 0);
+
+ if (REG_P (ret_ptr))
+ {
+ base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
+ offset = 0;
+ }
+ else
+ {
+ gcc_assert (GET_CODE (ret_ptr) == PLUS
+ && REG_P (XEXP (ret_ptr, 0))
+ && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
+ base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
+ offset = INTVAL (XEXP (ret_ptr, 1));
+ }
}
- else
- abort ();
/* If the base of the location containing the return pointer
is SP, we must update it with the replacement address. Otherwise,
info.sp_offset));
retaddr = gen_rtx_MEM (Pmode, retaddr);
+ MEM_NOTRAP_P (retaddr) = 1;
/* If there is a pending load to the equivalent register for SP
and we reference that register, we must load our address into
if (HARD_REGNO_MODE_OK (regno, Pmode)
&& !fixed_regs[regno]
&& TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
- && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
- regno)
+ && !REGNO_REG_SET_P
+ (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno)
&& !refers_to_regno_p (regno,
regno + hard_regno_nregs[regno]
[Pmode],
&& info.const_equiv[regno] == 0)
break;
- if (regno == FIRST_PSEUDO_REGISTER)
- abort ();
+ gcc_assert (regno < FIRST_PSEUDO_REGISTER);
reg = gen_rtx_REG (Pmode, regno);
emit_move_insn (reg, retaddr);
/* Show the SET in the above insn is a RETURN. */
jump_set = single_set (jump_insn);
- if (jump_set == 0)
- abort ();
- else
- SET_IS_RETURN_P (jump_set) = 1;
+ gcc_assert (jump_set);
+ SET_IS_RETURN_P (jump_set) = 1;
}
/* If SP is not mentioned in the pattern and its equivalent register, if
&& (info.sp_equiv_reg == stack_pointer_rtx
|| !reg_set_p (info.sp_equiv_reg, insn)))
{
- if (! validate_replace_rtx (stack_pointer_rtx,
- plus_constant (info.sp_equiv_reg,
- info.sp_offset),
- insn))
- abort ();
+ int changed;
+
+ changed = validate_replace_rtx (stack_pointer_rtx,
+ plus_constant (info.sp_equiv_reg,
+ info.sp_offset),
+ insn);
+ gcc_assert (changed);
add_insn (insn);
}
handle_epilogue_set (rtx set, struct epi_info *p)
{
/* First handle the case where we are setting SP. Record what it is being
- set from. If unknown, abort. */
+ set from, which we must be able to determine */
if (reg_set_p (stack_pointer_rtx, set))
{
- if (SET_DEST (set) != stack_pointer_rtx)
- abort ();
+ gcc_assert (SET_DEST (set) == stack_pointer_rtx);
if (GET_CODE (SET_SRC (set)) == PLUS)
{
p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
- else if (REG_P (XEXP (SET_SRC (set), 1))
- && REGNO (XEXP (SET_SRC (set), 1)) < FIRST_PSEUDO_REGISTER
- && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))] != 0)
- p->new_sp_offset
- = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
else
- abort ();
+ {
+ gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
+ && (REGNO (XEXP (SET_SRC (set), 1))
+ < FIRST_PSEUDO_REGISTER)
+ && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
+ p->new_sp_offset
+ = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
+ }
}
else
p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
p->new_sp_offset += p->sp_offset;
}
- if (p->new_sp_equiv_reg == 0 || !REG_P (p->new_sp_equiv_reg))
- abort ();
+ gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
return;
}
- /* Next handle the case where we are setting SP's equivalent register.
- If we already have a value to set it to, abort. We could update, but
- there seems little point in handling that case. Note that we have
- to allow for the case where we are setting the register set in
- the previous part of a PARALLEL inside a single insn. But use the
- old offset for any updates within this insn. We must allow for the case
- where the register is being set in a different (usually wider) mode than
- Pmode). */
+ /* Next handle the case where we are setting SP's equivalent
+ register. We must not already have a value to set it to. We
+ could update, but there seems little point in handling that case.
+ Note that we have to allow for the case where we are setting the
+ register set in the previous part of a PARALLEL inside a single
+ insn. But use the old offset for any updates within this insn.
+ We must allow for the case where the register is being set in a
+ different (usually wider) mode than Pmode). */
else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
{
- if (p->equiv_reg_src != 0
- || !REG_P (p->new_sp_equiv_reg)
- || !REG_P (SET_DEST (set))
- || GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) > BITS_PER_WORD
- || REGNO (p->new_sp_equiv_reg) != REGNO (SET_DEST (set)))
- abort ();
- else
- p->equiv_reg_src
- = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
- plus_constant (p->sp_equiv_reg,
- p->sp_offset));
+ gcc_assert (!p->equiv_reg_src
+ && REG_P (p->new_sp_equiv_reg)
+ && REG_P (SET_DEST (set))
+ && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
+ <= BITS_PER_WORD)
+ && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
+ p->equiv_reg_src
+ = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
+ plus_constant (p->sp_equiv_reg,
+ p->sp_offset));
}
/* Otherwise, replace any references to SP in the insn to its new value
#if defined (HAVE_epilogue) || defined(HAVE_return)
rtx epilogue_end = NULL_RTX;
#endif
+ edge_iterator ei;
#ifdef HAVE_prologue
if (HAVE_prologue)
/* Can't deal with multiple successors of the entry block
at the moment. Function should always have at least one
entry point. */
- if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
- abort ();
+ gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
- insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
+ insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
inserted = 1;
}
#endif
/* If the exit block has no non-fake predecessors, we don't need
an epilogue. */
- for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
if ((e->flags & EDGE_FAKE) == 0)
break;
if (e == NULL)
emit (conditional) return instructions. */
basic_block last;
- edge e_next;
rtx label;
- for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
if (e->flags & EDGE_FALLTHRU)
break;
if (e == NULL)
if (BB_HEAD (last) == label && LABEL_P (label))
{
+ edge_iterator ei2;
rtx epilogue_line_note = NULL_RTX;
/* Locate the line number associated with the closing brace,
break;
}
- for (e = last->pred; e; e = e_next)
+ for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
{
basic_block bb = e->src;
rtx jump;
- e_next = e->pred_next;
if (bb == ENTRY_BLOCK_PTR)
- continue;
+ {
+ ei_next (&ei2);
+ continue;
+ }
jump = BB_END (bb);
if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
- continue;
+ {
+ ei_next (&ei2);
+ continue;
+ }
/* If we have an unconditional jump, we can replace that
with a simple return instruction. */
else if (condjump_p (jump))
{
if (! redirect_jump (jump, 0, 0))
- continue;
+ {
+ ei_next (&ei2);
+ continue;
+ }
/* If this block has only one successor, it both jumps
and falls through to the fallthru block, so we can't
delete the edge. */
- if (bb->succ->succ_next == NULL)
- continue;
+ if (single_succ_p (bb))
+ {
+ ei_next (&ei2);
+ continue;
+ }
}
else
- continue;
+ {
+ ei_next (&ei2);
+ continue;
+ }
/* Fix up the CFG for the successful change we just made. */
redirect_edge_succ (e, EXIT_BLOCK_PTR);
emit_barrier_after (BB_END (last));
emit_return_into_block (last, epilogue_line_note);
epilogue_end = BB_END (last);
- last->succ->flags &= ~EDGE_FALLTHRU;
+ single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
goto epilogue_done;
}
}
There really shouldn't be a mixture -- either all should have
been converted or none, however... */
- for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
if (e->flags & EDGE_FALLTHRU)
break;
if (e == NULL)
fixup_fallthru_exit_predecessor. */
cfg_layout_initialize (0);
FOR_EACH_BB (cur_bb)
- if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
- cur_bb->rbi->next = cur_bb->next_bb;
+ if (cur_bb->index >= NUM_FIXED_BLOCKS
+ && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
+ cur_bb->aux = cur_bb->next_bb;
cfg_layout_finalize ();
}
epilogue_done:
#ifdef HAVE_sibcall_epilogue
/* Emit sibling epilogues before any sibling call sites. */
- for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
+ for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
{
basic_block bb = e->src;
rtx insn = BB_END (bb);
- rtx i;
- rtx newinsn;
if (!CALL_P (insn)
|| ! SIBLING_CALL_P (insn))
- continue;
+ {
+ ei_next (&ei);
+ continue;
+ }
start_sequence ();
emit_insn (gen_sibcall_epilogue ());
record_insns (seq, &sibcall_epilogue);
set_insn_locators (seq, epilogue_locator);
- i = PREV_INSN (insn);
- newinsn = emit_insn_before (seq, insn);
+ emit_insn_before (seq, insn);
+ ei_next (&ei);
}
#endif
rtx insn, last, note;
int len;
- if ((len = VARRAY_SIZE (prologue)) > 0)
+ if ((len = VEC_length (int, prologue)) > 0)
{
last = 0, note = 0;
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
note = insn;
}
- else if (contains (insn, prologue))
+ else if (contains (insn, &prologue))
{
last = insn;
if (--len == 0)
}
}
- if ((len = VARRAY_SIZE (epilogue)) > 0)
+ if ((len = VEC_length (int, epilogue)) > 0)
{
last = 0, note = 0;
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
note = insn;
}
- else if (contains (insn, epilogue))
+ else if (contains (insn, &epilogue))
{
last = insn;
if (--len == 0)
#endif /* HAVE_prologue or HAVE_epilogue */
}
-/* Called once, at initialization, to initialize function.c. */
-
-void
-init_function_once (void)
-{
- VARRAY_INT_INIT (prologue, 0, "prologue");
- VARRAY_INT_INIT (epilogue, 0, "epilogue");
- VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
-}
-
/* Resets insn_block_boundaries array. */
void
if (!block)
return;
+ if(!cfun->ib_boundaries_block)
+ return;
+
last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
VARRAY_POP (cfun->ib_boundaries_block);
n = get_max_uid ();
{
return lang_hooks.decl_printable_name (cfun->decl, 2);
}
+\f
+
+static void
+rest_of_handle_check_leaf_regs (void)
+{
+#ifdef LEAF_REGISTERS
+ current_function_uses_only_leaf_regs
+ = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
+#endif
+}
+
+struct tree_opt_pass pass_leaf_regs =
+{
+ NULL, /* name */
+ NULL, /* gate */
+ rest_of_handle_check_leaf_regs, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+ 0 /* letter */
+};
+
#include "gt-function.h"