/* Expands front end tree to back end RTL for GNU C-Compiler
- Copyright (C) 1987, 1988, 1989, 1991 Free Software Foundation, Inc.
+ Copyright (C) 1987, 1988, 1989, 1991, 1992 Free Software Foundation, Inc.
This file is part of GNU CC.
#include "insn-config.h"
#include "recog.h"
#include "output.h"
+#include "basic-block.h"
/* Round a value to the lowest integer less than it that is a multiple of
the required alignment. Avoid using division in case the value is
static void optimize_bit_field ();
static void instantiate_decls ();
static void instantiate_decls_1 ();
+static void instantiate_decl ();
static int instantiate_virtual_regs_1 ();
static rtx fixup_memory_subreg ();
static rtx walk_fixup_memory_subreg ();
/* Save the current context for compilation of a nested function.
This is called from language-specific code.
The caller is responsible for saving any language-specific status,
- since this function knows only about language-indepedent variables. */
+ since this function knows only about language-independent variables. */
void
push_function_context ()
/* If this is a variable-size object with a pseudo to address it,
put that pseudo into the stack, if the var is nonlocal. */
- if (TREE_NONLOCAL (decl)
+ if (DECL_NONLOCAL (decl)
&& GET_CODE (reg) == MEM
&& GET_CODE (XEXP (reg, 0)) == REG
&& REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
if (GET_CODE (x) == SIGN_EXTRACT)
wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
#endif
- /* If we have a narrower mode, we can do someting. */
+ /* If we have a narrower mode, we can do something. */
if (wanted_mode != VOIDmode
&& GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
{
/* Make the change and see if the insn remains valid. */
INSN_CODE (insn) = -1;
XEXP (x, 0) = newmem;
- XEXP (x, 2) = gen_rtx (CONST_INT, VOIDmode, pos);
+ XEXP (x, 2) = GEN_INT (pos);
if (recog_memoized (insn) >= 0)
return;
optimize_bit_field (x, insn, 0);
if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
|| GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
- optimize_bit_field (x, insn, 0);
+ optimize_bit_field (x, insn, NULL_PTR);
/* If SET_DEST is now a paradoxical SUBREG, put the result of this
insn into a pseudo and store the low part of the pseudo into VAR. */
int width = INTVAL (XEXP (outerdest, 1));
int pos = INTVAL (XEXP (outerdest, 2));
- /* If we have a narrower mode, we can do someting. */
+ /* If we have a narrower mode, we can do something. */
if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
{
int offset = pos / BITS_PER_UNIT;
/* Make the change and see if the insn remains valid. */
INSN_CODE (insn) = -1;
XEXP (outerdest, 0) = newmem;
- XEXP (outerdest, 2) = gen_rtx (CONST_INT, VOIDmode, pos);
+ XEXP (outerdest, 2) = GEN_INT (pos);
if (recog_memoized (insn) >= 0)
return;
|| GET_CODE (insn) == CALL_INSN)
{
instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
- instantiate_virtual_regs_1 (®_NOTES (insn), 0, 0);
+ instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
}
/* Now instantiate the remaining register equivalences for debugging info.
{
tree decl;
- if (TREE_INLINE (fndecl))
+ if (DECL_INLINE (fndecl))
/* When compiling an inline function, the obstack used for
rtl allocation is the maybepermanent_obstack. Calling
`resume_temporary_allocation' switches us back to that
/* Process all parameters of the function. */
for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
{
- if (DECL_RTL (decl) && GET_CODE (DECL_RTL (decl)) == MEM)
- instantiate_virtual_regs_1 (&XEXP (DECL_RTL (decl), 0),
- valid_only ? DECL_RTL (decl) : 0, 0);
-#if 1 /* This is probably correct, but it seems to require fixes
- elsewhere in order to work. Let's fix them in 2.1. */
- if (DECL_INCOMING_RTL (decl)
- && GET_CODE (DECL_INCOMING_RTL (decl)) == MEM)
- instantiate_virtual_regs_1 (&XEXP (DECL_INCOMING_RTL (decl), 0),
- valid_only ? DECL_INCOMING_RTL (decl) : 0,
- 0);
-#endif
+ instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
+ valid_only);
+ instantiate_decl (DECL_INCOMING_RTL (decl),
+ int_size_in_bytes (TREE_TYPE (decl)), valid_only);
}
/* Now process all variables defined in the function or its subblocks. */
instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
- if (TREE_INLINE (fndecl))
+ if (DECL_INLINE (fndecl))
{
/* Save all rtl allocated for this function by raising the
high-water mark on the maybepermanent_obstack. */
tree t;
for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
- if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
- instantiate_virtual_regs_1 (& XEXP (DECL_RTL (t), 0),
- valid_only ? DECL_RTL (t) : 0, 0);
+ instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
+ valid_only);
/* Process all subblocks. */
for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
instantiate_decls_1 (t, valid_only);
}
+
+/* Subroutine of the preceeding procedures: Given RTL representing a
+ decl and the size of the object, do any instantiation required.
+
+ If VALID_ONLY is non-zero, it means that the RTL should only be
+ changed if the new address is valid. */
+
+static void
+instantiate_decl (x, size, valid_only)
+ rtx x;
+ int size;
+ int valid_only;
+{
+ enum machine_mode mode;
+ rtx addr;
+
+ /* If this is not a MEM, no need to do anything. Similarly if the
+ address is a constant or a register that is not a virtual register. */
+
+ if (x == 0 || GET_CODE (x) != MEM)
+ return;
+
+ addr = XEXP (x, 0);
+ if (CONSTANT_P (addr)
+ || (GET_CODE (addr) == REG
+ && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
+ || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
+ return;
+
+ /* If we should only do this if the address is valid, copy the address.
+ We need to do this so we can undo any changes that might make the
+ address invalid. This copy is unfortunate, but probably can't be
+ avoided. */
+
+ if (valid_only)
+ addr = copy_rtx (addr);
+
+ instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
+
+ if (! valid_only)
+ return;
+
+ /* Now verify that the resulting address is valid for every integer or
+ floating-point mode up to and including SIZE bytes long. We do this
+ since the object might be accessed in any mode and frame addresses
+ are shared. */
+
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
+ mode = GET_MODE_WIDER_MODE (mode))
+ if (! memory_address_p (mode, addr))
+ return;
+
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
+ mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
+ mode = GET_MODE_WIDER_MODE (mode))
+ if (! memory_address_p (mode, addr))
+ return;
+
+ /* Otherwise, put back the address, now that we have updated it and we
+ know it is valid. */
+
+ XEXP (x, 0) = addr;
+}
\f
/* Given a pointer to a piece of rtx and an optional pointer to the
containing object, instantiate any virtual registers present in it.
start_sequence ();
if (GET_CODE (SET_SRC (x)) != REG)
- temp = force_operand (SET_SRC (x), 0);
+ temp = force_operand (SET_SRC (x), NULL_RTX);
else
temp = SET_SRC (x);
- temp = force_operand (plus_constant (temp, offset), 0);
+ temp = force_operand (plus_constant (temp, offset), NULL_RTX);
seq = get_insns ();
end_sequence ();
XEXP (x, 0) = old;
start_sequence ();
- temp = force_operand (new, 0);
+ temp = force_operand (new, NULL_RTX);
seq = get_insns ();
end_sequence ();
Note that we cannot pass X as the object in the recursive call
since the insn being processed may not allow all valid
- addresses. */
+ addresses. However, if we were not passed on object, we can
+ only modify X without copying it if X will have a valid
+ address.
+
+ ??? Also note that this can still lose if OBJECT is an insn that
+ has less restrictions on an address that some other insn.
+ In that case, we will modify the shared address. This case
+ doesn't seem very likely, though. */
- if (instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0))
+ if (instantiate_virtual_regs_1 (&XEXP (x, 0),
+ object ? object : x, 0))
return 1;
/* Otherwise make a copy and process that copy. We copy the entire
return 0;
start_sequence ();
- temp = force_operand (temp, 0);
+ temp = force_operand (temp, NULL_RTX);
seq = get_insns ();
end_sequence ();
if (GET_CODE (insn) == CODE_LABEL)
LABEL_PRESERVE_P (insn) = 0;
if (GET_CODE (insn) == INSN
- && GET_CODE (PATTERN (insn)) == SET
- && (SET_DEST (PATTERN (insn)) == nonlocal_goto_handler_slot
- || SET_SRC (PATTERN (insn)) == nonlocal_goto_handler_slot
- || SET_DEST (PATTERN (insn)) == nonlocal_goto_stack_level
- || SET_SRC (PATTERN (insn)) == nonlocal_goto_stack_level))
+ && ((nonlocal_goto_handler_slot != 0
+ && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
+ || (nonlocal_goto_stack_level != 0
+ && reg_mentioned_p (nonlocal_goto_stack_level,
+ PATTERN (insn)))))
delete_insn (insn);
}
}
{
tree type = build_pointer_type (fntype);
- function_result_decl = build_decl (PARM_DECL, 0, type);
+ function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
DECL_ARG_TYPE (function_result_decl) = type;
TREE_CHAIN (function_result_decl) = fnargs;
bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
#ifdef INIT_CUMULATIVE_INCOMING_ARGS
- INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, 0);
+ INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_PTR);
#else
- INIT_CUMULATIVE_ARGS (args_so_far, fntype, 0);
+ INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_PTR);
#endif
/* We haven't yet found an argument that we must push and pretend the
passed_mode = TYPE_MODE (passed_type);
nominal_mode = TYPE_MODE (TREE_TYPE (parm));
+ /* If the parm's mode is VOID, its value doesn't matter,
+ and avoid the usual things like emit_move_insn that could crash. */
+ if (nominal_mode == VOIDmode)
+ {
+ DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
+ continue;
+ }
+
#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
/* See if this arg was passed by invisible reference. */
if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
to indicate there is no preallocated stack slot for the parm. */
if (entry_parm == stack_parm
-#ifdef REG_PARM_STACK_SPACE
+#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
/* On some machines, even if a parm value arrives in a register
- there is still an (uninitialized) stack slot allocated for it. */
+ there is still an (uninitialized) stack slot allocated for it.
+
+ ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
+ whether this parameter already has a stack slot allocated,
+ because an arg block exists only if current_function_args_size
+ is larger than some threshhold, and we haven't calculated that
+ yet. So, for now, we just assume that stack slots never exist
+ in this case. */
|| REG_PARM_STACK_SPACE (fndecl) > 0
#endif
)
if (second_time)
continue;
+ /* If we can't trust the parm stack slot to be aligned enough
+ for its ultimate type, don't use that slot after entry.
+ We'll make another stack slot, if we need one. */
+ {
+#ifdef FUNCTION_ARG_BOUNDARY
+ int thisparm_boundary
+ = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type);
+#else
+ int thisparm_boundary = PARM_BOUNDARY;
+#endif
+
+ if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
+ stack_parm = 0;
+ }
+
/* Now adjust STACK_PARM to the mode and precise location
where this parameter should live during execution,
if we discover that it must live in the stack during execution.
}
DECL_RTL (parm) = stack_parm;
}
- else if (! (
-#if 0 /* This change was turned off because it makes compilation bigger. */
- !optimize
-#else /* It's not clear why the following was replaced. */
- /* Obsoleted by preceding line. */
- (obey_regdecls && ! TREE_REGDECL (parm)
- && ! TREE_INLINE (fndecl))
-#endif
+ else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
+ && ! DECL_INLINE (fndecl))
/* layout_decl may set this. */
|| TREE_ADDRESSABLE (parm)
|| TREE_SIDE_EFFECTS (parm)
&& REGNO (entry_parm) < FIRST_PSEUDO_REGISTER
&& ! HARD_REGNO_MODE_OK (REGNO (entry_parm),
GET_MODE (entry_parm)))
- convert_move (parmreg, copy_to_reg (entry_parm));
+ convert_move (parmreg, copy_to_reg (entry_parm), 0);
else
convert_move (parmreg, validize_mem (entry_parm), 0);
}
else
emit_move_insn (parmreg, validize_mem (entry_parm));
+ /* If we were passed a pointer but the actual value
+ can safely live in a register, put it in one. */
+ if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
+ && ! ((obey_regdecls && ! DECL_REGISTER (parm)
+ && ! DECL_INLINE (fndecl))
+ /* layout_decl may set this. */
+ || TREE_ADDRESSABLE (parm)
+ || TREE_SIDE_EFFECTS (parm)
+ /* If -ffloat-store specified, don't put explicit
+ float variables into registers. */
+ || (flag_float_store
+ && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
+ {
+ /* We can't use nominal_mode, because it will have been set to
+ Pmode above. We must use the actual mode of the parm. */
+ parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
+ emit_move_insn (parmreg, DECL_RTL (parm));
+ DECL_RTL (parm) = parmreg;
+ }
+
/* In any case, record the parm's desired stack location
in case we later discover it must live in the stack. */
if (REGNO (parmreg) >= nparmregs)
as we make here would screw up life analysis for it. */
if (nominal_mode == passed_mode
&& GET_CODE (entry_parm) == MEM
+ && entry_parm == stack_parm
&& stack_offset.var == 0
&& reg_mentioned_p (virtual_incoming_args_rtx,
XEXP (entry_parm, 0)))
minimum length. */
#ifdef REG_PARM_STACK_SPACE
+#ifndef MAYBE_REG_PARM_STACK_SPACE
current_function_args_size = MAX (current_function_args_size,
REG_PARM_STACK_SPACE (fndecl));
#endif
+#endif
#ifdef STACK_BOUNDARY
#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
#ifdef ARGS_GROW_DOWNWARD
current_function_arg_offset_rtx
- = (stack_args_size.var == 0 ? gen_rtx (CONST_INT, VOIDmode,
- -stack_args_size.constant)
+ = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
: expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
size_int (-stack_args_size.constant)),
- 0, VOIDmode, 0));
+ NULL_RTX, VOIDmode, 0));
#else
current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
#endif
area reserved for registers, skip that area. */
if (! in_regs)
{
+#ifdef MAYBE_REG_PARM_STACK_SPACE
+ reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
+#else
reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
+#endif
if (reg_parm_stack_space > 0)
{
if (initial_offset_ptr->var)
#endif /* ARGS_GROW_DOWNWARD */
}
+/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
+ BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
+
static void
pad_to_arg_alignment (offset_ptr, boundary)
struct args_size *offset_ptr;
NON_SAVING_SETJMP
||
#endif
- ! TREE_REGDECL (decl)))
+ ! DECL_REGISTER (decl)))
put_var_into_stack (decl);
for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
setjmp_protect (sub);
NON_SAVING_SETJMP
||
#endif
- ! TREE_REGDECL (decl)))
+ ! DECL_REGISTER (decl)))
put_var_into_stack (decl);
}
\f
/* Round address up to desired boundary. */
rtx temp = gen_reg_rtx (Pmode);
temp = expand_binop (Pmode, add_optab, tramp,
- gen_rtx (CONST_INT, VOIDmode, TRAMPOLINE_ALIGNMENT - 1),
+ GEN_INT (TRAMPOLINE_ALIGNMENT - 1),
temp, 0, OPTAB_LIB_WIDEN);
tramp = expand_binop (Pmode, and_optab, temp,
- gen_rtx (CONST_INT, VOIDmode, - TRAMPOLINE_ALIGNMENT),
+ GEN_INT (- TRAMPOLINE_ALIGNMENT),
temp, 0, OPTAB_LIB_WIDEN);
#endif
return tramp;
}
\f
+/* The functions identify_blocks and reorder_blocks provide a way to
+ reorder the tree of BLOCK nodes, for optimizers that reshuffle or
+ duplicate portions of the RTL code. Call identify_blocks before
+ changing the RTL, and call reorder_blocks after. */
+
+static int all_blocks ();
+static tree blocks_nreverse ();
+
+/* Put all this function's BLOCK nodes into a vector, and return it.
+ Also store in each NOTE for the beginning or end of a block
+ the index of that block in the vector.
+ The arguments are TOP_BLOCK, the top-level block of the function,
+ and INSNS, the insn chain of the function. */
+
+tree *
+identify_blocks (top_block, insns)
+ tree top_block;
+ rtx insns;
+{
+ int n_blocks;
+ tree *block_vector;
+ int *block_stack;
+ int depth = 0;
+ int next_block_number = 0;
+ int current_block_number = 0;
+ rtx insn;
+
+ if (top_block == 0)
+ return 0;
+
+ n_blocks = all_blocks (top_block, 0);
+ block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
+ block_stack = (int *) alloca (n_blocks * sizeof (int));
+
+ all_blocks (top_block, block_vector);
+
+ for (insn = insns; insn; insn = NEXT_INSN (insn))
+ if (GET_CODE (insn) == NOTE)
+ {
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
+ {
+ block_stack[depth++] = current_block_number;
+ current_block_number = next_block_number;
+ NOTE_BLOCK_NUMBER (insn) = next_block_number++;
+ }
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
+ {
+ current_block_number = block_stack[--depth];
+ NOTE_BLOCK_NUMBER (insn) = current_block_number;
+ }
+ }
+
+ return block_vector;
+}
+
+/* Given BLOCK_VECTOR which was returned by identify_blocks,
+ and a revised instruction chain, rebuild the tree structure
+ of BLOCK nodes to correspond to the new order of RTL.
+ The new block tree is inserted below TOP_BLOCK.
+ Returns the current top-level block. */
+
+tree
+reorder_blocks (block_vector, top_block, insns)
+ tree *block_vector;
+ tree top_block;
+ rtx insns;
+{
+ tree current_block = top_block;
+ rtx insn;
+
+ if (block_vector == 0)
+ return top_block;
+
+ /* Prune the old tree away, so that it doesn't get in the way. */
+ BLOCK_SUBBLOCKS (current_block) = 0;
+
+ for (insn = insns; insn; insn = NEXT_INSN (insn))
+ if (GET_CODE (insn) == NOTE)
+ {
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
+ {
+ tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
+ /* If we have seen this block before, copy it. */
+ if (TREE_ASM_WRITTEN (block))
+ block = copy_node (block);
+ BLOCK_SUBBLOCKS (block) = 0;
+ TREE_ASM_WRITTEN (block) = 1;
+ BLOCK_SUPERCONTEXT (block) = current_block;
+ BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
+ BLOCK_SUBBLOCKS (current_block) = block;
+ current_block = block;
+ NOTE_SOURCE_FILE (insn) = 0;
+ }
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
+ {
+ BLOCK_SUBBLOCKS (current_block)
+ = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
+ current_block = BLOCK_SUPERCONTEXT (current_block);
+ NOTE_SOURCE_FILE (insn) = 0;
+ }
+ }
+
+ return current_block;
+}
+
+/* Reverse the order of elements in the chain T of blocks,
+ and return the new head of the chain (old last element). */
+
+static tree
+blocks_nreverse (t)
+ tree t;
+{
+ register tree prev = 0, decl, next;
+ for (decl = t; decl; decl = next)
+ {
+ next = BLOCK_CHAIN (decl);
+ BLOCK_CHAIN (decl) = prev;
+ prev = decl;
+ }
+ return prev;
+}
+
+/* Count the subblocks of BLOCK, and list them all into the vector VECTOR.
+ Also clear TREE_ASM_WRITTEN in all blocks. */
+
+static int
+all_blocks (block, vector)
+ tree block;
+ tree *vector;
+{
+ int n_blocks = 1;
+ tree subblocks;
+
+ TREE_ASM_WRITTEN (block) = 0;
+ /* Record this block. */
+ if (vector)
+ vector[0] = block;
+
+ /* Record the subblocks, and their subblocks. */
+ for (subblocks = BLOCK_SUBBLOCKS (block);
+ subblocks; subblocks = BLOCK_CHAIN (subblocks))
+ n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0);
+
+ return n_blocks;
+}
+\f
/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
and initialize static variables for generating RTL for the statements
of the function. */
/* Make sure first insn is a note even if we don't want linenums.
This makes sure the first insn will never be deleted.
Also, final expects a note to appear there. */
- emit_note (0, NOTE_INSN_DELETED);
+ emit_note (NULL_PTR, NOTE_INSN_DELETED);
/* Set flags used by final.c. */
if (aggregate_value_p (DECL_RESULT (subr)))
/* If function gets a static chain arg, store it in the stack frame.
Do this first, so it gets the first stack slot offset. */
if (current_function_needs_context)
- emit_move_insn (assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0),
- static_chain_incoming_rtx);
+ {
+ last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
+ emit_move_insn (last_ptr, static_chain_incoming_rtx);
+ }
/* If the parameters of this function need cleaning up, get a label
for the beginning of the code which executes those cleanups. This must
REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
/* Needed because we may need to move this to memory
in case it's a named return value whose address is taken. */
- TREE_REGDECL (DECL_RESULT (subr)) = 1;
+ DECL_REGISTER (DECL_RESULT (subr)) = 1;
}
}
The move is supposed to make sdb output more accurate. */
/* Indicate the beginning of the function body,
as opposed to parm setup. */
- emit_note (0, NOTE_INSN_FUNCTION_BEG);
+ emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
/* If doing stupid allocation, mark parms as born here. */
if (GET_CODE (get_last_insn ()) != NOTE)
- emit_note (0, NOTE_INSN_DELETED);
+ emit_note (NULL_PTR, NOTE_INSN_DELETED);
parm_birth_insn = get_last_insn ();
if (obey_regdecls)
/* Fetch static chain values for containing functions. */
tem = decl_function_context (current_function_decl);
- if (tem)
+ /* If not doing stupid register allocation, then start off with the static
+ chain pointer in a pseudo register. Otherwise, we use the stack
+ address that was generated above. */
+ if (tem && ! obey_regdecls)
last_ptr = copy_to_reg (static_chain_incoming_rtx);
context_display = 0;
while (tem)
/* After the display initializations is where the tail-recursion label
should go, if we end up needing one. Ensure we have a NOTE here
since some things (like trampolines) get placed before this. */
- tail_recursion_reentry = emit_note (0, NOTE_INSN_DELETED);
+ tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
/* Evaluate now the sizes of any types declared among the arguments. */
for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
- expand_expr (TREE_VALUE (tem), 0, VOIDmode, 0);
+ expand_expr (TREE_VALUE (tem), NULL_RTX, VOIDmode, 0);
/* Make sure there is a line number after the function entry setup code. */
force_next_line_note ();
start_sequence ();
tramp = change_address (initial_trampoline, BLKmode,
round_trampoline_addr (XEXP (tramp, 0)));
- emit_block_move (tramp, initial_trampoline,
- gen_rtx (CONST_INT, VOIDmode, TRAMPOLINE_SIZE),
+ emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE),
FUNCTION_BOUNDARY / BITS_PER_UNIT);
INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
XEXP (DECL_RTL (function), 0), context);
/* End any sequences that failed to be closed due to syntax errors. */
while (in_sequence_p ())
- end_sequence (0);
+ end_sequence ();
/* Outside function body, can't compute type's actual size
until next function's body starts. */
/* Mark the end of the function body.
If control reaches this insn, the function can drop through
without returning a value. */
- emit_note (0, NOTE_INSN_FUNCTION_END);
+ emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
/* Output a linenumber for the end of the function.
SDB depends on this. */
#endif
if (current_function_calls_alloca)
{
- rtx tem = gen_reg_rtx (Pmode);
- emit_insn_after (gen_rtx (SET, VOIDmode, tem, stack_pointer_rtx),
- parm_birth_insn);
- emit_insn (gen_rtx (SET, VOIDmode, stack_pointer_rtx, tem));
+ rtx tem = 0;
+
+ emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
+ emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
}
/* If scalar return value was computed in a pseudo-reg,
/* If you have any cleanups to do at this point,
and they need to create temporary variables,
then you will lose. */
- fixup_gotos (0, 0, 0, get_insns (), 0);
+ fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, get_insns (), 0);
+}
+\f
+/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
+
+static int *prologue;
+static int *epilogue;
+
+/* Create an array that records the INSN_UIDs of INSNS (either a sequence
+ or a single insn). */
+
+static int *
+record_insns (insns)
+ rtx insns;
+{
+ int *vec;
+
+ if (GET_CODE (insns) == SEQUENCE)
+ {
+ int len = XVECLEN (insns, 0);
+ vec = (int *) oballoc ((len + 1) * sizeof (int));
+ vec[len] = 0;
+ while (--len >= 0)
+ vec[len] = INSN_UID (XVECEXP (insns, 0, len));
+ }
+ else
+ {
+ vec = (int *) oballoc (2 * sizeof (int));
+ vec[0] = INSN_UID (insns);
+ vec[1] = 0;
+ }
+ return vec;
+}
+
+/* Determine how many INSN_UIDs in VEC are part of INSN. */
+
+static int
+contains (insn, vec)
+ rtx insn;
+ int *vec;
+{
+ register int i, j;
+
+ if (GET_CODE (insn) == INSN
+ && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ {
+ int count = 0;
+ for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
+ for (j = 0; vec[j]; j++)
+ if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
+ count++;
+ return count;
+ }
+ else
+ {
+ for (j = 0; vec[j]; j++)
+ if (INSN_UID (insn) == vec[j])
+ return 1;
+ }
+ return 0;
+}
+
+/* Generate the prologe and epilogue RTL if the machine supports it. Thread
+ this into place with notes indicating where the prologue ends and where
+ the epilogue begins. Update the basic block information when possible. */
+
+void
+thread_prologue_and_epilogue_insns (f)
+ rtx f;
+{
+#ifdef HAVE_prologue
+ if (HAVE_prologue)
+ {
+ rtx head, seq, insn;
+
+ /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
+ prologue insns and a NOTE_INSN_PROLOGUE_END. */
+ emit_note_after (NOTE_INSN_PROLOGUE_END, f);
+ seq = gen_prologue ();
+ head = emit_insn_after (seq, f);
+
+ /* Include the new prologue insns in the first block. Ignore them
+ if they form a basic block unto themselves. */
+ if (basic_block_head && n_basic_blocks
+ && GET_CODE (basic_block_head[0]) != CODE_LABEL)
+ basic_block_head[0] = NEXT_INSN (f);
+
+ /* Retain a map of the prologue insns. */
+ prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
+ }
+ else
+#endif
+ prologue = 0;
+
+#ifdef HAVE_epilogue
+ if (HAVE_epilogue)
+ {
+ rtx insn = get_last_insn ();
+ rtx prev = prev_nonnote_insn (insn);
+
+ /* If we end with a BARRIER, we don't need an epilogue. */
+ if (! (prev && GET_CODE (prev) == BARRIER))
+ {
+ rtx tail, seq;
+
+ /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG,
+ the epilogue insns (this must include the jump insn that
+ returns), USE insns ad the end of a function, and a BARRIER. */
+
+ emit_barrier_after (insn);
+
+ /* Place the epilogue before the USE insns at the end of a
+ function. */
+ while (prev
+ && GET_CODE (prev) == INSN
+ && GET_CODE (PATTERN (prev)) == USE)
+ {
+ insn = PREV_INSN (prev);
+ prev = prev_nonnote_insn (prev);
+ }
+
+ seq = gen_epilogue ();
+ tail = emit_jump_insn_after (seq, insn);
+ emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
+
+ /* Include the new epilogue insns in the last block. Ignore
+ them if they form a basic block unto themselves. */
+ if (basic_block_end && n_basic_blocks
+ && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
+ basic_block_end[n_basic_blocks - 1] = tail;
+
+ /* Retain a map of the epilogue insns. */
+ epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
+ return;
+ }
+ }
+#endif
+ epilogue = 0;
+}
+
+/* Reposition the prologue-end and epilogue-begin notes after instruction
+ scheduling and delayed branch scheduling. */
+
+void
+reposition_prologue_and_epilogue_notes (f)
+ rtx f;
+{
+#if defined (HAVE_prologue) || defined (HAVE_epilogue)
+ /* Reposition the prologue and epilogue notes. */
+ if (n_basic_blocks)
+ {
+ rtx next, prev;
+ int len;
+
+ if (prologue)
+ {
+ register rtx insn, note = 0;
+
+ /* Scan from the beginning until we reach the last prologue insn.
+ We apparently can't depend on basic_block_{head,end} after
+ reorg has run. */
+ for (len = 0; prologue[len]; len++)
+ ;
+ for (insn = f; insn; insn = NEXT_INSN (insn))
+ if (GET_CODE (insn) == NOTE)
+ {
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
+ note = insn;
+ }
+ else if ((len -= contains (insn, prologue)) == 0)
+ {
+ /* Find the prologue-end note if we haven't already, and
+ move it to just after the last prologue insn. */
+ if (note == 0)
+ for (note = insn; note = NEXT_INSN (note);)
+ if (GET_CODE (note) == NOTE
+ && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
+ break;
+ next = NEXT_INSN (note);
+ prev = PREV_INSN (note);
+ if (prev)
+ NEXT_INSN (prev) = next;
+ if (next)
+ PREV_INSN (next) = prev;
+ add_insn_after (note, insn);
+ break;
+ }
+ }
+
+ if (epilogue)
+ {
+ register rtx insn, note = 0;
+
+ /* Scan from the end until we reach the first epilogue insn.
+ We apparently can't depend on basic_block_{head,end} after
+ reorg has run. */
+ for (len = 0; epilogue[len]; len++)
+ ;
+ for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
+ if (GET_CODE (insn) == NOTE)
+ {
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
+ note = insn;
+ }
+ else if ((len -= contains (insn, epilogue)) == 0)
+ {
+ /* Find the epilogue-begin note if we haven't already, and
+ move it to just before the first epilogue insn. */
+ if (note == 0)
+ for (note = insn; note = PREV_INSN (note);)
+ if (GET_CODE (note) == NOTE
+ && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
+ break;
+ next = NEXT_INSN (note);
+ prev = PREV_INSN (note);
+ if (prev)
+ NEXT_INSN (prev) = next;
+ if (next)
+ PREV_INSN (next) = prev;
+ add_insn_after (note, PREV_INSN (insn));
+ break;
+ }
+ }
+ }
+#endif /* HAVE_prologue or HAVE_epilogue */
}