#include "ggc.h"
#include "tm_p.h"
+#ifndef ACCUMULATE_OUTGOING_ARGS
+#define ACCUMULATE_OUTGOING_ARGS 0
+#endif
+
#ifndef TRAMPOLINE_ALIGNMENT
#define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
#endif
compiler passes. */
int current_function_is_leaf;
+/* Nonzero if function being compiled doesn't contain any instructions
+ that can throw an exception. This is set prior to final. */
+
+int current_function_nothrow;
+
/* Nonzero if function being compiled doesn't modify the stack pointer
(ignoring the prologue and epilogue). This is only valid after
life_analysis has run. */
struct function *all_functions = 0;
/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
-static int *prologue;
-static int *epilogue;
+static varray_type prologue;
+static varray_type epilogue;
+
+/* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
+ in this function. */
+static varray_type sibcall_epilogue;
\f
/* In order to evaluate some expressions, such as function calls returning
structures in memory, we need to temporarily allocate stack locations.
static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
enum machine_mode, enum machine_mode,
- int, int, int, struct hash_table *));
+ int, unsigned int, int,
+ struct hash_table *));
static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
struct hash_table *));
static struct fixup_replacement
static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
static void instantiate_decls PARAMS ((tree, int));
static void instantiate_decls_1 PARAMS ((tree, int));
-static void instantiate_decl PARAMS ((rtx, int, int));
+static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
static void delete_handlers PARAMS ((void));
static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
static tree round_down PARAMS ((tree, int));
#endif
static rtx round_trampoline_addr PARAMS ((rtx));
+static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
+static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
static tree blocks_nreverse PARAMS ((tree));
static int all_blocks PARAMS ((tree, tree *));
static tree *get_block_vector PARAMS ((tree, int *));
/* We always define `record_insns' even if its not used so that we
can always export `prologue_epilogue_contains'. */
-static int *record_insns PARAMS ((rtx)) ATTRIBUTE_UNUSED;
-static int contains PARAMS ((rtx, int *));
+static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
+static int contains PARAMS ((rtx, varray_type));
#ifdef HAVE_return
static void emit_return_into_block PARAMS ((basic_block));
#endif
static void prepare_function_start PARAMS ((void));
static void do_clobber_return_reg PARAMS ((rtx, void *));
static void do_use_return_reg PARAMS ((rtx, void *));
-static void preserve_rtl_expr_temp PARAMS ((struct temp_slot *));
\f
/* Pointer to chain of `struct function' for containing functions. */
struct function *outer_function_chain;
free_after_compilation (f)
struct function *f;
{
+ struct temp_slot *ts;
+ struct temp_slot *next;
+
free_eh_status (f);
free_expr_status (f);
free_emit_status (f);
if (f->x_parm_reg_stack_loc)
free (f->x_parm_reg_stack_loc);
+ for (ts = f->x_temp_slots; ts; ts = next)
+ {
+ next = ts->next;
+ free (ts);
+ }
+ f->x_temp_slots = NULL;
+
f->arg_offset_rtx = NULL;
f->return_rtx = NULL;
f->internal_arg_pointer = NULL;
f->x_parm_birth_insn = NULL;
f->x_last_parm_insn = NULL;
f->x_parm_reg_stack_loc = NULL;
- f->x_temp_slots = NULL;
f->fixup_var_refs_queue = NULL;
f->original_arg_vector = NULL;
f->original_decl_initial = NULL;
if (best_p->size - rounded_size >= alignment)
{
- p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
+ p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
p->in_use = p->addr_taken = 0;
p->size = best_p->size - rounded_size;
p->base_offset = best_p->base_offset + rounded_size;
{
HOST_WIDE_INT frame_offset_old = frame_offset;
- p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
+ p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
/* We are passing an explicit alignment request to assign_stack_local.
One side effect of that is assign_stack_local will not round SIZE
}
/* Either delete Q or advance past it. */
if (delete_q)
- prev_q->next = q->next;
+ {
+ prev_q->next = q->next;
+ free (q);
+ }
else
prev_q = q;
}
p->level--;
}
-/* Preserve the temporary slot given by P (originally created during
- the building of an RTL_EXPR) at least as long as things in our
- current scope. */
-
-static void
-preserve_rtl_expr_temp (p)
- struct temp_slot *p;
-{
- /* Set the slot level to that of the currently prevailing scope. */
- p->level = MIN (p->level, temp_slot_level);
- /* This slot is no longer associated with the RTL_EXPR from which it
- originated. */
- p->rtl_expr = NULL_TREE;
-}
-
-/* Preserve the temporary slots created during the building of the
- RTL_EXPR given by T at least as long as things in our current
- scope. */
-
-void
-preserve_rtl_expr_temps (t)
- tree t;
-{
- struct temp_slot *p;
-
- for (p = temp_slots; p; p = p->next)
- if (p->in_use && p->rtl_expr == t)
- preserve_rtl_expr_temp (p);
-}
-
/* X is the result of an RTL_EXPR. If it is a temporary slot associated
with that RTL_EXPR, promote it into a temporary slot at the present
level so it will not be freed when we free slots made in the
/* If we can find a match, move it to our level unless it is already at
an upper level. */
p = find_temp_slot_from_address (XEXP (x, 0));
- if (p)
- preserve_rtl_expr_temp (p);
+ if (p != 0)
+ {
+ p->level = MIN (p->level, temp_slot_level);
+ p->rtl_expr = 0;
+ }
return;
}
for (p = temp_slots; p; p = p->next)
if (p->rtl_expr == t)
- p->in_use = 0;
+ {
+ /* If this slot is below the current TEMP_SLOT_LEVEL, then it
+ needs to be preserved. This can happen if a temporary in
+ the RTL_EXPR was addressed; preserve_temp_slots will move
+ the temporary into a higher level. */
+ if (temp_slot_level <= p->level)
+ p->in_use = 0;
+ else
+ p->rtl_expr = NULL_TREE;
+ }
combine_temp_slots ();
}
/* A CONCAT contains two pseudos; put them both in the stack.
We do it so they end up consecutive. */
enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
- tree part_type = TREE_TYPE (TREE_TYPE (decl));
+ tree part_type = type_for_mode (part_mode, 0);
#ifdef FRAME_GROWS_DOWNWARD
/* Since part 0 should have a lower address, do it second. */
put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
PUT_CODE (reg, MEM);
MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
MEM_ALIAS_SET (reg) = get_alias_set (decl);
+ MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (TREE_TYPE (decl)));
/* The two parts are in memory order already.
Use the lower parts address as ours. */
tree type;
enum machine_mode promoted_mode, decl_mode;
int volatile_p;
- int original_regno;
+ unsigned int original_regno;
int used_p;
struct hash_table *ht;
{
struct function *func = function ? function : cfun;
rtx new = 0;
- int regno = original_regno;
+ unsigned int regno = original_regno;
if (regno == 0)
regno = REGNO (reg);
if (regno < func->x_max_parm_reg)
new = func->x_parm_reg_stack_loc[regno];
+
if (new == 0)
new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
rtx first_insn = get_insns ();
struct sequence_stack *stack = seq_stack;
tree rtl_exps = rtl_expr_chain;
+ rtx insn;
/* Must scan all insns for stack-refs that exceed the limit. */
fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
}
/* Scan the catch clauses for exception handling too. */
- push_to_sequence (catch_clauses);
+ push_to_full_sequence (catch_clauses, catch_clauses_last);
fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
0, 0);
- end_sequence ();
+ end_full_sequence (&catch_clauses, &catch_clauses_last);
+
+ /* Scan sequences saved in CALL_PLACEHOLDERS too. */
+ for (insn = first_insn; insn; insn = NEXT_INSN (insn))
+ {
+ if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ {
+ int i;
+
+ /* Look at the Normal call, sibling call and tail recursion
+ sequences attached to the CALL_PLACEHOLDER. */
+ for (i = 0; i < 3; i++)
+ {
+ rtx seq = XEXP (PATTERN (insn), i);
+ if (seq)
+ {
+ push_to_sequence (seq);
+ fixup_var_refs_insns (var, promoted_mode, unsignedp,
+ seq, 0, 0);
+ XEXP (PATTERN (insn), i) = get_insns ();
+ end_sequence ();
+ }
+ }
+ }
+ }
}
\f
/* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
#ifndef STACK_DYNAMIC_OFFSET
-#ifdef ACCUMULATE_OUTGOING_ARGS
/* The bottom of the stack points to the actual arguments. If
REG_PARM_STACK_SPACE is defined, this includes the space for the register
parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
#define STACK_DYNAMIC_OFFSET(FNDECL) \
-(current_function_outgoing_args_size \
- + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
+((ACCUMULATE_OUTGOING_ARGS \
+ ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
+ + (STACK_POINTER_OFFSET)) \
#else
#define STACK_DYNAMIC_OFFSET(FNDECL) \
-(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
-#endif
-
-#else
-#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
+((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
+ + (STACK_POINTER_OFFSET))
#endif
#endif
-/* On a few machines, the CFA coincides with the arg pointer. */
+/* On most machines, the CFA coincides with the first incoming parm. */
#ifndef ARG_POINTER_CFA_OFFSET
-#define ARG_POINTER_CFA_OFFSET 0
+#define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
#endif
/* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
-#if 0
void
flush_addressof (decl)
tree decl;
&& GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
}
-#endif
/* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
start_sequence ();
store_bit_field (sub, size_x, 0, GET_MODE (x),
val, GET_MODE_SIZE (GET_MODE (sub)),
- GET_MODE_SIZE (GET_MODE (sub)));
+ GET_MODE_ALIGNMENT (GET_MODE (sub)));
/* Make sure to unshare any shared rtl that store_bit_field
might have created. */
/* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
requires a fixup pass over the instruction stream to correct
INSNs that depended on the REG being a REG, and not a MEM. But,
- these fixup passes are slow. Furthermore, more MEMs are not
+ these fixup passes are slow. Furthermore, most MEMs are not
mentioned in very many instructions. So, we speed up the process
by pre-calculating which REGs occur in which INSNs; that allows
us to perform the fixup passes much more quickly. */
rtx insns;
{
rtx insn;
- int i;
+ unsigned int i;
/* Compute the offsets to use for this function. */
in_arg_offset = FIRST_PARM_OFFSET (fndecl);
var_offset = STARTING_FRAME_OFFSET;
dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
out_arg_offset = STACK_POINTER_OFFSET;
- cfa_offset = ARG_POINTER_CFA_OFFSET;
+ cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
/* Scan all variables and parameters of this function. For each that is
in memory, instantiate all virtual registers if the result is a valid
static void
instantiate_decl (x, size, valid_only)
rtx x;
- int size;
+ HOST_WIDE_INT size;
int valid_only;
{
enum machine_mode mode;
instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
- if (valid_only)
+ if (valid_only && size >= 0)
{
+ unsigned HOST_WIDE_INT decl_size = size;
+
/* Now verify that the resulting address is valid for every integer or
floating-point mode up to and including SIZE bytes long. We do this
since the object might be accessed in any mode and frame addresses
are shared. */
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
+ mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
mode = GET_MODE_WIDER_MODE (mode))
if (! memory_address_p (mode, addr))
return;
for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
- mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
+ mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
mode = GET_MODE_WIDER_MODE (mode))
if (! memory_address_p (mode, addr))
return;
{
int i, regno, nregs;
rtx reg;
- tree type;
- if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
- type = exp;
- else
- type = TREE_TYPE (exp);
+
+ tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
if (RETURN_IN_MEMORY (type))
return 1;
if (GET_CODE (entry_parm) == PARALLEL)
emit_group_store (validize_mem (stack_parm), entry_parm,
int_size_in_bytes (TREE_TYPE (parm)),
- (TYPE_ALIGN (TREE_TYPE (parm))
- / BITS_PER_UNIT));
+ TYPE_ALIGN (TREE_TYPE (parm)));
+
else
move_block_from_reg (REGNO (entry_parm),
validize_mem (stack_parm), nregs,
if (GET_CODE (entry_parm) == PARALLEL)
emit_group_store (validize_mem (stack_parm), entry_parm,
int_size_in_bytes (TREE_TYPE (parm)),
- (TYPE_ALIGN (TREE_TYPE (parm))
- / BITS_PER_UNIT));
+ TYPE_ALIGN (TREE_TYPE (parm)));
else
move_block_from_reg (REGNO (entry_parm),
validize_mem (stack_parm),
may need to do it in a wider mode. */
register rtx parmreg;
- int regno, regnoi = 0, regnor = 0;
+ unsigned int regno, regnoi = 0, regnor = 0;
unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
push_to_sequence (conversion_insns);
- if (TYPE_SIZE (type) == 0
+ if (!COMPLETE_TYPE_P (type)
|| TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
/* This is a variable sized object. */
copy = gen_rtx_MEM (BLKmode,
/* For pointer data type, suggest pointer register. */
if (POINTER_TYPE_P (TREE_TYPE (parm)))
mark_reg_pointer (parmreg,
- (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
- / BITS_PER_UNIT));
+ TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
+
}
else
{
rtx
promoted_input_arg (regno, pmode, punsignedp)
- int regno;
+ unsigned int regno;
enum machine_mode *pmode;
int *punsignedp;
{
return tramp;
}
\f
-/* The functions identify_blocks and reorder_blocks provide a way to
- reorder the tree of BLOCK nodes, for optimizers that reshuffle or
- duplicate portions of the RTL code. Call identify_blocks before
- changing the RTL, and call reorder_blocks after. */
-
/* Put all this function's BLOCK nodes including those that are chained
onto the first block into a vector, and return it.
Also store in each NOTE for the beginning or end of a block
and INSNS, the insn chain of the function. */
void
-identify_blocks (block, insns)
- tree block;
- rtx insns;
+identify_blocks ()
{
int n_blocks;
- tree *block_vector;
+ tree *block_vector, *last_block_vector;
tree *block_stack;
- int depth = 0;
- int current_block_number = 1;
- rtx insn;
+ tree block = DECL_INITIAL (current_function_decl);
if (block == 0)
return;
block_vector = get_block_vector (block, &n_blocks);
block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
- for (insn = insns; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == NOTE)
- {
- if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
- {
- tree b;
-
- /* If there are more block notes than BLOCKs, something
- is badly wrong. */
- if (current_block_number == n_blocks)
- abort ();
-
- b = block_vector[current_block_number++];
- NOTE_BLOCK (insn) = b;
- block_stack[depth++] = b;
- }
- else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
- {
- if (depth == 0)
- /* There are more NOTE_INSN_BLOCK_ENDs that
- NOTE_INSN_BLOCK_BEGs. Something is badly wrong. */
- abort ();
+ last_block_vector = identify_blocks_1 (get_insns (),
+ block_vector + 1,
+ block_vector + n_blocks,
+ block_stack);
- NOTE_BLOCK (insn) = block_stack[--depth];
- }
- }
+ /* If we didn't use all of the subblocks, we've misplaced block notes. */
+ /* ??? This appears to happen all the time. Latent bugs elsewhere? */
+ if (0 && last_block_vector != block_vector + n_blocks)
+ abort ();
free (block_vector);
free (block_stack);
}
-/* Given a revised instruction chain, rebuild the tree structure of
- BLOCK nodes to correspond to the new order of RTL. The new block
- tree is inserted below TOP_BLOCK. Returns the current top-level
- block. */
+/* Subroutine of identify_blocks. Do the block substitution on the
+ insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
-tree
-reorder_blocks (block, insns)
- tree block;
+ BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
+ BLOCK_VECTOR is incremented for each block seen. */
+
+static tree *
+identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
rtx insns;
+ tree *block_vector;
+ tree *end_block_vector;
+ tree *orig_block_stack;
{
- tree current_block = block;
rtx insn;
+ tree *block_stack = orig_block_stack;
+
+ for (insn = insns; insn; insn = NEXT_INSN (insn))
+ {
+ if (GET_CODE (insn) == NOTE)
+ {
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
+ {
+ tree b;
+
+ /* If there are more block notes than BLOCKs, something
+ is badly wrong. */
+ if (block_vector == end_block_vector)
+ abort ();
+
+ b = *block_vector++;
+ NOTE_BLOCK (insn) = b;
+ *block_stack++ = b;
+ }
+ else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
+ {
+ /* If there are more NOTE_INSN_BLOCK_ENDs than
+ NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
+ if (block_stack == orig_block_stack)
+ abort ();
+
+ NOTE_BLOCK (insn) = *--block_stack;
+ }
+ }
+ else if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ {
+ rtx cp = PATTERN (insn);
+
+ block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
+ end_block_vector, block_stack);
+ if (XEXP (cp, 1))
+ block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
+ end_block_vector, block_stack);
+ if (XEXP (cp, 2))
+ block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
+ end_block_vector, block_stack);
+ }
+ }
+
+ /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
+ something is badly wrong. */
+ if (block_stack != orig_block_stack)
+ abort ();
+
+ return block_vector;
+}
+
+/* Identify BLOCKs referenced by more than one
+ NOTE_INSN_BLOCK_{BEG,END}, and create duplicate blocks. */
+
+void
+reorder_blocks ()
+{
+ tree block = DECL_INITIAL (current_function_decl);
varray_type block_stack;
if (block == NULL_TREE)
- return NULL_TREE;
+ return;
VARRAY_TREE_INIT (block_stack, 10, "block_stack");
- /* Prune the old trees away, so that it doesn't get in the way. */
- BLOCK_SUBBLOCKS (current_block) = 0;
- BLOCK_CHAIN (current_block) = 0;
+ /* Prune the old trees away, so that they don't get in the way. */
+ BLOCK_SUBBLOCKS (block) = NULL_TREE;
+ BLOCK_CHAIN (block) = NULL_TREE;
- for (insn = insns; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == NOTE)
- {
- if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
- {
- tree block = NOTE_BLOCK (insn);
- /* If we have seen this block before, copy it. */
- if (TREE_ASM_WRITTEN (block))
- {
- block = copy_node (block);
- NOTE_BLOCK (insn) = block;
- }
- BLOCK_SUBBLOCKS (block) = 0;
- TREE_ASM_WRITTEN (block) = 1;
- BLOCK_SUPERCONTEXT (block) = current_block;
- BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
- BLOCK_SUBBLOCKS (current_block) = block;
- current_block = block;
- VARRAY_PUSH_TREE (block_stack, block);
- }
- else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
- {
- NOTE_BLOCK (insn) = VARRAY_TOP_TREE (block_stack);
- VARRAY_POP (block_stack);
- BLOCK_SUBBLOCKS (current_block)
- = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
- current_block = BLOCK_SUPERCONTEXT (current_block);
- }
- }
+ reorder_blocks_1 (get_insns (), block, &block_stack);
- BLOCK_SUBBLOCKS (current_block)
- = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
+ BLOCK_SUBBLOCKS (block)
+ = blocks_nreverse (BLOCK_SUBBLOCKS (block));
VARRAY_FREE (block_stack);
+}
+
+/* Helper function for reorder_blocks. Process the insn chain beginning
+ at INSNS. Recurse for CALL_PLACEHOLDER insns. */
+
+static void
+reorder_blocks_1 (insns, current_block, p_block_stack)
+ rtx insns;
+ tree current_block;
+ varray_type *p_block_stack;
+{
+ rtx insn;
- return current_block;
+ for (insn = insns; insn; insn = NEXT_INSN (insn))
+ {
+ if (GET_CODE (insn) == NOTE)
+ {
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
+ {
+ tree block = NOTE_BLOCK (insn);
+ /* If we have seen this block before, copy it. */
+ if (TREE_ASM_WRITTEN (block))
+ {
+ block = copy_node (block);
+ NOTE_BLOCK (insn) = block;
+ }
+ BLOCK_SUBBLOCKS (block) = 0;
+ TREE_ASM_WRITTEN (block) = 1;
+ BLOCK_SUPERCONTEXT (block) = current_block;
+ BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
+ BLOCK_SUBBLOCKS (current_block) = block;
+ current_block = block;
+ VARRAY_PUSH_TREE (*p_block_stack, block);
+ }
+ else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
+ {
+ NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
+ VARRAY_POP (*p_block_stack);
+ BLOCK_SUBBLOCKS (current_block)
+ = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
+ current_block = BLOCK_SUPERCONTEXT (current_block);
+ }
+ }
+ else if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ {
+ rtx cp = PATTERN (insn);
+ reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
+ if (XEXP (cp, 1))
+ reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
+ if (XEXP (cp, 2))
+ reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
+ }
+ }
}
/* Reverse the order of elements in the chain T of blocks,
cfun->original_decl_initial = 0;
cfun->original_arg_vector = 0;
- cfun->stack_alignment_needed = 0;
#ifdef STACK_BOUNDARY
+ cfun->stack_alignment_needed = STACK_BOUNDARY;
cfun->preferred_stack_boundary = STACK_BOUNDARY;
+#else
+ cfun->stack_alignment_needed = 0;
+ cfun->preferred_stack_boundary = 0;
#endif
/* Set if a call to setjmp is seen. */
current_function_calls_alloca = 0;
current_function_contains_functions = 0;
current_function_is_leaf = 0;
+ current_function_nothrow = 0;
current_function_sp_is_unchanging = 0;
current_function_uses_only_leaf_regs = 0;
current_function_has_computed_jump = 0;
init_function_for_compilation ()
{
reg_renumber = 0;
+
/* No prologue/epilogue insns yet. */
- prologue = epilogue = 0;
+ VARRAY_GROW (prologue, 0);
+ VARRAY_GROW (epilogue, 0);
+ VARRAY_GROW (sibcall_epilogue, 0);
}
/* Indicate that the current function uses extra args
blktramp = change_address (initial_trampoline, BLKmode, tramp);
emit_block_move (blktramp, initial_trampoline,
GEN_INT (TRAMPOLINE_SIZE),
- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
+ TRAMPOLINE_ALIGNMENT);
#endif
INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
seq = get_insns ();
/* If there are any catch_clauses remaining, output them now. */
emit_insns (catch_clauses);
- catch_clauses = NULL_RTX;
+ catch_clauses = catch_clauses_last = NULL_RTX;
/* If the above emitted any code, may sure we jump around it. */
if (last != get_last_insn ())
{
expand_fixups (get_insns ());
}
\f
-/* Create an array that records the INSN_UIDs of INSNS (either a sequence
- or a single insn). */
+/* Extend a vector that records the INSN_UIDs of INSNS (either a
+ sequence or a single insn). */
-static int *
-record_insns (insns)
+static void
+record_insns (insns, vecp)
rtx insns;
+ varray_type *vecp;
{
- int *vec;
-
if (GET_CODE (insns) == SEQUENCE)
{
int len = XVECLEN (insns, 0);
- vec = (int *) oballoc ((len + 1) * sizeof (int));
- vec[len] = 0;
+ int i = VARRAY_SIZE (*vecp);
+
+ VARRAY_GROW (*vecp, i + len);
while (--len >= 0)
- vec[len] = INSN_UID (XVECEXP (insns, 0, len));
+ {
+ VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
+ ++i;
+ }
}
else
{
- vec = (int *) oballoc (2 * sizeof (int));
- vec[0] = INSN_UID (insns);
- vec[1] = 0;
+ int i = VARRAY_SIZE (*vecp);
+ VARRAY_GROW (*vecp, i + 1);
+ VARRAY_INT (*vecp, i) = INSN_UID (insns);
}
- return vec;
}
/* Determine how many INSN_UIDs in VEC are part of INSN. */
static int
contains (insn, vec)
rtx insn;
- int *vec;
+ varray_type vec;
{
register int i, j;
{
int count = 0;
for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
- for (j = 0; vec[j]; j++)
- if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
+ for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
+ if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
count++;
return count;
}
else
{
- for (j = 0; vec[j]; j++)
- if (INSN_UID (insn) == vec[j])
+ for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
+ if (INSN_UID (insn) == VARRAY_INT (vec, j))
return 1;
}
return 0;
prologue_epilogue_contains (insn)
rtx insn;
{
- if (prologue && contains (insn, prologue))
+ if (contains (insn, prologue))
return 1;
- if (epilogue && contains (insn, epilogue))
+ if (contains (insn, epilogue))
return 1;
return 0;
}
+int
+sibcall_epilogue_contains (insn)
+ rtx insn;
+{
+ if (sibcall_epilogue)
+ return contains (insn, sibcall_epilogue);
+ return 0;
+}
+
#ifdef HAVE_return
/* Insert gen_return at the end of block BB. This also means updating
block_for_insn appropriately. */
/* Retain a map of the prologue insns. */
if (GET_CODE (seq) != SEQUENCE)
seq = get_insns ();
- prologue = record_insns (seq);
+ record_insns (seq, &prologue);
emit_note (NULL, NOTE_INSN_PROLOGUE_END);
/* GDB handles `break f' by setting a breakpoint on the first
/* Retain a map of the epilogue insns. */
if (GET_CODE (seq) != SEQUENCE)
seq = get_insns ();
- epilogue = record_insns (seq);
+ record_insns (seq, &epilogue);
seq = gen_sequence ();
end_sequence();
if (insertted)
commit_edge_insertions ();
+
+#ifdef HAVE_sibcall_epilogue
+ /* Emit sibling epilogues before any sibling call sites. */
+ for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
+ {
+ basic_block bb = e->src;
+ rtx insn = bb->end;
+ rtx i;
+
+ if (GET_CODE (insn) != CALL_INSN
+ || ! SIBLING_CALL_P (insn))
+ continue;
+
+ start_sequence ();
+ seq = gen_sibcall_epilogue ();
+ end_sequence ();
+
+ i = PREV_INSN (insn);
+ emit_insn_before (seq, insn);
+
+ /* Update the UID to basic block map. */
+ for (i = NEXT_INSN (i); i != insn; i = NEXT_INSN (i))
+ set_block_for_insn (i, bb);
+
+ /* Retain a map of the epilogue insns. Used in life analysis to
+ avoid getting rid of sibcall epilogue insns. */
+ record_insns (seq, &sibcall_epilogue);
+ }
+#endif
}
/* Reposition the prologue-end and epilogue-begin notes after instruction
rtx f ATTRIBUTE_UNUSED;
{
#if defined (HAVE_prologue) || defined (HAVE_epilogue)
- /* Reposition the prologue and epilogue notes. */
- if (n_basic_blocks)
+ int len;
+
+ if ((len = VARRAY_SIZE (prologue)) > 0)
{
- int len;
+ register rtx insn, note = 0;
- if (prologue)
+ /* Scan from the beginning until we reach the last prologue insn.
+ We apparently can't depend on basic_block_{head,end} after
+ reorg has run. */
+ for (insn = f; len && insn; insn = NEXT_INSN (insn))
{
- register rtx insn, note = 0;
-
- /* Scan from the beginning until we reach the last prologue insn.
- We apparently can't depend on basic_block_{head,end} after
- reorg has run. */
- for (len = 0; prologue[len]; len++)
- ;
- for (insn = f; len && insn; insn = NEXT_INSN (insn))
+ if (GET_CODE (insn) == NOTE)
{
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
+ note = insn;
+ }
+ else if ((len -= contains (insn, prologue)) == 0)
+ {
+ rtx next;
+ /* Find the prologue-end note if we haven't already, and
+ move it to just after the last prologue insn. */
+ if (note == 0)
{
- if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
- note = insn;
+ for (note = insn; (note = NEXT_INSN (note));)
+ if (GET_CODE (note) == NOTE
+ && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
+ break;
}
- else if ((len -= contains (insn, prologue)) == 0)
- {
- rtx next;
- /* Find the prologue-end note if we haven't already, and
- move it to just after the last prologue insn. */
- if (note == 0)
- {
- for (note = insn; (note = NEXT_INSN (note));)
- if (GET_CODE (note) == NOTE
- && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
- break;
- }
- next = NEXT_INSN (note);
+ next = NEXT_INSN (note);
- /* Whether or not we can depend on BLOCK_HEAD,
- attempt to keep it up-to-date. */
- if (BLOCK_HEAD (0) == note)
- BLOCK_HEAD (0) = next;
+ /* Whether or not we can depend on BLOCK_HEAD,
+ attempt to keep it up-to-date. */
+ if (BLOCK_HEAD (0) == note)
+ BLOCK_HEAD (0) = next;
- remove_insn (note);
- add_insn_after (note, insn);
- }
+ remove_insn (note);
+ add_insn_after (note, insn);
}
}
+ }
+
+ if ((len = VARRAY_SIZE (epilogue)) > 0)
+ {
+ register rtx insn, note = 0;
- if (epilogue)
+ /* Scan from the end until we reach the first epilogue insn.
+ We apparently can't depend on basic_block_{head,end} after
+ reorg has run. */
+ for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
{
- register rtx insn, note = 0;
-
- /* Scan from the end until we reach the first epilogue insn.
- We apparently can't depend on basic_block_{head,end} after
- reorg has run. */
- for (len = 0; epilogue[len]; len++)
- ;
- for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
+ if (GET_CODE (insn) == NOTE)
+ {
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
+ note = insn;
+ }
+ else if ((len -= contains (insn, epilogue)) == 0)
{
- if (GET_CODE (insn) == NOTE)
+ /* Find the epilogue-begin note if we haven't already, and
+ move it to just before the first epilogue insn. */
+ if (note == 0)
{
- if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
- note = insn;
+ for (note = insn; (note = PREV_INSN (note));)
+ if (GET_CODE (note) == NOTE
+ && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
+ break;
}
- else if ((len -= contains (insn, epilogue)) == 0)
- {
- /* Find the epilogue-begin note if we haven't already, and
- move it to just before the first epilogue insn. */
- if (note == 0)
- {
- for (note = insn; (note = PREV_INSN (note));)
- if (GET_CODE (note) == NOTE
- && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
- break;
- }
- /* Whether or not we can depend on BLOCK_HEAD,
- attempt to keep it up-to-date. */
- if (n_basic_blocks
- && BLOCK_HEAD (n_basic_blocks-1) == insn)
- BLOCK_HEAD (n_basic_blocks-1) = note;
+ /* Whether or not we can depend on BLOCK_HEAD,
+ attempt to keep it up-to-date. */
+ if (n_basic_blocks
+ && BLOCK_HEAD (n_basic_blocks-1) == insn)
+ BLOCK_HEAD (n_basic_blocks-1) = note;
- remove_insn (note);
- add_insn_before (note, insn);
- }
+ remove_insn (note);
+ add_insn_before (note, insn);
}
}
}
{
ggc_add_root (&all_functions, 1, sizeof all_functions,
mark_function_chain);
+
+ VARRAY_INT_INIT (prologue, 0, "prologue");
+ VARRAY_INT_INIT (epilogue, 0, "epilogue");
+ VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
}