/* Expands front end tree to back end RTL for GNU C-Compiler
- Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
+ Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
+ 1998, 1999, 2000 Free Software Foundation, Inc.
This file is part of GNU CC.
#include "toplev.h"
#include "hash.h"
#include "ggc.h"
+#include "tm_p.h"
+
+#ifndef ACCUMULATE_OUTGOING_ARGS
+#define ACCUMULATE_OUTGOING_ARGS 0
+#endif
#ifndef TRAMPOLINE_ALIGNMENT
#define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
#endif
+#if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
+#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
+#endif
+
/* Some systems use __main in a way incompatible with its use in gcc, in these
cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
give the same symbol without quotes for an alternative entry point. You
compiler passes. */
int current_function_is_leaf;
+/* Nonzero if function being compiled doesn't contain any instructions
+ that can throw an exception. This is set prior to final. */
+
+int current_function_nothrow;
+
/* Nonzero if function being compiled doesn't modify the stack pointer
(ignoring the prologue and epilogue). This is only valid after
life_analysis has run. */
/* These variables hold pointers to functions to
save and restore machine-specific data,
in push_function_context and pop_function_context. */
-void (*init_machine_status) PROTO((struct function *));
-void (*save_machine_status) PROTO((struct function *));
-void (*restore_machine_status) PROTO((struct function *));
-void (*mark_machine_status) PROTO((struct function *));
+void (*init_machine_status) PARAMS ((struct function *));
+void (*save_machine_status) PARAMS ((struct function *));
+void (*restore_machine_status) PARAMS ((struct function *));
+void (*mark_machine_status) PARAMS ((struct function *));
+void (*free_machine_status) PARAMS ((struct function *));
/* Likewise, but for language-specific data. */
-void (*init_lang_status) PROTO((struct function *));
-void (*save_lang_status) PROTO((struct function *));
-void (*restore_lang_status) PROTO((struct function *));
-void (*mark_lang_status) PROTO((struct function *));
-void (*free_lang_status) PROTO((struct function *));
+void (*init_lang_status) PARAMS ((struct function *));
+void (*save_lang_status) PARAMS ((struct function *));
+void (*restore_lang_status) PARAMS ((struct function *));
+void (*mark_lang_status) PARAMS ((struct function *));
+void (*free_lang_status) PARAMS ((struct function *));
/* The FUNCTION_DECL for an inline function currently being expanded. */
tree inline_function_decl;
/* The currently compiled function. */
-struct function *current_function = 0;
+struct function *cfun = 0;
/* Global list of all compiled functions. */
struct function *all_functions = 0;
/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
-static int *prologue;
-static int *epilogue;
+static varray_type prologue;
+static varray_type epilogue;
+
+/* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
+ in this function. */
+static varray_type sibcall_epilogue;
\f
/* In order to evaluate some expressions, such as function calls returning
structures in memory, we need to temporarily allocate stack locations.
/* Forward declarations. */
-static rtx assign_stack_local_1 PROTO ((enum machine_mode, HOST_WIDE_INT,
- int, struct function *));
-static rtx assign_stack_temp_for_type PROTO ((enum machine_mode, HOST_WIDE_INT,
- int, tree));
-static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
-static void put_reg_into_stack PROTO((struct function *, rtx, tree,
- enum machine_mode, enum machine_mode,
- int, int, int,
- struct hash_table *));
-static void fixup_var_refs PROTO((rtx, enum machine_mode, int,
- struct hash_table *));
+static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
+ int, struct function *));
+static rtx assign_stack_temp_for_type PARAMS ((enum machine_mode,
+ HOST_WIDE_INT, int, tree));
+static struct temp_slot *find_temp_slot_from_address PARAMS ((rtx));
+static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
+ enum machine_mode, enum machine_mode,
+ int, unsigned int, int,
+ struct hash_table *));
+static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int,
+ struct hash_table *));
static struct fixup_replacement
- *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
-static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
- rtx, int, struct hash_table *));
-static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
- struct fixup_replacement **));
-static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
-static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
-static rtx fixup_stack_1 PROTO((rtx, rtx));
-static void optimize_bit_field PROTO((rtx, rtx, rtx *));
-static void instantiate_decls PROTO((tree, int));
-static void instantiate_decls_1 PROTO((tree, int));
-static void instantiate_decl PROTO((rtx, int, int));
-static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
-static void delete_handlers PROTO((void));
-static void pad_to_arg_alignment PROTO((struct args_size *, int));
+ *find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
+static void fixup_var_refs_insns PARAMS ((rtx, enum machine_mode, int,
+ rtx, int, struct hash_table *));
+static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
+ struct fixup_replacement **));
+static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
+static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
+static rtx fixup_stack_1 PARAMS ((rtx, rtx));
+static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
+static void instantiate_decls PARAMS ((tree, int));
+static void instantiate_decls_1 PARAMS ((tree, int));
+static void instantiate_decl PARAMS ((rtx, HOST_WIDE_INT, int));
+static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
+static void delete_handlers PARAMS ((void));
+static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
+ struct args_size *));
#ifndef ARGS_GROW_DOWNWARD
-static void pad_below PROTO((struct args_size *, enum machine_mode,
- tree));
+static void pad_below PARAMS ((struct args_size *, enum machine_mode,
+ tree));
#endif
#ifdef ARGS_GROW_DOWNWARD
-static tree round_down PROTO((tree, int));
+static tree round_down PARAMS ((tree, int));
#endif
-static rtx round_trampoline_addr PROTO((rtx));
-static tree blocks_nreverse PROTO((tree));
-static int all_blocks PROTO((tree, tree *));
-#if defined (HAVE_prologue) || defined (HAVE_epilogue)
-static int *record_insns PROTO((rtx));
-static int contains PROTO((rtx, int *));
-#endif /* HAVE_prologue || HAVE_epilogue */
-static void put_addressof_into_stack PROTO((rtx, struct hash_table *));
-static void purge_addressof_1 PROTO((rtx *, rtx, int, int,
- struct hash_table *));
-static struct hash_entry *insns_for_mem_newfunc PROTO((struct hash_entry *,
- struct hash_table *,
- hash_table_key));
-static unsigned long insns_for_mem_hash PROTO ((hash_table_key));
-static boolean insns_for_mem_comp PROTO ((hash_table_key, hash_table_key));
-static int insns_for_mem_walk PROTO ((rtx *, void *));
-static void compute_insns_for_mem PROTO ((rtx, rtx, struct hash_table *));
-static void mark_temp_slot PROTO ((struct temp_slot *));
-static void mark_function_state PROTO ((struct function *));
-static void mark_function_chain PROTO ((void *));
-
+static rtx round_trampoline_addr PARAMS ((rtx));
+static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
+static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
+static tree blocks_nreverse PARAMS ((tree));
+static int all_blocks PARAMS ((tree, tree *));
+static tree *get_block_vector PARAMS ((tree, int *));
+/* We always define `record_insns' even if its not used so that we
+ can always export `prologue_epilogue_contains'. */
+static void record_insns PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
+static int contains PARAMS ((rtx, varray_type));
+#ifdef HAVE_return
+static void emit_return_into_block PARAMS ((basic_block));
+#endif
+static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
+static boolean purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
+ struct hash_table *));
+static int is_addressof PARAMS ((rtx *, void *));
+static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
+ struct hash_table *,
+ hash_table_key));
+static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
+static boolean insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
+static int insns_for_mem_walk PARAMS ((rtx *, void *));
+static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
+static void mark_temp_slot PARAMS ((struct temp_slot *));
+static void mark_function_status PARAMS ((struct function *));
+static void mark_function_chain PARAMS ((void *));
+static void prepare_function_start PARAMS ((void));
+static void do_clobber_return_reg PARAMS ((rtx, void *));
+static void do_use_return_reg PARAMS ((rtx, void *));
\f
/* Pointer to chain of `struct function' for containing functions. */
struct function *outer_function_chain;
if (context)
{
context_data = (context == current_function_decl
- ? current_function
+ ? cfun
: find_function_data (context));
context_data->contains_functions = 1;
}
- if (current_function == 0)
+ if (cfun == 0)
init_dummy_function_start ();
- p = current_function;
+ p = cfun;
p->next = outer_function_chain;
outer_function_chain = p;
if (save_machine_status)
(*save_machine_status) (p);
- current_function = 0;
+ cfun = 0;
}
void
void
pop_function_context_from (context)
- tree context;
+ tree context ATTRIBUTE_UNUSED;
{
struct function *p = outer_function_chain;
struct var_refs_queue *queue;
struct var_refs_queue *next;
- current_function = p;
+ cfun = p;
outer_function_chain = p->next;
current_function_decl = p->decl;
}
/* Clear out all parts of the state in F that can safely be discarded
+ after the function has been parsed, but not compiled, to let
+ garbage collection reclaim the memory. */
+
+void
+free_after_parsing (f)
+ struct function *f;
+{
+ /* f->expr->forced_labels is used by code generation. */
+ /* f->emit->regno_reg_rtx is used by code generation. */
+ /* f->varasm is used by code generation. */
+ /* f->eh->eh_return_stub_label is used by code generation. */
+
+ if (free_lang_status)
+ (*free_lang_status) (f);
+ free_stmt_status (f);
+}
+
+/* Clear out all parts of the state in F that can safely be discarded
after the function has been compiled, to let garbage collection
reclaim the memory. */
free_after_compilation (f)
struct function *f;
{
+ struct temp_slot *ts;
+ struct temp_slot *next;
+
+ free_eh_status (f);
+ free_expr_status (f);
free_emit_status (f);
free_varasm_status (f);
- free_stmt_status (f);
- if (free_lang_status)
- (*free_lang_status) (f);
- if (!DECL_DEFER_OUTPUT (f->decl))
+ if (free_machine_status)
+ (*free_machine_status) (f);
+
+ if (f->x_parm_reg_stack_loc)
+ free (f->x_parm_reg_stack_loc);
+
+ for (ts = f->x_temp_slots; ts; ts = next)
{
- free (f->x_parm_reg_stack_loc);
- f->can_garbage_collect = 1;
+ next = ts->next;
+ free (ts);
}
+ f->x_temp_slots = NULL;
+
+ f->arg_offset_rtx = NULL;
+ f->return_rtx = NULL;
+ f->internal_arg_pointer = NULL;
+ f->x_nonlocal_labels = NULL;
+ f->x_nonlocal_goto_handler_slots = NULL;
+ f->x_nonlocal_goto_handler_labels = NULL;
+ f->x_nonlocal_goto_stack_level = NULL;
+ f->x_cleanup_label = NULL;
+ f->x_return_label = NULL;
+ f->x_save_expr_regs = NULL;
+ f->x_stack_slot_list = NULL;
+ f->x_rtl_expr_chain = NULL;
+ f->x_tail_recursion_label = NULL;
+ f->x_tail_recursion_reentry = NULL;
+ f->x_arg_pointer_save_area = NULL;
+ f->x_context_display = NULL;
+ f->x_trampoline_list = NULL;
+ f->x_parm_birth_insn = NULL;
+ f->x_last_parm_insn = NULL;
+ f->x_parm_reg_stack_loc = NULL;
+ f->fixup_var_refs_queue = NULL;
+ f->original_arg_vector = NULL;
+ f->original_decl_initial = NULL;
+ f->inl_last_parm_insn = NULL;
+ f->epilogue_delay_list = NULL;
}
+
\f
/* Allocate fixed slots in the stack frame of the current function. */
HOST_WIDE_INT
get_frame_size ()
{
- return get_func_frame_size (current_function);
+ return get_func_frame_size (cfun);
}
/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
/* Allocate in the memory associated with the function in whose frame
we are assigning. */
- if (function != current_function)
+ if (function != cfun)
push_obstacks (function->function_obstack,
function->function_maybepermanent_obstack);
function->x_frame_offset -= size;
#endif
+ /* Ignore alignment we can't do with expected alignment of the boundary. */
+ if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
+ alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
+
+ if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
+ function->stack_alignment_needed = alignment * BITS_PER_UNIT;
+
/* Round frame offset to that alignment.
We must be careful here, since FRAME_OFFSET might be negative and
division with a negative dividend isn't as well defined as we might
/* If we have already instantiated virtual registers, return the actual
address relative to the frame pointer. */
- if (function == current_function && virtuals_instantiated)
+ if (function == cfun && virtuals_instantiated)
addr = plus_constant (frame_pointer_rtx,
(frame_offset + bigend_correction
+ STARTING_FRAME_OFFSET));
function->x_stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
- if (function != current_function)
+ if (function != cfun)
pop_obstacks ();
return x;
HOST_WIDE_INT size;
int align;
{
- return assign_stack_local_1 (mode, size, align, current_function);
+ return assign_stack_local_1 (mode, size, align, cfun);
}
\f
/* Allocate a temporary stack slot and record it for possible later
if (best_p->size - rounded_size >= alignment)
{
- p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
+ p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
p->in_use = p->addr_taken = 0;
p->size = best_p->size - rounded_size;
p->base_offset = best_p->base_offset + rounded_size;
{
HOST_WIDE_INT frame_offset_old = frame_offset;
- p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
+ p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
/* We are passing an explicit alignment request to assign_stack_local.
One side effect of that is assign_stack_local will not round SIZE
tree type;
int keep;
int memory_required;
- int dont_promote;
+ int dont_promote ATTRIBUTE_UNUSED;
{
enum machine_mode mode = TYPE_MODE (type);
+#ifndef PROMOTE_FOR_CALL_ONLY
int unsignedp = TREE_UNSIGNED (type);
+#endif
if (mode == BLKmode || memory_required)
{
HOST_WIDE_INT size = int_size_in_bytes (type);
rtx tmp;
+ /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
+ problems with allocating the stack space. */
+ if (size == 0)
+ size = 1;
+
/* Unfortunately, we don't yet know how to allocate variable-sized
temporaries. However, sometimes we have a fixed upper limit on
the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
}
/* Either delete Q or advance past it. */
if (delete_q)
- prev_q->next = q->next;
+ {
+ prev_q->next = q->next;
+ free (q);
+ }
else
prev_q = q;
}
return p;
}
+ /* If we have a sum involving a register, see if it points to a temp
+ slot. */
+ if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
+ && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
+ return p;
+ else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
+ && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
+ return p;
+
return 0;
}
update_temp_slot_address (old, new)
rtx old, new;
{
- struct temp_slot *p = find_temp_slot_from_address (old);
+ struct temp_slot *p;
- /* If none, return. Else add NEW as an alias. */
- if (p == 0)
+ if (rtx_equal_p (old, new))
return;
+
+ p = find_temp_slot_from_address (old);
+
+ /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
+ is a register, see if one operand of the PLUS is a temporary
+ location. If so, NEW points into it. Otherwise, if both OLD and
+ NEW are a PLUS and if there is a register in common between them.
+ If so, try a recursive call on those values. */
+ if (p == 0)
+ {
+ if (GET_CODE (old) != PLUS)
+ return;
+
+ if (GET_CODE (new) == REG)
+ {
+ update_temp_slot_address (XEXP (old, 0), new);
+ update_temp_slot_address (XEXP (old, 1), new);
+ return;
+ }
+ else if (GET_CODE (new) != PLUS)
+ return;
+
+ if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
+ update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
+ else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
+ update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
+ else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
+ update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
+ else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
+ update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
+
+ return;
+ }
+
+ /* Otherwise add an alias for the temp's address. */
else if (p->address == 0)
p->address = new;
else
for (p = temp_slots; p; p = p->next)
if (p->rtl_expr == t)
- p->in_use = 0;
+ {
+ /* If this slot is below the current TEMP_SLOT_LEVEL, then it
+ needs to be preserved. This can happen if a temporary in
+ the RTL_EXPR was addressed; preserve_temp_slots will move
+ the temporary into a higher level. */
+ if (temp_slot_level <= p->level)
+ p->in_use = 0;
+ else
+ p->rtl_expr = NULL_TREE;
+ }
combine_temp_slots ();
}
/* Likewise, but save the new level as the place to allocate variables
for blocks. */
+#if 0
void
push_temp_slots_for_block ()
{
{
target_temp_slot_level = level;
}
+#endif
/* Pop a temporary nesting level. All slots in use in the current level
are freed. */
/* A CONCAT contains two pseudos; put them both in the stack.
We do it so they end up consecutive. */
enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
- tree part_type = TREE_TYPE (TREE_TYPE (decl));
+ tree part_type = type_for_mode (part_mode, 0);
#ifdef FRAME_GROWS_DOWNWARD
/* Since part 0 should have a lower address, do it second. */
put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
PUT_CODE (reg, MEM);
MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
MEM_ALIAS_SET (reg) = get_alias_set (decl);
+ MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (TREE_TYPE (decl)));
/* The two parts are in memory order already.
Use the lower parts address as ours. */
tree type;
enum machine_mode promoted_mode, decl_mode;
int volatile_p;
- int original_regno;
+ unsigned int original_regno;
int used_p;
struct hash_table *ht;
{
- struct function *func = function ? function : current_function;
+ struct function *func = function ? function : cfun;
rtx new = 0;
- int regno = original_regno;
+ unsigned int regno = original_regno;
if (regno == 0)
regno = REGNO (reg);
if (regno < func->x_max_parm_reg)
new = func->x_parm_reg_stack_loc[regno];
+
if (new == 0)
new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
rtx first_insn = get_insns ();
struct sequence_stack *stack = seq_stack;
tree rtl_exps = rtl_expr_chain;
+ rtx insn;
/* Must scan all insns for stack-refs that exceed the limit. */
fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn,
}
/* Scan the catch clauses for exception handling too. */
- push_to_sequence (catch_clauses);
+ push_to_full_sequence (catch_clauses, catch_clauses_last);
fixup_var_refs_insns (var, promoted_mode, unsignedp, catch_clauses,
0, 0);
- end_sequence ();
+ end_full_sequence (&catch_clauses, &catch_clauses_last);
+
+ /* Scan sequences saved in CALL_PLACEHOLDERS too. */
+ for (insn = first_insn; insn; insn = NEXT_INSN (insn))
+ {
+ if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ {
+ int i;
+
+ /* Look at the Normal call, sibling call and tail recursion
+ sequences attached to the CALL_PLACEHOLDER. */
+ for (i = 0; i < 3; i++)
+ {
+ rtx seq = XEXP (PATTERN (insn), i);
+ if (seq)
+ {
+ push_to_sequence (seq);
+ fixup_var_refs_insns (var, promoted_mode, unsignedp,
+ seq, 0, 0);
+ XEXP (PATTERN (insn), i) = get_insns ();
+ end_sequence ();
+ }
+ }
+ }
+ }
}
\f
/* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
{
if (fmt[i] == 'e')
fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
- if (fmt[i] == 'E')
+ else if (fmt[i] == 'E')
{
register int j;
for (j = 0; j < XVECLEN (x, i); j++)
{
if (fmt[i] == 'e')
XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
- if (fmt[i] == 'E')
+ else if (fmt[i] == 'E')
{
register int j;
for (j = 0; j < XVECLEN (x, i); j++)
{
if (fmt[i] == 'e')
XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
- if (fmt[i] == 'E')
+ else if (fmt[i] == 'E')
{
register int j;
for (j = 0; j < XVECLEN (x, i); j++)
#ifndef STACK_DYNAMIC_OFFSET
-#ifdef ACCUMULATE_OUTGOING_ARGS
/* The bottom of the stack points to the actual arguments. If
REG_PARM_STACK_SPACE is defined, this includes the space for the register
parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
#define STACK_DYNAMIC_OFFSET(FNDECL) \
-(current_function_outgoing_args_size \
- + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
+((ACCUMULATE_OUTGOING_ARGS \
+ ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
+ + (STACK_POINTER_OFFSET)) \
#else
#define STACK_DYNAMIC_OFFSET(FNDECL) \
-(current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
-#endif
-
-#else
-#define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
+((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
+ + (STACK_POINTER_OFFSET))
#endif
#endif
-/* On a few machines, the CFA coincides with the arg pointer. */
+/* On most machines, the CFA coincides with the first incoming parm. */
#ifndef ARG_POINTER_CFA_OFFSET
-#define ARG_POINTER_CFA_OFFSET 0
+#define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
#endif
tree type = TREE_TYPE (decl);
rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
REGNO (reg), decl);
+
/* If the original REG was a user-variable, then so is the REG whose
- address is being taken. */
+ address is being taken. Likewise for unchanging. */
REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
+ RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
PUT_CODE (reg, MEM);
PUT_MODE (reg, DECL_MODE (decl));
/* Helper function for purge_addressof. See if the rtx expression at *LOC
in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
- the stack. */
+ the stack. If the function returns FALSE then the replacement could not
+ be made. */
-static void
+static boolean
purge_addressof_1 (loc, insn, force, store, ht)
rtx *loc;
rtx insn;
RTX_CODE code;
int i, j;
const char *fmt;
+ boolean result = true;
/* Re-start here to avoid recursion in common cases. */
restart:
x = *loc;
if (x == 0)
- return;
+ return true;
code = GET_CODE (x);
memory. */
if (code == SET)
{
- purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
- purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
- return;
+ result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
+ result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
+ return result;
}
else if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
if (validate_change (insn, loc, sub, 0)
|| validate_replace_rtx (x, sub, insn))
- return;
+ return true;
start_sequence ();
sub = force_operand (sub, NULL_RTX);
insns = gen_sequence ();
end_sequence ();
emit_insn_before (insns, insn);
- return;
+ return true;
}
else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
if (rtx_equal_p (x, XEXP (tem, 0)))
{
*loc = XEXP (XEXP (tem, 1), 0);
- return;
+ return true;
}
/* See comment for purge_addressof_replacements. */
z = gen_lowpart (GET_MODE (x), z);
*loc = z;
- return;
+ return true;
}
- /* There should always be such a replacement. */
- abort ();
+ /* Sometimes we may not be able to find the replacement. For
+ example when the original insn was a MEM in a wider mode,
+ and the note is part of a sign extension of a narrowed
+ version of that MEM. Gcc testcase compile/990829-1.c can
+ generate an example of this siutation. Rather than complain
+ we return false, which will prompt our caller to remove the
+ offending note. */
+ return false;
}
size_x = GET_MODE_BITSIZE (GET_MODE (x));
start_sequence ();
store_bit_field (sub, size_x, 0, GET_MODE (x),
val, GET_MODE_SIZE (GET_MODE (sub)),
- GET_MODE_SIZE (GET_MODE (sub)));
+ GET_MODE_ALIGNMENT (GET_MODE (sub)));
/* Make sure to unshare any shared rtl that store_bit_field
might have created. */
- for (p = get_insns(); p; p = NEXT_INSN (p))
- {
- reset_used_flags (PATTERN (p));
- reset_used_flags (REG_NOTES (p));
- reset_used_flags (LOG_LINKS (p));
- }
- unshare_all_rtl (get_insns ());
+ unshare_all_rtl_again (get_insns ());
seq = gen_sequence ();
end_sequence ();
purge_bitfield_addressof_replacements));
/* We replaced with a reg -- all done. */
- return;
+ return true;
}
}
if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
{
XEXP (XEXP (tem, 1), 0) = sub;
- return;
+ return true;
}
purge_addressof_replacements
= gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
gen_rtx_EXPR_LIST (VOIDmode, sub,
purge_addressof_replacements));
- return;
+ return true;
}
goto restart;
}
else if (code == ADDRESSOF)
{
put_addressof_into_stack (x, ht);
- return;
+ return true;
}
else if (code == SET)
{
- purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
- purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
- return;
+ result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
+ result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
+ return result;
}
/* Scan all subexpressions. */
for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
{
if (*fmt == 'e')
- purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
+ result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
else if (*fmt == 'E')
for (j = 0; j < XVECLEN (x, i); j++)
- purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
+ result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
}
+
+ return result;
}
/* Return a new hash table entry in HT. */
}
}
+/* Helper function for purge_addressof called through for_each_rtx.
+ Returns true iff the rtl is an ADDRESSOF. */
+static int
+is_addressof (rtl, data)
+ rtx * rtl;
+ void * data ATTRIBUTE_UNUSED;
+{
+ return GET_CODE (* rtl) == ADDRESSOF;
+}
+
/* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
(MEM (ADDRESSOF)) patterns, and force any needed registers into the
stack. */
/* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
requires a fixup pass over the instruction stream to correct
INSNs that depended on the REG being a REG, and not a MEM. But,
- these fixup passes are slow. Furthermore, more MEMs are not
+ these fixup passes are slow. Furthermore, most MEMs are not
mentioned in very many instructions. So, we speed up the process
by pre-calculating which REGs occur in which INSNs; that allows
us to perform the fixup passes much more quickly. */
if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
|| GET_CODE (insn) == CALL_INSN)
{
- purge_addressof_1 (&PATTERN (insn), insn,
- asm_noperands (PATTERN (insn)) > 0, 0, &ht);
- purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, &ht);
+ if (! purge_addressof_1 (&PATTERN (insn), insn,
+ asm_noperands (PATTERN (insn)) > 0, 0, &ht))
+ /* If we could not replace the ADDRESSOFs in the insn,
+ something is wrong. */
+ abort ();
+
+ if (! purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, &ht))
+ {
+ /* If we could not replace the ADDRESSOFs in the insn's notes,
+ we can just remove the offending notes instead. */
+ rtx note;
+
+ for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
+ {
+ /* If we find a REG_RETVAL note then the insn is a libcall.
+ Such insns must have REG_EQUAL notes as well, in order
+ for later passes of the compiler to work. So it is not
+ safe to delete the notes here, and instead we abort. */
+ if (REG_NOTE_KIND (note) == REG_RETVAL)
+ abort ();
+ if (for_each_rtx (& note, is_addressof, NULL))
+ remove_note (insn, note);
+ }
+ }
}
/* Clean up. */
rtx insns;
{
rtx insn;
- int i;
+ unsigned int i;
/* Compute the offsets to use for this function. */
in_arg_offset = FIRST_PARM_OFFSET (fndecl);
var_offset = STARTING_FRAME_OFFSET;
dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
out_arg_offset = STACK_POINTER_OFFSET;
- cfa_offset = ARG_POINTER_CFA_OFFSET;
+ cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
/* Scan all variables and parameters of this function. For each that is
in memory, instantiate all virtual registers if the result is a valid
static void
instantiate_decl (x, size, valid_only)
rtx x;
- int size;
+ HOST_WIDE_INT size;
int valid_only;
{
enum machine_mode mode;
instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
- if (valid_only)
+ if (valid_only && size >= 0)
{
+ unsigned HOST_WIDE_INT decl_size = size;
+
/* Now verify that the resulting address is valid for every integer or
floating-point mode up to and including SIZE bytes long. We do this
since the object might be accessed in any mode and frame addresses
are shared. */
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
+ mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
mode = GET_MODE_WIDER_MODE (mode))
if (! memory_address_p (mode, addr))
return;
for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
- mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
+ mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
mode = GET_MODE_WIDER_MODE (mode))
if (! memory_address_p (mode, addr))
return;
if (new)
{
+ rtx src = SET_SRC (x);
+
+ instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
+
/* The only valid sources here are PLUS or REG. Just do
the simplest possible thing to handle them. */
- if (GET_CODE (SET_SRC (x)) != REG
- && GET_CODE (SET_SRC (x)) != PLUS)
+ if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
abort ();
start_sequence ();
- if (GET_CODE (SET_SRC (x)) != REG)
- temp = force_operand (SET_SRC (x), NULL_RTX);
+ if (GET_CODE (src) != REG)
+ temp = force_operand (src, NULL_RTX);
else
- temp = SET_SRC (x);
+ temp = src;
temp = force_operand (plus_constant (temp, offset), NULL_RTX);
seq = get_insns ();
end_sequence ();
}
}
\f
-/* Output a USE for any register use in RTL.
- This is used with -noreg to mark the extent of lifespan
- of any registers used in a user-visible variable's DECL_RTL. */
-
-void
-use_variable (rtl)
- rtx rtl;
-{
- if (GET_CODE (rtl) == REG)
- /* This is a register variable. */
- emit_insn (gen_rtx_USE (VOIDmode, rtl));
- else if (GET_CODE (rtl) == MEM
- && GET_CODE (XEXP (rtl, 0)) == REG
- && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
- || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
- && XEXP (rtl, 0) != current_function_internal_arg_pointer)
- /* This is a variable-sized structure. */
- emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
-}
-
-/* Like use_variable except that it outputs the USEs after INSN
- instead of at the end of the insn-chain. */
-
-void
-use_variable_after (rtl, insn)
- rtx rtl, insn;
-{
- if (GET_CODE (rtl) == REG)
- /* This is a register variable. */
- emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
- else if (GET_CODE (rtl) == MEM
- && GET_CODE (XEXP (rtl, 0)) == REG
- && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
- || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
- && XEXP (rtl, 0) != current_function_internal_arg_pointer)
- /* This is a variable-sized structure. */
- emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
-}
-\f
int
max_parm_reg_num ()
{
{
int i, regno, nregs;
rtx reg;
- tree type;
- if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
- type = exp;
- else
- type = TREE_TYPE (exp);
+
+ tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
if (RETURN_IN_MEMORY (type))
return 1;
return 1;
/* Make sure we have suitable call-clobbered regs to return
the value in; if not, we must return it in memory. */
- reg = hard_function_value (type, 0);
+ reg = hard_function_value (type, 0, 0);
/* If we have something other than a REG (e.g. a PARALLEL), then assume
it is OK. */
int varargs_setup = 0;
#endif
rtx conversion_insns = 0;
+ struct args_size alignment_pad;
/* Nonzero if the last arg is named `__builtin_va_alist',
which is used on some machines for old-fashioned non-ANSI varargs.h;
pretend_named) != 0,
#endif
#endif
- fndecl, &stack_args_size, &stack_offset, &arg_size);
+ fndecl, &stack_args_size, &stack_offset, &arg_size,
+ &alignment_pad);
{
rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
if (GET_CODE (entry_parm) == PARALLEL)
emit_group_store (validize_mem (stack_parm), entry_parm,
int_size_in_bytes (TREE_TYPE (parm)),
- (TYPE_ALIGN (TREE_TYPE (parm))
- / BITS_PER_UNIT));
+ TYPE_ALIGN (TREE_TYPE (parm)));
+
else
move_block_from_reg (REGNO (entry_parm),
validize_mem (stack_parm), nregs,
for its ultimate type, don't use that slot after entry.
We'll make another stack slot, if we need one. */
{
- int thisparm_boundary
+ unsigned int thisparm_boundary
= FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
}
#endif /* 0 */
-#ifdef STACK_REGS
- /* We need this "use" info, because the gcc-register->stack-register
- converter in reg-stack.c needs to know which registers are active
- at the start of the function call. The actual parameter loading
- instructions are not always available then anymore, since they might
- have been optimised away. */
-
- if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
- emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
-#endif
-
/* ENTRY_PARM is an RTX for the parameter as it arrives,
in the mode in which it arrives.
STACK_PARM is an RTX for a stack slot where the parameter can live
if (GET_CODE (entry_parm) == PARALLEL)
emit_group_store (validize_mem (stack_parm), entry_parm,
int_size_in_bytes (TREE_TYPE (parm)),
- (TYPE_ALIGN (TREE_TYPE (parm))
- / BITS_PER_UNIT));
+ TYPE_ALIGN (TREE_TYPE (parm)));
else
move_block_from_reg (REGNO (entry_parm),
validize_mem (stack_parm),
}
DECL_RTL (parm) = stack_parm;
}
- else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
+ else if (! ((! optimize
+ && ! DECL_REGISTER (parm)
&& ! DECL_INLINE (fndecl))
/* layout_decl may set this. */
|| TREE_ADDRESSABLE (parm)
may need to do it in a wider mode. */
register rtx parmreg;
- int regno, regnoi = 0, regnor = 0;
+ unsigned int regno, regnoi = 0, regnor = 0;
unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
/* If we were passed a pointer but the actual value
can safely live in a register, put it in one. */
if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
- && ! ((obey_regdecls && ! DECL_REGISTER (parm)
+ && ! ((! optimize
+ && ! DECL_REGISTER (parm)
&& ! DECL_INLINE (fndecl))
/* layout_decl may set this. */
|| TREE_ADDRESSABLE (parm)
push_to_sequence (conversion_insns);
- if (TYPE_SIZE (type) == 0
+ if (!COMPLETE_TYPE_P (type)
|| TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
/* This is a variable sized object. */
copy = gen_rtx_MEM (BLKmode,
/* For pointer data type, suggest pointer register. */
if (POINTER_TYPE_P (TREE_TYPE (parm)))
mark_reg_pointer (parmreg,
- (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
- / BITS_PER_UNIT));
+ TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
+
}
else
{
#ifdef ARGS_GROW_DOWNWARD
current_function_arg_offset_rtx
= (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
- : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
- size_int (-stack_args_size.constant)),
+ : expand_expr (size_diffop (stack_args_size.var,
+ size_int (-stack_args_size.constant)),
NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
#else
current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
rtx
promoted_input_arg (regno, pmode, punsignedp)
- int regno;
+ unsigned int regno;
enum machine_mode *pmode;
int *punsignedp;
{
void
locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
- initial_offset_ptr, offset_ptr, arg_size_ptr)
+ initial_offset_ptr, offset_ptr, arg_size_ptr,
+ alignment_pad)
enum machine_mode passed_mode;
tree type;
- int in_regs;
+ int in_regs ATTRIBUTE_UNUSED;
tree fndecl ATTRIBUTE_UNUSED;
struct args_size *initial_offset_ptr;
struct args_size *offset_ptr;
struct args_size *arg_size_ptr;
+ struct args_size *alignment_pad;
+
{
tree sizetree
= type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
{
initial_offset_ptr->var
= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
- size_int (reg_parm_stack_space));
+ ssize_int (reg_parm_stack_space));
initial_offset_ptr->constant = 0;
}
else if (initial_offset_ptr->constant < reg_parm_stack_space)
if (initial_offset_ptr->var)
{
offset_ptr->constant = 0;
- offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
+ offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
initial_offset_ptr->var);
}
else
sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
SUB_PARM_SIZE (*offset_ptr, sizetree);
if (where_pad != downward)
- pad_to_arg_alignment (offset_ptr, boundary);
+ pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
if (initial_offset_ptr->var)
- {
- arg_size_ptr->var = size_binop (MINUS_EXPR,
- size_binop (MINUS_EXPR,
- integer_zero_node,
- initial_offset_ptr->var),
- offset_ptr->var);
- }
+ arg_size_ptr->var = size_binop (MINUS_EXPR,
+ size_binop (MINUS_EXPR,
+ ssize_int (0),
+ initial_offset_ptr->var),
+ offset_ptr->var);
+
else
- {
- arg_size_ptr->constant = (- initial_offset_ptr->constant
- - offset_ptr->constant);
- }
+ arg_size_ptr->constant = (- initial_offset_ptr->constant
+ - offset_ptr->constant);
+
#else /* !ARGS_GROW_DOWNWARD */
- pad_to_arg_alignment (initial_offset_ptr, boundary);
+ pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
*offset_ptr = *initial_offset_ptr;
#ifdef PUSH_ROUNDING
BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
static void
-pad_to_arg_alignment (offset_ptr, boundary)
+pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
struct args_size *offset_ptr;
int boundary;
+ struct args_size *alignment_pad;
{
+ tree save_var = NULL_TREE;
+ HOST_WIDE_INT save_constant = 0;
+
int boundary_in_bytes = boundary / BITS_PER_UNIT;
+ if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
+ {
+ save_var = offset_ptr->var;
+ save_constant = offset_ptr->constant;
+ }
+
+ alignment_pad->var = NULL_TREE;
+ alignment_pad->constant = 0;
+
if (boundary > BITS_PER_UNIT)
{
if (offset_ptr->var)
(ARGS_SIZE_TREE (*offset_ptr),
boundary / BITS_PER_UNIT);
offset_ptr->constant = 0; /*?*/
+ if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
+ alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
+ save_var);
}
else
- offset_ptr->constant =
+ {
+ offset_ptr->constant =
#ifdef ARGS_GROW_DOWNWARD
- FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
+ FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
#else
- CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
+ CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
#endif
+ if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
+ alignment_pad->constant = offset_ptr->constant - save_constant;
+ }
}
}
}
}
#endif
-
-#ifdef ARGS_GROW_DOWNWARD
-static tree
-round_down (value, divisor)
- tree value;
- int divisor;
-{
- return size_binop (MULT_EXPR,
- size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
- size_int (divisor));
-}
-#endif
\f
/* Walk the tree of blocks describing the binding levels within a function
and warn about uninitialized variables.
register tree decl, sub;
for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
{
- if (TREE_CODE (decl) == VAR_DECL
+ if (warn_uninitialized
+ && TREE_CODE (decl) == VAR_DECL
/* These warnings are unreliable for and aggregates
because assigning the fields one by one can fail to convince
flow.c that the entire aggregate was initialized.
&& regno_uninitialized (REGNO (DECL_RTL (decl))))
warning_with_decl (decl,
"`%s' might be used uninitialized in this function");
- if (TREE_CODE (decl) == VAR_DECL
+ if (extra_warnings
+ && TREE_CODE (decl) == VAR_DECL
&& DECL_RTL (decl) != 0
&& GET_CODE (DECL_RTL (decl)) == REG
&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
#endif
tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
- fp ? fp : current_function);
+ fp ? fp : cfun);
#endif
/* Record the trampoline for reuse and note it for later initialization
return tramp;
}
\f
-/* The functions identify_blocks and reorder_blocks provide a way to
- reorder the tree of BLOCK nodes, for optimizers that reshuffle or
- duplicate portions of the RTL code. Call identify_blocks before
- changing the RTL, and call reorder_blocks after. */
-
/* Put all this function's BLOCK nodes including those that are chained
onto the first block into a vector, and return it.
Also store in each NOTE for the beginning or end of a block
The arguments are BLOCK, the chain of top-level blocks of the function,
and INSNS, the insn chain of the function. */
-tree *
-identify_blocks (block, insns)
- tree block;
- rtx insns;
+void
+identify_blocks ()
{
int n_blocks;
- tree *block_vector;
- int *block_stack;
- int depth = 0;
- int next_block_number = 1;
- int current_block_number = 1;
- rtx insn;
+ tree *block_vector, *last_block_vector;
+ tree *block_stack;
+ tree block = DECL_INITIAL (current_function_decl);
if (block == 0)
- return 0;
+ return;
- n_blocks = all_blocks (block, 0);
- block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
- block_stack = (int *) alloca (n_blocks * sizeof (int));
+ /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
+ depth-first order. */
+ block_vector = get_block_vector (block, &n_blocks);
+ block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
- all_blocks (block, block_vector);
+ last_block_vector = identify_blocks_1 (get_insns (),
+ block_vector + 1,
+ block_vector + n_blocks,
+ block_stack);
+
+ /* If we didn't use all of the subblocks, we've misplaced block notes. */
+ /* ??? This appears to happen all the time. Latent bugs elsewhere? */
+ if (0 && last_block_vector != block_vector + n_blocks)
+ abort ();
+
+ free (block_vector);
+ free (block_stack);
+}
+
+/* Subroutine of identify_blocks. Do the block substitution on the
+ insn chain beginning with INSNS. Recurse for CALL_PLACEHOLDER chains.
+
+ BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
+ BLOCK_VECTOR is incremented for each block seen. */
+
+static tree *
+identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
+ rtx insns;
+ tree *block_vector;
+ tree *end_block_vector;
+ tree *orig_block_stack;
+{
+ rtx insn;
+ tree *block_stack = orig_block_stack;
for (insn = insns; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == NOTE)
- {
- if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
- {
- block_stack[depth++] = current_block_number;
- current_block_number = next_block_number;
- NOTE_BLOCK_NUMBER (insn) = next_block_number++;
- }
- if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
- {
- NOTE_BLOCK_NUMBER (insn) = current_block_number;
- current_block_number = block_stack[--depth];
- }
- }
+ {
+ if (GET_CODE (insn) == NOTE)
+ {
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
+ {
+ tree b;
- if (n_blocks != next_block_number)
+ /* If there are more block notes than BLOCKs, something
+ is badly wrong. */
+ if (block_vector == end_block_vector)
+ abort ();
+
+ b = *block_vector++;
+ NOTE_BLOCK (insn) = b;
+ *block_stack++ = b;
+ }
+ else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
+ {
+ /* If there are more NOTE_INSN_BLOCK_ENDs than
+ NOTE_INSN_BLOCK_BEGs, something is badly wrong. */
+ if (block_stack == orig_block_stack)
+ abort ();
+
+ NOTE_BLOCK (insn) = *--block_stack;
+ }
+ }
+ else if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ {
+ rtx cp = PATTERN (insn);
+
+ block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
+ end_block_vector, block_stack);
+ if (XEXP (cp, 1))
+ block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
+ end_block_vector, block_stack);
+ if (XEXP (cp, 2))
+ block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
+ end_block_vector, block_stack);
+ }
+ }
+
+ /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
+ something is badly wrong. */
+ if (block_stack != orig_block_stack)
abort ();
return block_vector;
}
-/* Given BLOCK_VECTOR which was returned by identify_blocks,
- and a revised instruction chain, rebuild the tree structure
- of BLOCK nodes to correspond to the new order of RTL.
- The new block tree is inserted below TOP_BLOCK.
- Returns the current top-level block. */
+/* Identify BLOCKs referenced by more than one
+ NOTE_INSN_BLOCK_{BEG,END}, and create duplicate blocks. */
-tree
-reorder_blocks (block_vector, block, insns)
- tree *block_vector;
- tree block;
- rtx insns;
+void
+reorder_blocks ()
{
- tree current_block = block;
- rtx insn;
+ tree block = DECL_INITIAL (current_function_decl);
+ varray_type block_stack;
- if (block_vector == 0)
- return block;
+ if (block == NULL_TREE)
+ return;
- /* Prune the old trees away, so that it doesn't get in the way. */
- BLOCK_SUBBLOCKS (current_block) = 0;
- BLOCK_CHAIN (current_block) = 0;
+ VARRAY_TREE_INIT (block_stack, 10, "block_stack");
- for (insn = insns; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == NOTE)
- {
- if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
- {
- tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
- /* If we have seen this block before, copy it. */
- if (TREE_ASM_WRITTEN (block))
- block = copy_node (block);
- BLOCK_SUBBLOCKS (block) = 0;
- TREE_ASM_WRITTEN (block) = 1;
- BLOCK_SUPERCONTEXT (block) = current_block;
- BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
- BLOCK_SUBBLOCKS (current_block) = block;
- current_block = block;
- NOTE_SOURCE_FILE (insn) = 0;
- }
- if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
- {
- BLOCK_SUBBLOCKS (current_block)
- = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
- current_block = BLOCK_SUPERCONTEXT (current_block);
- NOTE_SOURCE_FILE (insn) = 0;
- }
- }
+ /* Prune the old trees away, so that they don't get in the way. */
+ BLOCK_SUBBLOCKS (block) = NULL_TREE;
+ BLOCK_CHAIN (block) = NULL_TREE;
+
+ reorder_blocks_1 (get_insns (), block, &block_stack);
+
+ BLOCK_SUBBLOCKS (block)
+ = blocks_nreverse (BLOCK_SUBBLOCKS (block));
+
+ VARRAY_FREE (block_stack);
+}
- BLOCK_SUBBLOCKS (current_block)
- = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
- return current_block;
+/* Helper function for reorder_blocks. Process the insn chain beginning
+ at INSNS. Recurse for CALL_PLACEHOLDER insns. */
+
+static void
+reorder_blocks_1 (insns, current_block, p_block_stack)
+ rtx insns;
+ tree current_block;
+ varray_type *p_block_stack;
+{
+ rtx insn;
+
+ for (insn = insns; insn; insn = NEXT_INSN (insn))
+ {
+ if (GET_CODE (insn) == NOTE)
+ {
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
+ {
+ tree block = NOTE_BLOCK (insn);
+ /* If we have seen this block before, copy it. */
+ if (TREE_ASM_WRITTEN (block))
+ {
+ block = copy_node (block);
+ NOTE_BLOCK (insn) = block;
+ }
+ BLOCK_SUBBLOCKS (block) = 0;
+ TREE_ASM_WRITTEN (block) = 1;
+ BLOCK_SUPERCONTEXT (block) = current_block;
+ BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
+ BLOCK_SUBBLOCKS (current_block) = block;
+ current_block = block;
+ VARRAY_PUSH_TREE (*p_block_stack, block);
+ }
+ else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
+ {
+ NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
+ VARRAY_POP (*p_block_stack);
+ BLOCK_SUBBLOCKS (current_block)
+ = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
+ current_block = BLOCK_SUPERCONTEXT (current_block);
+ }
+ }
+ else if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ {
+ rtx cp = PATTERN (insn);
+ reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
+ if (XEXP (cp, 1))
+ reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
+ if (XEXP (cp, 2))
+ reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
+ }
+ }
}
/* Reverse the order of elements in the chain T of blocks,
return prev;
}
-/* Count the subblocks of the list starting with BLOCK, and list them
- all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
+/* Count the subblocks of the list starting with BLOCK. If VECTOR is
+ non-NULL, list them all into VECTOR, in a depth-first preorder
+ traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
blocks. */
static int
return n_blocks;
}
+
+/* Return a vector containing all the blocks rooted at BLOCK. The
+ number of elements in the vector is stored in N_BLOCKS_P. The
+ vector is dynamically allocated; it is the caller's responsibility
+ to call `free' on the pointer returned. */
+
+static tree *
+get_block_vector (block, n_blocks_p)
+ tree block;
+ int *n_blocks_p;
+{
+ tree *block_vector;
+
+ *n_blocks_p = all_blocks (block, NULL);
+ block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
+ all_blocks (block, block_vector);
+
+ return block_vector;
+}
+
+static int next_block_index = 2;
+
+/* Set BLOCK_NUMBER for all the blocks in FN. */
+
+void
+number_blocks (fn)
+ tree fn;
+{
+ int i;
+ int n_blocks;
+ tree *block_vector;
+
+ /* For SDB and XCOFF debugging output, we start numbering the blocks
+ from 1 within each function, rather than keeping a running
+ count. */
+#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
+ if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
+ next_block_index = 1;
+#endif
+
+ block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
+
+ /* The top-level BLOCK isn't numbered at all. */
+ for (i = 1; i < n_blocks; ++i)
+ /* We number the blocks from two. */
+ BLOCK_NUMBER (block_vector[i]) = next_block_index++;
+
+ free (block_vector);
+
+ return;
+}
+
\f
/* Allocate a function structure and reset its contents to the defaults. */
static void
prepare_function_start ()
{
- current_function = (struct function *) xcalloc (1, sizeof (struct function));
- current_function->can_garbage_collect = 0;
+ cfun = (struct function *) xcalloc (1, sizeof (struct function));
init_stmt_for_function ();
+ init_eh_for_function ();
cse_not_expected = ! optimize;
/* We haven't done register allocation yet. */
reg_renumber = 0;
- init_varasm_status (current_function);
+ init_varasm_status (cfun);
/* Clear out data used for inlining. */
- current_function->inlinable = 0;
- current_function->original_decl_initial = 0;
- current_function->original_arg_vector = 0;
+ cfun->inlinable = 0;
+ cfun->original_decl_initial = 0;
+ cfun->original_arg_vector = 0;
+
+#ifdef STACK_BOUNDARY
+ cfun->stack_alignment_needed = STACK_BOUNDARY;
+ cfun->preferred_stack_boundary = STACK_BOUNDARY;
+#else
+ cfun->stack_alignment_needed = 0;
+ cfun->preferred_stack_boundary = 0;
+#endif
/* Set if a call to setjmp is seen. */
current_function_calls_setjmp = 0;
current_function_calls_alloca = 0;
current_function_contains_functions = 0;
current_function_is_leaf = 0;
+ current_function_nothrow = 0;
current_function_sp_is_unchanging = 0;
current_function_uses_only_leaf_regs = 0;
current_function_has_computed_jump = 0;
current_function_outgoing_args_size = 0;
if (init_lang_status)
- (*init_lang_status) (current_function);
+ (*init_lang_status) (cfun);
if (init_machine_status)
- (*init_machine_status) (current_function);
+ (*init_machine_status) (cfun);
}
/* Initialize the rtl expansion mechanism so that we can do simple things
prepare_function_start ();
/* Remember this function for later. */
- current_function->next_global = all_functions;
- all_functions = current_function;
+ cfun->next_global = all_functions;
+ all_functions = cfun;
current_function_name = (*decl_printable_name) (subr, 2);
- current_function->decl = subr;
+ cfun->decl = subr;
/* Nonzero if this is a nested function that uses a static chain. */
init_function_for_compilation ()
{
reg_renumber = 0;
+
/* No prologue/epilogue insns yet. */
- prologue = epilogue = 0;
+ VARRAY_GROW (prologue, 0);
+ VARRAY_GROW (epilogue, 0);
+ VARRAY_GROW (sibcall_epilogue, 0);
}
/* Indicate that the current function uses extra args
tree subr;
int parms_have_cleanups;
{
- register int i;
tree tem;
rtx last_ptr = NULL_RTX;
= (flag_instrument_function_entry_exit
&& ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
+ current_function_limit_stack
+ = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
+
/* If function gets a static chain arg, store it in the stack frame.
Do this first, so it gets the first stack slot offset. */
if (current_function_needs_context)
as opposed to parm setup. */
emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
- /* If doing stupid allocation, mark parms as born here. */
-
if (GET_CODE (get_last_insn ()) != NOTE)
emit_note (NULL_PTR, NOTE_INSN_DELETED);
parm_birth_insn = get_last_insn ();
- if (obey_regdecls)
- {
- for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
- use_variable (regno_reg_rtx[i]);
-
- if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
- use_variable (current_function_internal_arg_pointer);
- }
-
context_display = 0;
if (current_function_needs_context)
{
/* Fetch static chain values for containing functions. */
tem = decl_function_context (current_function_decl);
- /* If not doing stupid register allocation copy the static chain
- pointer into a pseudo. If we have small register classes, copy
- the value from memory if static_chain_incoming_rtx is a REG. If
- we do stupid register allocation, we use the stack address
- generated above. */
- if (tem && ! obey_regdecls)
+ /* Copy the static chain pointer into a pseudo. If we have
+ small register classes, copy the value from memory if
+ static_chain_incoming_rtx is a REG. */
+ if (tem)
{
/* If the static chain originally came in a register, put it back
there, then move it out in the next insn. The reason for
/* Outside function body, can't compute type's actual size
until next function's body starts. */
- current_function = 0;
+
+ free_after_parsing (cfun);
+ free_after_compilation (cfun);
+ free (cfun);
+ cfun = 0;
+}
+
+/* Call DOIT for each hard register used as a return value from
+ the current function. */
+
+void
+diddle_return_value (doit, arg)
+ void (*doit) PARAMS ((rtx, void *));
+ void *arg;
+{
+ rtx outgoing = current_function_return_rtx;
+
+ if (! outgoing)
+ return;
+
+ if (GET_CODE (outgoing) == REG
+ && REGNO (outgoing) >= FIRST_PSEUDO_REGISTER)
+ {
+ tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
+#ifdef FUNCTION_OUTGOING_VALUE
+ outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
+#else
+ outgoing = FUNCTION_VALUE (type, current_function_decl);
+#endif
+ /* If this is a BLKmode structure being returned in registers, then use
+ the mode computed in expand_return. */
+ if (GET_MODE (outgoing) == BLKmode)
+ PUT_MODE (outgoing,
+ GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
+ }
+
+ if (GET_CODE (outgoing) == REG)
+ (*doit) (outgoing, arg);
+ else if (GET_CODE (outgoing) == PARALLEL)
+ {
+ int i;
+
+ for (i = 0; i < XVECLEN (outgoing, 0); i++)
+ {
+ rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
+
+ if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
+ (*doit) (x, arg);
+ }
+ }
+}
+
+static void
+do_clobber_return_reg (reg, arg)
+ rtx reg;
+ void *arg ATTRIBUTE_UNUSED;
+{
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
+}
+
+void
+clobber_return_register ()
+{
+ diddle_return_value (do_clobber_return_reg, NULL);
+}
+
+static void
+do_use_return_reg (reg, arg)
+ rtx reg;
+ void *arg ATTRIBUTE_UNUSED;
+{
+ emit_insn (gen_rtx_USE (VOIDmode, reg));
+}
+
+void
+use_return_register ()
+{
+ diddle_return_value (do_use_return_reg, NULL);
}
/* Generate RTL for the end of the current function.
int line;
int end_bindings;
{
- register int i;
tree link;
#ifdef TRAMPOLINE_TEMPLATE
for (link = trampoline_list; link; link = TREE_CHAIN (link))
{
tree function = TREE_PURPOSE (link);
- rtx context = lookup_static_chain (function);
+ rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
#ifdef TRAMPOLINE_TEMPLATE
rtx blktramp;
blktramp = change_address (initial_trampoline, BLKmode, tramp);
emit_block_move (blktramp, initial_trampoline,
GEN_INT (TRAMPOLINE_SIZE),
- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
+ TRAMPOLINE_ALIGNMENT);
#endif
INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
seq = get_insns ();
until next function's body starts. */
immediate_size_expand--;
- /* If doing stupid register allocation,
- mark register parms as dying here. */
-
- if (obey_regdecls)
- {
- rtx tem;
- for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
- use_variable (regno_reg_rtx[i]);
-
- /* Likewise for the regs of all the SAVE_EXPRs in the function. */
-
- for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
- {
- use_variable (XEXP (tem, 0));
- use_variable_after (XEXP (tem, 0), parm_birth_insn);
- }
-
- if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
- use_variable (current_function_internal_arg_pointer);
- }
-
clear_pending_stack_adjust ();
do_pending_stack_adjust ();
structure returning. */
if (return_label)
- emit_label (return_label);
+ {
+ /* Before the return label, clobber the return registers so that
+ they are not propogated live to the rest of the function. This
+ can only happen with functions that drop through; if there had
+ been a return statement, there would have either been a return
+ rtx, or a jump to the return label. */
+ clobber_return_register ();
+
+ emit_label (return_label);
+ }
/* C++ uses this. */
if (end_bindings)
expand_leftover_cleanups ();
+ /* If there are any catch_clauses remaining, output them now. */
+ emit_insns (catch_clauses);
+ catch_clauses = catch_clauses_last = NULL_RTX;
/* If the above emitted any code, may sure we jump around it. */
if (last != get_last_insn ())
{
GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
emit_move_insn (real_decl_result,
DECL_RTL (DECL_RESULT (current_function_decl)));
- emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
/* The delay slot scheduler assumes that current_function_return_rtx
holds the hard register containing the return value, not a temporary
REG_FUNCTION_VALUE_P (outgoing) = 1;
emit_move_insn (outgoing, value_address);
- use_variable (outgoing);
}
+ /* ??? This should no longer be necessary since stupid is no longer with
+ us, but there are some parts of the compiler (eg reload_combine, and
+ sh mach_dep_reorg) that still try and compute their own lifetime info
+ instead of using the general framework. */
+ use_return_register ();
+
/* If this is an implementation of __throw, do what's necessary to
communicate between __builtin_eh_return and the epilogue. */
expand_eh_return ();
expand_fixups (get_insns ());
}
\f
-/* Create an array that records the INSN_UIDs of INSNS (either a sequence
- or a single insn). */
+/* Extend a vector that records the INSN_UIDs of INSNS (either a
+ sequence or a single insn). */
-#if defined (HAVE_prologue) || defined (HAVE_epilogue)
-static int *
-record_insns (insns)
+static void
+record_insns (insns, vecp)
rtx insns;
+ varray_type *vecp;
{
- int *vec;
-
if (GET_CODE (insns) == SEQUENCE)
{
int len = XVECLEN (insns, 0);
- vec = (int *) oballoc ((len + 1) * sizeof (int));
- vec[len] = 0;
+ int i = VARRAY_SIZE (*vecp);
+
+ VARRAY_GROW (*vecp, i + len);
while (--len >= 0)
- vec[len] = INSN_UID (XVECEXP (insns, 0, len));
+ {
+ VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
+ ++i;
+ }
}
else
{
- vec = (int *) oballoc (2 * sizeof (int));
- vec[0] = INSN_UID (insns);
- vec[1] = 0;
+ int i = VARRAY_SIZE (*vecp);
+ VARRAY_GROW (*vecp, i + 1);
+ VARRAY_INT (*vecp, i) = INSN_UID (insns);
}
- return vec;
}
/* Determine how many INSN_UIDs in VEC are part of INSN. */
static int
contains (insn, vec)
rtx insn;
- int *vec;
+ varray_type vec;
{
register int i, j;
{
int count = 0;
for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
- for (j = 0; vec[j]; j++)
- if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
+ for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
+ if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
count++;
return count;
}
else
{
- for (j = 0; vec[j]; j++)
- if (INSN_UID (insn) == vec[j])
+ for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
+ if (INSN_UID (insn) == VARRAY_INT (vec, j))
return 1;
}
return 0;
prologue_epilogue_contains (insn)
rtx insn;
{
- if (prologue && contains (insn, prologue))
+ if (contains (insn, prologue))
return 1;
- if (epilogue && contains (insn, epilogue))
+ if (contains (insn, epilogue))
return 1;
return 0;
}
-#endif /* HAVE_prologue || HAVE_epilogue */
+
+int
+sibcall_epilogue_contains (insn)
+ rtx insn;
+{
+ if (sibcall_epilogue)
+ return contains (insn, sibcall_epilogue);
+ return 0;
+}
+
+#ifdef HAVE_return
+/* Insert gen_return at the end of block BB. This also means updating
+ block_for_insn appropriately. */
+
+static void
+emit_return_into_block (bb)
+ basic_block bb;
+{
+ rtx p, end;
+
+ end = emit_jump_insn_after (gen_return (), bb->end);
+ p = NEXT_INSN (bb->end);
+ while (1)
+ {
+ set_block_for_insn (p, bb);
+ if (p == end)
+ break;
+ p = NEXT_INSN (p);
+ }
+ bb->end = end;
+}
+#endif /* HAVE_return */
/* Generate the prologue and epilogue RTL if the machine supports it. Thread
this into place with notes indicating where the prologue ends and where
rtx f ATTRIBUTE_UNUSED;
{
int insertted = 0;
+ edge e;
+ rtx seq;
#ifdef HAVE_prologue
if (HAVE_prologue)
{
- rtx seq;
+ rtx insn;
start_sequence ();
seq = gen_prologue();
/* Retain a map of the prologue insns. */
if (GET_CODE (seq) != SEQUENCE)
seq = get_insns ();
- prologue = record_insns (seq);
-
+ record_insns (seq, &prologue);
emit_note (NULL, NOTE_INSN_PROLOGUE_END);
+
+ /* GDB handles `break f' by setting a breakpoint on the first
+ line note *after* the prologue. That means that we should
+ insert a line note here; otherwise, if the next line note
+ comes part way into the next block, GDB will skip all the way
+ to that point. */
+ insn = next_nonnote_insn (f);
+ while (insn)
+ {
+ if (GET_CODE (insn) == NOTE
+ && NOTE_LINE_NUMBER (insn) >= 0)
+ {
+ emit_line_note_force (NOTE_SOURCE_FILE (insn),
+ NOTE_LINE_NUMBER (insn));
+ break;
+ }
+
+ insn = PREV_INSN (insn);
+ }
+
seq = gen_sequence ();
end_sequence ();
}
#endif
-#ifdef HAVE_epilogue
- if (HAVE_epilogue)
- {
- edge e;
- basic_block bb = 0;
- rtx tail = get_last_insn ();
+ /* If the exit block has no non-fake predecessors, we don't need
+ an epilogue. */
+ for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
+ if ((e->flags & EDGE_FAKE) == 0)
+ break;
+ if (e == NULL)
+ goto epilogue_done;
- /* ??? This is gastly. If function returns were not done via uses,
- but via mark_regs_live_at_end, we could use insert_insn_on_edge
- and all of this uglyness would go away. */
+#ifdef HAVE_return
+ if (optimize && HAVE_return)
+ {
+ /* If we're allowed to generate a simple return instruction,
+ then by definition we don't need a full epilogue. Examine
+ the block that falls through to EXIT. If it does not
+ contain any code, examine its predecessors and try to
+ emit (conditional) return instructions. */
+
+ basic_block last;
+ edge e_next;
+ rtx label;
+
+ for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
+ if (e->flags & EDGE_FALLTHRU)
+ break;
+ if (e == NULL)
+ goto epilogue_done;
+ last = e->src;
- switch (optimize)
+ /* Verify that there are no active instructions in the last block. */
+ label = last->end;
+ while (label && GET_CODE (label) != CODE_LABEL)
{
- default:
- /* If the exit block has no non-fake predecessors, we don't
- need an epilogue. Furthermore, only pay attention to the
- fallthru predecessors; if (conditional) return insns were
- generated, by definition we do not need to emit epilogue
- insns. */
-
- for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
- if ((e->flags & EDGE_FAKE) == 0
- && (e->flags & EDGE_FALLTHRU) != 0)
- break;
- if (e == NULL)
+ if (active_insn_p (label))
break;
+ label = PREV_INSN (label);
+ }
- /* We can't handle multiple epilogues -- if one is needed,
- we won't be able to place it multiple times.
+ if (last->head == label && GET_CODE (label) == CODE_LABEL)
+ {
+ for (e = last->pred; e ; e = e_next)
+ {
+ basic_block bb = e->src;
+ rtx jump;
- ??? Fix epilogue expanders to not assume they are the
- last thing done compiling the function. Either that
- or copy_rtx each insn.
+ e_next = e->pred_next;
+ if (bb == ENTRY_BLOCK_PTR)
+ continue;
- ??? Blah, it's not a simple expression to assert that
- we've exactly one fallthru exit edge. */
+ jump = bb->end;
+ if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
+ continue;
- bb = e->src;
- tail = bb->end;
+ /* If we have an unconditional jump, we can replace that
+ with a simple return instruction. */
+ if (simplejump_p (jump))
+ {
+ emit_return_into_block (bb);
+ flow_delete_insn (jump);
+ }
- /* ??? If the last insn of the basic block is a jump, then we
- are creating a new basic block. Wimp out and leave these
- insns outside any block. */
- if (GET_CODE (tail) == JUMP_INSN)
- bb = 0;
+ /* If we have a conditional jump, we can try to replace
+ that with a conditional return instruction. */
+ else if (condjump_p (jump))
+ {
+ rtx ret, *loc;
- /* FALLTHRU */
- case 0:
- {
- rtx prev, seq, first_use;
-
- /* Move the USE insns at the end of a function onto a list. */
- prev = tail;
- if (GET_CODE (prev) == BARRIER
- || GET_CODE (prev) == NOTE)
- prev = prev_nonnote_insn (prev);
-
- first_use = 0;
- if (prev
- && GET_CODE (prev) == INSN
- && GET_CODE (PATTERN (prev)) == USE)
- {
- /* If the end of the block is the use, grab hold of something
- else so that we emit barriers etc in the right place. */
- if (prev == tail)
- {
- do
- tail = PREV_INSN (tail);
- while (GET_CODE (tail) == INSN
- && GET_CODE (PATTERN (tail)) == USE);
- }
+ ret = SET_SRC (PATTERN (jump));
+ if (GET_CODE (XEXP (ret, 1)) == LABEL_REF)
+ loc = &XEXP (ret, 1);
+ else
+ loc = &XEXP (ret, 2);
+ ret = gen_rtx_RETURN (VOIDmode);
+
+ if (! validate_change (jump, loc, ret, 0))
+ continue;
+ if (JUMP_LABEL (jump))
+ LABEL_NUSES (JUMP_LABEL (jump))--;
+
+ /* If this block has only one successor, it both jumps
+ and falls through to the fallthru block, so we can't
+ delete the edge. */
+ if (bb->succ->succ_next == NULL)
+ continue;
+ }
+ else
+ continue;
- do
- {
- rtx use = prev;
- prev = prev_nonnote_insn (prev);
+ /* Fix up the CFG for the successful change we just made. */
+ remove_edge (e);
+ make_edge (NULL, bb, EXIT_BLOCK_PTR, 0);
+ }
- remove_insn (use);
- if (first_use)
- {
- NEXT_INSN (use) = first_use;
- PREV_INSN (first_use) = use;
- }
- else
- NEXT_INSN (use) = NULL_RTX;
- first_use = use;
- }
- while (prev
- && GET_CODE (prev) == INSN
- && GET_CODE (PATTERN (prev)) == USE);
- }
+ /* Emit a return insn for the exit fallthru block. Whether
+ this is still reachable will be determined later. */
- /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
- epilogue insns, the USE insns at the end of a function,
- the jump insn that returns, and then a BARRIER. */
+ emit_barrier_after (last->end);
+ emit_return_into_block (last);
+ }
+ else
+ {
+ /* The exit block wasn't empty. We have to use insert_insn_on_edge,
+ as it may be the exit block can go elsewhere as well
+ as exiting. */
+ start_sequence ();
+ emit_jump_insn (gen_return ());
+ seq = gen_sequence ();
+ end_sequence ();
+ insert_insn_on_edge (seq, e);
+ insertted = 1;
+ }
+ goto epilogue_done;
+ }
+#endif
+#ifdef HAVE_epilogue
+ if (HAVE_epilogue)
+ {
+ /* Find the edge that falls through to EXIT. Other edges may exist
+ due to RETURN instructions, but those don't need epilogues.
+ There really shouldn't be a mixture -- either all should have
+ been converted or none, however... */
- if (GET_CODE (tail) != BARRIER)
- {
- prev = next_nonnote_insn (tail);
- if (!prev || GET_CODE (prev) != BARRIER)
- emit_barrier_after (tail);
- }
+ for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
+ if (e->flags & EDGE_FALLTHRU)
+ break;
+ if (e == NULL)
+ goto epilogue_done;
- seq = gen_epilogue ();
- prev = tail;
- tail = emit_jump_insn_after (seq, tail);
+ start_sequence ();
+ emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
- /* Insert the USE insns immediately before the return insn, which
- must be the last instruction emitted in the sequence. */
- if (first_use)
- emit_insns_before (first_use, tail);
- emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
+ seq = gen_epilogue ();
+ emit_jump_insn (seq);
- /* Update the tail of the basic block. */
- if (bb)
- bb->end = tail;
+ /* Retain a map of the epilogue insns. */
+ if (GET_CODE (seq) != SEQUENCE)
+ seq = get_insns ();
+ record_insns (seq, &epilogue);
- /* Retain a map of the epilogue insns. */
- epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
- }
- }
+ seq = gen_sequence ();
+ end_sequence();
+
+ insert_insn_on_edge (seq, e);
+ insertted = 1;
}
#endif
+epilogue_done:
if (insertted)
commit_edge_insertions ();
+
+#ifdef HAVE_sibcall_epilogue
+ /* Emit sibling epilogues before any sibling call sites. */
+ for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
+ {
+ basic_block bb = e->src;
+ rtx insn = bb->end;
+ rtx i;
+
+ if (GET_CODE (insn) != CALL_INSN
+ || ! SIBLING_CALL_P (insn))
+ continue;
+
+ start_sequence ();
+ seq = gen_sibcall_epilogue ();
+ end_sequence ();
+
+ i = PREV_INSN (insn);
+ emit_insn_before (seq, insn);
+
+ /* Update the UID to basic block map. */
+ for (i = NEXT_INSN (i); i != insn; i = NEXT_INSN (i))
+ set_block_for_insn (i, bb);
+
+ /* Retain a map of the epilogue insns. Used in life analysis to
+ avoid getting rid of sibcall epilogue insns. */
+ record_insns (seq, &sibcall_epilogue);
+ }
+#endif
}
/* Reposition the prologue-end and epilogue-begin notes after instruction
rtx f ATTRIBUTE_UNUSED;
{
#if defined (HAVE_prologue) || defined (HAVE_epilogue)
- /* Reposition the prologue and epilogue notes. */
- if (n_basic_blocks)
+ int len;
+
+ if ((len = VARRAY_SIZE (prologue)) > 0)
{
- int len;
+ register rtx insn, note = 0;
- if (prologue)
+ /* Scan from the beginning until we reach the last prologue insn.
+ We apparently can't depend on basic_block_{head,end} after
+ reorg has run. */
+ for (insn = f; len && insn; insn = NEXT_INSN (insn))
{
- register rtx insn, note = 0;
-
- /* Scan from the beginning until we reach the last prologue insn.
- We apparently can't depend on basic_block_{head,end} after
- reorg has run. */
- for (len = 0; prologue[len]; len++)
- ;
- for (insn = f; len && insn; insn = NEXT_INSN (insn))
+ if (GET_CODE (insn) == NOTE)
+ {
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
+ note = insn;
+ }
+ else if ((len -= contains (insn, prologue)) == 0)
{
- if (GET_CODE (insn) == NOTE)
+ rtx next;
+ /* Find the prologue-end note if we haven't already, and
+ move it to just after the last prologue insn. */
+ if (note == 0)
{
- if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
- note = insn;
+ for (note = insn; (note = NEXT_INSN (note));)
+ if (GET_CODE (note) == NOTE
+ && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
+ break;
}
- else if ((len -= contains (insn, prologue)) == 0)
- {
- rtx next;
- /* Find the prologue-end note if we haven't already, and
- move it to just after the last prologue insn. */
- if (note == 0)
- {
- for (note = insn; (note = NEXT_INSN (note));)
- if (GET_CODE (note) == NOTE
- && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
- break;
- }
- next = NEXT_INSN (note);
+ next = NEXT_INSN (note);
- /* Whether or not we can depend on BLOCK_HEAD,
- attempt to keep it up-to-date. */
- if (BLOCK_HEAD (0) == note)
- BLOCK_HEAD (0) = next;
+ /* Whether or not we can depend on BLOCK_HEAD,
+ attempt to keep it up-to-date. */
+ if (BLOCK_HEAD (0) == note)
+ BLOCK_HEAD (0) = next;
- remove_insn (note);
- add_insn_after (note, insn);
- }
+ remove_insn (note);
+ add_insn_after (note, insn);
}
}
+ }
+
+ if ((len = VARRAY_SIZE (epilogue)) > 0)
+ {
+ register rtx insn, note = 0;
- if (epilogue)
+ /* Scan from the end until we reach the first epilogue insn.
+ We apparently can't depend on basic_block_{head,end} after
+ reorg has run. */
+ for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
{
- register rtx insn, note = 0;
-
- /* Scan from the end until we reach the first epilogue insn.
- We apparently can't depend on basic_block_{head,end} after
- reorg has run. */
- for (len = 0; epilogue[len]; len++)
- ;
- for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
+ if (GET_CODE (insn) == NOTE)
{
- if (GET_CODE (insn) == NOTE)
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
+ note = insn;
+ }
+ else if ((len -= contains (insn, epilogue)) == 0)
+ {
+ /* Find the epilogue-begin note if we haven't already, and
+ move it to just before the first epilogue insn. */
+ if (note == 0)
{
- if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
- note = insn;
+ for (note = insn; (note = PREV_INSN (note));)
+ if (GET_CODE (note) == NOTE
+ && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
+ break;
}
- else if ((len -= contains (insn, epilogue)) == 0)
- {
- /* Find the epilogue-begin note if we haven't already, and
- move it to just before the first epilogue insn. */
- if (note == 0)
- {
- for (note = insn; (note = PREV_INSN (note));)
- if (GET_CODE (note) == NOTE
- && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
- break;
- }
- /* Whether or not we can depend on BLOCK_HEAD,
- attempt to keep it up-to-date. */
- if (n_basic_blocks
- && BLOCK_HEAD (n_basic_blocks-1) == insn)
- BLOCK_HEAD (n_basic_blocks-1) = note;
+ /* Whether or not we can depend on BLOCK_HEAD,
+ attempt to keep it up-to-date. */
+ if (n_basic_blocks
+ && BLOCK_HEAD (n_basic_blocks-1) == insn)
+ BLOCK_HEAD (n_basic_blocks-1) = note;
- remove_insn (note);
- add_insn_before (note, insn);
- }
+ remove_insn (note);
+ add_insn_before (note, insn);
}
}
}
/* Mark P for GC. */
static void
-mark_function_state (p)
+mark_function_status (p)
struct function *p;
{
int i;
for (; f; f = f->next_global)
{
- if (f->can_garbage_collect)
- continue;
-
ggc_mark_tree (f->decl);
- mark_function_state (f);
- mark_stmt_state (f->stmt);
- mark_eh_state (f->eh);
- mark_emit_state (f->emit);
- mark_varasm_state (f->varasm);
-
- ggc_mark_rtx (f->expr->x_saveregs_value);
- ggc_mark_rtx (f->expr->x_apply_args_value);
- ggc_mark_rtx (f->expr->x_forced_labels);
+ mark_function_status (f);
+ mark_eh_status (f->eh);
+ mark_stmt_status (f->stmt);
+ mark_expr_status (f->expr);
+ mark_emit_status (f->emit);
+ mark_varasm_status (f->varasm);
if (mark_machine_status)
(*mark_machine_status) (f);
{
ggc_add_root (&all_functions, 1, sizeof all_functions,
mark_function_chain);
+
+ VARRAY_INT_INIT (prologue, 0, "prologue");
+ VARRAY_INT_INIT (epilogue, 0, "epilogue");
+ VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
}