Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
1998, 1999, 2000, 2001 Free Software Foundation, Inc.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
-any later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING. If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA. */
/* This file handles the generation of rtl code from tree structure
at the level of the function as a whole.
#include "except.h"
#include "function.h"
#include "expr.h"
+#include "libfuncs.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "insn-config.h"
#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
#endif
-#if !defined (PREFERRED_STACK_BOUNDARY) && defined (STACK_BOUNDARY)
-#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
-#endif
-
/* Some systems use __main in a way incompatible with its use in gcc, in these
cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
give the same symbol without quotes for an alternative entry point. You
/* The currently compiled function. */
struct function *cfun = 0;
-/* Global list of all compiled functions. */
-struct function *all_functions = 0;
-
/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
static varray_type prologue;
static varray_type epilogue;
struct fixup_replacement *next;
};
-struct insns_for_mem_entry {
+struct insns_for_mem_entry
+{
/* The KEY in HE will be a MEM. */
struct hash_entry he;
/* These are the INSNS which reference the MEM. */
static rtx round_trampoline_addr PARAMS ((rtx));
static rtx adjust_trampoline_addr PARAMS ((rtx));
static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
-static void reorder_blocks_0 PARAMS ((rtx));
+static void reorder_blocks_0 PARAMS ((tree));
static void reorder_blocks_1 PARAMS ((rtx, tree, varray_type *));
+static void reorder_fix_fragments PARAMS ((tree));
static tree blocks_nreverse PARAMS ((tree));
static int all_blocks PARAMS ((tree, tree *));
static tree *get_block_vector PARAMS ((tree, int *));
static bool insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
static int insns_for_mem_walk PARAMS ((rtx *, void *));
static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
-static void mark_temp_slot PARAMS ((struct temp_slot *));
static void mark_function_status PARAMS ((struct function *));
-static void mark_function_chain PARAMS ((void *));
+static void maybe_mark_struct_function PARAMS ((void *));
static void prepare_function_start PARAMS ((void));
static void do_clobber_return_reg PARAMS ((rtx, void *));
static void do_use_return_reg PARAMS ((rtx, void *));
\f
/* Pointer to chain of `struct function' for containing functions. */
-struct function *outer_function_chain;
+static struct function *outer_function_chain;
/* Given a function decl for a containing function,
return the `struct function' for it. */
{
struct function *p;
- for (p = outer_function_chain; p; p = p->next)
+ for (p = outer_function_chain; p; p = p->outer)
if (p->decl == decl)
return p;
push_function_context_to (context)
tree context;
{
- struct function *p, *context_data;
+ struct function *p;
if (context)
{
- context_data = (context == current_function_decl
- ? cfun
- : find_function_data (context));
- context_data->contains_functions = 1;
+ if (context == current_function_decl)
+ cfun->contains_functions = 1;
+ else
+ {
+ struct function *containing = find_function_data (context);
+ containing->contains_functions = 1;
+ }
}
if (cfun == 0)
init_dummy_function_start ();
p = cfun;
- p->next = outer_function_chain;
+ p->outer = outer_function_chain;
outer_function_chain = p;
p->fixup_var_refs_queue = 0;
{
struct function *p = outer_function_chain;
struct var_refs_queue *queue;
- struct var_refs_queue *next;
cfun = p;
- outer_function_chain = p->next;
+ outer_function_chain = p->outer;
current_function_decl = p->decl;
reg_renumber = 0;
restore_emit_status (p);
+ restore_varasm_status (p);
if (restore_lang_status)
(*restore_lang_status) (p);
/* Finish doing put_var_into_stack for any of our variables
which became addressable during the nested function. */
- for (queue = p->fixup_var_refs_queue; queue; queue = next)
- {
- next = queue->next;
- fixup_var_refs (queue->modified, queue->promoted_mode,
- queue->unsignedp, 0);
- free (queue);
- }
+ for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
+ fixup_var_refs (queue->modified, queue->promoted_mode,
+ queue->unsignedp, 0);
+
p->fixup_var_refs_queue = 0;
/* Reset variables that have known state during rtx generation. */
free_after_compilation (f)
struct function *f;
{
- struct temp_slot *ts;
- struct temp_slot *next;
-
free_eh_status (f);
free_expr_status (f);
free_emit_status (f);
if (f->x_parm_reg_stack_loc)
free (f->x_parm_reg_stack_loc);
- for (ts = f->x_temp_slots; ts; ts = next)
- {
- next = ts->next;
- free (ts);
- }
f->x_temp_slots = NULL;
-
f->arg_offset_rtx = NULL;
f->return_rtx = NULL;
f->internal_arg_pointer = NULL;
int align;
struct function *function;
{
- register rtx x, addr;
+ rtx x, addr;
int bigend_correction = 0;
int alignment;
if (best_p->size - rounded_size >= alignment)
{
- p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
+ p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
p->in_use = p->addr_taken = 0;
p->size = best_p->size - rounded_size;
p->base_offset = best_p->base_offset + rounded_size;
{
HOST_WIDE_INT frame_offset_old = frame_offset;
- p = (struct temp_slot *) xmalloc (sizeof (struct temp_slot));
+ p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
/* We are passing an explicit alignment request to assign_stack_local.
One side effect of that is assign_stack_local will not round SIZE
/* If we know the alias set for the memory that will be used, use
it. If there's no TYPE, then we don't know anything about the
alias set for the memory. */
- if (type)
- MEM_ALIAS_SET (p->slot) = get_alias_set (type);
- else
- MEM_ALIAS_SET (p->slot) = 0;
+ set_mem_alias_set (p->slot, type ? get_alias_set (type) : 0);
- /* If a type is specified, set the relevant flags. */
+ /* If a type is specified, set the relevant flags. */
if (type != 0)
{
RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type);
}
/* Either delete Q or advance past it. */
if (delete_q)
- {
- prev_q->next = q->next;
- free (q);
- }
+ prev_q->next = q->next;
else
prev_q = q;
}
/* If this slot is below the current TEMP_SLOT_LEVEL, then it
needs to be preserved. This can happen if a temporary in
the RTL_EXPR was addressed; preserve_temp_slots will move
- the temporary into a higher level. */
+ the temporary into a higher level. */
if (temp_slot_level <= p->level)
p->in_use = 0;
else
put_var_into_stack (decl)
tree decl;
{
- register rtx reg;
+ rtx reg;
enum machine_mode promoted_mode, decl_mode;
struct function *function = 0;
tree context;
/* Get the mode it's actually stored in. */
promoted_mode = GET_MODE (reg);
- /* If this variable comes from an outer function,
- find that function's saved context. */
+ /* If this variable comes from an outer function, find that
+ function's saved context. Don't use find_function_data here,
+ because it might not be in any active function.
+ FIXME: Is that really supposed to happen?
+ It does in ObjC at least. */
if (context != current_function_decl && context != inline_function_decl)
- for (function = outer_function_chain; function; function = function->next)
+ for (function = outer_function_chain; function; function = function->outer)
if (function->decl == context)
break;
/* Change the CONCAT into a combined MEM for both parts. */
PUT_CODE (reg, MEM);
+ MEM_ATTRS (reg) = 0;
+
+ /* set_mem_attributes uses DECL_RTL to avoid re-generating of
+ already computed alias sets. Here we want to re-generate. */
+ if (DECL_P (decl))
+ SET_DECL_RTL (decl, NULL);
set_mem_attributes (reg, decl, 1);
+ if (DECL_P (decl))
+ SET_DECL_RTL (decl, reg);
/* The two parts are in memory order already.
Use the lower parts address as ours. */
PUT_CODE (reg, MEM);
PUT_MODE (reg, decl_mode);
XEXP (reg, 0) = XEXP (new, 0);
+ MEM_ATTRS (reg) = 0;
/* `volatil' bit means one thing for MEMs, another entirely for REGs. */
MEM_VOLATILE_P (reg) = volatile_p;
{
MEM_SET_IN_STRUCT_P (reg,
AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
- MEM_ALIAS_SET (reg) = get_alias_set (type);
+ set_mem_alias_set (reg, get_alias_set (type));
}
+
if (used_p)
schedule_fixup_var_refs (function, reg, type, promoted_mode, ht);
}
struct var_refs_queue *temp;
temp
- = (struct var_refs_queue *) xmalloc (sizeof (struct var_refs_queue));
+ = (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
temp->modified = reg;
temp->promoted_mode = promoted_mode;
temp->unsignedp = unsigned_p;
N.B. No need for special processing of CALL_PLACEHOLDERs here,
because the hash table will point straight to the interesting insn
(inside the CALL_PLACEHOLDER). */
+
static void
fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp)
struct hash_table *ht;
references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
TOPLEVEL is nonzero if this is the main insn chain for this
function. */
+
static void
fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel)
rtx insn;
find_reg_note (XEXP (note, 0), REG_RETVAL,
NULL_RTX));
- /* In unoptimized compilation, we shouldn't call delete_insn
- except in jump.c doing warnings. */
- PUT_CODE (insn, NOTE);
- NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (insn) = 0;
+ delete_insn (insn);
}
/* The insn to load VAR from a home in the arglist
&& SET_DEST (prev_set) == SET_SRC (set)
&& rtx_equal_p (SET_SRC (prev_set), var))))
{
- /* In unoptimized compilation, we shouldn't call delete_insn
- except in jump.c doing warnings. */
- PUT_CODE (insn, NOTE);
- NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (insn) = 0;
+ delete_insn (insn);
}
else
{
static void
fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
- register rtx var;
+ rtx var;
enum machine_mode promoted_mode;
- register rtx *loc;
+ rtx *loc;
rtx insn;
struct fixup_replacement **replacements;
{
- register int i;
- register rtx x = *loc;
+ int i;
+ rtx x = *loc;
RTX_CODE code = GET_CODE (x);
- register const char *fmt;
- register rtx tem, tem1;
+ const char *fmt;
+ rtx tem, tem1;
struct fixup_replacement *replacement;
switch (code)
enum machine_mode is_mode = GET_MODE (tem);
HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
-#ifdef HAVE_extzv
if (GET_CODE (x) == ZERO_EXTRACT)
{
- wanted_mode
- = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
- if (wanted_mode == VOIDmode)
- wanted_mode = word_mode;
+ enum machine_mode new_mode
+ = mode_for_extraction (EP_extzv, 1);
+ if (new_mode != MAX_MACHINE_MODE)
+ wanted_mode = new_mode;
}
-#endif
-#ifdef HAVE_extv
- if (GET_CODE (x) == SIGN_EXTRACT)
+ else if (GET_CODE (x) == SIGN_EXTRACT)
{
- wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
- if (wanted_mode == VOIDmode)
- wanted_mode = word_mode;
+ enum machine_mode new_mode
+ = mode_for_extraction (EP_extv, 1);
+ if (new_mode != MAX_MACHINE_MODE)
+ wanted_mode = new_mode;
}
-#endif
+
/* If we have a narrower mode, we can do something. */
if (wanted_mode != VOIDmode
&& GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
pos %= GET_MODE_BITSIZE (wanted_mode);
- newmem = gen_rtx_MEM (wanted_mode,
- plus_constant (XEXP (tem, 0), offset));
- MEM_COPY_ATTRIBUTES (newmem, tem);
+ newmem = adjust_address_nv (tem, wanted_mode, offset);
/* Make the change and see if the insn remains valid. */
INSN_CODE (insn) = -1;
{
rtx dest = SET_DEST (x);
rtx src = SET_SRC (x);
-#ifdef HAVE_insv
rtx outerdest = dest;
-#endif
while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
|| GET_CODE (dest) == SIGN_EXTRACT
/* We will need to rerecognize this insn. */
INSN_CODE (insn) = -1;
-#ifdef HAVE_insv
- if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
+ if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
+ && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
{
/* Since this case will return, ensure we fixup all the
operands here. */
enum machine_mode is_mode = GET_MODE (tem);
HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
- wanted_mode = insn_data[(int) CODE_FOR_insv].operand[0].mode;
- if (wanted_mode == VOIDmode)
- wanted_mode = word_mode;
+ wanted_mode = mode_for_extraction (EP_insv, 0);
/* If we have a narrower mode, we can do something. */
if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
pos %= GET_MODE_BITSIZE (wanted_mode);
- newmem = gen_rtx_MEM (wanted_mode,
- plus_constant (XEXP (tem, 0),
- offset));
- MEM_COPY_ATTRIBUTES (newmem, tem);
+ newmem = adjust_address_nv (tem, wanted_mode, offset);
/* Make the change and see if the insn remains valid. */
INSN_CODE (insn) = -1;
XEXP (outerdest, 0) = tem1;
return;
}
-#endif
/* STRICT_LOW_PART is a no-op on memory references
and it can cause combinations to be unrecognizable,
REG_NOTES (insn) = REG_NOTES (last);
PATTERN (insn) = PATTERN (last);
- PUT_CODE (last, NOTE);
- NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (last) = 0;
+ delete_insn (last);
}
else
PATTERN (insn) = pat;
REG_NOTES (insn) = REG_NOTES (last);
PATTERN (insn) = PATTERN (last);
- PUT_CODE (last, NOTE);
- NOTE_LINE_NUMBER (last) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (last) = 0;
+ delete_insn (last);
}
else
PATTERN (insn) = pat;
fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
else if (fmt[i] == 'E')
{
- register int j;
+ int j;
for (j = 0; j < XVECLEN (x, i); j++)
fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
insn, replacements);
&& ! uncritical)
abort ();
- addr = plus_constant (addr, offset);
- if (!flag_force_addr && memory_address_p (mode, addr))
+ if (!flag_force_addr
+ && memory_address_p (mode, plus_constant (addr, offset)))
/* Shortcut if no insns need be emitted. */
- return change_address (SUBREG_REG (x), mode, addr);
+ return adjust_address (SUBREG_REG (x), mode, offset);
+
start_sequence ();
- result = change_address (SUBREG_REG (x), mode, addr);
+ result = adjust_address (SUBREG_REG (x), mode, offset);
emit_insn_before (gen_sequence (), insn);
end_sequence ();
return result;
static rtx
walk_fixup_memory_subreg (x, insn, uncritical)
- register rtx x;
+ rtx x;
rtx insn;
int uncritical;
{
- register enum rtx_code code;
- register const char *fmt;
- register int i;
+ enum rtx_code code;
+ const char *fmt;
+ int i;
if (x == 0)
return 0;
XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
else if (fmt[i] == 'E')
{
- register int j;
+ int j;
for (j = 0; j < XVECLEN (x, i); j++)
XVECEXP (x, i, j)
= walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
rtx x;
rtx insn;
{
- register int i;
- register RTX_CODE code = GET_CODE (x);
- register const char *fmt;
+ int i;
+ RTX_CODE code = GET_CODE (x);
+ const char *fmt;
if (code == MEM)
{
- register rtx ad = XEXP (x, 0);
+ rtx ad = XEXP (x, 0);
/* If we have address of a stack slot but it's not valid
(displacement is too large), compute the sum in a register. */
if (GET_CODE (ad) == PLUS
seq = gen_sequence ();
end_sequence ();
emit_insn_before (seq, insn);
- return change_address (x, VOIDmode, temp);
+ return replace_equiv_address (x, temp);
}
return x;
}
XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
else if (fmt[i] == 'E')
{
- register int j;
+ int j;
for (j = 0; j < XVECLEN (x, i); j++)
XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
}
rtx insn;
rtx *equiv_mem;
{
- register rtx bitfield;
+ rtx bitfield;
int destflag;
rtx seq = 0;
enum machine_mode mode;
!= BLKmode)
&& INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
{
- register rtx memref = 0;
+ rtx memref = 0;
/* Now check that the containing word is memory, not a register,
and that it is safe to change the machine mode. */
}
start_sequence ();
- memref = change_address (memref, mode,
- plus_constant (XEXP (memref, 0), offset));
+ memref = adjust_address (memref, mode, offset);
insns = get_insns ();
end_sequence ();
emit_insns_before (insns, insn);
rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
REGNO (reg), decl);
+ /* Calculate this before we start messing with decl's RTL. */
+ HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
+
/* If the original REG was a user-variable, then so is the REG whose
address is being taken. Likewise for unchanging. */
REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
PUT_CODE (reg, MEM);
+ MEM_ATTRS (reg) = 0;
XEXP (reg, 0) = r;
+
if (decl)
{
tree type = TREE_TYPE (decl);
enum machine_mode decl_mode
= (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
: DECL_MODE (decl));
+ rtx decl_rtl = decl ? DECL_RTL_IF_SET (decl) : 0;
PUT_MODE (reg, decl_mode);
- MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
- MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
- MEM_ALIAS_SET (reg) = get_alias_set (decl);
+
+ /* Clear DECL_RTL momentarily so functions below will work
+ properly, then set it again. */
+ if (decl_rtl == reg)
+ SET_DECL_RTL (decl, 0);
+
+ set_mem_attributes (reg, decl, 1);
+ set_mem_alias_set (reg, set);
+
+ if (decl_rtl == reg)
+ SET_DECL_RTL (decl, reg);
if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), 0);
}
else
- {
- /* We have no alias information about this newly created MEM. */
- MEM_ALIAS_SET (reg) = 0;
-
- fixup_var_refs (reg, GET_MODE (reg), 0, 0);
- }
+ fixup_var_refs (reg, GET_MODE (reg), 0, 0);
return reg;
}
else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
{
rtx sub = XEXP (XEXP (x, 0), 0);
- rtx sub2;
if (GET_CODE (sub) == MEM)
- {
- sub2 = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
- MEM_COPY_ATTRIBUTES (sub2, sub);
- sub = sub2;
- }
+ sub = adjust_address_nv (sub, GET_MODE (x), 0);
else if (GET_CODE (sub) == REG
&& (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
;
return k1 == k2;
}
-struct insns_for_mem_walk_info {
+struct insns_for_mem_walk_info
+{
/* The hash table that we are using to record which INSNs use which
MEMs. */
struct hash_table *ht;
for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
{
HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
+ HOST_WIDE_INT size_rtl;
instantiate_decl (DECL_RTL (decl), size, valid_only);
/* If the parameter was promoted, then the incoming RTL mode may be
larger than the declared type size. We must use the larger of
the two sizes. */
- size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
+ size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
+ size = MAX (size_rtl, size);
instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
}
|| (nonlocal_goto_stack_level != 0
&& reg_mentioned_p (nonlocal_goto_stack_level,
PATTERN (insn))))
- delete_insn (insn);
+ delete_related_insns (insn);
}
}
}
rtx
get_first_block_beg ()
{
- register rtx searcher;
- register rtx insn = get_first_nonparm_insn ();
+ rtx searcher;
+ rtx insn = get_first_nonparm_insn ();
for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
if (GET_CODE (searcher) == NOTE
assign_parms (fndecl)
tree fndecl;
{
- register tree parm;
- register rtx entry_parm = 0;
- register rtx stack_parm = 0;
+ tree parm;
+ rtx entry_parm = 0;
+ rtx stack_parm = 0;
CUMULATIVE_ARGS args_so_far;
enum machine_mode promoted_mode, passed_mode;
enum machine_mode nominal_mode, promoted_nominal_mode;
/* Store the parm in a pseudoregister during the function, but we
may need to do it in a wider mode. */
- register rtx parmreg;
+ rtx parmreg;
unsigned int regno, regnoi = 0, regnor = 0;
unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
appropriately. */
if (passed_pointer)
{
- SET_DECL_RTL (parm,
- gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
- parmreg));
- set_mem_attributes (DECL_RTL (parm), parm, 1);
+ rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
+ parmreg);
+ set_mem_attributes (x, parm, 1);
+ SET_DECL_RTL (parm, x);
}
else
{
if (promoted_mode != nominal_mode)
{
- /* Conversion is required. */
+ /* Conversion is required. */
rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
emit_move_insn (tempreg, validize_mem (entry_parm));
entry_parm = convert_to_mode (nominal_mode, tempreg,
TREE_UNSIGNED (TREE_TYPE (parm)));
if (stack_parm)
- {
- /* ??? This may need a big-endian conversion on sparc64. */
- stack_parm = change_address (stack_parm, nominal_mode,
- NULL_RTX);
- }
+ /* ??? This may need a big-endian conversion on sparc64. */
+ stack_parm = adjust_address (stack_parm, nominal_mode, 0);
+
conversion_insns = get_insns ();
did_conversion = 1;
end_sequence ();
if (parm == function_result_decl)
{
tree result = DECL_RESULT (fndecl);
+ rtx x = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
- SET_DECL_RTL (result,
- gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm)));
+ set_mem_attributes (x, result, 1);
+ SET_DECL_RTL (result, x);
+ }
- set_mem_attributes (DECL_RTL (result), result, 1);
+ if (GET_CODE (DECL_RTL (parm)) == REG)
+ REGNO_DECL (REGNO (DECL_RTL (parm))) = parm;
+ else if (GET_CODE (DECL_RTL (parm)) == CONCAT)
+ {
+ REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 0))) = parm;
+ REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 1))) = parm;
}
+
}
/* Output all parameter conversion instructions (possibly including calls)
#endif
#endif
-#ifdef STACK_BOUNDARY
#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
current_function_args_size
= ((current_function_args_size + STACK_BYTES - 1)
/ STACK_BYTES) * STACK_BYTES;
-#endif
#ifdef ARGS_GROW_DOWNWARD
current_function_arg_offset_rtx
uninitialized_vars_warning (block)
tree block;
{
- register tree decl, sub;
+ tree decl, sub;
for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
{
if (warn_uninitialized
void
setjmp_args_warning ()
{
- register tree decl;
+ tree decl;
for (decl = DECL_ARGUMENTS (current_function_decl);
decl; decl = TREE_CHAIN (decl))
if (DECL_RTL (decl) != 0
setjmp_protect (block)
tree block;
{
- register tree decl, sub;
+ tree decl, sub;
for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
if ((TREE_CODE (decl) == VAR_DECL
|| TREE_CODE (decl) == PARM_DECL)
void
setjmp_protect_args ()
{
- register tree decl;
+ tree decl;
for (decl = DECL_ARGUMENTS (current_function_decl);
decl; decl = TREE_CHAIN (decl))
if ((TREE_CODE (decl) == VAR_DECL
if (context == current_function_decl || context == inline_function_decl)
return addr;
- for (fp = outer_function_chain; fp; fp = fp->next)
- if (fp->decl == context)
- break;
-
- if (fp == 0)
- abort ();
+ fp = find_function_data (context);
if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
addr = XEXP (XEXP (addr, 0), 0);
#ifdef NEED_SEPARATE_AP
rtx addr;
- if (fp->x_arg_pointer_save_area == 0)
- fp->x_arg_pointer_save_area
- = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
-
- addr = fix_lexical_addr (XEXP (fp->x_arg_pointer_save_area, 0), var);
+ addr = get_arg_pointer_save_area (fp);
+ addr = fix_lexical_addr (XEXP (addr, 0), var);
addr = memory_address (Pmode, addr);
base = gen_rtx_MEM (Pmode, addr);
- MEM_ALIAS_SET (base) = get_frame_alias_set ();
+ set_mem_alias_set (base, get_frame_alias_set ());
base = copy_to_reg (base);
#else
displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
return
adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
- for (fp = outer_function_chain; fp; fp = fp->next)
+ for (fp = outer_function_chain; fp; fp = fp->outer)
for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
if (TREE_PURPOSE (link) == function)
{
fn_context = decl_function_context (function);
if (fn_context != current_function_decl
&& fn_context != inline_function_decl)
- for (fp = outer_function_chain; fp; fp = fp->next)
- if (fp->decl == fn_context)
- break;
+ fp = find_function_data (fn_context);
/* Allocate run-time space for this trampoline
(usually in the defining function's stack frame). */
#ifdef TRAMPOLINE_ALIGNMENT
/* Round address up to desired boundary. */
rtx temp = gen_reg_rtx (Pmode);
- temp = expand_binop (Pmode, add_optab, tramp,
- GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
- temp, 0, OPTAB_LIB_WIDEN);
- tramp = expand_binop (Pmode, and_optab, temp,
- GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
- temp, 0, OPTAB_LIB_WIDEN);
+ rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
+ rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
+
+ temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
+ temp, 0, OPTAB_LIB_WIDEN);
+ tramp = expand_simple_binop (Pmode, AND, temp, mask,
+ temp, 0, OPTAB_LIB_WIDEN);
#endif
return tramp;
}
/* Given a trampoline address, round it then apply any
platform-specific adjustments so that the result can be used for a
- function call . */
+ function call . */
static rtx
adjust_trampoline_addr (tramp)
return block_vector;
}
-/* Identify BLOCKs referenced by more than one
- NOTE_INSN_BLOCK_{BEG,END}, and create duplicate blocks. */
+/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
+ and create duplicate blocks. */
+/* ??? Need an option to either create block fragments or to create
+ abstract origin duplicates of a source block. It really depends
+ on what optimization has been performed. */
void
reorder_blocks ()
VARRAY_TREE_INIT (block_stack, 10, "block_stack");
+ /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
+ reorder_blocks_0 (block);
+
/* Prune the old trees away, so that they don't get in the way. */
BLOCK_SUBBLOCKS (block) = NULL_TREE;
BLOCK_CHAIN (block) = NULL_TREE;
- reorder_blocks_0 (get_insns ());
+ /* Recreate the block tree from the note nesting. */
reorder_blocks_1 (get_insns (), block, &block_stack);
-
BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
+ /* Remove deleted blocks from the block fragment chains. */
+ reorder_fix_fragments (block);
+
VARRAY_FREE (block_stack);
}
-/* Helper function for reorder_blocks. Process the insn chain beginning
- at INSNS. Recurse for CALL_PLACEHOLDER insns. */
+/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
static void
-reorder_blocks_0 (insns)
- rtx insns;
+reorder_blocks_0 (block)
+ tree block;
{
- rtx insn;
-
- for (insn = insns; insn; insn = NEXT_INSN (insn))
+ while (block)
{
- if (GET_CODE (insn) == NOTE)
- {
- if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
- {
- tree block = NOTE_BLOCK (insn);
- TREE_ASM_WRITTEN (block) = 0;
- }
- }
- else if (GET_CODE (insn) == CALL_INSN
- && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
- {
- rtx cp = PATTERN (insn);
- reorder_blocks_0 (XEXP (cp, 0));
- if (XEXP (cp, 1))
- reorder_blocks_0 (XEXP (cp, 1));
- if (XEXP (cp, 2))
- reorder_blocks_0 (XEXP (cp, 2));
- }
+ TREE_ASM_WRITTEN (block) = 0;
+ reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
+ block = BLOCK_CHAIN (block);
}
}
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
{
tree block = NOTE_BLOCK (insn);
- /* If we have seen this block before, copy it. */
+
+ /* If we have seen this block before, that means it now
+ spans multiple address regions. Create a new fragment. */
if (TREE_ASM_WRITTEN (block))
{
- block = copy_node (block);
- NOTE_BLOCK (insn) = block;
+ tree new_block = copy_node (block);
+ tree origin;
+
+ origin = (BLOCK_FRAGMENT_ORIGIN (block)
+ ? BLOCK_FRAGMENT_ORIGIN (block)
+ : block);
+ BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
+ BLOCK_FRAGMENT_CHAIN (new_block)
+ = BLOCK_FRAGMENT_CHAIN (origin);
+ BLOCK_FRAGMENT_CHAIN (origin) = new_block;
+
+ NOTE_BLOCK (insn) = new_block;
+ block = new_block;
}
+
BLOCK_SUBBLOCKS (block) = 0;
TREE_ASM_WRITTEN (block) = 1;
BLOCK_SUPERCONTEXT (block) = current_block;
}
}
+/* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
+ appears in the block tree, select one of the fragments to become
+ the new origin block. */
+
+static void
+reorder_fix_fragments (block)
+ tree block;
+{
+ while (block)
+ {
+ tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
+ tree new_origin = NULL_TREE;
+
+ if (dup_origin)
+ {
+ if (! TREE_ASM_WRITTEN (dup_origin))
+ {
+ new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
+
+ /* Find the first of the remaining fragments. There must
+ be at least one -- the current block. */
+ while (! TREE_ASM_WRITTEN (new_origin))
+ new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
+ BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
+ }
+ }
+ else if (! dup_origin)
+ new_origin = block;
+
+ /* Re-root the rest of the fragments to the new origin. In the
+ case that DUP_ORIGIN was null, that means BLOCK was the origin
+ of a chain of fragments and we want to remove those fragments
+ that didn't make it to the output. */
+ if (new_origin)
+ {
+ tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
+ tree chain = *pp;
+
+ while (chain)
+ {
+ if (TREE_ASM_WRITTEN (chain))
+ {
+ BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
+ *pp = chain;
+ pp = &BLOCK_FRAGMENT_CHAIN (chain);
+ }
+ chain = BLOCK_FRAGMENT_CHAIN (chain);
+ }
+ *pp = NULL_TREE;
+ }
+
+ reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
+ block = BLOCK_CHAIN (block);
+ }
+}
+
/* Reverse the order of elements in the chain T of blocks,
and return the new head of the chain (old last element). */
blocks_nreverse (t)
tree t;
{
- register tree prev = 0, decl, next;
+ tree prev = 0, decl, next;
for (decl = t; decl; decl = next)
{
next = BLOCK_CHAIN (decl);
}
\f
/* Allocate a function structure and reset its contents to the defaults. */
+
static void
prepare_function_start ()
{
- cfun = (struct function *) xcalloc (1, sizeof (struct function));
+ cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
init_stmt_for_function ();
init_eh_for_function ();
cfun->original_decl_initial = 0;
cfun->original_arg_vector = 0;
-#ifdef STACK_BOUNDARY
cfun->stack_alignment_needed = STACK_BOUNDARY;
cfun->preferred_stack_boundary = STACK_BOUNDARY;
-#else
- cfun->stack_alignment_needed = 0;
- cfun->preferred_stack_boundary = 0;
-#endif
/* Set if a call to setjmp is seen. */
current_function_calls_setjmp = 0;
{
prepare_function_start ();
- /* Remember this function for later. */
- cfun->next_global = all_functions;
- all_functions = cfun;
-
current_function_name = (*decl_printable_name) (subr, 2);
cfun->decl = subr;
void
expand_main_function ()
{
-#if !defined (HAS_INIT_SECTION)
+#ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
+ if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
+ {
+ int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
+ rtx tmp;
+
+ /* Forcibly align the stack. */
+#ifdef STACK_GROWS_DOWNWARD
+ tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
+ stack_pointer_rtx, 1, OPTAB_WIDEN);
+#else
+ tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
+ GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
+ tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
+ stack_pointer_rtx, 1, OPTAB_WIDEN);
+#endif
+ if (tmp != stack_pointer_rtx)
+ emit_move_insn (stack_pointer_rtx, tmp);
+
+ /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
+ tmp = force_reg (Pmode, const0_rtx);
+ allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
+ }
+#endif
+
+#ifndef HAS_INIT_SECTION
emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
VOIDmode, 0);
-#endif /* not HAS_INIT_SECTION */
+#endif
}
\f
extern struct obstack permanent_obstack;
if (aggregate_value_p (DECL_RESULT (subr)))
{
/* Returning something that won't go in a register. */
- register rtx value_address = 0;
+ rtx value_address = 0;
#ifdef PCC_STATIC_STRUCT_RETURN
if (current_function_returns_pcc_struct)
}
if (value_address)
{
- SET_DECL_RTL (DECL_RESULT (subr),
- gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)),
- value_address));
- set_mem_attributes (DECL_RTL (DECL_RESULT (subr)),
- DECL_RESULT (subr), 1);
+ rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
+ set_mem_attributes (x, DECL_RESULT (subr), 1);
+ SET_DECL_RTL (DECL_RESULT (subr), x);
}
}
else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
-(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
#endif
last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
- MEM_ALIAS_SET (last_ptr) = get_frame_alias_set ();
+ set_mem_alias_set (last_ptr, get_frame_alias_set ());
last_ptr = copy_to_reg (last_ptr);
/* If we are not optimizing, ensure that we know that this
free_after_parsing (cfun);
free_after_compilation (cfun);
- free (cfun);
cfun = 0;
}
finish_expr_for_function ();
+ /* If arg_pointer_save_area was referenced only from a nested
+ function, we will not have initialized it yet. Do that now. */
+ if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
+ get_arg_pointer_save_area (cfun);
+
#ifdef NON_SAVING_SETJMP
/* Don't put any variables in registers if we call setjmp
on a machine that fails to restore the registers. */
}
#endif
- /* Save the argument pointer if a save area was made for it. */
- if (arg_pointer_save_area)
- {
- /* arg_pointer_save_area may not be a valid memory address, so we
- have to check it and fix it if necessary. */
- rtx seq;
- start_sequence ();
- emit_move_insn (validize_mem (arg_pointer_save_area),
- virtual_incoming_args_rtx);
- seq = gen_sequence ();
- end_sequence ();
- emit_insn_before (seq, tail_recursion_reentry);
- }
-
/* Initialize any trampolines required by this function. */
for (link = trampoline_list; link; link = TREE_CHAIN (link))
{
instead of using the general framework. */
use_return_register ();
- /* Output a return insn if we are using one.
- Otherwise, let the rtl chain end here, to drop through
- into the epilogue. */
-
-#ifdef HAVE_return
- if (HAVE_return)
- {
- emit_jump_insn (gen_return ());
- emit_barrier ();
- }
-#endif
-
/* Fix up any gotos that jumped out to the outermost
binding level of the function.
Must follow emitting RETURN_LABEL. */
then you will lose. */
expand_fixups (get_insns ());
}
+
+rtx
+get_arg_pointer_save_area (f)
+ struct function *f;
+{
+ rtx ret = f->x_arg_pointer_save_area;
+
+ if (! ret)
+ {
+ ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
+ f->x_arg_pointer_save_area = ret;
+ }
+
+ if (f == cfun && ! f->arg_pointer_save_area_init)
+ {
+ rtx seq;
+
+ /* Save the arg pointer at the beginning of the function. The
+ generated stack slot may not be a valid memory address, so we
+ have to check it and fix it if necessary. */
+ start_sequence ();
+ emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
+ seq = gen_sequence ();
+ end_sequence ();
+
+ push_topmost_sequence ();
+ emit_insn_after (seq, get_insns ());
+ pop_topmost_sequence ();
+ }
+
+ return ret;
+}
\f
/* Extend a vector that records the INSN_UIDs of INSNS (either a
sequence or a single insn). */
rtx insn;
varray_type vec;
{
- register int i, j;
+ int i, j;
if (GET_CODE (insn) == INSN
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
end = emit_jump_insn_after (gen_return (), bb->end);
if (line_note)
emit_line_note_after (NOTE_SOURCE_FILE (line_note),
- NOTE_LINE_NUMBER (line_note), bb->end);
-
- while (1)
- {
- set_block_for_insn (p, bb);
- if (p == bb->end)
- break;
- p = PREV_INSN (p);
- }
- bb->end = end;
+ NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
}
#endif /* HAVE_return */
else
sp_modified_unknown = 1;
- /* Don't allow the SP modification to happen. */
+ /* Don't allow the SP modification to happen. We don't call
+ delete_insn here since INSN isn't in any chain. */
PUT_CODE (insn, NOTE);
NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
NOTE_SOURCE_FILE (insn) = 0;
{
int inserted = 0;
edge e;
+#if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
rtx seq;
+#endif
#ifdef HAVE_prologue
rtx prologue_end = NULL_RTX;
#endif
seq = gen_sequence ();
end_sequence ();
- /* If optimization is off, and perhaps in an empty function,
- the entry block will have no successors. */
- if (ENTRY_BLOCK_PTR->succ)
- {
- /* Can't deal with multiple successsors of the entry block. */
- if (ENTRY_BLOCK_PTR->succ->succ_next)
- abort ();
+ /* Can't deal with multiple successsors of the entry block
+ at the moment. Function should always have at least one
+ entry point. */
+ if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
+ abort ();
- insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
- inserted = 1;
- }
- else
- emit_insn_after (seq, f);
+ insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
+ inserted = 1;
}
#endif
if (simplejump_p (jump))
{
emit_return_into_block (bb, epilogue_line_note);
- flow_delete_insn (jump);
+ delete_insn (jump);
}
/* If we have a conditional jump, we can try to replace
emit_barrier_after (last->end);
emit_return_into_block (last, epilogue_line_note);
epilogue_end = last->end;
+ last->succ->flags &= ~EDGE_FALLTHRU;
goto epilogue_done;
}
}
i = PREV_INSN (insn);
newinsn = emit_insn_before (seq, insn);
- /* Update the UID to basic block map. */
- for (i = NEXT_INSN (i); i != insn; i = NEXT_INSN (i))
- set_block_for_insn (i, bb);
-
/* Retain a map of the epilogue insns. Used in life analysis to
avoid getting rid of sibcall epilogue insns. */
record_insns (GET_CODE (seq) == SEQUENCE
if ((len = VARRAY_SIZE (prologue)) > 0)
{
- register rtx insn, note = 0;
+ rtx insn, note = 0;
/* Scan from the beginning until we reach the last prologue insn.
We apparently can't depend on basic_block_{head,end} after
if ((len = VARRAY_SIZE (epilogue)) > 0)
{
- register rtx insn, note = 0;
+ rtx insn, note = 0;
/* Scan from the end until we reach the first epilogue insn.
We apparently can't depend on basic_block_{head,end} after
#endif /* HAVE_prologue or HAVE_epilogue */
}
-/* Mark T for GC. */
-
-static void
-mark_temp_slot (t)
- struct temp_slot *t;
-{
- while (t)
- {
- ggc_mark_rtx (t->slot);
- ggc_mark_rtx (t->address);
- ggc_mark_tree (t->rtl_expr);
- ggc_mark_tree (t->type);
-
- t = t->next;
- }
-}
-
/* Mark P for GC. */
static void
mark_function_status (p)
struct function *p;
{
+ struct var_refs_queue *q;
+ struct temp_slot *t;
int i;
rtx *r;
ggc_mark_rtx (p->epilogue_delay_list);
ggc_mark_rtx (p->x_clobber_return_insn);
- mark_temp_slot (p->x_temp_slots);
+ for (t = p->x_temp_slots; t != 0; t = t->next)
+ {
+ ggc_mark (t);
+ ggc_mark_rtx (t->slot);
+ ggc_mark_rtx (t->address);
+ ggc_mark_tree (t->rtl_expr);
+ ggc_mark_tree (t->type);
+ }
- {
- struct var_refs_queue *q = p->fixup_var_refs_queue;
- while (q)
- {
- ggc_mark_rtx (q->modified);
- q = q->next;
+ for (q = p->fixup_var_refs_queue; q != 0; q = q->next)
+ {
+ ggc_mark (q);
+ ggc_mark_rtx (q->modified);
}
- }
ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
mark_hard_reg_initial_vals (p);
}
-/* Mark the function chain ARG (which is really a struct function **)
- for GC. */
+/* Mark the struct function pointed to by *ARG for GC, if it is not
+ NULL. This is used to mark the current function and the outer
+ function chain. */
static void
-mark_function_chain (arg)
+maybe_mark_struct_function (arg)
void *arg;
{
struct function *f = *(struct function **) arg;
- for (; f; f = f->next_global)
- {
- ggc_mark_tree (f->decl);
-
- mark_function_status (f);
- mark_eh_status (f->eh);
- mark_stmt_status (f->stmt);
- mark_expr_status (f->expr);
- mark_emit_status (f->emit);
- mark_varasm_status (f->varasm);
-
- if (mark_machine_status)
- (*mark_machine_status) (f);
- if (mark_lang_status)
- (*mark_lang_status) (f);
-
- if (f->original_arg_vector)
- ggc_mark_rtvec ((rtvec) f->original_arg_vector);
- if (f->original_decl_initial)
- ggc_mark_tree (f->original_decl_initial);
- }
+ if (f == 0)
+ return;
+
+ ggc_mark_struct_function (f);
+}
+
+/* Mark a struct function * for GC. This is called from ggc-common.c. */
+
+void
+ggc_mark_struct_function (f)
+ struct function *f;
+{
+ ggc_mark (f);
+ ggc_mark_tree (f->decl);
+
+ mark_function_status (f);
+ mark_eh_status (f->eh);
+ mark_stmt_status (f->stmt);
+ mark_expr_status (f->expr);
+ mark_emit_status (f->emit);
+ mark_varasm_status (f->varasm);
+
+ if (mark_machine_status)
+ (*mark_machine_status) (f);
+ if (mark_lang_status)
+ (*mark_lang_status) (f);
+
+ if (f->original_arg_vector)
+ ggc_mark_rtvec ((rtvec) f->original_arg_vector);
+ if (f->original_decl_initial)
+ ggc_mark_tree (f->original_decl_initial);
+ if (f->outer)
+ ggc_mark_struct_function (f->outer);
}
/* Called once, at initialization, to initialize function.c. */
void
init_function_once ()
{
- ggc_add_root (&all_functions, 1, sizeof all_functions,
- mark_function_chain);
+ ggc_add_root (&cfun, 1, sizeof cfun, maybe_mark_struct_function);
+ ggc_add_root (&outer_function_chain, 1, sizeof outer_function_chain,
+ maybe_mark_struct_function);
VARRAY_INT_INIT (prologue, 0, "prologue");
VARRAY_INT_INIT (epilogue, 0, "epilogue");