/* Data flow analysis for GNU compiler.
- Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
+ Copyright (C) 1987, 88, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
This file is part of GNU CC.
You should have received a copy of the GNU General Public License
along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
+the Free Software Foundation, 59 Temple Place - Suite 330,
+Boston, MA 02111-1307, USA. */
/* This file contains the data flow analysis pass of the compiler.
This is set up by find_basic_blocks and used there and in life_analysis,
and then freed. */
-static short *uid_block_number;
+static int *uid_block_number;
/* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
This information remains valid for the rest of the compilation
of the current function; it is used to control register allocation. */
-short *reg_basic_block;
+int *reg_basic_block;
/* Indexed by n, gives number of times (REG n) is used or set, each
weighted by its loop-depth.
int *reg_n_refs;
+/* Indexed by N; says whether a pseudo register N was ever used
+ within a SUBREG that changes the size of the reg. Some machines prohibit
+ such objects to be in certain (usually floating-point) registers. */
+
+char *reg_changes_size;
+
/* Indexed by N, gives number of places register N dies.
This information remains valid for the rest of the compilation
of the current function; it is used to control register allocation. */
-1 is used to mark a pseudo reg which has a constant or memory equivalent
and is used infrequently enough that it should not get a hard register.
-2 is used to mark a pseudo reg for a parameter, when a frame pointer
- is not required. global-alloc.c makes an allocno for this but does
+ is not required. global.c makes an allocno for this but does
not try to assign a hard register to it. */
int *reg_live_length;
static HARD_REG_SET elim_reg_set;
/* Forward declarations */
-static void find_basic_blocks ();
-static void life_analysis ();
-static void mark_label_ref ();
-void allocate_for_life_analysis (); /* Used also in stupid_life_analysis */
-static void init_regset_vector ();
-static void propagate_block ();
-static void mark_set_regs ();
-static void mark_used_regs ();
-static int insn_dead_p ();
-static int libcall_dead_p ();
-static int try_pre_increment ();
-static int try_pre_increment_1 ();
-static rtx find_use_as_address ();
-void dump_flow_info ();
+static void find_basic_blocks PROTO((rtx, rtx));
+static int jmp_uses_reg_or_mem PROTO((rtx));
+static void mark_label_ref PROTO((rtx, rtx, int));
+static void life_analysis PROTO((rtx, int));
+void allocate_for_life_analysis PROTO((void));
+static void init_regset_vector PROTO((regset *, regset, int, int));
+static void propagate_block PROTO((regset, rtx, rtx, int,
+ regset, int));
+static rtx flow_delete_insn PROTO((rtx));
+static int insn_dead_p PROTO((rtx, regset, int));
+static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
+static void mark_set_regs PROTO((regset, regset, rtx,
+ rtx, regset));
+static void mark_set_1 PROTO((regset, regset, rtx,
+ rtx, regset));
+static void find_auto_inc PROTO((regset, rtx, rtx));
+static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
+static int try_pre_increment_1 PROTO((rtx));
+static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
+static rtx find_use_as_address PROTO((rtx, rtx, HOST_WIDE_INT));
+void dump_flow_info PROTO((FILE *));
\f
/* Find basic blocks of the current function and perform data flow analysis.
F is the first insn of the function and NREGS the number of register numbers
basic_block_drops_in = (char *) alloca (n_basic_blocks);
basic_block_loop_depth = (short *) alloca (n_basic_blocks * sizeof (short));
uid_block_number
- = (short *) alloca ((max_uid_for_flow + 1) * sizeof (short));
+ = (int *) alloca ((max_uid_for_flow + 1) * sizeof (int));
uid_volatile = (char *) alloca (max_uid_for_flow + 1);
bzero (uid_volatile, max_uid_for_flow + 1);
register char *block_marked = (char *) alloca (n_basic_blocks);
/* List of label_refs to all labels whose addresses are taken
and used as data. */
- rtx label_value_list = 0;
+ rtx label_value_list;
+ rtx x, note;
+ enum rtx_code prev_code, code;
+ int depth, pass;
+ pass = 1;
+ restart:
+
+ label_value_list = 0;
block_live_static = block_live;
bzero (block_live, n_basic_blocks);
bzero (block_marked, n_basic_blocks);
if (n_basic_blocks > 0)
block_live[0] = 1;
- /* Initialize the ref chain of each label to 0. */
- /* Record where all the blocks start and end and their depth in loops. */
- /* For each insn, record the block it is in. */
- /* Also mark as reachable any blocks headed by labels that
- must not be deleted. */
+ /* Initialize the ref chain of each label to 0. Record where all the
+ blocks start and end and their depth in loops. For each insn, record
+ the block it is in. Also mark as reachable any blocks headed by labels
+ that must not be deleted. */
- {
- register RTX_CODE prev_code = JUMP_INSN;
- register RTX_CODE code;
- int depth = 1;
+ for (insn = f, i = -1, prev_code = JUMP_INSN, depth = 1;
+ insn; insn = NEXT_INSN (insn))
+ {
+ code = GET_CODE (insn);
+ if (code == NOTE)
+ {
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
+ depth++;
+ else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
+ depth--;
+ }
- for (insn = f, i = -1; insn; insn = NEXT_INSN (insn))
- {
- code = GET_CODE (insn);
- if (code == NOTE)
- {
- if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
- depth++;
- else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
- depth--;
- }
- /* A basic block starts at label, or after something that can jump. */
- else if (code == CODE_LABEL
- || (GET_RTX_CLASS (code) == 'i'
- && (prev_code == JUMP_INSN
- || (prev_code == CALL_INSN
- && nonlocal_label_list != 0)
- || prev_code == BARRIER)))
- {
- basic_block_head[++i] = insn;
- basic_block_end[i] = insn;
- basic_block_loop_depth[i] = depth;
- if (code == CODE_LABEL)
- {
+ /* A basic block starts at label, or after something that can jump. */
+ else if (code == CODE_LABEL
+ || (GET_RTX_CLASS (code) == 'i'
+ && (prev_code == JUMP_INSN
+ || (prev_code == CALL_INSN
+ && nonlocal_label_list != 0)
+ || prev_code == BARRIER)))
+ {
+ basic_block_head[++i] = insn;
+ basic_block_end[i] = insn;
+ basic_block_loop_depth[i] = depth;
+
+ if (code == CODE_LABEL)
+ {
LABEL_REFS (insn) = insn;
/* Any label that cannot be deleted
is considered to start a reachable block. */
if (LABEL_PRESERVE_P (insn))
block_live[i] = 1;
}
- }
- else if (GET_RTX_CLASS (code) == 'i')
- {
- basic_block_end[i] = insn;
- basic_block_loop_depth[i] = depth;
- }
+ }
- /* Make a list of all labels referred to other than by jumps. */
- if (code == INSN || code == CALL_INSN)
- {
- rtx note = find_reg_note (insn, REG_LABEL, NULL_RTX);
- if (note != 0)
+ else if (GET_RTX_CLASS (code) == 'i')
+ {
+ basic_block_end[i] = insn;
+ basic_block_loop_depth[i] = depth;
+ }
+
+ if (GET_RTX_CLASS (code) == 'i')
+ {
+ /* Make a list of all labels referred to other than by jumps. */
+ for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
+ if (REG_NOTE_KIND (note) == REG_LABEL)
label_value_list = gen_rtx (EXPR_LIST, VOIDmode, XEXP (note, 0),
label_value_list);
- }
+ }
- BLOCK_NUM (insn) = i;
+ BLOCK_NUM (insn) = i;
- /* Don't separate a CALL_INSN from following CLOBBER insns. This is
- a kludge that will go away when each CALL_INSN records its
- USE and CLOBBERs. */
+ if (code != NOTE)
+ prev_code = code;
+ }
- if (code != NOTE
- && ! (prev_code == CALL_INSN && code == INSN
- && GET_CODE (PATTERN (insn)) == CLOBBER))
- prev_code = code;
- }
- if (i + 1 != n_basic_blocks)
- abort ();
- }
+ /* During the second pass, `n_basic_blocks' is only an upper bound.
+ Only perform the sanity check for the first pass, and on the second
+ pass ensure `n_basic_blocks' is set to the correct value. */
+ if (pass == 1 && i + 1 != n_basic_blocks)
+ abort ();
+ n_basic_blocks = i + 1;
- /* Don't delete the labels that are referenced by non-jump instructions. */
- {
- register rtx x;
- for (x = label_value_list; x; x = XEXP (x, 1))
+ /* Don't delete the labels (in this function)
+ that are referenced by non-jump instructions. */
+
+ for (x = label_value_list; x; x = XEXP (x, 1))
+ if (! LABEL_REF_NONLOCAL_P (x))
+ block_live[BLOCK_NUM (XEXP (x, 0))] = 1;
+
+ for (x = forced_labels; x; x = XEXP (x, 1))
+ if (! LABEL_REF_NONLOCAL_P (x))
block_live[BLOCK_NUM (XEXP (x, 0))] = 1;
- }
/* Record which basic blocks control can drop in to. */
- {
- register int i;
- for (i = 0; i < n_basic_blocks; i++)
- {
- register rtx insn = PREV_INSN (basic_block_head[i]);
- /* TEMP1 is used to avoid a bug in Sequent's compiler. */
- register int temp1;
- while (insn && GET_CODE (insn) == NOTE)
- insn = PREV_INSN (insn);
- temp1 = insn && GET_CODE (insn) != BARRIER;
- basic_block_drops_in[i] = temp1;
- }
- }
+ for (i = 0; i < n_basic_blocks; i++)
+ {
+ for (insn = PREV_INSN (basic_block_head[i]);
+ insn && GET_CODE (insn) == NOTE; insn = PREV_INSN (insn))
+ ;
+
+ basic_block_drops_in[i] = insn && GET_CODE (insn) != BARRIER;
+ }
/* Now find which basic blocks can actually be reached
and put all jump insns' LABEL_REFS onto the ref-chains
if (n_basic_blocks > 0)
{
int something_marked = 1;
+ int deleted;
- /* Find all indirect jump insns and mark them as possibly jumping
- to all the labels whose addresses are explicitly used.
- This is because, when there are computed gotos,
- we can't tell which labels they jump to, of all the possibilities. */
+ /* Find all indirect jump insns and mark them as possibly jumping to all
+ the labels whose addresses are explicitly used. This is because,
+ when there are computed gotos, we can't tell which labels they jump
+ to, of all the possibilities.
+
+ Tablejumps and casesi insns are OK and we can recognize them by
+ a (use (label_ref)). */
for (insn = f; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == JUMP_INSN
- && GET_CODE (PATTERN (insn)) == SET
- && SET_DEST (PATTERN (insn)) == pc_rtx
- && (GET_CODE (SET_SRC (PATTERN (insn))) == REG
- || GET_CODE (SET_SRC (PATTERN (insn))) == MEM))
+ if (GET_CODE (insn) == JUMP_INSN)
{
- rtx x;
- for (x = label_value_list; x; x = XEXP (x, 1))
- mark_label_ref (gen_rtx (LABEL_REF, VOIDmode, XEXP (x, 0)),
- insn, 0);
- for (x = forced_labels; x; x = XEXP (x, 1))
- mark_label_ref (gen_rtx (LABEL_REF, VOIDmode, XEXP (x, 0)),
+ rtx pat = PATTERN (insn);
+ int computed_jump = 0;
+
+ if (GET_CODE (pat) == PARALLEL)
+ {
+ int len = XVECLEN (pat, 0);
+ int has_use_labelref = 0;
+
+ for (i = len - 1; i >= 0; i--)
+ if (GET_CODE (XVECEXP (pat, 0, i)) == USE
+ && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
+ == LABEL_REF))
+ has_use_labelref = 1;
+
+ if (! has_use_labelref)
+ for (i = len - 1; i >= 0; i--)
+ if (GET_CODE (XVECEXP (pat, 0, i)) == SET
+ && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
+ && jmp_uses_reg_or_mem (SET_SRC (XVECEXP (pat, 0, i))))
+ computed_jump = 1;
+ }
+ else if (GET_CODE (pat) == SET
+ && SET_DEST (pat) == pc_rtx
+ && jmp_uses_reg_or_mem (SET_SRC (pat)))
+ computed_jump = 1;
+
+ if (computed_jump)
+ {
+ for (x = label_value_list; x; x = XEXP (x, 1))
+ mark_label_ref (gen_rtx (LABEL_REF, VOIDmode, XEXP (x, 0)),
+ insn, 0);
+
+ for (x = forced_labels; x; x = XEXP (x, 1))
+ mark_label_ref (gen_rtx (LABEL_REF, VOIDmode, XEXP (x, 0)),
insn, 0);
+ }
}
/* Find all call insns and mark them as possibly jumping
for (insn = f; insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == CALL_INSN)
{
- rtx x;
for (x = nonlocal_label_list; x; x = XEXP (x, 1))
mark_label_ref (gen_rtx (LABEL_REF, VOIDmode, XEXP (x, 0)),
insn, 0);
+
/* ??? This could be made smarter:
in some cases it's possible to tell that certain
calls will not do a nonlocal goto.
}
}
+ /* ??? See if we have a "live" basic block that is not reachable.
+ This can happen if it is headed by a label that is preserved or
+ in one of the label lists, but no call or computed jump is in
+ the loop. It's not clear if we can delete the block or not,
+ but don't for now. However, we will mess up register status if
+ it remains unreachable, so add a fake reachability from the
+ previous block. */
+
+ for (i = 1; i < n_basic_blocks; i++)
+ if (block_live[i] && ! basic_block_drops_in[i]
+ && GET_CODE (basic_block_head[i]) == CODE_LABEL
+ && LABEL_REFS (basic_block_head[i]) == basic_block_head[i])
+ basic_block_drops_in[i] = 1;
+
/* Now delete the code for any basic blocks that can't be reached.
They can occur because jump_optimize does not recognize
unreachable loops as unreachable. */
+ deleted = 0;
for (i = 0; i < n_basic_blocks; i++)
if (!block_live[i])
{
+ deleted++;
+
+ /* Delete the insns in a (non-live) block. We physically delete
+ every non-note insn except the start and end (so
+ basic_block_head/end needn't be updated), we turn the latter
+ into NOTE_INSN_DELETED notes.
+ We use to "delete" the insns by turning them into notes, but
+ we may be deleting lots of insns that subsequent passes would
+ otherwise have to process. Secondly, lots of deleted blocks in
+ a row can really slow down propagate_block since it will
+ otherwise process insn-turned-notes multiple times when it
+ looks for loop begin/end notes. */
+ if (basic_block_head[i] != basic_block_end[i])
+ {
+ /* It would be quicker to delete all of these with a single
+ unchaining, rather than one at a time, but we need to keep
+ the NOTE's. */
+ insn = NEXT_INSN (basic_block_head[i]);
+ while (insn != basic_block_end[i])
+ {
+ if (GET_CODE (insn) == BARRIER)
+ abort ();
+ else if (GET_CODE (insn) != NOTE)
+ insn = flow_delete_insn (insn);
+ else
+ insn = NEXT_INSN (insn);
+ }
+ }
insn = basic_block_head[i];
- while (1)
+ if (GET_CODE (insn) != NOTE)
{
+ /* Turn the head into a deleted insn note. */
if (GET_CODE (insn) == BARRIER)
abort ();
- if (GET_CODE (insn) != NOTE)
- {
- PUT_CODE (insn, NOTE);
- NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (insn) = 0;
- }
- if (insn == basic_block_end[i])
- {
- /* BARRIERs are between basic blocks, not part of one.
- Delete a BARRIER if the preceding jump is deleted.
- We cannot alter a BARRIER into a NOTE
- because it is too short; but we can really delete
- it because it is not part of a basic block. */
- if (NEXT_INSN (insn) != 0
- && GET_CODE (NEXT_INSN (insn)) == BARRIER)
- delete_insn (NEXT_INSN (insn));
- break;
- }
- insn = NEXT_INSN (insn);
+ PUT_CODE (insn, NOTE);
+ NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
+ NOTE_SOURCE_FILE (insn) = 0;
+ }
+ insn = basic_block_end[i];
+ if (GET_CODE (insn) != NOTE)
+ {
+ /* Turn the tail into a deleted insn note. */
+ if (GET_CODE (insn) == BARRIER)
+ abort ();
+ PUT_CODE (insn, NOTE);
+ NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
+ NOTE_SOURCE_FILE (insn) = 0;
}
+ /* BARRIERs are between basic blocks, not part of one.
+ Delete a BARRIER if the preceding jump is deleted.
+ We cannot alter a BARRIER into a NOTE
+ because it is too short; but we can really delete
+ it because it is not part of a basic block. */
+ if (NEXT_INSN (insn) != 0
+ && GET_CODE (NEXT_INSN (insn)) == BARRIER)
+ delete_insn (NEXT_INSN (insn));
+
/* Each time we delete some basic blocks,
see if there is a jump around them that is
being turned into a no-op. If so, delete it. */
if (block_live[i - 1])
{
register int j;
- for (j = i; j < n_basic_blocks; j++)
+ for (j = i + 1; j < n_basic_blocks; j++)
if (block_live[j])
{
rtx label;
}
}
}
+
+ /* There are pathological cases where one function calling hundreds of
+ nested inline functions can generate lots and lots of unreachable
+ blocks that jump can't delete. Since we don't use sparse matrices
+ a lot of memory will be needed to compile such functions.
+ Implementing sparse matrices is a fair bit of work and it is not
+ clear that they win more than they lose (we don't want to
+ unnecessarily slow down compilation of normal code). By making
+ another pass for the pathological case, we can greatly speed up
+ their compilation without hurting normal code. This works because
+ all the insns in the unreachable blocks have either been deleted or
+ turned into notes.
+ Note that we're talking about reducing memory usage by 10's of
+ megabytes and reducing compilation time by several minutes. */
+ /* ??? The choice of when to make another pass is a bit arbitrary,
+ and was derived from empirical data. */
+ if (pass == 1
+ && deleted > 200)
+ {
+ pass++;
+ n_basic_blocks -= deleted;
+ /* `n_basic_blocks' may not be correct at this point: two previously
+ separate blocks may now be merged. That's ok though as we
+ recalculate it during the second pass. It certainly can't be
+ any larger than the current value. */
+ goto restart;
+ }
}
}
\f
+/* Subroutines of find_basic_blocks. */
+
+/* Return 1 if X, the SRC_SRC of SET of (pc) contain a REG or MEM that is
+ not in the constant pool and not in the condition of an IF_THEN_ELSE. */
+
+static int
+jmp_uses_reg_or_mem (x)
+ rtx x;
+{
+ enum rtx_code code = GET_CODE (x);
+ int i, j;
+ char *fmt;
+
+ switch (code)
+ {
+ case CONST:
+ case LABEL_REF:
+ case PC:
+ return 0;
+
+ case REG:
+ return 1;
+
+ case MEM:
+ return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
+ && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
+
+ case IF_THEN_ELSE:
+ return (jmp_uses_reg_or_mem (XEXP (x, 1))
+ || jmp_uses_reg_or_mem (XEXP (x, 2)));
+
+ case PLUS: case MINUS: case MULT:
+ return (jmp_uses_reg_or_mem (XEXP (x, 0))
+ || jmp_uses_reg_or_mem (XEXP (x, 1)));
+ }
+
+ fmt = GET_RTX_FORMAT (code);
+ for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
+ {
+ if (fmt[i] == 'e'
+ && jmp_uses_reg_or_mem (XEXP (x, i)))
+ return 1;
+
+ if (fmt[i] == 'E')
+ for (j = 0; j < XVECLEN (x, i); j++)
+ if (jmp_uses_reg_or_mem (XVECEXP (x, i, j)))
+ return 1;
+ }
+
+ return 0;
+}
+
/* Check expression X for label references;
if one is found, add INSN to the label's chain of references.
}
}
}
+
+/* Delete INSN by patching it out.
+ Return the next insn. */
+
+static rtx
+flow_delete_insn (insn)
+ rtx insn;
+{
+ /* ??? For the moment we assume we don't have to watch for NULLs here
+ since the start/end of basic blocks aren't deleted like this. */
+ NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
+ PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
+ return NEXT_INSN (insn);
+}
\f
/* Determine which registers are live at the start of each
basic block of the function whose first insn is F.
allocate_for_life_analysis ();
reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
- bzero (reg_next_use, nregs * sizeof (rtx));
+ bzero ((char *) reg_next_use, nregs * sizeof (rtx));
/* Set up several regset-vectors used internally within this function.
Their meanings are documented above, with their declarations. */
- basic_block_live_at_end = (regset *) alloca (n_basic_blocks * sizeof (regset));
+ basic_block_live_at_end
+ = (regset *) alloca (n_basic_blocks * sizeof (regset));
+
/* Don't use alloca since that leads to a crash rather than an error message
if there isn't enough space.
Don't use oballoc since we may need to allocate other things during
this function on the temporary obstack. */
tem = (regset) obstack_alloc (&flow_obstack, n_basic_blocks * regset_bytes);
- bzero (tem, n_basic_blocks * regset_bytes);
- init_regset_vector (basic_block_live_at_end, tem, n_basic_blocks, regset_bytes);
+ bzero ((char *) tem, n_basic_blocks * regset_bytes);
+ init_regset_vector (basic_block_live_at_end, tem,
+ n_basic_blocks, regset_bytes);
- basic_block_new_live_at_end = (regset *) alloca (n_basic_blocks * sizeof (regset));
+ basic_block_new_live_at_end
+ = (regset *) alloca (n_basic_blocks * sizeof (regset));
tem = (regset) obstack_alloc (&flow_obstack, n_basic_blocks * regset_bytes);
- bzero (tem, n_basic_blocks * regset_bytes);
- init_regset_vector (basic_block_new_live_at_end, tem, n_basic_blocks, regset_bytes);
+ bzero ((char *) tem, n_basic_blocks * regset_bytes);
+ init_regset_vector (basic_block_new_live_at_end, tem,
+ n_basic_blocks, regset_bytes);
- basic_block_significant = (regset *) alloca (n_basic_blocks * sizeof (regset));
+ basic_block_significant
+ = (regset *) alloca (n_basic_blocks * sizeof (regset));
tem = (regset) obstack_alloc (&flow_obstack, n_basic_blocks * regset_bytes);
- bzero (tem, n_basic_blocks * regset_bytes);
- init_regset_vector (basic_block_significant, tem, n_basic_blocks, regset_bytes);
+ bzero ((char *) tem, n_basic_blocks * regset_bytes);
+ init_regset_vector (basic_block_significant, tem,
+ n_basic_blocks, regset_bytes);
/* Record which insns refer to any volatile memory
or for any reason can't be deleted just because they are dead stores.
consider the stack pointer live at the end of the function. */
basic_block_live_at_end[n_basic_blocks - 1]
[STACK_POINTER_REGNUM / REGSET_ELT_BITS]
- |= 1 << (STACK_POINTER_REGNUM % REGSET_ELT_BITS);
+ |= (REGSET_ELT_TYPE) 1 << (STACK_POINTER_REGNUM % REGSET_ELT_BITS);
basic_block_new_live_at_end[n_basic_blocks - 1]
[STACK_POINTER_REGNUM / REGSET_ELT_BITS]
- |= 1 << (STACK_POINTER_REGNUM % REGSET_ELT_BITS);
+ |= (REGSET_ELT_TYPE) 1 << (STACK_POINTER_REGNUM % REGSET_ELT_BITS);
+ }
+
+ /* Mark the frame pointer is needed at the end of the function. If
+ we end up eliminating it, it will be removed from the live list
+ of each basic block by reload. */
+
+ if (n_basic_blocks > 0)
+ {
+ basic_block_live_at_end[n_basic_blocks - 1]
+ [FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
+ |= (REGSET_ELT_TYPE) 1 << (FRAME_POINTER_REGNUM % REGSET_ELT_BITS);
+ basic_block_new_live_at_end[n_basic_blocks - 1]
+ [FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
+ |= (REGSET_ELT_TYPE) 1 << (FRAME_POINTER_REGNUM % REGSET_ELT_BITS);
+#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+ /* If they are different, also mark the hard frame pointer as live */
+ basic_block_live_at_end[n_basic_blocks - 1]
+ [HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
+ |= (REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
+ % REGSET_ELT_BITS);
+ basic_block_new_live_at_end[n_basic_blocks - 1]
+ [HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
+ |= (REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
+ % REGSET_ELT_BITS);
+#endif
}
/* Mark all global registers as being live at the end of the function
if (global_regs[i])
{
basic_block_live_at_end[n_basic_blocks - 1]
- [i / REGSET_ELT_BITS] |= 1 << (i % REGSET_ELT_BITS);
+ [i / REGSET_ELT_BITS]
+ |= (REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS);
basic_block_new_live_at_end[n_basic_blocks - 1]
- [i / REGSET_ELT_BITS] |= 1 << (i % REGSET_ELT_BITS);
+ [i / REGSET_ELT_BITS]
+ |= (REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS);
}
/* Propagate life info through the basic blocks
{
/* Update the basic_block_live_at_start
by propagation backwards through the block. */
- bcopy (basic_block_new_live_at_end[i],
- basic_block_live_at_end[i], regset_bytes);
- bcopy (basic_block_live_at_end[i],
- basic_block_live_at_start[i], regset_bytes);
+ bcopy ((char *) basic_block_new_live_at_end[i],
+ (char *) basic_block_live_at_end[i], regset_bytes);
+ bcopy ((char *) basic_block_live_at_end[i],
+ (char *) basic_block_live_at_start[i], regset_bytes);
propagate_block (basic_block_live_at_start[i],
basic_block_head[i], basic_block_end[i], 0,
first_pass ? basic_block_significant[i]
{
register rtx jump, head;
+
/* Update the basic_block_new_live_at_end's of the block
that falls through into this one (if any). */
head = basic_block_head[i];
- jump = PREV_INSN (head);
if (basic_block_drops_in[i])
{
- register int from_block = BLOCK_NUM (jump);
register int j;
for (j = 0; j < regset_size; j++)
- basic_block_new_live_at_end[from_block][j]
+ basic_block_new_live_at_end[i-1][j]
|= basic_block_live_at_start[i][j];
}
+
/* Update the basic_block_new_live_at_end's of
all the blocks that jump to this one. */
if (GET_CODE (head) == CODE_LABEL)
regset_bytes = regset_size * sizeof (*(regset)0);
reg_n_refs = (int *) oballoc (max_regno * sizeof (int));
- bzero (reg_n_refs, max_regno * sizeof (int));
+ bzero ((char *) reg_n_refs, max_regno * sizeof (int));
reg_n_sets = (short *) oballoc (max_regno * sizeof (short));
- bzero (reg_n_sets, max_regno * sizeof (short));
+ bzero ((char *) reg_n_sets, max_regno * sizeof (short));
reg_n_deaths = (short *) oballoc (max_regno * sizeof (short));
- bzero (reg_n_deaths, max_regno * sizeof (short));
+ bzero ((char *) reg_n_deaths, max_regno * sizeof (short));
+
+ reg_changes_size = (char *) oballoc (max_regno * sizeof (char));
+ bzero (reg_changes_size, max_regno * sizeof (char));;
reg_live_length = (int *) oballoc (max_regno * sizeof (int));
- bzero (reg_live_length, max_regno * sizeof (int));
+ bzero ((char *) reg_live_length, max_regno * sizeof (int));
reg_n_calls_crossed = (int *) oballoc (max_regno * sizeof (int));
- bzero (reg_n_calls_crossed, max_regno * sizeof (int));
+ bzero ((char *) reg_n_calls_crossed, max_regno * sizeof (int));
- reg_basic_block = (short *) oballoc (max_regno * sizeof (short));
+ reg_basic_block = (int *) oballoc (max_regno * sizeof (int));
for (i = 0; i < max_regno; i++)
reg_basic_block[i] = REG_BLOCK_UNKNOWN;
- basic_block_live_at_start = (regset *) oballoc (n_basic_blocks * sizeof (regset));
+ basic_block_live_at_start
+ = (regset *) oballoc (n_basic_blocks * sizeof (regset));
tem = (regset) oballoc (n_basic_blocks * regset_bytes);
- bzero (tem, n_basic_blocks * regset_bytes);
- init_regset_vector (basic_block_live_at_start, tem, n_basic_blocks, regset_bytes);
+ bzero ((char *) tem, n_basic_blocks * regset_bytes);
+ init_regset_vector (basic_block_live_at_start, tem,
+ n_basic_blocks, regset_bytes);
regs_live_at_setjmp = (regset) oballoc (regset_bytes);
- bzero (regs_live_at_setjmp, regset_bytes);
+ bzero ((char *) regs_live_at_setjmp, regset_bytes);
}
/* Make each element of VECTOR point at a regset,
p += bytes_per_elt / sizeof (*p);
}
}
-\f
+
/* Compute the registers live at the beginning of a basic block
from those live at the end.
num_scratch = 0;
maxlive = (regset) alloca (regset_bytes);
- bcopy (old, maxlive, regset_bytes);
+ bcopy ((char *) old, (char *) maxlive, regset_bytes);
regs_sometimes_live
= (struct sometimes *) alloca (max_regno * sizeof (struct sometimes));
{
prev = PREV_INSN (insn);
- /* Look for loop boundaries, remembering that we are going backwards. */
- if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
- loop_depth++;
- else if (GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
- loop_depth--;
-
- /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
- Abort now rather than setting register status incorrectly. */
- if (loop_depth == 0)
- abort ();
-
- /* If this is a call to `setjmp' et al,
- warn if any non-volatile datum is live. */
-
- if (final && GET_CODE (insn) == NOTE
- && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
+ if (GET_CODE (insn) == NOTE)
{
- int i;
- for (i = 0; i < regset_size; i++)
- regs_live_at_setjmp[i] |= old[i];
+ /* Look for loop boundaries, remembering that we are going
+ backwards. */
+ if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
+ loop_depth++;
+ else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
+ loop_depth--;
+
+ /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
+ Abort now rather than setting register status incorrectly. */
+ if (loop_depth == 0)
+ abort ();
+
+ /* If this is a call to `setjmp' et al,
+ warn if any non-volatile datum is live. */
+
+ if (final && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
+ {
+ int i;
+ for (i = 0; i < regset_size; i++)
+ regs_live_at_setjmp[i] |= old[i];
+ }
}
/* Update the life-status of regs for this insn.
are those live after, with DEAD regs turned off,
and then LIVE regs turned on. */
- if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
+ else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
{
register int i;
rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
{
register int i;
+ rtx note;
+
+ for (note = CALL_INSN_FUNCTION_USAGE (insn);
+ note;
+ note = XEXP (note, 1))
+ if (GET_CODE (XEXP (note, 0)) == USE)
+ mark_used_regs (old, live, SET_DEST (XEXP (note, 0)),
+ final, insn);
+
/* Each call clobbers all call-clobbered regs that are not
- global. Note that the function-value reg is a
+ global or fixed. Note that the function-value reg is a
call-clobbered reg, and mark_set_regs has already had
a chance to handle it. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (call_used_regs[i] && ! global_regs[i])
+ if (call_used_regs[i] && ! global_regs[i]
+ && ! fixed_regs[i])
dead[i / REGSET_ELT_BITS]
|= ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS));
/* Calls may also reference any of the global registers,
so they are made live. */
-
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (global_regs[i])
- live[i / REGSET_ELT_BITS]
- |= ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS));
+ mark_used_regs (old, live,
+ gen_rtx (REG, reg_raw_mode[i], i),
+ final, insn);
/* Calls also clobber memory. */
last_mem_set = 0;
register struct sometimes *p = regs_sometimes_live;
for (i = 0; i < sometimes_max; i++, p++)
- if (old[p->offset] & (1 << p->bit))
+ if (old[p->offset] & ((REGSET_ELT_TYPE) 1 << p->bit))
reg_n_calls_crossed[p->offset * REGSET_ELT_BITS + p->bit]+= 1;
}
}
register REGSET_ELT_TYPE bit
= (REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS);
+ /* Don't delete insns to set global regs. */
if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
/* Make sure insns to set frame pointer aren't deleted. */
|| regno == FRAME_POINTER_REGNUM
+#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+ || regno == HARD_FRAME_POINTER_REGNUM
+#endif
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
/* Make sure insns to set arg pointer are never deleted
(if the arg pointer isn't fixed, there will be a USE for
&& GET_CODE (SET_SRC (XVECEXP (call, 0, i))) == CALL)
break;
+ /* This may be a library call that is returning a value
+ via invisible pointer. Do nothing special, since
+ ordinary death handling can understand these insns. */
if (i < 0)
- abort ();
+ return 0;
call = XVECEXP (call, 0, i);
}
/* Return 1 if register REGNO was used before it was set.
In other words, if it is live at function entry.
- Don't count global regster variables, though. */
+ Don't count global register variables, though. */
int
regno_uninitialized (regno)
int regno;
{
- if (n_basic_blocks == 0 || global_regs[regno])
+ if (n_basic_blocks == 0
+ || (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
return 0;
return (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
in propagate_block. In this case, various info about register
usage is stored, LOG_LINKS fields of insns are set up. */
-static void mark_set_1 ();
-
static void
mark_set_regs (needed, dead, x, insn, significant)
regset needed;
if (GET_CODE (reg) == REG
&& (regno = REGNO (reg), regno != FRAME_POINTER_REGNUM)
+#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+ && regno != HARD_FRAME_POINTER_REGNUM
+#endif
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
&& ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
#endif
register int offset = regno / REGSET_ELT_BITS;
register REGSET_ELT_TYPE bit
= (REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS);
- REGSET_ELT_TYPE all_needed = (needed[offset] & bit);
REGSET_ELT_TYPE some_needed = (needed[offset] & bit);
+ REGSET_ELT_TYPE some_not_needed = (~ needed[offset]) & bit;
/* Mark it as a significant register for this basic block. */
if (significant)
n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
while (--n > 0)
{
+ REGSET_ELT_TYPE n_bit
+ = (REGSET_ELT_TYPE) 1 << ((regno + n) % REGSET_ELT_BITS);
+
if (significant)
- significant[(regno + n) / REGSET_ELT_BITS]
- |= (REGSET_ELT_TYPE) 1 << ((regno + n) % REGSET_ELT_BITS);
- dead[(regno + n) / REGSET_ELT_BITS]
- |= (REGSET_ELT_TYPE) 1 << ((regno + n) % REGSET_ELT_BITS);
+ significant[(regno + n) / REGSET_ELT_BITS] |= n_bit;
+
+ dead[(regno + n) / REGSET_ELT_BITS] |= n_bit;
some_needed
- |= (needed[(regno + n) / REGSET_ELT_BITS]
- & (REGSET_ELT_TYPE) 1 << ((regno + n) % REGSET_ELT_BITS));
- all_needed
- &= (needed[(regno + n) / REGSET_ELT_BITS]
- & (REGSET_ELT_TYPE) 1 << ((regno + n) % REGSET_ELT_BITS));
+ |= (needed[(regno + n) / REGSET_ELT_BITS] & n_bit);
+ some_not_needed
+ |= ((~ needed[(regno + n) / REGSET_ELT_BITS]) & n_bit);
}
}
/* Additional data to record if this is the final pass. */
for (i = regno; i < endregno; i++)
{
+ /* The next use is no longer "next", since a store
+ intervenes. */
+ reg_next_use[i] = 0;
+
regs_ever_live[i] = 1;
reg_n_sets[i]++;
}
}
else
{
+ /* The next use is no longer "next", since a store
+ intervenes. */
+ reg_next_use[regno] = 0;
+
/* Keep track of which basic blocks each reg appears in. */
if (reg_basic_block[regno] == REG_BLOCK_UNKNOWN)
reg_live_length[regno]++;
}
- /* The next use is no longer "next", since a store intervenes. */
- reg_next_use[regno] = 0;
-
- if (all_needed)
+ if (! some_not_needed)
{
/* Make a logical link from the next following insn
that uses this register, back to this insn.
<< ((regno + i) % REGSET_ELT_BITS))) == 0)
REG_NOTES (insn)
= gen_rtx (EXPR_LIST, REG_UNUSED,
- gen_rtx (REG, word_mode, regno + i),
+ gen_rtx (REG, reg_raw_mode[regno + i],
+ regno + i),
REG_NOTES (insn));
}
}
}
+ else if (GET_CODE (reg) == REG)
+ reg_next_use[regno] = 0;
/* If this is the last pass and this is a SCRATCH, show it will be dying
here and count it. */
rtx insn;
{
rtx addr = XEXP (x, 0);
- int offset = 0;
+ HOST_WIDE_INT offset = 0;
+ rtx set;
/* Here we detect use of an index register which might be good for
postincrement, postdecrement, preincrement, or predecrement. */
int regno = REGNO (addr);
/* Is the next use an increment that might make auto-increment? */
- incr = reg_next_use[regno];
- if (incr && GET_CODE (PATTERN (incr)) == SET
+ if ((incr = reg_next_use[regno]) != 0
+ && (set = single_set (incr)) != 0
+ && GET_CODE (set) == SET
&& BLOCK_NUM (incr) == BLOCK_NUM (insn)
/* Can't add side effects to jumps; if reg is spilled and
reloaded, there's no way to store back the altered value. */
&& GET_CODE (insn) != JUMP_INSN
- && (y = SET_SRC (PATTERN (incr)), GET_CODE (y) == PLUS)
+ && (y = SET_SRC (set), GET_CODE (y) == PLUS)
&& XEXP (y, 0) == addr
&& GET_CODE (XEXP (y, 1)) == CONST_INT
&& (0
&& (use = find_use_as_address (PATTERN (insn), addr, offset),
use != 0 && use != (rtx) 1))
{
- int win = 0;
- rtx q = SET_DEST (PATTERN (incr));
+ rtx q = SET_DEST (set);
+ enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
+ ? (offset ? PRE_INC : POST_INC)
+ : (offset ? PRE_DEC : POST_DEC));
if (dead_or_set_p (incr, addr))
- win = 1;
- else if (GET_CODE (q) == REG && ! reg_used_between_p (q, insn, incr))
{
- /* We have *p followed by q = p+size.
+ /* This is the simple case. Try to make the auto-inc. If
+ we can't, we are done. Otherwise, we will do any
+ needed updates below. */
+ if (! validate_change (insn, &XEXP (x, 0),
+ gen_rtx (inc_code, Pmode, addr),
+ 0))
+ return;
+ }
+ else if (GET_CODE (q) == REG
+ /* PREV_INSN used here to check the semi-open interval
+ [insn,incr). */
+ && ! reg_used_between_p (q, PREV_INSN (insn), incr)
+ /* We must also check for sets of q as q may be
+ a call clobbered hard register and there may
+ be a call between PREV_INSN (insn) and incr. */
+ && ! reg_set_between_p (q, PREV_INSN (insn), incr))
+ {
+ /* We have *p followed sometime later by q = p+size.
Both p and q must be live afterward,
- and q must be dead before.
+ and q is not used between INSN and it's assignment.
Change it to q = p, ...*q..., q = q+size.
Then fall into the usual case. */
rtx insns, temp;
BLOCK_NUM (temp) = BLOCK_NUM (insn);
}
+ /* If we can't make the auto-inc, or can't make the
+ replacement into Y, exit. There's no point in making
+ the change below if we can't do the auto-inc and doing
+ so is not correct in the pre-inc case. */
+
+ validate_change (insn, &XEXP (x, 0),
+ gen_rtx (inc_code, Pmode, q),
+ 1);
+ validate_change (incr, &XEXP (y, 0), q, 1);
+ if (! apply_change_group ())
+ return;
+
+ /* We now know we'll be doing this change, so emit the
+ new insn(s) and do the updates. */
emit_insns_before (insns, insn);
if (basic_block_head[BLOCK_NUM (insn)] == insn)
basic_block_head[BLOCK_NUM (insn)] = insns;
- XEXP (x, 0) = q;
- XEXP (y, 0) = q;
-
/* INCR will become a NOTE and INSN won't contain a
use of ADDR. If a use of ADDR was just placed in
the insn before INSN, make that the next use.
addr = q;
regno = REGNO (q);
- win = 1;
/* REGNO is now used in INCR which is below INSN, but
it previously wasn't live here. If we don't mark
if (GET_CODE (temp) == CALL_INSN)
reg_n_calls_crossed[regno]++;
}
+ else
+ return;
+
+ /* If we haven't returned, it means we were able to make the
+ auto-inc, so update the status. First, record that this insn
+ has an implicit side effect. */
+
+ REG_NOTES (insn)
+ = gen_rtx (EXPR_LIST, REG_INC, addr, REG_NOTES (insn));
+
+ /* Modify the old increment-insn to simply copy
+ the already-incremented value of our register. */
+ if (! validate_change (incr, &SET_SRC (set), addr, 0))
+ abort ();
- if (win)
+ /* If that makes it a no-op (copying the register into itself) delete
+ it so it won't appear to be a "use" and a "set" of this
+ register. */
+ if (SET_DEST (set) == addr)
{
- /* We have found a suitable auto-increment: do POST_INC around
- the register here, and patch out the increment instruction
- that follows. */
- XEXP (x, 0) = gen_rtx ((INTVAL (XEXP (y, 1)) == size
- ? (offset ? PRE_INC : POST_INC)
- : (offset ? PRE_DEC : POST_DEC)),
- Pmode, addr);
-
- /* Record that this insn has an implicit side effect. */
- REG_NOTES (insn)
- = gen_rtx (EXPR_LIST, REG_INC, addr, REG_NOTES (insn));
-
- /* Modify the old increment-insn to simply copy
- the already-incremented value of our register. */
- SET_SRC (PATTERN (incr)) = addr;
- /* Indicate insn must be re-recognized. */
- INSN_CODE (incr) = -1;
-
- /* If that makes it a no-op (copying the register into itself)
- then delete it so it won't appear to be a "use" and a "set"
- of this register. */
- if (SET_DEST (PATTERN (incr)) == addr)
- {
- PUT_CODE (incr, NOTE);
- NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (incr) = 0;
- }
+ PUT_CODE (incr, NOTE);
+ NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
+ NOTE_SOURCE_FILE (incr) = 0;
+ }
- if (regno >= FIRST_PSEUDO_REGISTER)
- {
- /* Count an extra reference to the reg. When a reg is
- incremented, spilling it is worse, so we want to make
- that less likely. */
- reg_n_refs[regno] += loop_depth;
- /* Count the increment as a setting of the register,
- even though it isn't a SET in rtl. */
- reg_n_sets[regno]++;
- }
+ if (regno >= FIRST_PSEUDO_REGISTER)
+ {
+ /* Count an extra reference to the reg. When a reg is
+ incremented, spilling it is worse, so we want to make
+ that less likely. */
+ reg_n_refs[regno] += loop_depth;
+
+ /* Count the increment as a setting of the register,
+ even though it isn't a SET in rtl. */
+ reg_n_sets[regno]++;
}
}
}
regset needed;
regset live;
rtx x;
- rtx insn;
int final;
+ rtx insn;
{
register RTX_CODE code;
register int regno;
case CONST:
case CONST_DOUBLE:
case PC:
- case CLOBBER:
case ADDR_VEC:
case ADDR_DIFF_VEC:
case ASM_INPUT:
return;
#endif
+ case CLOBBER:
+ /* If we are clobbering a MEM, mark any registers inside the address
+ as being used. */
+ if (GET_CODE (XEXP (x, 0)) == MEM)
+ mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), final, insn);
+ return;
+
case MEM:
/* Invalidate the data for the last MEM stored. We could do this only
if the addresses conflict, but this doesn't seem worthwhile. */
#endif
break;
+ case SUBREG:
+ if (GET_CODE (SUBREG_REG (x)) == REG
+ && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
+ && (GET_MODE_SIZE (GET_MODE (x))
+ != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
+ reg_changes_size[REGNO (SUBREG_REG (x))] = 1;
+
+ /* While we're here, optimize this case. */
+ x = SUBREG_REG (x);
+
+ /* In case the SUBREG is not of a register, don't optimize */
+ if (GET_CODE (x) != REG)
+ {
+ mark_used_regs (needed, live, x, final, insn);
+ return;
+ }
+
+ /* ... fall through ... */
+
case REG:
/* See a register other than being set
=> mark it as needed. */
register int offset = regno / REGSET_ELT_BITS;
register REGSET_ELT_TYPE bit
= (REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS);
- int all_needed = (needed[offset] & bit) != 0;
- int some_needed = (needed[offset] & bit) != 0;
+ REGSET_ELT_TYPE some_needed = needed[offset] & bit;
+ REGSET_ELT_TYPE some_not_needed = (~ needed[offset]) & bit;
live[offset] |= bit;
+
/* A hard reg in a wide mode may really be multiple registers.
If so, mark all of them just like the first. */
if (regno < FIRST_PSEUDO_REGISTER)
/* For stack ptr or fixed arg pointer,
nothing below can be necessary, so waste no more time. */
if (regno == STACK_POINTER_REGNUM
+#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+ || regno == HARD_FRAME_POINTER_REGNUM
+#endif
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
|| (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
#endif
/* No death notes for global register variables;
their values are live after this function exits. */
if (global_regs[regno])
- return;
+ {
+ if (final)
+ reg_next_use[regno] = insn;
+ return;
+ }
n = HARD_REGNO_NREGS (regno, GET_MODE (x));
while (--n > 0)
{
- live[(regno + n) / REGSET_ELT_BITS]
- |= (REGSET_ELT_TYPE) 1 << ((regno + n) % REGSET_ELT_BITS);
- some_needed
- |= (needed[(regno + n) / REGSET_ELT_BITS]
- & (REGSET_ELT_TYPE) 1 << ((regno + n) % REGSET_ELT_BITS));
- all_needed
- &= (needed[(regno + n) / REGSET_ELT_BITS]
- & (REGSET_ELT_TYPE) 1 << ((regno + n) % REGSET_ELT_BITS));
+ REGSET_ELT_TYPE n_bit
+ = (REGSET_ELT_TYPE) 1 << ((regno + n) % REGSET_ELT_BITS);
+
+ live[(regno + n) / REGSET_ELT_BITS] |= n_bit;
+ some_needed |= (needed[(regno + n) / REGSET_ELT_BITS] & n_bit);
+ some_not_needed
+ |= ((~ needed[(regno + n) / REGSET_ELT_BITS]) & n_bit);
}
}
if (final)
we do not make a REG_DEAD note; likewise if we already
made such a note. */
- if (! all_needed
+ if (some_not_needed
&& ! dead_or_set_p (insn, x)
#if 0
&& (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
#endif
)
{
+ /* Check for the case where the register dying partially
+ overlaps the register set by this insn. */
+ if (regno < FIRST_PSEUDO_REGISTER
+ && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
+ {
+ int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
+ while (--n >= 0)
+ some_needed |= dead_or_set_regno_p (insn, regno + n);
+ }
+
/* If none of the words in X is needed, make a REG_DEAD
note. Otherwise, we must make partial REG_DEAD notes. */
if (! some_needed)
&& ! dead_or_set_regno_p (insn, regno + i))
REG_NOTES (insn)
= gen_rtx (EXPR_LIST, REG_DEAD,
- gen_rtx (REG, word_mode, regno + i),
+ gen_rtx (REG, reg_raw_mode[regno + i],
+ regno + i),
REG_NOTES (insn));
}
}
|| GET_CODE (testreg) == SIGN_EXTRACT
|| GET_CODE (testreg) == SUBREG)
{
+ if (GET_CODE (testreg) == SUBREG
+ && GET_CODE (SUBREG_REG (testreg)) == REG
+ && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
+ && (GET_MODE_SIZE (GET_MODE (testreg))
+ != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
+ reg_changes_size[REGNO (SUBREG_REG (testreg))] = 1;
+
/* Modifying a single register in an alternate mode
does not use any of the old value. But these other
ways of storing in a register do use the old value. */
if (GET_CODE (testreg) == REG
&& (regno = REGNO (testreg), regno != FRAME_POINTER_REGNUM)
+#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+ && regno != HARD_FRAME_POINTER_REGNUM
+#endif
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
&& ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
#endif
- && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
+ )
+ /* We used to exclude global_regs here, but that seems wrong.
+ Storing in them is like storing in mem. */
{
mark_used_regs (needed, live, SET_SRC (x), final, insn);
if (mark_dest)
rtx y = reg_next_use[regno];
if (y != 0
&& BLOCK_NUM (y) == BLOCK_NUM (insn)
+ /* Don't do this if the reg dies, or gets set in y; a standard addressing
+ mode would be better. */
+ && ! dead_or_set_p (y, SET_DEST (x))
&& try_pre_increment (y, SET_DEST (PATTERN (insn)),
amount))
{
if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
return 0;
- XEXP (use, 0) = gen_rtx (amount > 0
- ? (do_post ? POST_INC : PRE_INC)
- : (do_post ? POST_DEC : PRE_DEC),
- Pmode, reg);
+ /* See if this combination of instruction and addressing mode exists. */
+ if (! validate_change (insn, &XEXP (use, 0),
+ gen_rtx (amount > 0
+ ? (do_post ? POST_INC : PRE_INC)
+ : (do_post ? POST_DEC : PRE_DEC),
+ Pmode, reg), 0))
+ return 0;
/* Record that this insn now has an implicit side effect on X. */
REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_INC, reg, REG_NOTES (insn));
find_use_as_address (x, reg, plusconst)
register rtx x;
rtx reg;
- int plusconst;
+ HOST_WIDE_INT plusconst;
{
enum rtx_code code = GET_CODE (x);
char *fmt = GET_RTX_FORMAT (code);
for (regno = 0; regno < max_regno; regno++)
{
register int offset = regno / REGSET_ELT_BITS;
- register int bit = 1 << (regno % REGSET_ELT_BITS);
+ register REGSET_ELT_TYPE bit
+ = (REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS);
if (basic_block_live_at_start[i][offset] & bit)
fprintf (file, " %d", regno);
}