#define EPILOGUE_USES(REGNO) 0
#endif
-/* Not in basic-block.h, since it is private to this file. When set, it
- causes us to keep REG_N_SETS uptodate for original pseudo registers. */
-#define PROP_POSTRELOAD 64
-
/* The obstack on which the flow graph components are allocated. */
struct obstack flow_obstack;
regset reg_cond_reg;
#endif
+ /* The length of mem_set_list. */
+ int mem_set_list_len;
+
/* Non-zero if the value of CC0 is live. */
int cc0_live;
int flags;
};
+/* Maximum length of pbi->mem_set_list before we start dropping
+ new elements on the floor. */
+#define MAX_MEM_SET_LIST_LEN 100
+
/* Store the data structures necessary for depth-first search. */
struct depth_first_search_dsS {
/* stack for backtracking during the algorithm */
bb = BASIC_BLOCK (i);
}
}
+
+/* Add fake edges to the function exit for any non constant calls in
+ the bitmap of blocks specified by BLOCKS or to the whole CFG if
+ BLOCKS is zero. Return the nuber of blocks that were split. */
+
+int
+flow_call_edges_add (blocks)
+ sbitmap blocks;
+{
+ int i;
+ int blocks_split = 0;
+ int bb_num = 0;
+ basic_block *bbs;
+
+ /* Map bb indicies into basic block pointers since split_block
+ will renumber the basic blocks. */
+
+ bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
+
+ if (! blocks)
+ {
+ for (i = 0; i < n_basic_blocks; i++)
+ bbs[bb_num++] = BASIC_BLOCK (i);
+ }
+ else
+ {
+ EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
+ {
+ bbs[bb_num++] = BASIC_BLOCK (i);
+ });
+ }
+
+
+ /* Now add fake edges to the function exit for any non constant
+ calls since there is no way that we can determine if they will
+ return or not... */
+
+ for (i = 0; i < bb_num; i++)
+ {
+ basic_block bb = bbs[i];
+ rtx insn;
+ rtx prev_insn;
+
+ for (insn = bb->end; ; insn = prev_insn)
+ {
+ prev_insn = PREV_INSN (insn);
+ if (GET_CODE (insn) == CALL_INSN && ! CONST_CALL_P (insn))
+ {
+ edge e;
+
+ /* Note that the following may create a new basic block
+ and renumber the existing basic blocks. */
+ e = split_block (bb, insn);
+ if (e)
+ blocks_split++;
+
+ make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
+ }
+ if (insn == bb->head)
+ break;
+ }
+ }
+
+ if (blocks_split)
+ verify_flow_info ();
+
+ free (bbs);
+ return blocks_split;
+}
\f
/* Delete all unreachable basic blocks. */
NOTE_SOURCE_FILE (q) = 0;
}
else
- q = PREV_INSN (q);
+ {
+ q = PREV_INSN (q);
+
+ /* We don't want a block to end on a line-number note since that has
+ the potential of changing the code between -g and not -g. */
+ while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
+ q = PREV_INSN (q);
+ }
b->end = q;
}
tmp = INITIALIZE_REG_SET (tmp_head);
- if (reload_completed)
- prop_flags |= PROP_POSTRELOAD;
-
/* For a global update, we go through the relaxation process again. */
if (extent != UPDATE_LIFE_LOCAL)
{
#endif
}
-#ifdef PIC_OFFSET_TABLE_REGNUM
#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
/* Many architectures have a GP register even without flag_pic.
Assume the pic register is not in use, or will be handled by
other means, if it is not fixed. */
- if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
+ if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
+ && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
#endif
-#endif
/* Mark all global registers, and all registers used by the epilogue
as being live at the end of the function since they may be
SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
#endif
-#ifdef PIC_OFFSET_TABLE_REGNUM
/* Any constant, or pseudo with constant equivalences, may
require reloading from memory using the pic register. */
- if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
+ if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
+ && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
-#endif
}
/* Regs used in phi nodes are not included in
note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
if (flags & PROP_SCAN_DEAD_CODE)
{
- insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
- REG_NOTES (insn));
+ insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
libcall_is_dead = (insn_is_dead && note != 0
&& libcall_dead_p (pbi, note, insn));
}
- /* We almost certainly don't want to delete prologue or epilogue
- instructions. Warn about probable compiler losage. */
- if (insn_is_dead
- && reload_completed
- && (((HAVE_epilogue || HAVE_prologue)
- && prologue_epilogue_contains (insn))
- || (HAVE_sibcall_epilogue
- && sibcall_epilogue_contains (insn)))
- && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
- {
- if (flags & PROP_KILL_DEAD_CODE)
- {
- warning ("ICE: would have deleted prologue/epilogue insn");
- if (!inhibit_warnings)
- debug_rtx (insn);
- }
- libcall_is_dead = insn_is_dead = 0;
- }
-
/* If an instruction consists of just dead store(s) on final pass,
delete it. */
if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
{
+ /* If we're trying to delete a prologue or epilogue instruction
+ that isn't flagged as possibly being dead, something is wrong.
+ But if we are keeping the stack pointer depressed, we might well
+ be deleting insns that are used to compute the amount to update
+ it by, so they are fine. */
+ if (reload_completed
+ && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
+ && (TYPE_RETURNS_STACK_DEPRESSED
+ (TREE_TYPE (current_function_decl))))
+ && (((HAVE_epilogue || HAVE_prologue)
+ && prologue_epilogue_contains (insn))
+ || (HAVE_sibcall_epilogue
+ && sibcall_epilogue_contains (insn)))
+ && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
+ abort ();
+
/* Record sets. Do this even for dead instructions, since they
would have killed the values if they hadn't been deleted. */
mark_set_regs (pbi, PATTERN (insn), insn);
/* Non-constant calls clobber memory. */
if (! CONST_CALL_P (insn))
- free_EXPR_LIST_list (&pbi->mem_set_list);
+ {
+ free_EXPR_LIST_list (&pbi->mem_set_list);
+ pbi->mem_set_list_len = 0;
+ }
/* There may be extra registers to be clobbered. */
for (note = CALL_INSN_FUNCTION_USAGE (insn);
pbi->bb = bb;
pbi->reg_live = live;
pbi->mem_set_list = NULL_RTX;
+ pbi->mem_set_list_len = 0;
pbi->local_set = local_set;
pbi->cond_local_set = cond_local_set;
pbi->cc0_live = 0;
{
rtx mem = SET_DEST (PATTERN (insn));
+ /* This optimization is performed by faking a store to the
+ memory at the end of the block. This doesn't work for
+ unchanging memories because multiple stores to unchanging
+ memory is illegal and alias analysis doesn't consider it. */
+ if (RTX_UNCHANGING_P (mem))
+ continue;
+
if (XEXP (mem, 0) == frame_pointer_rtx
|| (GET_CODE (XEXP (mem, 0)) == PLUS
&& XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
mem = shallow_copy_rtx (mem);
#endif
pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
+ if (++pbi->mem_set_list_len >= MAX_MEM_SET_LIST_LEN)
+ break;
}
}
}
else
pbi->mem_set_list = next;
free_EXPR_LIST_node (temp);
+ pbi->mem_set_list_len--;
}
else
prev = temp;
else
pbi->mem_set_list = next;
free_EXPR_LIST_node (temp);
+ pbi->mem_set_list_len--;
}
else
prev = temp;
rtx reg, cond, insn;
int flags;
{
- int orig_regno = -1;
int regno_first = -1, regno_last = -1;
int not_dead = 0;
int i;
- /* Some targets place small structures in registers for
- return values of functions. We have to detect this
- case specially here to get correct flow information. */
- if (GET_CODE (reg) == PARALLEL
- && GET_MODE (reg) == BLKmode)
- {
- for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
- mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
- return;
- }
-
/* Modifying just one hardware register of a multi-reg value or just a
byte field of a register does not mean the value from before this insn
is now dead. Of course, if it was dead after it's unused now. */
switch (GET_CODE (reg))
{
+ case PARALLEL:
+ /* Some targets place small structures in registers for return values of
+ functions. We have to detect this case specially here to get correct
+ flow information. */
+ for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
+ if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
+ mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
+ flags);
+ return;
+
case ZERO_EXTRACT:
case SIGN_EXTRACT:
case STRICT_LOW_PART:
/* Fall through. */
case REG:
- orig_regno = ORIGINAL_REGNO (reg);
regno_last = regno_first = REGNO (reg);
if (regno_first < FIRST_PSEUDO_REGISTER)
regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
if (insn && GET_CODE (reg) == MEM)
invalidate_mems_from_autoinc (pbi, insn);
- if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
+ if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN
+ && GET_CODE (reg) == MEM && ! side_effects_p (reg)
/* ??? With more effort we could track conditional memory life. */
&& ! cond
/* We do not know the size of a BLKmode store, so we do not track
reg = shallow_copy_rtx (reg);
#endif
pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
+ pbi->mem_set_list_len++;
}
}
/* Additional data to record if this is the final pass. */
if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
- | PROP_DEATH_NOTES | PROP_AUTOINC | PROP_POSTRELOAD))
+ | PROP_DEATH_NOTES | PROP_AUTOINC))
{
register rtx y;
register int blocknum = pbi->bb->index;
pbi->reg_next_use[i] = 0;
}
- /* After reload has completed, try to keep REG_N_SETS uptodate for
- the original pseudos. */
- if ((flags & PROP_POSTRELOAD) && orig_regno >= FIRST_PSEUDO_REGISTER)
- REG_N_SETS (orig_regno) += 1;
-
if (flags & PROP_REG_INFO)
{
for (i = regno_first; i <= regno_last; ++i)
else
pbi->mem_set_list = next;
free_EXPR_LIST_node (temp);
+ pbi->mem_set_list_len--;
}
else
prev = temp;
testreg = XEXP (testreg, 0);
}
- /* If this is a store into a register, recursively scan the
- value being stored. */
+ /* If this is a store into a register or group of registers,
+ recursively scan the value being stored. */
if ((GET_CODE (testreg) == PARALLEL
&& GET_MODE (testreg) == BLKmode)
So for now, just clear the memory set list and mark any regs
we can find in ASM_OPERANDS as used. */
if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
- free_EXPR_LIST_list (&pbi->mem_set_list);
+ {
+ free_EXPR_LIST_list (&pbi->mem_set_list);
+ pbi->mem_set_list_len = 0;
+ }
/* For all ASM_OPERANDS, we must traverse the vector of input operands.
We can not just fall through here since then we would be confused
basic_block bb = NOTE_BASIC_BLOCK (x);
num_bb_notes++;
if (bb->index != last_bb_num_seen + 1)
- fatal ("Basic blocks not numbered consecutively");
+ /* Basic blocks not numbered consecutively. */
+ abort ();
+
last_bb_num_seen = bb->index;
}
}
if (num_bb_notes != n_basic_blocks)
- fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
- num_bb_notes, n_basic_blocks);
+ internal_error
+ ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
+ num_bb_notes, n_basic_blocks);
if (err)
abort ();