case CONST_DOUBLE:
case CONST_VECTOR:
case CALL:
+ case CONSTANT_P_RTX:
return 0;
default:
&& REGNO (src) >= FIRST_PSEUDO_REGISTER
&& can_copy_p [GET_MODE (dest)]
&& REGNO (src) != regno)
- || CONSTANT_P (src))
+ || (CONSTANT_P (src)
+ && GET_CODE (src) != CONSTANT_P_RTX))
/* A copy is not available if its src or dest is subsequently
modified. Here we want to search from INSN+1 on, but
oprs_available_p searches from INSN on. */
conditional branch instructions first. */
if (alter_jumps
&& (sset = single_set (insn)) != NULL
+ && NEXT_INSN (insn)
&& any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
{
rtx dest = SET_DEST (sset);
if (l->in_libcall)
continue;
- if (CONSTANT_P (this_rtx))
+ if (CONSTANT_P (this_rtx)
+ && GET_CODE (this_rtx) != CONSTANT_P_RTX)
newcnst = this_rtx;
if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
/* Don't copy propagate if it has attached REG_EQUIV note.
rtx insn;
struct reg_use *reg_used;
rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
+ bool changed = false;
cselib_init ();
libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
reg_used++, reg_use_count--)
if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
libcall_sp))
- break;
+ {
+ changed = true;
+ break;
+ }
}
while (reg_use_count);
}
cselib_process_insn (insn);
}
cselib_finish ();
+ /* Global analysis may get into infinite loops for unreachable blocks. */
+ if (changed && alter_jumps)
+ {
+ delete_unreachable_blocks ();
+ free_reg_set_mem ();
+ alloc_reg_set_mem (max_reg_num ());
+ compute_sets (get_insns ());
+ }
}
/* Forward propagate copies. This includes copies and constants. Return
fprintf (gcse_file, "%d const props, %d copy props\n\n",
const_prop_count, copy_prop_count);
}
+ /* Global analysis may get into infinite loops for unreachable blocks. */
+ if (changed && cprop_jumps)
+ delete_unreachable_blocks ();
return changed;
}
\f
/* Bypass conditional jumps. */
+/* The value of last_basic_block at the beginning of the jump_bypass
+ pass. The use of redirect_edge_and_branch_force may introduce new
+ basic blocks, but the data flow analysis is only valid for basic
+ block indices less than bypass_last_basic_block. */
+
+static int bypass_last_basic_block;
+
/* Find a set of REGNO to a constant that is available at the end of basic
block BB. Returns NULL if no such set is found. Based heavily upon
find_avail_set. */
for (e = bb->pred; e; e = enext)
{
enext = e->pred_next;
+ if (e->flags & EDGE_COMPLEX)
+ continue;
+
+ /* We can't redirect edges from new basic blocks. */
+ if (e->src->index >= bypass_last_basic_block)
+ continue;
+
for (i = 0; i < reg_use_count; i++)
{
struct reg_use *reg_used = ®_use_table[i];
else
dest = NULL;
- /* Once basic block indices are stable, we should be able
- to use redirect_edge_and_branch_force instead. */
old_dest = e->dest;
- if (dest != NULL && dest != old_dest
- && redirect_edge_and_branch (e, dest))
- {
+ if (dest != NULL
+ && dest != old_dest
+ && dest != EXIT_BLOCK_PTR)
+ {
+ redirect_edge_and_branch_force (e, dest);
+
/* Copy the register setter to the redirected edge.
Don't copy CC0 setters, as CC0 is dead after jump. */
if (setcc)
if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
return 0;
+ bypass_last_basic_block = last_basic_block;
+
changed = 0;
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
EXIT_BLOCK_PTR, next_bb)