#include "obstack.h"
#include "hashtab.h"
#include "params.h"
+#include "target.h"
/* The following code implements gcse after reload, the purpose of this
pass is to cleanup redundant loads generated by reload and other
return exp->hash;
}
-/* Callbach for hashtab.
+/* Callback for hashtab.
Return nonzero if exp1 is equivalent to exp2. */
static int
struct expr *exp1 = (struct expr *) exp1p;
struct expr *exp2 = (struct expr *) exp2p;
int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
- if (equiv_p
- && exp1->hash != exp2->hash)
- abort ();
+
+ gcc_assert (!equiv_p || exp1->hash == exp2->hash);
return equiv_p;
}
\f
fprintf (file, "expr: ");
print_rtl (file, expr->expr);
fprintf (file,"\nhashcode: %u\n", expr->hash);
- fprintf (file,"list of occurences:\n");
+ fprintf (file,"list of occurrences:\n");
occr = expr->avail_occr;
while (occr)
{
switch (code)
{
case REG:
-#ifdef ENABLE_CHECKING
/* We are called after register allocation. */
- if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
- abort ();
-#endif
+ gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
if (after_insn)
/* If the last CUID setting the insn is less than the CUID of
INSN, then reg X is not changed in or after INSN. */
if (JUMP_P (insn) || set_noop_p (pat))
return;
-#ifdef ENABLE_CHEKCING
- /* We shouldn't have any EH_REGION notes post reload. */
- if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
- abort ();
-#endif
-
if (REG_P (dest))
{
if (/* Don't CSE something if we can't do a reg/reg copy. */
{
rtx insn;
-#ifdef ENABLE_CHECKING
/* We are called after register allocation. */
- if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
- abort ();
-#endif
+ gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
if (from_insn == to_insn)
return NULL_RTX;
{
rtx insn;
-#ifdef ENABLE_CHECKING
/* We are called after register allocation. */
- if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
- abort ();
-#endif
+ gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
if (from_insn == to_insn)
return NULL_RTX;
static rtx
get_avail_load_store_reg (rtx insn)
{
- if (REG_P (SET_DEST (PATTERN (insn)))) /* A load. */
+ if (REG_P (SET_DEST (PATTERN (insn))))
+ /* A load. */
return SET_DEST(PATTERN(insn));
- if (REG_P (SET_SRC (PATTERN (insn)))) /* A store. */
- return SET_SRC (PATTERN (insn));
- abort ();
+ else
+ {
+ /* A store. */
+ gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
+ return SET_SRC (PATTERN (insn));
+ }
}
/* Return nonzero if the predecessors of BB are "well behaved". */
gcov_type ok_count = 0; /* Redundant load execution count. */
gcov_type critical_count = 0; /* Execution count of critical edges. */
edge_iterator ei;
+ bool critical_edge_split = false;
/* The execution count of the loads to be added to make the
load fully redundant. */
rtx next_pred_bb_end;
avail_insn = NULL_RTX;
+ avail_reg = NULL_RTX;
pred_bb = pred->src;
next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
for (a_occr = get_bb_avail_insn (pred_bb, expr->avail_occr); a_occr;
{
/* Check if the loaded register is not used. */
avail_insn = a_occr->insn;
- if (! (avail_reg = get_avail_load_store_reg (avail_insn)))
- abort ();
+ avail_reg = get_avail_load_store_reg (avail_insn);
+ gcc_assert (avail_reg);
+
/* Make sure we can generate a move from register avail_reg to
dest. */
extract_insn (gen_move_insn (copy_rtx (dest),
{
npred_ok++;
ok_count += pred->count;
+ if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
+ copy_rtx (avail_reg)))))
+ {
+ /* Check if there is going to be a split. */
+ if (EDGE_CRITICAL_P (pred))
+ critical_edge_split = true;
+ }
+ else /* Its a dead move no need to generate. */
+ continue;
occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
sizeof (struct occr));
occr->insn = avail_insn;
}
else
{
+ /* Adding a load on a critical edge will cuase a split. */
+ if (EDGE_CRITICAL_P (pred))
+ critical_edge_split = true;
not_ok_count += pred->count;
unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
sizeof (struct unoccr));
if (/* No load can be replaced by copy. */
npred_ok == 0
/* Prevent exploding the code. */
- || (optimize_size && npred_ok > 1))
+ || (optimize_size && npred_ok > 1)
+ /* If we don't have profile information we cannot tell if splitting
+ a critical edge is profitable or not so don't do it. */
+ || ((! profile_info || ! flag_branch_probabilities
+ || targetm.cannot_modify_jumps_p ())
+ && critical_edge_split))
goto cleanup;
/* Check if it's worth applying the partial redundancy elimination. */
/* Set avail_reg to be the register having the value of the
memory. */
avail_reg = get_avail_load_store_reg (avail_insn);
- if (! avail_reg)
- abort ();
+ gcc_assert (avail_reg);
insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
copy_rtx (avail_reg)),
a_occr = get_bb_avail_insn (bb, a_occr->next));
if (!a_occr)
- delete_insn (insn);
+ {
+ stats.insns_deleted++;
+
+ if (dump_file)
+ {
+ fprintf (dump_file, "deleting insn:\n");
+ print_rtl_single (dump_file, insn);
+ fprintf (dump_file, "\n");
+ }
+ delete_insn (insn);
+ }
else
a_occr->deleted_p = 1;
void
gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
{
+
memset (&stats, 0, sizeof (stats));
/* Allocate ememory for this pass.