/* Post reload partially redundant load elimination
- Copyright (C) 2004
+ Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010
Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "flags.h"
-#include "real.h"
#include "insn-config.h"
#include "recog.h"
#include "basic-block.h"
#include "obstack.h"
#include "hashtab.h"
#include "params.h"
+#include "target.h"
+#include "timevar.h"
+#include "tree-pass.h"
+#include "dbgcnt.h"
/* The following code implements gcse after reload, the purpose of this
pass is to cleanup redundant loads generated by reload and other
/* Array where each element is the CUID if the insn that last set the hard
register with the number of the element, since the start of the current
- basic block. */
+ basic block.
+
+ This array is used during the building of the hash table (step 1) to
+ determine if a reg is killed before the end of a basic block.
+
+ It is also used when eliminating partial redundancies (step 2) to see
+ if a reg was modified since the start of a basic block. */
static int *reg_avail_info;
/* A list of insns that may modify memory within the current basic block. */
/* Support for hash table construction and transformations. */
static bool oprs_unchanged_p (rtx, rtx, bool);
-static void record_last_reg_set_info (rtx, int);
+static void record_last_reg_set_info (rtx, rtx);
+static void record_last_reg_set_info_regno (rtx, int);
static void record_last_mem_set_info (rtx);
-static void record_last_set_info (rtx, rtx, void *);
-static void mark_call (rtx);
-static void mark_set (rtx, rtx);
-static void mark_clobber (rtx, rtx);
-static void mark_oprs_set (rtx);
+static void record_last_set_info (rtx, const_rtx, void *);
+static void record_opr_changes (rtx);
-static void find_mem_conflicts (rtx, rtx, void *);
+static void find_mem_conflicts (rtx, const_rtx, void *);
static int load_killed_in_block_p (int, rtx, bool);
static void reset_opr_set_tables (void);
static bool reg_killed_on_edge (rtx, edge);
static bool reg_used_on_edge (rtx, edge);
-static rtx reg_set_between_after_reload_p (rtx, rtx, rtx);
-static rtx reg_used_between_after_reload_p (rtx, rtx, rtx);
static rtx get_avail_load_store_reg (rtx);
static bool bb_has_well_behaved_predecessors (basic_block);
rtx insn;
/* Find the largest UID and create a mapping from UIDs to CUIDs. */
- uid_cuid = xcalloc (get_max_uid () + 1, sizeof (int));
- i = 0;
+ uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
+ i = 1;
FOR_EACH_BB (bb)
FOR_BB_INSNS (bb, insn)
{
static hashval_t
hash_expr_for_htab (const void *expp)
{
- struct expr *exp = (struct expr *) expp;
+ const struct expr *const exp = (const struct expr *) expp;
return exp->hash;
}
-/* Callbach for hashtab.
+/* Callback for hashtab.
Return nonzero if exp1 is equivalent to exp2. */
static int
expr_equiv_p (const void *exp1p, const void *exp2p)
{
- struct expr *exp1 = (struct expr *) exp1p;
- struct expr *exp2 = (struct expr *) exp2p;
+ const struct expr *const exp1 = (const struct expr *) exp1p;
+ const struct expr *const exp2 = (const struct expr *) exp2p;
int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
- if (equiv_p
- && exp1->hash != exp2->hash)
- abort ();
+
+ gcc_assert (!equiv_p || exp1->hash == exp2->hash);
return equiv_p;
}
\f
slot = (struct expr **) htab_find_slot_with_hash (expr_table, cur_expr,
hash, INSERT);
-
+
if (! (*slot))
/* The expression isn't found, so insert it. */
*slot = cur_expr;
/* Search for another occurrence in the same basic block. */
avail_occr = cur_expr->avail_occr;
- while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
+ while (avail_occr
+ && BLOCK_FOR_INSN (avail_occr->insn) != BLOCK_FOR_INSN (insn))
{
/* If an occurrence isn't found, save a pointer to the end of
the list. */
fprintf (file, "expr: ");
print_rtl (file, expr->expr);
fprintf (file,"\nhashcode: %u\n", expr->hash);
- fprintf (file,"list of occurences:\n");
+ fprintf (file,"list of occurrences:\n");
occr = expr->avail_occr;
while (occr)
{
fprintf (file, "\n");
}
\f
+/* Return true if register X is recorded as being set by an instruction
+ whose CUID is greater than the one given. */
-/* Return nonzero if the operands of expression X are unchanged from the
- start of INSN's basic block up to but not including INSN if AFTER_INSN
- is false, or from INSN to the end of INSN's basic block if AFTER_INSN
- is true. */
+static bool
+reg_changed_after_insn_p (rtx x, int cuid)
+{
+ unsigned int regno, end_regno;
+
+ regno = REGNO (x);
+ end_regno = END_HARD_REGNO (x);
+ do
+ if (reg_avail_info[regno] > cuid)
+ return true;
+ while (++regno < end_regno);
+ return false;
+}
+
+/* Return nonzero if the operands of expression X are unchanged
+ 1) from the start of INSN's basic block up to but not including INSN
+ if AFTER_INSN is false, or
+ 2) from INSN to the end of INSN's basic block if AFTER_INSN is true. */
static bool
oprs_unchanged_p (rtx x, rtx insn, bool after_insn)
switch (code)
{
case REG:
-#ifdef ENABLE_CHECKING
/* We are called after register allocation. */
- if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
- abort ();
-#endif
+ gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
if (after_insn)
- /* If the last CUID setting the insn is less than the CUID of
- INSN, then reg X is not changed in or after INSN. */
- return reg_avail_info[REGNO (x)] < INSN_CUID (insn);
+ return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1);
else
- /* Reg X is not set before INSN in the current basic block if
- we have not yet recorded the CUID of an insn that touches
- the reg. */
- return reg_avail_info[REGNO (x)] == 0;
+ return !reg_changed_after_insn_p (x, 0);
case MEM:
if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
to a nonzero value. */
static void
-find_mem_conflicts (rtx dest, rtx setter ATTRIBUTE_UNUSED,
+find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
void *data)
{
rtx mem_op = (rtx) data;
while (GET_CODE (dest) == SUBREG
|| GET_CODE (dest) == ZERO_EXTRACT
- || GET_CODE (dest) == SIGN_EXTRACT
|| GET_CODE (dest) == STRICT_LOW_PART)
dest = XEXP (dest, 0);
\f
/* Return nonzero if the expression in X (a memory reference) is killed
- in block BB before if (AFTER_INSN is false) or after (if AFTER_INSN
- is true) the insn with the CUID in UID_LIMIT. */
+ in the current basic block before (if AFTER_INSN is false) or after
+ (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
+
+ This function assumes that the modifies_mem table is flushed when
+ the hash table construction or redundancy elimination phases start
+ processing a new basic block. */
static int
load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
/* Record register first/last/block set information for REGNO in INSN. */
-static void
-record_last_reg_set_info (rtx insn, int regno)
+static inline void
+record_last_reg_set_info (rtx insn, rtx reg)
+{
+ unsigned int regno, end_regno;
+
+ regno = REGNO (reg);
+ end_regno = END_HARD_REGNO (reg);
+ do
+ reg_avail_info[regno] = INSN_CUID (insn);
+ while (++regno < end_regno);
+}
+
+static inline void
+record_last_reg_set_info_regno (rtx insn, int regno)
{
reg_avail_info[regno] = INSN_CUID (insn);
}
the SET is taking place. */
static void
-record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
+record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
{
rtx last_set_insn = (rtx) data;
dest = SUBREG_REG (dest);
if (REG_P (dest))
- record_last_reg_set_info (last_set_insn, REGNO (dest));
- else if (MEM_P (dest)
- /* Ignore pushes, they clobber nothing. */
- && ! push_operand (dest, GET_MODE (dest)))
- record_last_mem_set_info (last_set_insn);
+ record_last_reg_set_info (last_set_insn, dest);
+ else if (MEM_P (dest))
+ {
+ /* Ignore pushes, they don't clobber memory. They may still
+ clobber the stack pointer though. Some targets do argument
+ pushes without adding REG_INC notes. See e.g. PR25196,
+ where a pushsi2 on i386 doesn't have REG_INC notes. Note
+ such changes here too. */
+ if (! push_operand (dest, GET_MODE (dest)))
+ record_last_mem_set_info (last_set_insn);
+ else
+ record_last_reg_set_info_regno (last_set_insn, STACK_POINTER_REGNUM);
+ }
}
-\f
+
/* Reset tables used to keep track of what's still available since the
start of the block. */
obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
modifies_mem_list = NULL;
}
-
-/* Mark things set by a CALL. */
-
-static void
-mark_call (rtx insn)
-{
- if (! CONST_OR_PURE_CALL_P (insn))
- record_last_mem_set_info (insn);
-}
-
-/* Mark things set by a SET. */
-
-static void
-mark_set (rtx pat, rtx insn)
-{
- rtx dest = SET_DEST (pat);
-
- while (GET_CODE (dest) == SUBREG
- || GET_CODE (dest) == ZERO_EXTRACT
- || GET_CODE (dest) == SIGN_EXTRACT
- || GET_CODE (dest) == STRICT_LOW_PART)
- dest = XEXP (dest, 0);
-
- if (REG_P (dest))
- record_last_reg_set_info (insn, REGNO (dest));
- else if (MEM_P (dest))
- record_last_mem_set_info (insn);
-
- if (GET_CODE (SET_SRC (pat)) == CALL)
- mark_call (insn);
-}
-
-/* Record things set by a CLOBBER. */
-
-static void
-mark_clobber (rtx pat, rtx insn)
-{
- rtx clob = XEXP (pat, 0);
-
- while (GET_CODE (clob) == SUBREG
- || GET_CODE (clob) == STRICT_LOW_PART)
- clob = XEXP (clob, 0);
-
- if (REG_P (clob))
- record_last_reg_set_info (insn, REGNO (clob));
- else
- record_last_mem_set_info (insn);
-}
+\f
/* Record things set by INSN.
This data is used by oprs_unchanged_p. */
static void
-mark_oprs_set (rtx insn)
+record_opr_changes (rtx insn)
{
- rtx pat = PATTERN (insn);
- int i;
+ rtx note;
- if (GET_CODE (pat) == SET)
- mark_set (pat, insn);
+ /* Find all stores and record them. */
+ note_stores (PATTERN (insn), record_last_set_info, insn);
- else if (GET_CODE (pat) == PARALLEL)
- for (i = 0; i < XVECLEN (pat, 0); i++)
- {
- rtx x = XVECEXP (pat, 0, i);
-
- if (GET_CODE (x) == SET)
- mark_set (x, insn);
- else if (GET_CODE (x) == CLOBBER)
- mark_clobber (x, insn);
- else if (GET_CODE (x) == CALL)
- mark_call (insn);
- }
+ /* Also record autoincremented REGs for this insn as changed. */
+ for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
+ if (REG_NOTE_KIND (note) == REG_INC)
+ record_last_reg_set_info (insn, XEXP (note, 0));
- else if (GET_CODE (pat) == CLOBBER)
- mark_clobber (pat, insn);
+ /* Finally, if this is a call, record all call clobbers. */
+ if (CALL_P (insn))
+ {
+ unsigned int regno;
+ rtx link, x;
+
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
+ record_last_reg_set_info_regno (insn, regno);
+
+ for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
+ if (GET_CODE (XEXP (link, 0)) == CLOBBER)
+ {
+ x = XEXP (XEXP (link, 0), 0);
+ if (REG_P (x))
+ {
+ gcc_assert (HARD_REGISTER_P (x));
+ record_last_reg_set_info (insn, x);
+ }
+ }
- else if (GET_CODE (pat) == CALL)
- mark_call (insn);
+ if (! RTL_CONST_OR_PURE_CALL_P (insn))
+ record_last_mem_set_info (insn);
+ }
}
\f
if (JUMP_P (insn) || set_noop_p (pat))
return;
-#ifdef ENABLE_CHEKCING
- /* We shouldn't have any EH_REGION notes post reload. */
- if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
- abort ();
-#endif
-
if (REG_P (dest))
{
- if (/* Don't GCSE something if we can't do a reg/reg copy. */
+ if (/* Don't CSE something if we can't do a reg/reg copy. */
can_copy_p (GET_MODE (dest))
/* Is SET_SRC something we want to gcse? */
&& general_operand (src, GET_MODE (src))
+#ifdef STACK_REGS
+ /* Never consider insns touching the register stack. It may
+ create situations that reg-stack cannot handle (e.g. a stack
+ register live across an abnormal edge). */
+ && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
+#endif
/* An expression is not available if its operands are
subsequently modified, including this insn. */
&& oprs_unchanged_p (src, insn, true))
else if (REG_P (src))
{
/* Only record sets of pseudo-regs in the hash table. */
- if (/* Don't GCSE something if we can't do a reg/reg copy. */
+ if (/* Don't CSE something if we can't do a reg/reg copy. */
can_copy_p (GET_MODE (src))
/* Is SET_DEST something we want to gcse? */
&& general_operand (dest, GET_MODE (dest))
+#ifdef STACK_REGS
+ /* As above for STACK_REGS. */
+ && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
+#endif
&& ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
/* Check if the memory expression is killed after insn. */
&& ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
}
}
\f
+
/* Create hash table of memory expressions available at end of basic
- blocks. */
+ blocks. Basically you should think of this hash table as the
+ representation of AVAIL_OUT. This is the set of expressions that
+ is generated in a basic block and not killed before the end of the
+ same basic block. Notice that this is really a local computation. */
static void
compute_hash_table (void)
FOR_EACH_BB (bb)
{
rtx insn;
- unsigned int regno;
-
- reset_opr_set_tables ();
/* First pass over the instructions records information used to
- determine when registers and memory are first and last set. */
+ determine when registers and memory are last set.
+ Since we compute a "local" AVAIL_OUT, reset the tables that
+ help us keep track of what has been modified since the start
+ of the block. */
+ reset_opr_set_tables ();
FOR_BB_INSNS (bb, insn)
{
- if (! INSN_P (insn))
- continue;
-
- if (CALL_P (insn))
- {
- bool clobbers_all = false;
-
-#ifdef NON_SAVING_SETJMP
- if (NON_SAVING_SETJMP
- && find_reg_note (insn, REG_SETJMP, NULL_RTX))
- clobbers_all = true;
-#endif
-
- for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if (clobbers_all
- || TEST_HARD_REG_BIT (regs_invalidated_by_call,
- regno))
- record_last_reg_set_info (insn, regno);
-
- if (! CONST_OR_PURE_CALL_P (insn))
- record_last_mem_set_info (insn);
- }
-
- note_stores (PATTERN (insn), record_last_set_info, insn);
-
- if (GET_CODE (PATTERN (insn)) == SET)
- {
- rtx src, dest;
-
- src = SET_SRC (PATTERN (insn));
- dest = SET_DEST (PATTERN (insn));
- if (MEM_P (src) && auto_inc_p (XEXP (src, 0)))
- {
- regno = REGNO (XEXP (XEXP (src, 0), 0));
- record_last_reg_set_info (insn, regno);
- }
- if (MEM_P (dest) && auto_inc_p (XEXP (dest, 0)))
- {
- regno = REGNO (XEXP (XEXP (dest, 0), 0));
- record_last_reg_set_info (insn, regno);
- }
- }
- }
+ if (INSN_P (insn))
+ record_opr_changes (insn);
+ }
- /* The next pass builds the hash table. */
+ /* The next pass actually builds the hash table. */
FOR_BB_INSNS (bb, insn)
if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
hash_scan_set (insn);
return false;
}
\f
-
-/* Return the insn that sets register REG or clobbers it in between
- FROM_INSN and TO_INSN (exclusive of those two).
- Just like reg_set_between but for hard registers and not pseudos. */
-
-static rtx
-reg_set_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
-{
- rtx insn;
- int regno;
-
-#ifdef ENABLE_CHECKING
- /* We are called after register allocation. */
- if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
- abort ();
-#endif
-
- if (from_insn == to_insn)
- return NULL_RTX;
-
- regno = REGNO (reg);
- for (insn = NEXT_INSN (from_insn);
- insn != to_insn;
- insn = NEXT_INSN (insn))
- {
- if (INSN_P (insn))
- {
- if (FIND_REG_INC_NOTE (insn, reg)
- || (CALL_P (insn)
- && call_used_regs[regno])
- || find_reg_fusage (insn, CLOBBER, reg))
- return insn;
- }
- if (set_of (reg, insn) != NULL_RTX)
- return insn;
- }
-
- return NULL_RTX;
-}
-
-/* Return the insn that uses register REG in between FROM_INSN and TO_INSN
- (exclusive of those two). Similar to reg_used_between but for hard
- registers and not pseudos. */
-
-static rtx
-reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
-{
- rtx insn;
- int regno;
-
-#ifdef ENABLE_CHECKING
- /* We are called after register allocation. */
- if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
- abort ();
-#endif
-
- if (from_insn == to_insn)
- return NULL_RTX;
-
- regno = REGNO (reg);
- for (insn = NEXT_INSN (from_insn);
- insn != to_insn;
- insn = NEXT_INSN (insn))
- if (INSN_P (insn)
- && (reg_overlap_mentioned_p (reg, PATTERN (insn))
- || (CALL_P (insn)
- && call_used_regs[regno])
- || find_reg_fusage (insn, USE, reg)
- || find_reg_fusage (insn, CLOBBER, reg)))
- return insn;
-
- return NULL_RTX;
-}
-
-/* Return true if REG is used, set, or killed between the beginning of
- basic block BB and UP_TO_INSN. Caches the result in reg_avail_info. */
-
-static bool
-reg_set_or_used_since_bb_start (rtx reg, basic_block bb, rtx up_to_insn)
-{
- rtx insn, start = PREV_INSN (BB_HEAD (bb));
-
- if (reg_avail_info[REGNO (reg)] != 0)
- return true;
-
- insn = reg_used_between_after_reload_p (reg, start, up_to_insn);
- if (! insn)
- insn = reg_set_between_after_reload_p (reg, start, up_to_insn);
-
- if (insn)
- reg_avail_info[REGNO (reg)] = INSN_CUID (insn);
-
- return insn != NULL_RTX;
-}
-
/* Return the loaded/stored register of a load/store instruction. */
static rtx
get_avail_load_store_reg (rtx insn)
{
- if (REG_P (SET_DEST (PATTERN (insn)))) /* A load. */
+ if (REG_P (SET_DEST (PATTERN (insn))))
+ /* A load. */
return SET_DEST(PATTERN(insn));
- if (REG_P (SET_SRC (PATTERN (insn)))) /* A store. */
- return SET_SRC (PATTERN (insn));
- abort ();
+ else
+ {
+ /* A store. */
+ gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
+ return SET_SRC (PATTERN (insn));
+ }
}
/* Return nonzero if the predecessors of BB are "well behaved". */
bb_has_well_behaved_predecessors (basic_block bb)
{
edge pred;
+ edge_iterator ei;
- if (! bb->pred)
+ if (EDGE_COUNT (bb->preds) == 0)
return false;
- for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
+ FOR_EACH_EDGE (pred, ei, bb->preds)
{
if ((pred->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (pred))
return false;
/* This handles the case where several stores feed a partially redundant
load. It checks if the redundancy elimination is possible and if it's
- worth it. */
+ worth it.
+
+ Redundancy elimination is possible if,
+ 1) None of the operands of an insn have been modified since the start
+ of the current basic block.
+ 2) In any predecessor of the current basic block, the same expression
+ is generated.
+
+ See the function body for the heuristics that determine if eliminating
+ a redundancy is also worth doing, assuming it is possible. */
static void
eliminate_partially_redundant_load (basic_block bb, rtx insn,
int npred_ok = 0;
gcov_type ok_count = 0; /* Redundant load execution count. */
gcov_type critical_count = 0; /* Execution count of critical edges. */
+ edge_iterator ei;
+ bool critical_edge_split = false;
/* The execution count of the loads to be added to make the
load fully redundant. */
/* Check that the loaded register is not used, set, or killed from the
beginning of the block. */
- if (reg_set_or_used_since_bb_start (dest, bb, insn))
+ if (reg_changed_after_insn_p (dest, 0)
+ || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn))
return;
/* Check potential for replacing load with copy for predecessors. */
- for (pred = bb->pred; pred; pred = pred->pred_next)
+ FOR_EACH_EDGE (pred, ei, bb->preds)
{
rtx next_pred_bb_end;
avail_insn = NULL_RTX;
+ avail_reg = NULL_RTX;
pred_bb = pred->src;
next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
for (a_occr = get_bb_avail_insn (pred_bb, expr->avail_occr); a_occr;
{
/* Check if the loaded register is not used. */
avail_insn = a_occr->insn;
- if (! (avail_reg = get_avail_load_store_reg (avail_insn)))
- abort ();
+ avail_reg = get_avail_load_store_reg (avail_insn);
+ gcc_assert (avail_reg);
+
/* Make sure we can generate a move from register avail_reg to
dest. */
extract_insn (gen_move_insn (copy_rtx (dest),
avail_insn = NULL;
continue;
}
- if (! reg_set_between_after_reload_p (avail_reg, avail_insn,
- next_pred_bb_end))
+ if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end))
/* AVAIL_INSN remains non-null. */
break;
else
{
npred_ok++;
ok_count += pred->count;
+ if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
+ copy_rtx (avail_reg)))))
+ {
+ /* Check if there is going to be a split. */
+ if (EDGE_CRITICAL_P (pred))
+ critical_edge_split = true;
+ }
+ else /* Its a dead move no need to generate. */
+ continue;
occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
- sizeof (struct occr));
+ sizeof (struct unoccr));
occr->insn = avail_insn;
occr->pred = pred;
occr->next = avail_occrs;
}
else
{
+ /* Adding a load on a critical edge will cause a split. */
+ if (EDGE_CRITICAL_P (pred))
+ critical_edge_split = true;
not_ok_count += pred->count;
unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
sizeof (struct unoccr));
if (/* No load can be replaced by copy. */
npred_ok == 0
- /* Prevent exploding the code. */
- || (optimize_size && npred_ok > 1))
+ /* Prevent exploding the code. */
+ || (optimize_bb_for_size_p (bb) && npred_ok > 1)
+ /* If we don't have profile information we cannot tell if splitting
+ a critical edge is profitable or not so don't do it. */
+ || ((! profile_info || ! flag_branch_probabilities
+ || targetm.cannot_modify_jumps_p ())
+ && critical_edge_split))
goto cleanup;
/* Check if it's worth applying the partial redundancy elimination. */
/* Set avail_reg to be the register having the value of the
memory. */
avail_reg = get_avail_load_store_reg (avail_insn);
- if (! avail_reg)
- abort ();
+ gcc_assert (avail_reg);
insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
copy_rtx (avail_reg)),
a_occr = get_bb_avail_insn (bb, a_occr->next));
if (!a_occr)
- delete_insn (insn);
+ {
+ stats.insns_deleted++;
+
+ if (dump_file)
+ {
+ fprintf (dump_file, "deleting insn:\n");
+ print_rtl_single (dump_file, insn);
+ fprintf (dump_file, "\n");
+ }
+ delete_insn (insn);
+ }
else
a_occr->deleted_p = 1;
EXIT_BLOCK_PTR,
next_bb)
{
+ /* Don't try anything on basic blocks with strange predecessors. */
if (! bb_has_well_behaved_predecessors (bb))
continue;
- /* Do not try this optimization on cold basic blocks. */
- if (probably_cold_bb_p (bb))
+ /* Do not try anything on cold basic blocks. */
+ if (optimize_bb_for_size_p (bb))
continue;
+ /* Reset the table of things changed since the start of the current
+ basic block. */
reset_opr_set_tables ();
+ /* Look at all insns in the current basic block and see if there are
+ any loads in it that we can record. */
FOR_BB_INSNS (bb, insn)
{
/* Is it a load - of the form (set (reg) (mem))? */
}
}
- /* Keep track of everything modified by this insn. */
+ /* Keep track of everything modified by this insn, so that we
+ know what has been modified since the start of the current
+ basic block. */
if (INSN_P (insn))
- mark_oprs_set (insn);
+ record_opr_changes (insn);
}
}
for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
{
- if (occr->deleted_p)
+ if (occr->deleted_p && dbg_cnt (gcse2_delete))
{
delete_insn (occr->insn);
stats.insns_deleted++;
/* Main entry point of the GCSE after reload - clean some redundant loads
due to spilling. */
-void
+static void
gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
{
+
memset (&stats, 0, sizeof (stats));
- /* Allocate ememory for this pass.
+ /* Allocate memory for this pass.
Also computes and initializes the insns' CUIDs. */
alloc_mem ();
fprintf (dump_file, "\n\n");
}
}
-
+
/* We are finished with alias. */
end_alias_analysis ();
free_mem ();
}
+\f
+static bool
+gate_handle_gcse2 (void)
+{
+ return (optimize > 0 && flag_gcse_after_reload
+ && optimize_function_for_speed_p (cfun));
+}
+
+
+static unsigned int
+rest_of_handle_gcse2 (void)
+{
+ gcse_after_reload_main (get_insns ());
+ rebuild_jump_labels (get_insns ());
+ return 0;
+}
+
+struct rtl_opt_pass pass_gcse2 =
+{
+ {
+ RTL_PASS,
+ "gcse2", /* name */
+ gate_handle_gcse2, /* gate */
+ rest_of_handle_gcse2, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_GCSE_AFTER_RELOAD, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func | TODO_verify_rtl_sharing
+ | TODO_verify_flow | TODO_ggc_collect /* todo_flags_finish */
+ }
+};
+