You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
/* TODO
- reordering of memory allocation and freeing to be more space efficient
#include "intl.h"
#include "obstack.h"
#include "timevar.h"
+#include "tree-pass.h"
+#include "hashtab.h"
/* Propagate flow information through back edges and thus enable PRE's
moving loop invariant calculations out of loops.
\f
/* GCSE global vars. */
-/* -dG dump file. */
-static FILE *gcse_file;
-
/* Note whether or not we should run jump optimization after gcse. We
want to do this for two cases.
* If we added any labels via edge splitting. */
static int run_jump_opt_after_gcse;
-/* Bitmaps are normally not included in debugging dumps.
- However it's useful to be able to print them from GDB.
- We could create special functions for this, but it's simpler to
- just allow passing stderr to the dump_foo fns. Since stderr can
- be a macro, we store a copy here. */
-static FILE *debug_stderr;
-
/* An obstack for our working variables. */
static struct obstack gcse_obstack;
/* Head of the list of load/store memory refs. */
static struct ls_expr * pre_ldst_mems = NULL;
+/* Hashtable for the load/store memory refs. */
+static htab_t pre_ldst_table = NULL;
+
/* Bitmap containing one bit for each register in the program.
Used when performing GCSE to track which registers have been set since
the start of the basic block. */
static int gcse_create_count;
/* Number of local constants propagated. */
static int local_const_prop_count;
-/* Number of local copys propagated. */
+/* Number of local copies propagated. */
static int local_copy_prop_count;
/* Number of global constants propagated. */
static int global_const_prop_count;
-/* Number of global copys propagated. */
+/* Number of global copies propagated. */
static int global_copy_prop_count;
\f
/* For available exprs */
static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
static void *grealloc (void *, size_t);
static void *gcse_alloc (unsigned long);
-static void alloc_gcse_mem (rtx);
+static void alloc_gcse_mem (void);
static void free_gcse_mem (void);
static void alloc_reg_set_mem (int);
static void free_reg_set_mem (void);
static void record_one_set (int, rtx);
static void record_set_info (rtx, rtx, void *);
-static void compute_sets (rtx);
+static void compute_sets (void);
static void hash_scan_insn (rtx, struct hash_table *, int);
static void hash_scan_set (rtx, rtx, struct hash_table *);
static void hash_scan_clobber (rtx, rtx, struct hash_table *);
static int cprop_insn (rtx, int);
static int cprop (int);
static void find_implicit_sets (void);
-static int one_cprop_pass (int, int, int);
-static bool constprop_register (rtx, rtx, rtx, int);
+static int one_cprop_pass (int, bool, bool);
+static bool constprop_register (rtx, rtx, rtx, bool);
static struct expr *find_bypass_set (int, int);
static bool reg_killed_on_edge (rtx, edge);
static int bypass_block (basic_block, rtx, rtx);
static void free_modify_mem_tables (void);
static rtx gcse_emit_move_after (rtx, rtx, rtx);
static void local_cprop_find_used_regs (rtx *, void *);
-static bool do_local_cprop (rtx, rtx, int, rtx*);
+static bool do_local_cprop (rtx, rtx, bool, rtx*);
static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
-static void local_cprop_pass (int);
+static void local_cprop_pass (bool);
static bool is_too_expensive (const char *);
\f
F is the first instruction in the function. Return nonzero if a
change is mode. */
-int
-gcse_main (rtx f, FILE *file)
+static int
+gcse_main (rtx f ATTRIBUTE_UNUSED)
{
int changed, pass;
/* Bytes used at start of pass. */
/* Assume that we do not need to run jump optimizations after gcse. */
run_jump_opt_after_gcse = 0;
- /* For calling dump_foo fns from gdb. */
- debug_stderr = stderr;
- gcse_file = file;
-
/* Identify the basic block information for this function, including
successors and predecessors. */
max_gcse_regno = max_reg_num ();
- if (file)
- dump_flow_info (file);
+ if (dump_file)
+ dump_flow_info (dump_file, dump_flags);
/* Return if there's nothing to do, or it is too expensive. */
- if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
+ if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
+ || is_too_expensive (_("GCSE disabled")))
return 0;
gcc_obstack_init (&gcse_obstack);
information about memory sets when we build the hash tables. */
alloc_reg_set_mem (max_gcse_regno);
- compute_sets (f);
+ compute_sets ();
pass = 0;
initial_bytes_used = bytes_used;
while (changed && pass < MAX_GCSE_PASSES)
{
changed = 0;
- if (file)
- fprintf (file, "GCSE pass %d\n\n", pass + 1);
+ if (dump_file)
+ fprintf (dump_file, "GCSE pass %d\n\n", pass + 1);
/* Initialize bytes_used to the space for the pred/succ lists,
and the reg_set_table data. */
/* Each pass may create new registers, so recalculate each time. */
max_gcse_regno = max_reg_num ();
- alloc_gcse_mem (f);
+ alloc_gcse_mem ();
/* Don't allow constant propagation to modify jumps
during this pass. */
timevar_push (TV_CPROP1);
- changed = one_cprop_pass (pass + 1, 0, 0);
+ changed = one_cprop_pass (pass + 1, false, false);
timevar_pop (TV_CPROP1);
if (optimize_size)
}
free_reg_set_mem ();
alloc_reg_set_mem (max_reg_num ());
- compute_sets (f);
+ compute_sets ();
run_jump_opt_after_gcse = 1;
timevar_pop (TV_PRE);
}
{
timevar_push (TV_HOIST);
max_gcse_regno = max_reg_num ();
- alloc_gcse_mem (f);
+ alloc_gcse_mem ();
changed |= one_code_hoisting_pass ();
free_gcse_mem ();
timevar_pop (TV_HOIST);
}
- if (file)
+ if (dump_file)
{
- fprintf (file, "\n");
- fflush (file);
+ fprintf (dump_file, "\n");
+ fflush (dump_file);
}
obstack_free (&gcse_obstack, gcse_obstack_bottom);
conditional jumps. */
max_gcse_regno = max_reg_num ();
- alloc_gcse_mem (f);
+ alloc_gcse_mem ();
/* This time, go ahead and allow cprop to alter jumps. */
timevar_push (TV_CPROP2);
- one_cprop_pass (pass + 1, 1, 0);
+ one_cprop_pass (pass + 1, true, false);
timevar_pop (TV_CPROP2);
free_gcse_mem ();
- if (file)
+ if (dump_file)
{
- fprintf (file, "GCSE of %s: %d basic blocks, ",
+ fprintf (dump_file, "GCSE of %s: %d basic blocks, ",
current_function_name (), n_basic_blocks);
- fprintf (file, "%d pass%s, %d bytes\n\n",
+ fprintf (dump_file, "%d pass%s, %d bytes\n\n",
pass, pass > 1 ? "es" : "", max_pass_bytes);
}
This is called at the start of each pass. */
static void
-alloc_gcse_mem (rtx f)
+alloc_gcse_mem (void)
{
int i;
+ basic_block bb;
rtx insn;
/* Find the largest UID and create a mapping from UIDs to CUIDs.
CUIDs are like UIDs except they increase monotonically, have no gaps,
- and only apply to real insns. */
+ and only apply to real insns.
+ (Actually, there are gaps, for insn that are not inside a basic block.
+ but we should never see those anyway, so this is OK.) */
max_uid = get_max_uid ();
uid_cuid = gcalloc (max_uid + 1, sizeof (int));
- for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
- {
- if (INSN_P (insn))
- uid_cuid[INSN_UID (insn)] = i++;
- else
- uid_cuid[INSN_UID (insn)] = i;
- }
+ i = 0;
+ FOR_EACH_BB (bb)
+ FOR_BB_INSNS (bb, insn)
+ {
+ if (INSN_P (insn))
+ uid_cuid[INSN_UID (insn)] = i++;
+ else
+ uid_cuid[INSN_UID (insn)] = i;
+ }
/* Create a table mapping cuids to insns. */
max_cuid = i;
cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
- for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
- if (INSN_P (insn))
- CUID_INSN (i++) = insn;
+ i = 0;
+ FOR_EACH_BB (bb)
+ FOR_BB_INSNS (bb, insn)
+ if (INSN_P (insn))
+ CUID_INSN (i++) = insn;
/* Allocate vars to track sets of regs. */
reg_set_bitmap = BITMAP_ALLOC (NULL);
`reg_set_table' for further documentation. */
static void
-compute_sets (rtx f)
+compute_sets (void)
{
+ basic_block bb;
rtx insn;
- for (insn = f; insn != 0; insn = NEXT_INSN (insn))
- if (INSN_P (insn))
- note_stores (PATTERN (insn), record_set_info, insn);
+ FOR_EACH_BB (bb)
+ FOR_BB_INSNS (bb, insn)
+ if (INSN_P (insn))
+ note_stores (PATTERN (insn), record_set_info, insn);
}
\f
/* Hash table support. */
static int
want_to_gcse_p (rtx x)
{
+#ifdef STACK_REGS
+ /* On register stack architectures, don't GCSE constants from the
+ constant pool, as the benefits are often swamped by the overhead
+ of shuffling the register stack between basic blocks. */
+ if (IS_STACK_MODE (GET_MODE (x)))
+ x = avoid_constant_pool_reference (x);
+#endif
+
switch (GET_CODE (x))
{
case REG:
load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
{
rtx list_entry = modify_mem_list[bb->index];
+
+ /* If this is a readonly then we aren't going to be changing it. */
+ if (MEM_READONLY_P (x))
+ return 0;
+
while (list_entry)
{
rtx setter;
unsigned int regno = REGNO (dest);
rtx tmp;
- /* If this is a single set and we are doing constant propagation,
- see if a REG_NOTE shows this equivalent to a constant. */
- if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
- && gcse_constant_p (XEXP (note, 0)))
+ /* See if a REG_NOTE shows this equivalent to a simpler expression.
+ This allows us to do a single GCSE pass and still eliminate
+ redundant constants, addresses or other expressions that are
+ constructed with multiple instructions. */
+ note = find_reg_equal_equiv_note (insn);
+ if (note != 0
+ && (table->set_p
+ ? gcse_constant_p (XEXP (note, 0))
+ : want_to_gcse_p (XEXP (note, 0))))
src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
/* Only record sets of pseudo-regs in the hash table. */
REG_EQUIV notes and if the argument slot is used somewhere
explicitly, it means address of parameter has been taken,
so we should not extend the lifetime of the pseudo. */
- && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
- || ! MEM_P (XEXP (note, 0))))
+ && (note == NULL_RTX || ! MEM_P (XEXP (note, 0))))
{
/* An expression is not anticipatable if its operands are
modified before this insn or if this is not the only SET in
??? hard-reg reg_set_in_block computation
could be moved to compute_sets since they currently don't change. */
- for (insn = BB_HEAD (current_bb);
- insn && insn != NEXT_INSN (BB_END (current_bb));
- insn = NEXT_INSN (insn))
+ FOR_BB_INSNS (current_bb, insn)
{
if (! INSN_P (insn))
continue;
BB_HEAD (current_bb), table);
/* The next pass builds the hash table. */
-
- for (insn = BB_HEAD (current_bb), in_libcall_block = 0;
- insn && insn != NEXT_INSN (BB_END (current_bb));
- insn = NEXT_INSN (insn))
+ in_libcall_block = 0;
+ FOR_BB_INSNS (current_bb, insn)
if (INSN_P (insn))
{
if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
return;
case MEM:
- {
- bitmap_iterator bi;
- unsigned bb_index;
-
- /* First handle all the blocks with calls. We don't need to
- do any list walking for them. */
- EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
- {
- if (set_p)
- SET_BIT (bmap[bb_index], indx);
- else
- RESET_BIT (bmap[bb_index], indx);
- }
+ if (! MEM_READONLY_P (x))
+ {
+ bitmap_iterator bi;
+ unsigned bb_index;
- /* Now iterate over the blocks which have memory modifications
- but which do not have any calls. */
- EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set, blocks_with_calls,
- 0, bb_index, bi)
- {
- rtx list_entry = canon_modify_mem_list[bb_index];
+ /* First handle all the blocks with calls. We don't need to
+ do any list walking for them. */
+ EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
+ {
+ if (set_p)
+ SET_BIT (bmap[bb_index], indx);
+ else
+ RESET_BIT (bmap[bb_index], indx);
+ }
- while (list_entry)
+ /* Now iterate over the blocks which have memory modifications
+ but which do not have any calls. */
+ EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set,
+ blocks_with_calls,
+ 0, bb_index, bi)
{
- rtx dest, dest_addr;
+ rtx list_entry = canon_modify_mem_list[bb_index];
- /* LIST_ENTRY must be an INSN of some kind that sets memory.
- Examine each hunk of memory that is modified. */
+ while (list_entry)
+ {
+ rtx dest, dest_addr;
- dest = XEXP (list_entry, 0);
- list_entry = XEXP (list_entry, 1);
- dest_addr = XEXP (list_entry, 0);
+ /* LIST_ENTRY must be an INSN of some kind that sets memory.
+ Examine each hunk of memory that is modified. */
- if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
- x, rtx_addr_varies_p))
- {
- if (set_p)
- SET_BIT (bmap[bb_index], indx);
- else
- RESET_BIT (bmap[bb_index], indx);
- break;
- }
- list_entry = XEXP (list_entry, 1);
+ dest = XEXP (list_entry, 0);
+ list_entry = XEXP (list_entry, 1);
+ dest_addr = XEXP (list_entry, 0);
+
+ if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
+ x, rtx_addr_varies_p))
+ {
+ if (set_p)
+ SET_BIT (bmap[bb_index], indx);
+ else
+ RESET_BIT (bmap[bb_index], indx);
+ break;
+ }
+ list_entry = XEXP (list_entry, 1);
+ }
}
- }
- }
+ }
x = XEXP (x, 0);
goto repeat;
int success = 0;
rtx set = single_set (insn);
+ /* Usually we substitute easy stuff, so we won't copy everything.
+ We however need to take care to not duplicate non-trivial CONST
+ expressions. */
+ to = copy_rtx (to);
+
validate_replace_src_group (from, to, insn);
if (num_changes_pending () && apply_change_group ())
success = 1;
validate_change (insn, &SET_SRC (set), src, 0);
}
- /* If there is already a NOTE, update the expression in it with our
- replacement. */
- if (note != 0)
+ /* If there is already a REG_EQUAL note, update the expression in it
+ with our replacement. */
+ if (note != 0 && REG_NOTE_KIND (note) == REG_EQUAL)
XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
have a note, and have no special SET, add a REG_EQUAL note to not
lose information. */
if (!success && note == 0 && set != 0
- && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT)
+ && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
+ && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
}
We don't allow that. Remove that note. This code ought
not to happen, because previous code ought to synthesize
reg-reg move, but be on the safe side. */
- if (note && REG_P (XEXP (note, 0)))
+ if (note && REG_NOTE_KIND (note) == REG_EQUAL && REG_P (XEXP (note, 0)))
remove_note (insn, note);
return success;
run_jump_opt_after_gcse = 1;
global_const_prop_count++;
- if (gcse_file != NULL)
+ if (dump_file != NULL)
{
- fprintf (gcse_file,
+ fprintf (dump_file,
"GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
REGNO (from), INSN_UID (jump));
- print_rtl (gcse_file, src);
- fprintf (gcse_file, "\n");
+ print_rtl (dump_file, src);
+ fprintf (dump_file, "\n");
}
purge_dead_edges (bb);
}
static bool
-constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
+constprop_register (rtx insn, rtx from, rtx to, bool alter_jumps)
{
rtx sset;
{
changed = 1;
global_const_prop_count++;
- if (gcse_file != NULL)
+ if (dump_file != NULL)
{
- fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
- fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
- print_rtl (gcse_file, src);
- fprintf (gcse_file, "\n");
+ fprintf (dump_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
+ fprintf (dump_file, "insn %d with constant ", INSN_UID (insn));
+ print_rtl (dump_file, src);
+ fprintf (dump_file, "\n");
}
if (INSN_DELETED_P (insn))
return 1;
{
changed = 1;
global_copy_prop_count++;
- if (gcse_file != NULL)
+ if (dump_file != NULL)
{
- fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
+ fprintf (dump_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
regno, INSN_UID (insn));
- fprintf (gcse_file, " with reg %d\n", REGNO (src));
+ fprintf (dump_file, " with reg %d\n", REGNO (src));
}
/* The original insn setting reg_used may or may not now be
their REG_EQUAL notes need updating. */
static bool
-do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
+do_local_cprop (rtx x, rtx insn, bool alter_jumps, rtx *libcall_sp)
{
rtx newreg = NULL, newcnst = NULL;
adjusted = adjust_libcall_notes (x, newcnst, insn, libcall_sp);
gcc_assert (adjusted);
- if (gcse_file != NULL)
+ if (dump_file != NULL)
{
- fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
+ fprintf (dump_file, "LOCAL CONST-PROP: Replacing reg %d in ",
REGNO (x));
- fprintf (gcse_file, "insn %d with constant ",
+ fprintf (dump_file, "insn %d with constant ",
INSN_UID (insn));
- print_rtl (gcse_file, newcnst);
- fprintf (gcse_file, "\n");
+ print_rtl (dump_file, newcnst);
+ fprintf (dump_file, "\n");
}
local_const_prop_count++;
return true;
else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
{
adjust_libcall_notes (x, newreg, insn, libcall_sp);
- if (gcse_file != NULL)
+ if (dump_file != NULL)
{
- fprintf (gcse_file,
+ fprintf (dump_file,
"LOCAL COPY-PROP: Replacing reg %d in insn %d",
REGNO (x), INSN_UID (insn));
- fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
+ fprintf (dump_file, " with reg %d\n", REGNO (newreg));
}
local_copy_prop_count++;
return true;
#define MAX_NESTED_LIBCALLS 9
+/* Do local const/copy propagation (i.e. within each basic block).
+ If ALTER_JUMPS is true, allow propagating into jump insns, which
+ could modify the CFG. */
+
static void
-local_cprop_pass (int alter_jumps)
+local_cprop_pass (bool alter_jumps)
{
+ basic_block bb;
rtx insn;
struct reg_use *reg_used;
rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
cselib_init (false);
libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
*libcall_sp = 0;
- for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ FOR_EACH_BB (bb)
{
- if (INSN_P (insn))
+ FOR_BB_INSNS (bb, insn)
{
- rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
-
- if (note)
- {
- gcc_assert (libcall_sp != libcall_stack);
- *--libcall_sp = XEXP (note, 0);
- }
- note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
- if (note)
- libcall_sp++;
- note = find_reg_equal_equiv_note (insn);
- do
+ if (INSN_P (insn))
{
- reg_use_count = 0;
- note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
- if (note)
- local_cprop_find_used_regs (&XEXP (note, 0), NULL);
+ rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
- for (reg_used = ®_use_table[0]; reg_use_count > 0;
- reg_used++, reg_use_count--)
- if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
- libcall_sp))
- {
- changed = true;
+ if (note)
+ {
+ gcc_assert (libcall_sp != libcall_stack);
+ *--libcall_sp = XEXP (note, 0);
+ }
+ note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
+ if (note)
+ libcall_sp++;
+ note = find_reg_equal_equiv_note (insn);
+ do
+ {
+ reg_use_count = 0;
+ note_uses (&PATTERN (insn), local_cprop_find_used_regs,
+ NULL);
+ if (note)
+ local_cprop_find_used_regs (&XEXP (note, 0), NULL);
+
+ for (reg_used = ®_use_table[0]; reg_use_count > 0;
+ reg_used++, reg_use_count--)
+ if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
+ libcall_sp))
+ {
+ changed = true;
+ break;
+ }
+ if (INSN_DELETED_P (insn))
break;
- }
- if (INSN_DELETED_P (insn))
- break;
+ }
+ while (reg_use_count);
}
- while (reg_use_count);
+ cselib_process_insn (insn);
}
- cselib_process_insn (insn);
+
+ /* Forget everything at the end of a basic block. Make sure we are
+ not inside a libcall, they should never cross basic blocks. */
+ cselib_clear_table ();
+ gcc_assert (libcall_sp == &libcall_stack[MAX_NESTED_LIBCALLS]);
}
+
cselib_finish ();
+
/* Global analysis may get into infinite loops for unreachable blocks. */
if (changed && alter_jumps)
{
delete_unreachable_blocks ();
free_reg_set_mem ();
alloc_reg_set_mem (max_reg_num ());
- compute_sets (get_insns ());
+ compute_sets ();
}
}
/* Note we start at block 1. */
if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
{
- if (gcse_file != NULL)
- fprintf (gcse_file, "\n");
+ if (dump_file != NULL)
+ fprintf (dump_file, "\n");
return 0;
}
start of the block]. */
reset_opr_set_tables ();
- for (insn = BB_HEAD (bb);
- insn != NULL && insn != NEXT_INSN (BB_END (bb));
- insn = NEXT_INSN (insn))
+ FOR_BB_INSNS (bb, insn)
if (INSN_P (insn))
{
changed |= cprop_insn (insn, alter_jumps);
}
}
- if (gcse_file != NULL)
- fprintf (gcse_file, "\n");
+ if (dump_file != NULL)
+ fprintf (dump_file, "\n");
return changed;
}
settle for the condition variable in the jump instruction being integral.
We prefer to be able to record the value of a user variable, rather than
the value of a temporary used in a condition. This could be solved by
- recording the value of *every* register scaned by canonicalize_condition,
+ recording the value of *every* register scanned by canonicalize_condition,
but this would require some code reorganization. */
rtx
dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
: FALLTHRU_EDGE (bb)->dest;
- if (dest && EDGE_COUNT (dest->preds) == 1
+ if (dest && single_pred_p (dest)
&& dest != EXIT_BLOCK_PTR)
{
new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
XEXP (cond, 1));
implicit_sets[dest->index] = new;
- if (gcse_file)
+ if (dump_file)
{
- fprintf(gcse_file, "Implicit set of reg %d in ",
+ fprintf(dump_file, "Implicit set of reg %d in ",
REGNO (XEXP (cond, 0)));
- fprintf(gcse_file, "basic block %d\n", dest->index);
+ fprintf(dump_file, "basic block %d\n", dest->index);
}
count++;
}
}
}
- if (gcse_file)
- fprintf (gcse_file, "Found %d implicit sets\n", count);
+ if (dump_file)
+ fprintf (dump_file, "Found %d implicit sets\n", count);
}
/* Perform one copy/constant propagation pass.
perform conditional jump bypassing optimizations. */
static int
-one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
+one_cprop_pass (int pass, bool cprop_jumps, bool bypass_jumps)
{
int changed = 0;
global_const_prop_count = local_const_prop_count = 0;
global_copy_prop_count = local_copy_prop_count = 0;
- local_cprop_pass (cprop_jumps);
+ if (cprop_jumps)
+ local_cprop_pass (cprop_jumps);
/* Determine implicit sets. */
- implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
+ implicit_sets = XCNEWVEC (rtx, last_basic_block);
find_implicit_sets ();
alloc_hash_table (max_cuid, &set_hash_table, 1);
free (implicit_sets);
implicit_sets = NULL;
- if (gcse_file)
- dump_hash_table (gcse_file, "SET", &set_hash_table);
+ if (dump_file)
+ dump_hash_table (dump_file, "SET", &set_hash_table);
if (set_hash_table.n_elems > 0)
{
alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
free_hash_table (&set_hash_table);
- if (gcse_file)
+ if (dump_file)
{
- fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
+ fprintf (dump_file, "CPROP of %s, pass %d: %d bytes needed, ",
current_function_name (), pass, bytes_used);
- fprintf (gcse_file, "%d local const props, %d local copy props\n\n",
+ fprintf (dump_file, "%d local const props, %d local copy props, ",
local_const_prop_count, local_copy_prop_count);
- fprintf (gcse_file, "%d global const props, %d global copy props\n\n",
+ fprintf (dump_file, "%d global const props, %d global copy props\n\n",
global_const_prop_count, global_copy_prop_count);
}
/* Global analysis may get into infinite loops for unreachable blocks. */
}
else if (GET_CODE (new) == LABEL_REF)
{
- edge_iterator ei2;
-
dest = BLOCK_FOR_INSN (XEXP (new, 0));
/* Don't bypass edges containing instructions. */
- FOR_EACH_EDGE (edest, ei2, bb->succs)
- if (edest->dest == dest && edest->insns.r)
- {
- dest = NULL;
- break;
- }
+ edest = find_edge (bb, dest);
+ if (edest && edest->insns.r)
+ dest = NULL;
}
else
dest = NULL;
branch. We would end up emitting the instruction on "both"
edges. */
- if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc))))
- {
- edge e2;
- edge_iterator ei2;
-
- FOR_EACH_EDGE (e2, ei2, e->src->succs)
- if (e2->dest == dest)
- {
- dest = NULL;
- break;
- }
- }
+ if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc)))
+ && find_edge (e->src, dest))
+ dest = NULL;
old_dest = e->dest;
if (dest != NULL
insert_insn_on_edge (copy_insn (pat), e);
}
- if (gcse_file != NULL)
+ if (dump_file != NULL)
{
- fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d "
+ fprintf (dump_file, "JUMP-BYPASS: Proved reg %d "
"in jump_insn %d equals constant ",
regno, INSN_UID (jump));
- print_rtl (gcse_file, SET_SRC (set->expr));
- fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
+ print_rtl (dump_file, SET_SRC (set->expr));
+ fprintf (dump_file, "\nBypass edge from %d->%d to %d\n",
e->src->index, old_dest->index, dest->index);
}
change = 1;
EXIT_BLOCK_PTR, next_bb)
{
/* Check for more than one predecessor. */
- if (EDGE_COUNT (bb->preds) > 1)
+ if (!single_pred_p (bb))
{
setcc = NULL_RTX;
- for (insn = BB_HEAD (bb);
- insn != NULL && insn != NEXT_INSN (BB_END (bb));
- insn = NEXT_INSN (insn))
+ FOR_BB_INSNS (bb, insn)
if (NONJUMP_INSN_P (insn))
{
if (setcc)
sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
}
- edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
+ edge_list = pre_edge_lcm (expr_hash_table.n_elems, transp, comp, antloc,
ae_kill, &pre_insert_map, &pre_delete_map);
sbitmap_vector_free (antloc);
antloc = NULL;
pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
{
int rval;
- char *visited = xcalloc (last_basic_block, 1);
+ char *visited = XCNEWVEC (char, last_basic_block);
rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
if (JUMP_P (insn)
|| (NONJUMP_INSN_P (insn)
- && (EDGE_COUNT (bb->succs) > 1
- || EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL)))
+ && (!single_succ_p (bb)
+ || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
{
#ifdef HAVE_cc0
rtx note;
/* Likewise if the last insn is a call, as will happen in the presence
of exception handling. */
else if (CALL_P (insn)
- && (EDGE_COUNT (bb->succs) > 1 || EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL))
+ && (!single_succ_p (bb)
+ || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
{
/* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
we search backward and place the instructions before the first
gcse_create_count++;
- if (gcse_file)
+ if (dump_file)
{
- fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
+ fprintf (dump_file, "PRE/HOIST: end of bb %d, insn %d, ",
bb->index, INSN_UID (new_insn));
- fprintf (gcse_file, "copying expression %d to reg %d\n",
+ fprintf (dump_file, "copying expression %d to reg %d\n",
expr->bitmap_index, regno);
}
}
if (! occr->deleted_p)
continue;
- /* Insert this expression on this edge if if it would
+ /* Insert this expression on this edge if it would
reach the deleted occurrence in BB. */
if (!TEST_BIT (inserted[e], j))
{
insert_insn_on_edge (insn, eg);
}
- if (gcse_file)
+ if (dump_file)
{
- fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
+ fprintf (dump_file, "PRE/HOIST: edge (%d,%d), ",
bb->index,
INDEX_EDGE_SUCC_BB (edge_list, e)->index);
- fprintf (gcse_file, "copy expression %d\n",
+ fprintf (dump_file, "copy expression %d\n",
expr->bitmap_index);
}
int regno = REGNO (reg);
int indx = expr->bitmap_index;
rtx pat = PATTERN (insn);
- rtx set, new_insn;
+ rtx set, first_set, new_insn;
rtx old_reg;
int i;
case PARALLEL:
/* Search through the parallel looking for the set whose
source was the expression that we're interested in. */
+ first_set = NULL_RTX;
set = NULL_RTX;
for (i = 0; i < XVECLEN (pat, 0); i++)
{
rtx x = XVECEXP (pat, 0, i);
- if (GET_CODE (x) == SET
- && expr_equiv_p (SET_SRC (x), expr->expr))
+ if (GET_CODE (x) == SET)
{
- set = x;
- break;
+ /* If the source was a REG_EQUAL or REG_EQUIV note, we
+ may not find an equivalent expression, but in this
+ case the PARALLEL will have a single set. */
+ if (first_set == NULL_RTX)
+ first_set = x;
+ if (expr_equiv_p (SET_SRC (x), expr->expr))
+ {
+ set = x;
+ break;
+ }
}
}
+
+ gcc_assert (first_set);
+ if (set == NULL_RTX)
+ set = first_set;
break;
default:
gcse_create_count++;
- if (gcse_file)
- fprintf (gcse_file,
+ if (dump_file)
+ fprintf (dump_file,
"PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
BLOCK_NUM (insn), INSN_UID (new_insn), indx,
INSN_UID (insn), regno);
changed = 1;
gcse_subst_count++;
- if (gcse_file)
+ if (dump_file)
{
- fprintf (gcse_file,
+ fprintf (dump_file,
"PRE: redundant insn %d (expression %d) in ",
INSN_UID (insn), indx);
- fprintf (gcse_file, "bb %d, reaching reg is %d\n",
+ fprintf (dump_file, "bb %d, reaching reg is %d\n",
bb->index, REGNO (expr->reaching_reg));
}
}
/* Compute a mapping from expression number (`bitmap_index') to
hash table entry. */
- index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
+ index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
for (i = 0; i < expr_hash_table.size; i++)
for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
index_map[expr->bitmap_index] = expr;
compute_hash_table (&expr_hash_table);
trim_ld_motion_mems ();
- if (gcse_file)
- dump_hash_table (gcse_file, "Expression", &expr_hash_table);
+ if (dump_file)
+ dump_hash_table (dump_file, "Expression", &expr_hash_table);
if (expr_hash_table.n_elems > 0)
{
remove_fake_exit_edges ();
free_hash_table (&expr_hash_table);
- if (gcse_file)
+ if (dump_file)
{
- fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
+ fprintf (dump_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
current_function_name (), pass, bytes_used);
- fprintf (gcse_file, "%d substs, %d insns created\n",
+ fprintf (dump_file, "%d substs, %d insns created\n",
gcse_subst_count, gcse_create_count);
}
LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
because the following loop optimization pass requires them. */
-/* ??? This is very similar to the loop.c add_label_notes function. We
- could probably share code here. */
-
/* ??? If there was a jump optimization pass after gcse and before loop,
then we would not need to do this here, because jump would add the
necessary REG_LABEL notes. */
passes++;
}
- if (gcse_file)
- fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
+ if (dump_file)
+ fprintf (dump_file, "hoisting vbeinout computation: %d passes\n", passes);
}
/* Top level routine to do the dataflow analysis needed by code hoisting. */
compute_transpout ();
compute_code_hoist_vbeinout ();
calculate_dominance_info (CDI_DOMINATORS);
- if (gcse_file)
- fprintf (gcse_file, "\n");
+ if (dump_file)
+ fprintf (dump_file, "\n");
}
/* Determine if the expression identified by EXPR_INDEX would
if (visited == NULL)
{
visited_allocated_locally = 1;
- visited = xcalloc (last_basic_block, 1);
+ visited = XCNEWVEC (char, last_basic_block);
}
FOR_EACH_EDGE (pred, ei, bb->preds)
/* Compute a mapping from expression number (`bitmap_index') to
hash table entry. */
- index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
+ index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
for (i = 0; i < expr_hash_table.size; i++)
for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
index_map[expr->bitmap_index] = expr;
insn_inserted_p = 0;
/* These tests should be the same as the tests above. */
- if (TEST_BIT (hoist_vbeout[bb->index], i))
+ if (TEST_BIT (hoist_exprs[bb->index], i))
{
/* We've found a potentially hoistable expression, now
we look at every block BB dominates to see if it
alloc_hash_table (max_cuid, &expr_hash_table, 0);
compute_hash_table (&expr_hash_table);
- if (gcse_file)
- dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
+ if (dump_file)
+ dump_hash_table (dump_file, "Code Hosting Expressions", &expr_hash_table);
if (expr_hash_table.n_elems > 0)
{
load towards the exit, and we end up with no loads or stores of 'i'
in the loop. */
+static hashval_t
+pre_ldst_expr_hash (const void *p)
+{
+ int do_not_record_p = 0;
+ const struct ls_expr *x = p;
+ return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
+}
+
+static int
+pre_ldst_expr_eq (const void *p1, const void *p2)
+{
+ const struct ls_expr *ptr1 = p1, *ptr2 = p2;
+ return expr_equiv_p (ptr1->pattern, ptr2->pattern);
+}
+
/* This will search the ldst list for a matching expression. If it
doesn't find one, we create one and initialize it. */
int do_not_record_p = 0;
struct ls_expr * ptr;
unsigned int hash;
+ void **slot;
+ struct ls_expr e;
hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
NULL, /*have_reg_qty=*/false);
- for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
- if (ptr->hash_index == hash && expr_equiv_p (ptr->pattern, x))
- return ptr;
+ e.pattern = x;
+ slot = htab_find_slot_with_hash (pre_ldst_table, &e, hash, INSERT);
+ if (*slot)
+ return (struct ls_expr *)*slot;
- ptr = xmalloc (sizeof (struct ls_expr));
+ ptr = XNEW (struct ls_expr);
ptr->next = pre_ldst_mems;
ptr->expr = NULL;
ptr->index = 0;
ptr->hash_index = hash;
pre_ldst_mems = ptr;
+ *slot = ptr;
return ptr;
}
static void
free_ldst_mems (void)
{
+ if (pre_ldst_table)
+ htab_delete (pre_ldst_table);
+ pre_ldst_table = NULL;
+
while (pre_ldst_mems)
{
struct ls_expr * tmp = pre_ldst_mems;
static struct ls_expr *
find_rtx_in_ldst (rtx x)
{
- struct ls_expr * ptr;
-
- for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
- if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
- return ptr;
-
- return NULL;
+ struct ls_expr e;
+ void **slot;
+ if (!pre_ldst_table)
+ return NULL;
+ e.pattern = x;
+ slot = htab_find_slot (pre_ldst_table, &e, NO_INSERT);
+ if (!slot || ((struct ls_expr *)*slot)->invalid)
+ return NULL;
+ return *slot;
}
/* Assign each element of the list of mems a monotonically increasing value. */
rtx insn;
pre_ldst_mems = NULL;
+ pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
+ pre_ldst_expr_eq, NULL);
FOR_EACH_BB (bb)
{
- for (insn = BB_HEAD (bb);
- insn && insn != NEXT_INSN (BB_END (bb));
- insn = NEXT_INSN (insn))
+ FOR_BB_INSNS (bb, insn)
{
if (INSN_P (insn))
{
else
{
*last = ptr->next;
+ htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
free_ldst_entry (ptr);
ptr = * last;
}
}
/* Show the world what we've found. */
- if (gcse_file && pre_ldst_mems != NULL)
- print_ldst_list (gcse_file);
+ if (dump_file && pre_ldst_mems != NULL)
+ print_ldst_list (dump_file);
}
/* This routine will take an expression which we are replacing with
if (expr->reaching_reg == src)
continue;
- if (gcse_file)
+ if (dump_file)
{
- fprintf (gcse_file, "PRE: store updated with reaching reg ");
- print_rtl (gcse_file, expr->reaching_reg);
- fprintf (gcse_file, ":\n ");
- print_inline_rtx (gcse_file, insn, 8);
- fprintf (gcse_file, "\n");
+ fprintf (dump_file, "PRE: store updated with reaching reg ");
+ print_rtl (dump_file, expr->reaching_reg);
+ fprintf (dump_file, ":\n ");
+ print_inline_rtx (dump_file, insn, 8);
+ fprintf (dump_file, "\n");
}
copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
return;
+ /* Make sure that the SET_SRC of this store insns can be assigned to
+ a register, or we will fail later on in replace_store_insn, which
+ assumes that we can do this. But sometimes the target machine has
+ oddities like MEM read-modify-write instruction. See for example
+ PR24257. */
+ if (!can_assign_to_reg_p (SET_SRC (set)))
+ return;
+
ptr = ldst_entry (dest);
if (!ptr->pattern_regs)
ptr->pattern_regs = extract_mentioned_regs (dest);
max_gcse_regno);
sbitmap_vector_zero (reg_set_in_block, last_basic_block);
pre_ldst_mems = 0;
- last_set_in = xcalloc (max_gcse_regno, sizeof (int));
- already_set = xmalloc (sizeof (int) * max_gcse_regno);
+ pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
+ pre_ldst_expr_eq, NULL);
+ last_set_in = XCNEWVEC (int, max_gcse_regno);
+ already_set = XNEWVEC (int, max_gcse_regno);
/* Find all the stores we care about. */
FOR_EACH_BB (bb)
/* First compute the registers set in this block. */
regvec = last_set_in;
- for (insn = BB_HEAD (bb);
- insn != NEXT_INSN (BB_END (bb));
- insn = NEXT_INSN (insn))
+ FOR_BB_INSNS (bb, insn)
{
if (! INSN_P (insn))
continue;
/* Now find the stores. */
memset (already_set, 0, sizeof (int) * max_gcse_regno);
regvec = already_set;
- for (insn = BB_HEAD (bb);
- insn != NEXT_INSN (BB_END (bb));
- insn = NEXT_INSN (insn))
+ FOR_BB_INSNS (bb, insn)
{
if (! INSN_P (insn))
continue;
if (!AVAIL_STORE_LIST (ptr))
{
*prev_next_ptr_ptr = ptr->next;
+ htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
free_ldst_entry (ptr);
}
else
ret = enumerate_ldsts ();
- if (gcse_file)
+ if (dump_file)
{
- fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
- print_ldst_list (gcse_file);
+ fprintf (dump_file, "ST_avail and ST_antic (shown under loads..)\n");
+ print_ldst_list (dump_file);
}
free (last_set_in);
/* Check if INSN kills the store pattern X (is aliased with it).
AFTER is true if we are checking the case when store X occurs
- after the insn. Return true if it it does. */
+ after the insn. Return true if it does. */
static bool
store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
if (TEST_BIT (ae_gen[bb->index], ptr->index))
{
rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
- if (gcse_file)
- fprintf (gcse_file, "Removing redundant store:\n");
+ if (dump_file)
+ fprintf (dump_file, "Removing redundant store:\n");
replace_store_insn (r, XEXP (st, 0), bb, ptr);
continue;
}
transp = sbitmap_vector_alloc (last_basic_block, num_stores);
sbitmap_vector_zero (transp, last_basic_block);
- regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
+ regs_set_in_block = XNEWVEC (int, max_gcse_regno);
FOR_EACH_BB (bb)
{
free (regs_set_in_block);
- if (gcse_file)
+ if (dump_file)
{
- dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
- dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
- dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
- dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
+ dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
+ dump_sbitmap_vector (dump_file, "st_kill", "", ae_kill, last_basic_block);
+ dump_sbitmap_vector (dump_file, "Transpt", "", transp, last_basic_block);
+ dump_sbitmap_vector (dump_file, "st_avloc", "", ae_gen, last_basic_block);
}
}
insn = emit_insn_after_noloc (insn, prev);
- if (gcse_file)
+ if (dump_file)
{
- fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
+ fprintf (dump_file, "STORE_MOTION insert store at start of BB %d:\n",
bb->index);
- print_inline_rtx (gcse_file, insn, 6);
- fprintf (gcse_file, "\n");
+ print_inline_rtx (dump_file, insn, 6);
+ fprintf (dump_file, "\n");
}
}
insert_insn_on_edge (insn, e);
- if (gcse_file)
+ if (dump_file)
{
- fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
+ fprintf (dump_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
e->src->index, e->dest->index);
- print_inline_rtx (gcse_file, insn, 6);
- fprintf (gcse_file, "\n");
+ print_inline_rtx (dump_file, insn, 6);
+ fprintf (dump_file, "\n");
}
return 1;
rtx last, insn, note;
rtx mem = smexpr->pattern;
- stack = xmalloc (sizeof (edge_iterator) * n_basic_blocks);
+ stack = XNEWVEC (edge_iterator, n_basic_blocks);
sp = 0;
ei = ei_start (bb->succs);
if (!note || !expr_equiv_p (XEXP (note, 0), mem))
continue;
- if (gcse_file)
- fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
+ if (dump_file)
+ fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
INSN_UID (insn));
remove_note (insn, note);
}
insn = gen_move_insn (reg, SET_SRC (single_set (del)));
insn = emit_insn_after (insn, del);
- if (gcse_file)
+ if (dump_file)
{
- fprintf (gcse_file,
+ fprintf (dump_file,
"STORE_MOTION delete insn in BB %d:\n ", bb->index);
- print_inline_rtx (gcse_file, del, 6);
- fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
- print_inline_rtx (gcse_file, insn, 6);
- fprintf (gcse_file, "\n");
+ print_inline_rtx (dump_file, del, 6);
+ fprintf (dump_file, "\nSTORE MOTION replaced with insn:\n ");
+ print_inline_rtx (dump_file, insn, 6);
+ fprintf (dump_file, "\n");
}
for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
if (!note || !expr_equiv_p (XEXP (note, 0), mem))
continue;
- if (gcse_file)
- fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
+ if (dump_file)
+ fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
INSN_UID (insn));
remove_note (insn, note);
}
struct ls_expr * ptr;
int update_flow = 0;
- if (gcse_file)
+ if (dump_file)
{
- fprintf (gcse_file, "before store motion\n");
- print_rtl (gcse_file, get_insns ());
+ fprintf (dump_file, "before store motion\n");
+ print_rtl (dump_file, get_insns ());
}
init_alias_analysis ();
num_stores = compute_store_table ();
if (num_stores == 0)
{
+ htab_delete (pre_ldst_table);
+ pre_ldst_table = NULL;
sbitmap_vector_free (reg_set_in_block);
end_alias_analysis ();
return;
add_noreturn_fake_exit_edges ();
connect_infinite_loops_to_exit ();
- edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
+ edge_list = pre_edge_rev_lcm (num_stores, transp, ae_gen,
st_antloc, ae_kill, &pre_insert_map,
&pre_delete_map);
if (x >= 0)
{
- if (gcse_file != NULL)
- fprintf (gcse_file,
+ if (dump_file != NULL)
+ fprintf (dump_file,
"Can't replace store %d: abnormal edge from %d to %d\n",
ptr->index, INDEX_EDGE (edge_list, x)->src->index,
INDEX_EDGE (edge_list, x)->dest->index);
\f
/* Entry point for jump bypassing optimization pass. */
-int
-bypass_jumps (FILE *file)
+static int
+bypass_jumps (void)
{
int changed;
if (current_function_calls_setjmp)
return 0;
- /* For calling dump_foo fns from gdb. */
- debug_stderr = stderr;
- gcse_file = file;
-
/* Identify the basic block information for this function, including
successors and predecessors. */
max_gcse_regno = max_reg_num ();
- if (file)
- dump_flow_info (file);
+ if (dump_file)
+ dump_flow_info (dump_file, dump_flags);
/* Return if there's nothing to do, or it is too expensive. */
- if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled")))
+ if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
+ || is_too_expensive (_ ("jump bypassing disabled")))
return 0;
gcc_obstack_init (&gcse_obstack);
information about memory sets when we build the hash tables. */
alloc_reg_set_mem (max_gcse_regno);
- compute_sets (get_insns ());
+ compute_sets ();
max_gcse_regno = max_reg_num ();
- alloc_gcse_mem (get_insns ());
- changed = one_cprop_pass (MAX_GCSE_PASSES + 2, 1, 1);
+ alloc_gcse_mem ();
+ changed = one_cprop_pass (MAX_GCSE_PASSES + 2, true, true);
free_gcse_mem ();
- if (file)
+ if (dump_file)
{
- fprintf (file, "BYPASS of %s: %d basic blocks, ",
+ fprintf (dump_file, "BYPASS of %s: %d basic blocks, ",
current_function_name (), n_basic_blocks);
- fprintf (file, "%d bytes\n\n", bytes_used);
+ fprintf (dump_file, "%d bytes\n\n", bytes_used);
}
obstack_free (&gcse_obstack, NULL);
graceful degradation. */
if (n_edges > 20000 + n_basic_blocks * 4)
{
- if (warn_disabled_optimization)
- warning ("%s: %d basic blocks and %d edges/basic block",
- pass, n_basic_blocks, n_edges / n_basic_blocks);
+ warning (OPT_Wdisabled_optimization,
+ "%s: %d basic blocks and %d edges/basic block",
+ pass, n_basic_blocks, n_edges / n_basic_blocks);
return true;
}
* SBITMAP_SET_SIZE (max_reg_num ())
* sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
{
- if (warn_disabled_optimization)
- warning ("%s: %d basic blocks and %d registers",
- pass, n_basic_blocks, max_reg_num ());
+ warning (OPT_Wdisabled_optimization,
+ "%s: %d basic blocks and %d registers",
+ pass, n_basic_blocks, max_reg_num ());
return true;
}
return false;
}
+\f
+static bool
+gate_handle_jump_bypass (void)
+{
+ return optimize > 0 && flag_gcse;
+}
+
+/* Perform jump bypassing and control flow optimizations. */
+static unsigned int
+rest_of_handle_jump_bypass (void)
+{
+ cleanup_cfg (CLEANUP_EXPENSIVE);
+ reg_scan (get_insns (), max_reg_num ());
+
+ if (bypass_jumps ())
+ {
+ rebuild_jump_labels (get_insns ());
+ cleanup_cfg (CLEANUP_EXPENSIVE);
+ delete_trivially_dead_insns (get_insns (), max_reg_num ());
+ }
+ return 0;
+}
+
+struct tree_opt_pass pass_jump_bypass =
+{
+ "bypass", /* name */
+ gate_handle_jump_bypass, /* gate */
+ rest_of_handle_jump_bypass, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_BYPASS, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func |
+ TODO_ggc_collect | TODO_verify_flow, /* todo_flags_finish */
+ 'G' /* letter */
+};
+
+
+static bool
+gate_handle_gcse (void)
+{
+ return optimize > 0 && flag_gcse;
+}
+
+
+static unsigned int
+rest_of_handle_gcse (void)
+{
+ int save_csb, save_cfj;
+ int tem2 = 0, tem;
+
+ tem = gcse_main (get_insns ());
+ rebuild_jump_labels (get_insns ());
+ delete_trivially_dead_insns (get_insns (), max_reg_num ());
+
+ save_csb = flag_cse_skip_blocks;
+ save_cfj = flag_cse_follow_jumps;
+ flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
+
+ /* If -fexpensive-optimizations, re-run CSE to clean up things done
+ by gcse. */
+ if (flag_expensive_optimizations)
+ {
+ timevar_push (TV_CSE);
+ reg_scan (get_insns (), max_reg_num ());
+ tem2 = cse_main (get_insns (), max_reg_num ());
+ purge_all_dead_edges ();
+ delete_trivially_dead_insns (get_insns (), max_reg_num ());
+ timevar_pop (TV_CSE);
+ cse_not_expected = !flag_rerun_cse_after_loop;
+ }
+
+ /* If gcse or cse altered any jumps, rerun jump optimizations to clean
+ things up. */
+ if (tem || tem2)
+ {
+ timevar_push (TV_JUMP);
+ rebuild_jump_labels (get_insns ());
+ delete_dead_jumptables ();
+ cleanup_cfg (CLEANUP_EXPENSIVE);
+ timevar_pop (TV_JUMP);
+ }
+
+ flag_cse_skip_blocks = save_csb;
+ flag_cse_follow_jumps = save_cfj;
+ return 0;
+}
+
+struct tree_opt_pass pass_gcse =
+{
+ "gcse1", /* name */
+ gate_handle_gcse, /* gate */
+ rest_of_handle_gcse, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_GCSE, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func |
+ TODO_verify_flow | TODO_ggc_collect, /* todo_flags_finish */
+ 'G' /* letter */
+};
+
#include "gt-gcse.h"