/* Global common subexpression elimination/Partial redundancy elimination
and global constant/copy propagation for GNU compiler.
Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
- 2006, 2007 Free Software Foundation, Inc.
+ 2006, 2007, 2008 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
-02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
/* TODO
- reordering of memory allocation and freeing to be more space efficient
/* Number of cuids. */
static int max_cuid;
-/* Mapping of cuids to insns. */
-static rtx *cuid_insn;
-
-/* Get insn from cuid. */
-#define CUID_INSN(CUID) (cuid_insn[CUID])
-
/* Maximum register number in function prior to doing gcse + 1.
Registers created during this pass have regno >= max_gcse_regno.
This is named with "gcse" to not collide with global of same name. */
static void alloc_reg_set_mem (int);
static void free_reg_set_mem (void);
static void record_one_set (int, rtx);
-static void record_set_info (rtx, rtx, void *);
+static void record_set_info (rtx, const_rtx, void *);
static void compute_sets (void);
-static void hash_scan_insn (rtx, struct hash_table *, int);
+static void hash_scan_insn (rtx, struct hash_table *);
static void hash_scan_set (rtx, rtx, struct hash_table *);
static void hash_scan_clobber (rtx, rtx, struct hash_table *);
static void hash_scan_call (rtx, rtx, struct hash_table *);
static int want_to_gcse_p (rtx);
static bool can_assign_to_reg_p (rtx);
-static bool gcse_constant_p (rtx);
-static int oprs_unchanged_p (rtx, rtx, int);
-static int oprs_anticipatable_p (rtx, rtx);
-static int oprs_available_p (rtx, rtx);
+static bool gcse_constant_p (const_rtx);
+static int oprs_unchanged_p (const_rtx, const_rtx, int);
+static int oprs_anticipatable_p (const_rtx, const_rtx);
+static int oprs_available_p (const_rtx, const_rtx);
static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
struct hash_table *);
static void insert_set_in_table (rtx, rtx, struct hash_table *);
-static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
+static unsigned int hash_expr (const_rtx, enum machine_mode, int *, int);
static unsigned int hash_set (int, int);
-static int expr_equiv_p (rtx, rtx);
+static int expr_equiv_p (const_rtx, const_rtx);
static void record_last_reg_set_info (rtx, int);
static void record_last_mem_set_info (rtx);
-static void record_last_set_info (rtx, rtx, void *);
+static void record_last_set_info (rtx, const_rtx, void *);
static void compute_hash_table (struct hash_table *);
static void alloc_hash_table (int, struct hash_table *, int);
static void free_hash_table (struct hash_table *);
static struct expr *lookup_set (unsigned int, struct hash_table *);
static struct expr *next_set (unsigned int, struct expr *);
static void reset_opr_set_tables (void);
-static int oprs_not_set_p (rtx, rtx);
+static int oprs_not_set_p (const_rtx, const_rtx);
static void mark_call (rtx);
static void mark_set (rtx, rtx);
static void mark_clobber (rtx, rtx);
static void mark_oprs_set (rtx);
static void alloc_cprop_mem (int, int);
static void free_cprop_mem (void);
-static void compute_transp (rtx, int, sbitmap *, int);
+static void compute_transp (const_rtx, int, sbitmap *, int);
static void compute_transpout (void);
static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
struct hash_table *);
static int try_replace_reg (rtx, rtx, rtx);
static struct expr *find_avail_set (int, rtx);
static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
-static void mems_conflict_for_gcse_p (rtx, rtx, void *);
-static int load_killed_in_block_p (basic_block, int, rtx, int);
-static void canon_list_insert (rtx, rtx, void *);
+static void mems_conflict_for_gcse_p (rtx, const_rtx, void *);
+static int load_killed_in_block_p (const_basic_block, int, const_rtx, int);
+static void canon_list_insert (rtx, const_rtx, void *);
static int cprop_insn (rtx, int);
static int cprop (int);
static void find_implicit_sets (void);
static int one_cprop_pass (int, bool, bool);
static bool constprop_register (rtx, rtx, rtx, bool);
static struct expr *find_bypass_set (int, int);
-static bool reg_killed_on_edge (rtx, edge);
+static bool reg_killed_on_edge (const_rtx, const_edge);
static int bypass_block (basic_block, rtx, rtx);
static int bypass_conditional_jumps (void);
static void alloc_pre_mem (int, int);
static int enumerate_ldsts (void);
static inline struct ls_expr * first_ls_expr (void);
static inline struct ls_expr * next_ls_expr (struct ls_expr *);
-static int simple_mem (rtx);
+static int simple_mem (const_rtx);
static void invalidate_any_buried_refs (rtx);
static void compute_ld_motion_mems (void);
static void trim_ld_motion_mems (void);
static void update_ld_motion_stores (struct expr *);
-static void reg_set_info (rtx, rtx, void *);
-static void reg_clear_last_set (rtx, rtx, void *);
-static bool store_ops_ok (rtx, int *);
+static void reg_set_info (rtx, const_rtx, void *);
+static void reg_clear_last_set (rtx, const_rtx, void *);
+static bool store_ops_ok (const_rtx, int *);
static rtx extract_mentioned_regs (rtx);
static rtx extract_mentioned_regs_helper (rtx, rtx);
static void find_moveable_store (rtx, int *, int *);
static int compute_store_table (void);
-static bool load_kills_store (rtx, rtx, int);
-static bool find_loads (rtx, rtx, int);
-static bool store_killed_in_insn (rtx, rtx, rtx, int);
-static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
-static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
+static bool load_kills_store (const_rtx, const_rtx, int);
+static bool find_loads (const_rtx, const_rtx, int);
+static bool store_killed_in_insn (const_rtx, const_rtx, const_rtx, int);
+static bool store_killed_after (const_rtx, const_rtx, const_rtx, const_basic_block, int *, rtx *);
+static bool store_killed_before (const_rtx, const_rtx, const_rtx, const_basic_block, int *);
static void build_store_vectors (void);
static void insert_insn_start_basic_block (rtx, basic_block);
static int insert_store (struct ls_expr *, edge);
static void free_modify_mem_tables (void);
static rtx gcse_emit_move_after (rtx, rtx, rtx);
static void local_cprop_find_used_regs (rtx *, void *);
-static bool do_local_cprop (rtx, rtx, bool, rtx*);
-static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
+static bool do_local_cprop (rtx, rtx, bool);
static void local_cprop_pass (bool);
static bool is_too_expensive (const char *);
+
+#define GNEW(T) ((T *) gmalloc (sizeof (T)))
+#define GCNEW(T) ((T *) gcalloc (1, sizeof (T)))
+
+#define GNEWVEC(T, N) ((T *) gmalloc (sizeof (T) * (N)))
+#define GCNEWVEC(T, N) ((T *) gcalloc ((N), sizeof (T)))
+#define GRESIZEVEC(T, P, N) ((T *) grealloc ((void *) (P), sizeof (T) * (N)))
+
+#define GNEWVAR(T, S) ((T *) gmalloc ((S)))
+#define GCNEWVAR(T, S) ((T *) gcalloc (1, (S)))
+#define GRESIZEVAR(T, P, S) ((T *) grealloc ((P), (S)))
+
+#define GOBNEW(T) ((T *) gcse_alloc (sizeof (T)))
+#define GOBNEWVAR(T, S) ((T *) gcse_alloc ((S)))
\f
/* Entry point for global common subexpression elimination.
/* We do not construct an accurate cfg in functions which call
setjmp, so just punt to be safe. */
- if (current_function_calls_setjmp)
+ if (cfun->calls_setjmp)
return 0;
/* Assume that we do not need to run jump optimizations after gcse. */
pass = 0;
initial_bytes_used = bytes_used;
max_pass_bytes = 0;
- gcse_obstack_bottom = gcse_alloc (1);
+ gcse_obstack_bottom = GOBNEWVAR (char, 1);
changed = 1;
while (changed && pass < MAX_GCSE_PASSES)
{
/* Don't allow constant propagation to modify jumps
during this pass. */
- timevar_push (TV_CPROP1);
- changed = one_cprop_pass (pass + 1, false, false);
- timevar_pop (TV_CPROP1);
+ if (dbg_cnt (cprop1))
+ {
+ timevar_push (TV_CPROP1);
+ changed = one_cprop_pass (pass + 1, false, false);
+ timevar_pop (TV_CPROP1);
+ }
if (optimize_size)
/* Do nothing. */ ;
if (changed)
{
free_modify_mem_tables ();
- modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
- canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
+ modify_mem_list = GCNEWVEC (rtx, last_basic_block);
+ canon_modify_mem_list = GCNEWVEC (rtx, last_basic_block);
}
free_reg_set_mem ();
alloc_reg_set_mem (max_reg_num ());
/* Do one last pass of copy propagation, including cprop into
conditional jumps. */
- max_gcse_regno = max_reg_num ();
- alloc_gcse_mem ();
- /* This time, go ahead and allow cprop to alter jumps. */
- timevar_push (TV_CPROP2);
- one_cprop_pass (pass + 1, true, true);
- timevar_pop (TV_CPROP2);
- free_gcse_mem ();
+ if (dbg_cnt (cprop2))
+ {
+ max_gcse_regno = max_reg_num ();
+ alloc_gcse_mem ();
+
+ /* This time, go ahead and allow cprop to alter jumps. */
+ timevar_push (TV_CPROP2);
+ one_cprop_pass (pass + 1, true, true);
+ timevar_pop (TV_CPROP2);
+ free_gcse_mem ();
+ }
if (dump_file)
{
but we should never see those anyway, so this is OK.) */
max_uid = get_max_uid ();
- uid_cuid = gcalloc (max_uid + 1, sizeof (int));
+ uid_cuid = GCNEWVEC (int, max_uid + 1);
i = 0;
FOR_EACH_BB (bb)
FOR_BB_INSNS (bb, insn)
uid_cuid[INSN_UID (insn)] = i;
}
- /* Create a table mapping cuids to insns. */
-
max_cuid = i;
- cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
- i = 0;
- FOR_EACH_BB (bb)
- FOR_BB_INSNS (bb, insn)
- if (INSN_P (insn))
- CUID_INSN (i++) = insn;
/* Allocate vars to track sets of regs. */
reg_set_bitmap = BITMAP_ALLOC (NULL);
reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
/* Allocate array to keep a list of insns which modify memory in each
basic block. */
- modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
- canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
+ modify_mem_list = GCNEWVEC (rtx, last_basic_block);
+ canon_modify_mem_list = GCNEWVEC (rtx, last_basic_block);
modify_mem_list_set = BITMAP_ALLOC (NULL);
blocks_with_calls = BITMAP_ALLOC (NULL);
}
free_gcse_mem (void)
{
free (uid_cuid);
- free (cuid_insn);
BITMAP_FREE (reg_set_bitmap);
alloc_reg_set_mem (int n_regs)
{
reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
- reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
+ reg_set_table = GCNEWVEC (struct reg_set *, reg_set_table_size);
gcc_obstack_init (®_set_obstack);
}
{
int new_size = regno + REG_SET_TABLE_SLOP;
- reg_set_table = grealloc (reg_set_table,
- new_size * sizeof (struct reg_set *));
+ reg_set_table = GRESIZEVEC (struct reg_set *, reg_set_table, new_size);
memset (reg_set_table + reg_set_table_size, 0,
(new_size - reg_set_table_size) * sizeof (struct reg_set *));
reg_set_table_size = new_size;
}
- new_reg_info = obstack_alloc (®_set_obstack, sizeof (struct reg_set));
+ new_reg_info = XOBNEW (®_set_obstack, struct reg_set);
bytes_used += sizeof (struct reg_set);
new_reg_info->bb_index = BLOCK_NUM (insn);
new_reg_info->next = reg_set_table[regno];
occurring. */
static void
-record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
+record_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
{
rtx record_set_insn = (rtx) data;
case SUBREG:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case CALL:
return 0;
or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
static int
-oprs_unchanged_p (rtx x, rtx insn, int avail_p)
+oprs_unchanged_p (const_rtx x, const_rtx insn, int avail_p)
{
int i, j;
enum rtx_code code;
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
load_killed_in_block_p. A memory reference for a load instruction,
mems_conflict_for_gcse_p will see if a memory store conflicts with
this memory load. */
-static rtx gcse_mem_operand;
+static const_rtx gcse_mem_operand;
/* DEST is the output of an instruction. If it is a memory reference, and
possibly conflicts with the load found in gcse_mem_operand, then set
gcse_mems_conflict_p to a nonzero value. */
static void
-mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
+mems_conflict_for_gcse_p (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
void *data ATTRIBUTE_UNUSED)
{
while (GET_CODE (dest) == SUBREG
AVAIL_P to 0. */
static int
-load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
+load_killed_in_block_p (const_basic_block bb, int uid_limit, const_rtx x, int avail_p)
{
rtx list_entry = modify_mem_list[bb->index];
the start of INSN's basic block up to but not including INSN. */
static int
-oprs_anticipatable_p (rtx x, rtx insn)
+oprs_anticipatable_p (const_rtx x, const_rtx insn)
{
return oprs_unchanged_p (x, insn, 0);
}
INSN to the end of INSN's basic block. */
static int
-oprs_available_p (rtx x, rtx insn)
+oprs_available_p (const_rtx x, const_rtx insn)
{
return oprs_unchanged_p (x, insn, 1);
}
the current size of the hash table to be probed. */
static unsigned int
-hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
+hash_expr (const_rtx x, enum machine_mode mode, int *do_not_record_p,
int hash_table_size)
{
unsigned int hash;
/* Return nonzero if exp1 is equivalent to exp2. */
static int
-expr_equiv_p (rtx x, rtx y)
+expr_equiv_p (const_rtx x, const_rtx y)
{
return exp_equiv_p (x, y, 0, true);
}
if (! found)
{
- cur_expr = gcse_alloc (sizeof (struct expr));
+ cur_expr = GOBNEW (struct expr);
bytes_used += sizeof (struct expr);
if (table->table[hash] == NULL)
/* This is the first pattern that hashed to this index. */
else
{
/* First occurrence of this expression in this basic block. */
- antic_occr = gcse_alloc (sizeof (struct occr));
+ antic_occr = GOBNEW (struct occr);
bytes_used += sizeof (struct occr);
antic_occr->insn = insn;
antic_occr->next = cur_expr->antic_occr;
else
{
/* First occurrence of this expression in this basic block. */
- avail_occr = gcse_alloc (sizeof (struct occr));
+ avail_occr = GOBNEW (struct occr);
bytes_used += sizeof (struct occr);
avail_occr->insn = insn;
avail_occr->next = cur_expr->avail_occr;
if (! found)
{
- cur_expr = gcse_alloc (sizeof (struct expr));
+ cur_expr = GOBNEW (struct expr);
bytes_used += sizeof (struct expr);
if (table->table[hash] == NULL)
/* This is the first pattern that hashed to this index. */
else
{
/* First occurrence of this expression in this basic block. */
- cur_occr = gcse_alloc (sizeof (struct occr));
+ cur_occr = GOBNEW (struct occr);
bytes_used += sizeof (struct occr);
cur_occr->insn = insn;
the purposes of GCSE's constant propagation. */
static bool
-gcse_constant_p (rtx x)
+gcse_constant_p (const_rtx x)
{
/* Consider a COMPARE of two integers constant. */
if (GET_CODE (x) == COMPARE
unsigned int regno = REGNO (dest);
rtx tmp;
- /* See if a REG_NOTE shows this equivalent to a simpler expression.
+ /* See if a REG_EQUAL note shows this equivalent to a simpler expression.
+
This allows us to do a single GCSE pass and still eliminate
redundant constants, addresses or other expressions that are
- constructed with multiple instructions. */
+ constructed with multiple instructions.
+
+ However, keep the original SRC if INSN is a simple reg-reg move. In
+ In this case, there will almost always be a REG_EQUAL note on the
+ insn that sets SRC. By recording the REG_EQUAL value here as SRC
+ for INSN, we miss copy propagation opportunities and we perform the
+ same PRE GCSE operation repeatedly on the same REG_EQUAL value if we
+ do more than one PRE GCSE pass.
+
+ Note that this does not impede profitable constant propagations. We
+ "look through" reg-reg sets in lookup_avail_set. */
note = find_reg_equal_equiv_note (insn);
if (note != 0
+ && REG_NOTE_KIND (note) == REG_EQUAL
+ && !REG_P (src)
&& (table->set_p
? gcse_constant_p (XEXP (note, 0))
: want_to_gcse_p (XEXP (note, 0))))
modified. Here we want to search from INSN+1 on, but
oprs_available_p searches from INSN on. */
&& (insn == BB_END (BLOCK_FOR_INSN (insn))
- || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
- && oprs_available_p (pat, tmp))))
+ || (tmp = next_nonnote_insn (insn)) == NULL_RTX
+ || BLOCK_FOR_INSN (tmp) != BLOCK_FOR_INSN (insn)
+ || oprs_available_p (pat, tmp)))
insert_set_in_table (pat, insn, table);
}
/* In case of store we want to consider the memory value as available in
are also in the PARALLEL. Later.
If SET_P is nonzero, this is for the assignment hash table,
- otherwise it is for the expression hash table.
- If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
- not record any expressions. */
+ otherwise it is for the expression hash table. */
static void
-hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
+hash_scan_insn (rtx insn, struct hash_table *table)
{
rtx pat = PATTERN (insn);
int i;
- if (in_libcall_block)
- return;
-
/* Pick out the sets of INSN and for other forms of instructions record
what's been modified. */
unsigned int *hash_val;
struct expr *expr;
- flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
- hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
+ flat_table = XCNEWVEC (struct expr *, table->n_elems);
+ hash_val = XNEWVEC (unsigned int, table->n_elems);
for (i = 0; i < (int) table->size; i++)
for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
taken off pairwise. */
static void
-canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
+canon_list_insert (rtx dest ATTRIBUTE_UNUSED, const_rtx unused1 ATTRIBUTE_UNUSED,
void * v_insn)
{
rtx dest_addr, insn;
the SET is taking place. */
static void
-record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
+record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
{
rtx last_set_insn = (rtx) data;
/* re-Cache any INSN_LIST nodes we have allocated. */
clear_modify_mem_tables ();
/* Some working arrays used to track first and last set in each block. */
- reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
+ reg_avail_info = GNEWVEC (struct reg_avail_info, max_gcse_regno);
for (i = 0; i < max_gcse_regno; ++i)
reg_avail_info[i].last_bb = NULL;
{
rtx insn;
unsigned int regno;
- int in_libcall_block;
/* First pass over the instructions records information used to
determine when registers and memory are first and last set.
BB_HEAD (current_bb), table);
/* The next pass builds the hash table. */
- in_libcall_block = 0;
FOR_BB_INSNS (current_bb, insn)
if (INSN_P (insn))
- {
- if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
- in_libcall_block = 1;
- else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
- in_libcall_block = 0;
- hash_scan_insn (insn, table, in_libcall_block);
- if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
- in_libcall_block = 0;
- }
+ hash_scan_insn (insn, table);
}
free (reg_avail_info);
??? Later take some measurements. */
table->size |= 1;
n = table->size * sizeof (struct expr *);
- table->table = gmalloc (n);
+ table->table = GNEWVAR (struct expr *, n);
table->set_p = set_p;
}
INSN's basic block. */
static int
-oprs_not_set_p (rtx x, rtx insn)
+oprs_not_set_p (const_rtx x, const_rtx insn)
{
int i, j;
enum rtx_code code;
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
static void
mark_call (rtx insn)
{
- if (! CONST_OR_PURE_CALL_P (insn))
+ if (! RTL_CONST_OR_PURE_CALL_P (insn))
record_last_mem_set_info (insn);
}
bit in BMAP. */
static void
-compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
+compute_transp (const_rtx x, int indx, sbitmap *bmap, int set_p)
{
int i, j;
basic_block bb;
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
with our replacement. */
if (note != 0 && REG_NOTE_KIND (note) == REG_EQUAL)
set_unique_reg_note (insn, REG_EQUAL,
- simplify_replace_rtx (XEXP (note, 0), from, to));
+ simplify_replace_rtx (XEXP (note, 0), from,
+ copy_rtx (to)));
if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
{
/* If above failed and this is a single set, try to simplify the source of
to one computed by setcc. */
if (setcc && modified_in_p (new, setcc))
return 0;
- if (! validate_change (jump, &SET_SRC (set), new, 0))
+ if (! validate_unshare_change (jump, &SET_SRC (set), new, 0))
{
/* When (some) constants are not valid in a comparison, and there
are two registers to be replaced by constants before the entire
}
purge_dead_edges (bb);
+ /* If a conditional jump has been changed into unconditional jump, remove
+ the jump and make the edge fallthru - this is always called in
+ cfglayout mode. */
+ if (new != pc_rtx && simplejump_p (jump))
+ {
+ edge e;
+ edge_iterator ei;
+
+ for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ei_next (&ei))
+ if (e->dest != EXIT_BLOCK_PTR
+ && BB_HEAD (e->dest) == JUMP_LABEL (jump))
+ {
+ e->flags |= EDGE_FALLTHRU;
+ break;
+ }
+ delete_insn (jump);
+ }
+
return 1;
}
find_used_regs (xptr, data);
}
-/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
- their REG_EQUAL notes need updating. */
+/* Try to perform local const/copy propagation on X in INSN.
+ If ALTER_JUMPS is false, changing jump insns is not allowed. */
static bool
-do_local_cprop (rtx x, rtx insn, bool alter_jumps, rtx *libcall_sp)
+do_local_cprop (rtx x, rtx insn, bool alter_jumps)
{
rtx newreg = NULL, newcnst = NULL;
rtx this_rtx = l->loc;
rtx note;
- /* Don't CSE non-constant values out of libcall blocks. */
- if (l->in_libcall && ! CONSTANT_P (this_rtx))
- continue;
-
if (gcse_constant_p (this_rtx))
newcnst = this_rtx;
if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
}
if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
{
- /* If we find a case where we can't fix the retval REG_EQUAL notes
- match the new register, we either have to abandon this replacement
- or fix delete_trivially_dead_insns to preserve the setting insn,
- or make it delete the REG_EQUAL note, and fix up all passes that
- require the REG_EQUAL note there. */
- bool adjusted;
-
- adjusted = adjust_libcall_notes (x, newcnst, insn, libcall_sp);
- gcc_assert (adjusted);
-
if (dump_file != NULL)
{
fprintf (dump_file, "LOCAL CONST-PROP: Replacing reg %d in ",
}
else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
{
- adjust_libcall_notes (x, newreg, insn, libcall_sp);
if (dump_file != NULL)
{
fprintf (dump_file,
return false;
}
-/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
- their REG_EQUAL notes need updating to reflect that OLDREG has been
- replaced with NEWVAL in INSN. Return true if all substitutions could
- be made. */
-static bool
-adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
-{
- rtx end;
-
- while ((end = *libcall_sp++))
- {
- rtx note = find_reg_equal_equiv_note (end);
-
- if (! note)
- continue;
-
- if (REG_P (newval))
- {
- if (reg_set_between_p (newval, PREV_INSN (insn), end))
- {
- do
- {
- note = find_reg_equal_equiv_note (end);
- if (! note)
- continue;
- if (reg_mentioned_p (newval, XEXP (note, 0)))
- return false;
- }
- while ((end = *libcall_sp++));
- return true;
- }
- }
- XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), oldreg, newval);
- df_notes_rescan (end);
- insn = end;
- }
- return true;
-}
-
-#define MAX_NESTED_LIBCALLS 9
-
/* Do local const/copy propagation (i.e. within each basic block).
If ALTER_JUMPS is true, allow propagating into jump insns, which
could modify the CFG. */
basic_block bb;
rtx insn;
struct reg_use *reg_used;
- rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
bool changed = false;
cselib_init (false);
- libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
- *libcall_sp = 0;
FOR_EACH_BB (bb)
{
FOR_BB_INSNS (bb, insn)
{
if (INSN_P (insn))
{
- rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
-
- if (note)
- {
- gcc_assert (libcall_sp != libcall_stack);
- *--libcall_sp = XEXP (note, 0);
- }
- note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
- if (note)
- libcall_sp++;
- note = find_reg_equal_equiv_note (insn);
+ rtx note = find_reg_equal_equiv_note (insn);
do
{
reg_use_count = 0;
for (reg_used = ®_use_table[0]; reg_use_count > 0;
reg_used++, reg_use_count--)
{
- if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
- libcall_sp))
+ if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps))
{
changed = true;
break;
cselib_process_insn (insn);
}
- /* Forget everything at the end of a basic block. Make sure we are
- not inside a libcall, they should never cross basic blocks. */
+ /* Forget everything at the end of a basic block. */
cselib_clear_table ();
- gcc_assert (libcall_sp == &libcall_stack[MAX_NESTED_LIBCALLS]);
}
cselib_finish ();
it. COND is either an EQ or NE comparison. */
static bool
-implicit_set_cond_p (rtx cond)
+implicit_set_cond_p (const_rtx cond)
{
- enum machine_mode mode = GET_MODE (XEXP (cond, 0));
- rtx cst = XEXP (cond, 1);
+ const enum machine_mode mode = GET_MODE (XEXP (cond, 0));
+ const_rtx cst = XEXP (cond, 1);
/* We can't perform this optimization if either operand might be or might
contain a signed zero. */
valid prior to commit_edge_insertions. */
static bool
-reg_killed_on_edge (rtx reg, edge e)
+reg_killed_on_edge (const_rtx reg, const_edge e)
{
rtx insn;
expressions into. Get the mode for the new pseudo from
the mode of the original destination pseudo. */
if (expr->reaching_reg == NULL)
- expr->reaching_reg
- = gen_reg_rtx (GET_MODE (SET_DEST (set)));
+ expr->reaching_reg = gen_reg_rtx_and_attrs (SET_DEST (set));
gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
delete_insn (insn);
return changed;
}
\f
-/* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
- If notes are added to an insn which references a CODE_LABEL, the
- LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
- because the following loop optimization pass requires them. */
+/* If X contains any LABEL_REF's, add REG_LABEL_OPERAND notes for them
+ to INSN. If such notes are added to an insn which references a
+ CODE_LABEL, the LABEL_NUSES count is incremented. We have to add
+ that note, because the following loop optimization pass requires
+ them. */
/* ??? If there was a jump optimization pass after gcse and before loop,
then we would not need to do this here, because jump would add the
- necessary REG_LABEL notes. */
+ necessary REG_LABEL_OPERAND and REG_LABEL_TARGET notes. */
static void
add_label_notes (rtx x, rtx insn)
We no longer ignore such label references (see LABEL_REF handling in
mark_jump_label for additional information). */
- REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
- REG_NOTES (insn));
+ /* There's no reason for current users to emit jump-insns with
+ such a LABEL_REF, so we don't have to handle REG_LABEL_TARGET
+ notes. */
+ gcc_assert (!JUMP_P (insn));
+ add_reg_note (insn, REG_LABEL_OPERAND, XEXP (x, 0));
+
if (LABEL_P (XEXP (x, 0)))
LABEL_NUSES (XEXP (x, 0))++;
+
return;
}
FOR_EACH_BB (bb)
{
- /* Note that flow inserted a nop a the end of basic blocks that
+ /* Note that flow inserted a nop at the end of basic blocks that
end in call instructions for reasons other than abnormal
control flow. */
if (! CALL_P (BB_END (bb)))
the convergence. */
FOR_EACH_BB_REVERSE (bb)
{
- changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
- hoist_vbeout[bb->index], transp[bb->index]);
if (bb->next_bb != EXIT_BLOCK_PTR)
- sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
+ sbitmap_intersection_of_succs (hoist_vbeout[bb->index],
+ hoist_vbein, bb->index);
+
+ changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index],
+ antloc[bb->index],
+ hoist_vbeout[bb->index],
+ transp[bb->index]);
}
passes++;
from the mode of the original destination pseudo. */
if (expr->reaching_reg == NULL)
expr->reaching_reg
- = gen_reg_rtx (GET_MODE (SET_DEST (set)));
+ = gen_reg_rtx_and_attrs (SET_DEST (set));
gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
delete_insn (insn);
pre_ldst_expr_hash (const void *p)
{
int do_not_record_p = 0;
- const struct ls_expr *x = p;
+ const struct ls_expr *const x = (const struct ls_expr *) p;
return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
}
static int
pre_ldst_expr_eq (const void *p1, const void *p2)
{
- const struct ls_expr *ptr1 = p1, *ptr2 = p2;
+ const struct ls_expr *const ptr1 = (const struct ls_expr *) p1,
+ *const ptr2 = (const struct ls_expr *) p2;
return expr_equiv_p (ptr1->pattern, ptr2->pattern);
}
slot = htab_find_slot (pre_ldst_table, &e, NO_INSERT);
if (!slot || ((struct ls_expr *)*slot)->invalid)
return NULL;
- return *slot;
+ return (struct ls_expr *) *slot;
}
/* Assign each element of the list of mems a monotonically increasing value. */
ld_motion list, otherwise we let the usual aliasing take care of it. */
static int
-simple_mem (rtx x)
+simple_mem (const_rtx x)
{
if (! MEM_P (x))
return 0;
note_stores. */
static void
-reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
+reg_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
void *data)
{
- sbitmap bb_reg = data;
+ sbitmap bb_reg = (sbitmap) data;
if (GET_CODE (dest) == SUBREG)
dest = SUBREG_REG (dest);
note_stores. */
static void
-reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED,
+reg_clear_last_set (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
void *data)
{
- int *dead_vec = data;
+ int *dead_vec = (int *) data;
if (GET_CODE (dest) == SUBREG)
dest = SUBREG_REG (dest);
due to set of registers in bitmap REGS_SET. */
static bool
-store_ops_ok (rtx x, int *regs_set)
+store_ops_ok (const_rtx x, int *regs_set)
{
- rtx reg;
+ const_rtx reg;
for (; x; x = XEXP (x, 1))
{
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
after the X. */
static bool
-load_kills_store (rtx x, rtx store_pattern, int after)
+load_kills_store (const_rtx x, const_rtx store_pattern, int after)
{
if (after)
return anti_dependence (x, store_pattern);
after the insn X. */
static bool
-find_loads (rtx x, rtx store_pattern, int after)
+find_loads (const_rtx x, const_rtx store_pattern, int after)
{
const char * fmt;
int i, j;
}
static inline bool
-store_killed_in_pat (rtx x, rtx pat, int after)
+store_killed_in_pat (const_rtx x, const_rtx pat, int after)
{
if (GET_CODE (pat) == SET)
{
after the insn. Return true if it does. */
static bool
-store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
+store_killed_in_insn (const_rtx x, const_rtx x_regs, const_rtx insn, int after)
{
- rtx reg, base, note, pat;
+ const_rtx reg, base, note, pat;
if (!INSN_P (insn))
return false;
{
/* A normal or pure call might read from pattern,
but a const call will not. */
- if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
+ if (!RTL_CONST_CALL_P (insn))
return true;
/* But even a const call reads its parameters. Check whether the
is killed, return the last insn in that it occurs in FAIL_INSN. */
static bool
-store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
+store_killed_after (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
int *regs_set_after, rtx *fail_insn)
{
rtx last = BB_END (bb), act;
within basic block BB. X_REGS is list of registers mentioned in X.
REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
static bool
-store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
+store_killed_before (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
int *regs_set_before)
{
rtx first = BB_HEAD (bb);
are any side effects. */
if (TEST_BIT (ae_gen[bb->index], ptr->index))
{
- rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
+ rtx r = gen_reg_rtx_and_attrs (ptr->pattern);
if (dump_file)
fprintf (dump_file, "Removing redundant store:\n");
replace_store_insn (r, XEXP (st, 0), bb, ptr);
static void
replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
{
- rtx insn, mem, note, set, ptr, pair;
+ rtx insn, mem, note, set, ptr;
mem = smexpr->pattern;
insn = gen_move_insn (reg, SET_SRC (single_set (del)));
break;
}
- /* Move the notes from the deleted insn to its replacement, and patch
- up the LIBCALL notes. */
+ /* Move the notes from the deleted insn to its replacement. */
REG_NOTES (insn) = REG_NOTES (del);
- note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
- if (note)
- {
- pair = XEXP (note, 0);
- note = find_reg_note (pair, REG_LIBCALL, NULL_RTX);
- XEXP (note, 0) = insn;
- }
- note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
- if (note)
- {
- pair = XEXP (note, 0);
- note = find_reg_note (pair, REG_RETVAL, NULL_RTX);
- XEXP (note, 0) = insn;
- }
-
/* Emit the insn AFTER all the notes are transferred.
This is cheaper since we avoid df rescanning for the note change. */
insn = emit_insn_after (insn, del);
rtx reg, i, del;
if (expr->reaching_reg == NULL_RTX)
- expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
+ expr->reaching_reg = gen_reg_rtx_and_attrs (expr->pattern);
reg = expr->reaching_reg;
/* We do not construct an accurate cfg in functions which call
setjmp, so just punt to be safe. */
- if (current_function_calls_setjmp)
+ if (cfun->calls_setjmp)
return 0;
/* Identify the basic block information for this function, including
static bool
gate_handle_jump_bypass (void)
{
- return optimize > 0 && flag_gcse;
+ return optimize > 0 && flag_gcse
+ && dbg_cnt (jump_bypass);
}
/* Perform jump bypassing and control flow optimizations. */
return 0;
}
-struct tree_opt_pass pass_jump_bypass =
+struct rtl_opt_pass pass_jump_bypass =
{
+ {
+ RTL_PASS,
"bypass", /* name */
gate_handle_jump_bypass, /* gate */
rest_of_handle_jump_bypass, /* execute */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func |
- TODO_ggc_collect | TODO_verify_flow, /* todo_flags_finish */
- 'G' /* letter */
+ TODO_ggc_collect | TODO_verify_flow /* todo_flags_finish */
+ }
};
static bool
gate_handle_gcse (void)
{
- return optimize > 0 && flag_gcse;
+ return optimize > 0 && flag_gcse
+ && dbg_cnt (gcse);
}
{
timevar_push (TV_CSE);
tem2 = cse_main (get_insns (), max_reg_num ());
- df_finish_pass ();
+ df_finish_pass (false);
purge_all_dead_edges ();
delete_trivially_dead_insns (get_insns (), max_reg_num ());
timevar_pop (TV_CSE);
/* If gcse or cse altered any jumps, rerun jump optimizations to clean
things up. */
- if (tem || tem2)
+ if (tem || tem2 == 2)
{
timevar_push (TV_JUMP);
rebuild_jump_labels (get_insns ());
cleanup_cfg (0);
timevar_pop (TV_JUMP);
}
+ else if (tem2 == 1)
+ cleanup_cfg (0);
flag_cse_skip_blocks = save_csb;
flag_cse_follow_jumps = save_cfj;
return 0;
}
-struct tree_opt_pass pass_gcse =
+struct rtl_opt_pass pass_gcse =
{
+ {
+ RTL_PASS,
"gcse1", /* name */
gate_handle_gcse, /* gate */
rest_of_handle_gcse, /* execute */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_df_finish |
+ TODO_df_finish | TODO_verify_rtl_sharing |
TODO_dump_func |
- TODO_verify_flow | TODO_ggc_collect, /* todo_flags_finish */
- 'G' /* letter */
+ TODO_verify_flow | TODO_ggc_collect /* todo_flags_finish */
+ }
};