You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
/* TODO
- reordering of memory allocation and freeing to be more space efficient
#include "intl.h"
#include "obstack.h"
#include "timevar.h"
+#include "tree-pass.h"
+#include "hashtab.h"
/* Propagate flow information through back edges and thus enable PRE's
moving loop invariant calculations out of loops.
/* Head of the list of load/store memory refs. */
static struct ls_expr * pre_ldst_mems = NULL;
+/* Hashtable for the load/store memory refs. */
+static htab_t pre_ldst_table = NULL;
+
/* Bitmap containing one bit for each register in the program.
Used when performing GCSE to track which registers have been set since
the start of the basic block. */
static int gcse_create_count;
/* Number of local constants propagated. */
static int local_const_prop_count;
-/* Number of local copys propagated. */
+/* Number of local copies propagated. */
static int local_copy_prop_count;
/* Number of global constants propagated. */
static int global_const_prop_count;
-/* Number of global copys propagated. */
+/* Number of global copies propagated. */
static int global_copy_prop_count;
\f
/* For available exprs */
load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
{
rtx list_entry = modify_mem_list[bb->index];
+
+ /* If this is a readonly then we aren't going to be changing it. */
+ if (MEM_READONLY_P (x))
+ return 0;
+
while (list_entry)
{
rtx setter;
return;
case MEM:
- {
- bitmap_iterator bi;
- unsigned bb_index;
-
- /* First handle all the blocks with calls. We don't need to
- do any list walking for them. */
- EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
- {
- if (set_p)
- SET_BIT (bmap[bb_index], indx);
- else
- RESET_BIT (bmap[bb_index], indx);
- }
+ if (! MEM_READONLY_P (x))
+ {
+ bitmap_iterator bi;
+ unsigned bb_index;
- /* Now iterate over the blocks which have memory modifications
- but which do not have any calls. */
- EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set, blocks_with_calls,
- 0, bb_index, bi)
- {
- rtx list_entry = canon_modify_mem_list[bb_index];
+ /* First handle all the blocks with calls. We don't need to
+ do any list walking for them. */
+ EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
+ {
+ if (set_p)
+ SET_BIT (bmap[bb_index], indx);
+ else
+ RESET_BIT (bmap[bb_index], indx);
+ }
- while (list_entry)
+ /* Now iterate over the blocks which have memory modifications
+ but which do not have any calls. */
+ EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set,
+ blocks_with_calls,
+ 0, bb_index, bi)
{
- rtx dest, dest_addr;
+ rtx list_entry = canon_modify_mem_list[bb_index];
- /* LIST_ENTRY must be an INSN of some kind that sets memory.
- Examine each hunk of memory that is modified. */
+ while (list_entry)
+ {
+ rtx dest, dest_addr;
- dest = XEXP (list_entry, 0);
- list_entry = XEXP (list_entry, 1);
- dest_addr = XEXP (list_entry, 0);
+ /* LIST_ENTRY must be an INSN of some kind that sets memory.
+ Examine each hunk of memory that is modified. */
- if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
- x, rtx_addr_varies_p))
- {
- if (set_p)
- SET_BIT (bmap[bb_index], indx);
- else
- RESET_BIT (bmap[bb_index], indx);
- break;
- }
- list_entry = XEXP (list_entry, 1);
+ dest = XEXP (list_entry, 0);
+ list_entry = XEXP (list_entry, 1);
+ dest_addr = XEXP (list_entry, 0);
+
+ if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
+ x, rtx_addr_varies_p))
+ {
+ if (set_p)
+ SET_BIT (bmap[bb_index], indx);
+ else
+ RESET_BIT (bmap[bb_index], indx);
+ break;
+ }
+ list_entry = XEXP (list_entry, 1);
+ }
}
- }
- }
+ }
x = XEXP (x, 0);
goto repeat;
have a note, and have no special SET, add a REG_EQUAL note to not
lose information. */
if (!success && note == 0 && set != 0
- && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT)
+ && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
+ && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
}
insn_inserted_p = 0;
/* These tests should be the same as the tests above. */
- if (TEST_BIT (hoist_vbeout[bb->index], i))
+ if (TEST_BIT (hoist_exprs[bb->index], i))
{
/* We've found a potentially hoistable expression, now
we look at every block BB dominates to see if it
load towards the exit, and we end up with no loads or stores of 'i'
in the loop. */
+static hashval_t
+pre_ldst_expr_hash (const void *p)
+{
+ int do_not_record_p = 0;
+ const struct ls_expr *x = p;
+ return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
+}
+
+static int
+pre_ldst_expr_eq (const void *p1, const void *p2)
+{
+ const struct ls_expr *ptr1 = p1, *ptr2 = p2;
+ return expr_equiv_p (ptr1->pattern, ptr2->pattern);
+}
+
/* This will search the ldst list for a matching expression. If it
doesn't find one, we create one and initialize it. */
int do_not_record_p = 0;
struct ls_expr * ptr;
unsigned int hash;
+ void **slot;
+ struct ls_expr e;
hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
NULL, /*have_reg_qty=*/false);
- for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
- if (ptr->hash_index == hash && expr_equiv_p (ptr->pattern, x))
- return ptr;
+ e.pattern = x;
+ slot = htab_find_slot_with_hash (pre_ldst_table, &e, hash, INSERT);
+ if (*slot)
+ return (struct ls_expr *)*slot;
ptr = xmalloc (sizeof (struct ls_expr));
ptr->index = 0;
ptr->hash_index = hash;
pre_ldst_mems = ptr;
+ *slot = ptr;
return ptr;
}
static void
free_ldst_mems (void)
{
+ if (pre_ldst_table)
+ htab_delete (pre_ldst_table);
+ pre_ldst_table = NULL;
+
while (pre_ldst_mems)
{
struct ls_expr * tmp = pre_ldst_mems;
static struct ls_expr *
find_rtx_in_ldst (rtx x)
{
- struct ls_expr * ptr;
-
- for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
- if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
- return ptr;
-
- return NULL;
+ struct ls_expr e;
+ void **slot;
+ if (!pre_ldst_table)
+ return NULL;
+ e.pattern = x;
+ slot = htab_find_slot (pre_ldst_table, &e, NO_INSERT);
+ if (!slot || ((struct ls_expr *)*slot)->invalid)
+ return NULL;
+ return *slot;
}
/* Assign each element of the list of mems a monotonically increasing value. */
rtx insn;
pre_ldst_mems = NULL;
+ pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
+ pre_ldst_expr_eq, NULL);
FOR_EACH_BB (bb)
{
else
{
*last = ptr->next;
+ htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
free_ldst_entry (ptr);
ptr = * last;
}
max_gcse_regno);
sbitmap_vector_zero (reg_set_in_block, last_basic_block);
pre_ldst_mems = 0;
+ pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
+ pre_ldst_expr_eq, NULL);
last_set_in = xcalloc (max_gcse_regno, sizeof (int));
already_set = xmalloc (sizeof (int) * max_gcse_regno);
if (!AVAIL_STORE_LIST (ptr))
{
*prev_next_ptr_ptr = ptr->next;
+ htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
free_ldst_entry (ptr);
}
else
num_stores = compute_store_table ();
if (num_stores == 0)
{
+ htab_delete (pre_ldst_table);
+ pre_ldst_table = NULL;
sbitmap_vector_free (reg_set_in_block);
end_alias_analysis ();
return;
return false;
}
+\f
+static bool
+gate_handle_jump_bypass (void)
+{
+ return optimize > 0 && flag_gcse;
+}
+
+/* Perform jump bypassing and control flow optimizations. */
+static void
+rest_of_handle_jump_bypass (void)
+{
+ cleanup_cfg (CLEANUP_EXPENSIVE);
+ reg_scan (get_insns (), max_reg_num ());
+
+ if (bypass_jumps (dump_file))
+ {
+ rebuild_jump_labels (get_insns ());
+ cleanup_cfg (CLEANUP_EXPENSIVE);
+ delete_trivially_dead_insns (get_insns (), max_reg_num ());
+ }
+}
+
+struct tree_opt_pass pass_jump_bypass =
+{
+ "bypass", /* name */
+ gate_handle_jump_bypass, /* gate */
+ rest_of_handle_jump_bypass, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_BYPASS, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func |
+ TODO_ggc_collect | TODO_verify_flow, /* todo_flags_finish */
+ 'G' /* letter */
+};
+
+
+static bool
+gate_handle_gcse (void)
+{
+ return optimize > 0 && flag_gcse;
+}
+
+
+static void
+rest_of_handle_gcse (void)
+{
+ int save_csb, save_cfj;
+ int tem2 = 0, tem;
+
+ tem = gcse_main (get_insns (), dump_file);
+ rebuild_jump_labels (get_insns ());
+ delete_trivially_dead_insns (get_insns (), max_reg_num ());
+
+ save_csb = flag_cse_skip_blocks;
+ save_cfj = flag_cse_follow_jumps;
+ flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
+
+ /* If -fexpensive-optimizations, re-run CSE to clean up things done
+ by gcse. */
+ if (flag_expensive_optimizations)
+ {
+ timevar_push (TV_CSE);
+ reg_scan (get_insns (), max_reg_num ());
+ tem2 = cse_main (get_insns (), max_reg_num (), dump_file);
+ purge_all_dead_edges ();
+ delete_trivially_dead_insns (get_insns (), max_reg_num ());
+ timevar_pop (TV_CSE);
+ cse_not_expected = !flag_rerun_cse_after_loop;
+ }
+
+ /* If gcse or cse altered any jumps, rerun jump optimizations to clean
+ things up. */
+ if (tem || tem2)
+ {
+ timevar_push (TV_JUMP);
+ rebuild_jump_labels (get_insns ());
+ delete_dead_jumptables ();
+ cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
+ timevar_pop (TV_JUMP);
+ }
+
+ flag_cse_skip_blocks = save_csb;
+ flag_cse_follow_jumps = save_cfj;
+}
+
+struct tree_opt_pass pass_gcse =
+{
+ "gcse1", /* name */
+ gate_handle_gcse, /* gate */
+ rest_of_handle_gcse, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_GCSE, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func |
+ TODO_verify_flow | TODO_ggc_collect, /* todo_flags_finish */
+ 'G' /* letter */
+};
+
#include "gt-gcse.h"