/* Global common subexpression elimination/Partial redundancy elimination
and global constant/copy propagation for GNU compiler.
- Copyright (C) 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
+ Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002
+ Free Software Foundation, Inc.
This file is part of GCC.
#include "output.h"
#include "function.h"
#include "expr.h"
+#include "except.h"
#include "ggc.h"
#include "params.h"
static int classic_gcse PARAMS ((void));
static int one_classic_gcse_pass PARAMS ((int));
static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
-static void delete_null_pointer_checks_1 PARAMS ((varray_type *, unsigned int *,
+static void delete_null_pointer_checks_1 PARAMS ((unsigned int *,
sbitmap *, sbitmap *,
struct null_pointer_info *));
static rtx process_insert_insn PARAMS ((struct expr *));
basic_block));
static void free_store_memory PARAMS ((void));
static void store_motion PARAMS ((void));
+static void free_insn_expr_list_list PARAMS ((rtx *));
static void clear_modify_mem_tables PARAMS ((void));
static void free_modify_mem_tables PARAMS ((void));
\f
if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
{
if (warn_disabled_optimization)
- warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
- n_basic_blocks, n_edges / n_basic_blocks);
+ warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
+ n_basic_blocks, n_edges / n_basic_blocks);
return 0;
}
{
free_modify_mem_tables ();
modify_mem_list
- = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
+ = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx));
canon_modify_mem_list
- = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
- memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
- memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
+ = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx));
+ memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx));
+ memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx));
orig_bb_count = n_basic_blocks;
}
free_reg_set_mem ();
{
int i;
#ifndef AVOID_CCMODE_COPIES
- rtx reg,insn;
+ rtx reg, insn;
#endif
memset (can_copy_p, 0, NUM_MACHINE_MODES);
alloc_gcse_mem (f)
rtx f;
{
- int i,n;
+ int i, n;
rtx insn;
/* Find the largest UID and create a mapping from UIDs to CUIDs.
max_gcse_regno);
/* Allocate array to keep a list of insns which modify memory in each
basic block. */
- modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
- canon_modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
- memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
- memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
+ modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx));
+ canon_modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx));
+ memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx));
+ memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx));
modify_mem_list_set = BITMAP_XMALLOC ();
canon_modify_mem_list_set = BITMAP_XMALLOC ();
}
= (struct reg_set **) grealloc ((char *) reg_set_table,
new_size * sizeof (struct reg_set *));
memset ((char *) (reg_set_table + reg_set_table_size), 0,
- (new_size - reg_set_table_size) * sizeof (struct reg_set *));
+ (new_size - reg_set_table_size) * sizeof (struct reg_set *));
reg_set_table_size = new_size;
}
case SUBREG:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_VECTOR:
case CALL:
return 0;
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
case ADDR_VEC:
const char *ps;
{
unsigned hash = 0;
- const unsigned char *p = (const unsigned char *)ps;
+ const unsigned char *p = (const unsigned char *) ps;
if (p)
while (*p)
+ (unsigned int) CONST_DOUBLE_HIGH (x));
return hash;
+ case CONST_VECTOR:
+ {
+ int units;
+ rtx elt;
+
+ units = CONST_VECTOR_NUNITS (x);
+
+ for (i = 0; i < units; ++i)
+ {
+ elt = CONST_VECTOR_ELT (x, i);
+ hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
+ }
+
+ return hash;
+ }
+
/* Assume there is only one rtx object for any given label. */
case LABEL_REF:
/* We don't hash on the address of the CODE_LABEL to avoid bootstrap
default:
abort ();
}
- }
+ }
return 1;
}
&& regno >= FIRST_PSEUDO_REGISTER
/* Don't GCSE something if we can't do a reg/reg copy. */
&& can_copy_p [GET_MODE (dest)]
+ /* GCSE commonly inserts instruction after the insn. We can't
+ do that easily for EH_REGION notes so disable GCSE on these
+ for now. */
+ && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
/* Is SET_SRC something we want to gcse? */
&& want_to_gcse_p (src)
/* Don't CSE a nop. */
/* Don't GCSE if it has attached REG_EQUIV note.
At this point this only function parameters should have
REG_EQUIV notes and if the argument slot is used somewhere
- explicitely, it means address of parameter has been taken,
+ explicitly, it means address of parameter has been taken,
so we should not extend the lifetime of the pseudo. */
&& ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
|| GET_CODE (XEXP (note, 0)) != MEM))
&& REGNO (src) >= FIRST_PSEUDO_REGISTER
&& can_copy_p [GET_MODE (dest)]
&& REGNO (src) != regno)
- || GET_CODE (src) == CONST_INT
- || GET_CODE (src) == SYMBOL_REF
- || GET_CODE (src) == CONST_DOUBLE)
+ || CONSTANT_P (src))
/* A copy is not available if its src or dest is subsequently
modified. Here we want to search from INSN+1 on, but
oprs_available_p searches from INSN on. */
void * v_insn;
{
rtx dest_addr, insn;
+ int bb;
while (GET_CODE (dest) == SUBREG
|| GET_CODE (dest) == ZERO_EXTRACT
dest_addr = get_addr (XEXP (dest, 0));
dest_addr = canon_rtx (dest_addr);
insn = (rtx) v_insn;
+ bb = BLOCK_NUM (insn);
- canon_modify_mem_list[BLOCK_NUM (insn)] =
- alloc_INSN_LIST (dest_addr, canon_modify_mem_list[BLOCK_NUM (insn)]);
- canon_modify_mem_list[BLOCK_NUM (insn)] =
- alloc_INSN_LIST (dest, canon_modify_mem_list[BLOCK_NUM (insn)]);
- bitmap_set_bit (canon_modify_mem_list_set, BLOCK_NUM (insn));
+ canon_modify_mem_list[bb] =
+ alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
+ canon_modify_mem_list[bb] =
+ alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
+ bitmap_set_bit (canon_modify_mem_list_set, bb);
}
/* Record memory modification information for INSN. We do not actually care
record_last_mem_set_info (insn)
rtx insn;
{
+ int bb = BLOCK_NUM (insn);
+
/* load_killed_in_block_p will handle the case of calls clobbering
everything. */
- modify_mem_list[BLOCK_NUM (insn)] =
- alloc_INSN_LIST (insn, modify_mem_list[BLOCK_NUM (insn)]);
- bitmap_set_bit (modify_mem_list_set, BLOCK_NUM (insn));
+ modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
+ bitmap_set_bit (modify_mem_list_set, bb);
if (GET_CODE (insn) == CALL_INSN)
{
/* Note that traversals of this loop (other than for free-ing)
will break after encountering a CALL_INSN. So, there's no
need to insert a pair of items, as canon_list_insert does. */
- canon_modify_mem_list[BLOCK_NUM (insn)] =
- alloc_INSN_LIST (insn, canon_modify_mem_list[BLOCK_NUM (insn)]);
- bitmap_set_bit (canon_modify_mem_list_set, BLOCK_NUM (insn));
+ canon_modify_mem_list[bb] =
+ alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
+ bitmap_set_bit (canon_modify_mem_list_set, bb);
}
else
- note_stores (PATTERN (insn), canon_list_insert, (void*)insn );
+ note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
}
/* Called from compute_hash_table via note_stores to handle one
hash_scan_insn (insn, set_p, in_libcall_block);
if (!set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
in_libcall_block = 0;
- }
+ }
}
free (reg_avail_info);
/* Initialize count of number of entries in hash table. */
n_sets = 0;
memset ((char *) set_hash_table, 0,
- set_hash_table_size * sizeof (struct expr *));
+ set_hash_table_size * sizeof (struct expr *));
compute_hash_table (1);
}
/* Initialize count of number of entries in hash table. */
n_exprs = 0;
memset ((char *) expr_hash_table, 0,
- expr_hash_table_size * sizeof (struct expr *));
+ expr_hash_table_size * sizeof (struct expr *));
compute_hash_table (0);
}
return expr;
}
+/* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
+ types may be mixed. */
+
+static void
+free_insn_expr_list_list (listp)
+ rtx *listp;
+{
+ rtx list, next;
+
+ for (list = *listp; list ; list = next)
+ {
+ next = XEXP (list, 1);
+ if (GET_CODE (list) == EXPR_LIST)
+ free_EXPR_LIST_node (list);
+ else
+ free_INSN_LIST_node (list);
+ }
+
+ *listp = NULL;
+}
+
/* Clear canon_modify_mem_list and modify_mem_list tables. */
static void
clear_modify_mem_tables ()
int i;
EXECUTE_IF_SET_IN_BITMAP
- (canon_modify_mem_list_set, 0, i,
- free_INSN_LIST_list (modify_mem_list + i));
- bitmap_clear (canon_modify_mem_list_set);
+ (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
+ bitmap_clear (modify_mem_list_set);
EXECUTE_IF_SET_IN_BITMAP
(canon_modify_mem_list_set, 0, i,
- free_INSN_LIST_list (canon_modify_mem_list + i));
- bitmap_clear (modify_mem_list_set);
+ free_insn_expr_list_list (canon_modify_mem_list + i));
+ bitmap_clear (canon_modify_mem_list_set);
}
/* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
case ADDR_VEC:
for (bb = 0; bb < n_basic_blocks; bb++)
{
sbitmap_union_of_preds (reaching_defs[bb], rd_out, bb);
- changed |= sbitmap_union_of_diff (rd_out[bb], rd_gen[bb],
- reaching_defs[bb], rd_kill[bb]);
+ changed |= sbitmap_union_of_diff_cg (rd_out[bb], rd_gen[bb],
+ reaching_defs[bb], rd_kill[bb]);
}
passes++;
}
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
case ADDR_VEC:
|| (((this_reg = reg_set_table[regnum_for_replacing]),
this_reg->next == NULL)
|| can_disregard_other_sets (&this_reg, insn, 0)))
- {
- use_src = 1;
- found_setting = 1;
- }
+ {
+ use_src = 1;
+ found_setting = 1;
+ }
}
if (!found_setting)
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
case ADDR_VEC:
This can not happen since the set of (reg Y) would have killed the
set of (reg X) making it unavailable at the start of this block. */
while (1)
- {
+ {
rtx src;
struct expr *set = lookup_set (regno, NULL_RTX);
/* Follow the copy chain, ie start another iteration of the loop
and see if we have an available copy into SRC. */
regno = REGNO (src);
- }
+ }
/* SET1 holds the last set that was available and anticipatable at
INSN. */
if (rtx_equal_p (new, SET_SRC (set)))
return 0;
- /* If this is now a no-op leave it that way, but update LABEL_NUSED if
- necessary. */
+ /* If this is now a no-op delete it, otherwise this must be a valid insn. */
if (new == pc_rtx)
+ delete_insn (insn);
+ else
{
- SET_SRC (set) = new;
-
- if (JUMP_LABEL (insn) != 0)
- {
- --LABEL_NUSES (JUMP_LABEL (insn));
- JUMP_LABEL (insn) = NULL_RTX;
- }
- }
-
- /* Otherwise, this must be a valid instruction. */
- else if (! validate_change (insn, &SET_SRC (set), new, 0))
- return 0;
+ if (! validate_change (insn, &SET_SRC (set), new, 0))
+ return 0;
- /* If this has turned into an unconditional jump,
- then put a barrier after it so that the unreachable
- code will be deleted. */
- if (GET_CODE (SET_SRC (set)) == LABEL_REF)
- emit_barrier_after (insn);
+ /* If this has turned into an unconditional jump,
+ then put a barrier after it so that the unreachable
+ code will be deleted. */
+ if (GET_CODE (SET_SRC (set)) == LABEL_REF)
+ emit_barrier_after (insn);
+ }
run_jump_opt_after_gcse = 1;
delete_insn (insn);
return 1;
- }
+}
#endif
/* Perform constant and copy propagation on INSN.
src = SET_SRC (pat);
/* Constant propagation. */
- if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE
- || GET_CODE (src) == SYMBOL_REF)
+ if (CONSTANT_P (src))
{
/* Handle normal insns first. */
if (GET_CODE (insn) == INSN
call mark_oprs_set if we turned the insn into a NOTE. */
if (GET_CODE (insn) != NOTE)
mark_oprs_set (insn);
- }
+ }
}
if (gcse_file != NULL)
antloc = NULL;
sbitmap_vector_free (ae_kill);
ae_kill = NULL;
- free (trapping_expr);
+ sbitmap_free (trapping_expr);
}
\f
/* PRE utilities */
int rval;
char *visited = (char *) xcalloc (n_basic_blocks, 1);
- rval = pre_expr_reaches_here_p_work(occr_bb, expr, bb, visited);
+ rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
free (visited);
return rval;
pat = process_insert_insn (expr);
/* If the last insn is a jump, insert EXPR in front [taking care to
- handle cc0, etc. properly]. */
+ handle cc0, etc. properly]. Similary we need to care trapping
+ instructions in presence of non-call exceptions. */
- if (GET_CODE (insn) == JUMP_INSN)
+ if (GET_CODE (insn) == JUMP_INSN
+ || (GET_CODE (insn) == INSN
+ && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
{
#ifdef HAVE_cc0
rtx note;
#endif
+ /* It should always be the case that we can put these instructions
+ anywhere in the basic block with performing PRE optimizations.
+ Check this. */
+ if (GET_CODE (insn) == INSN && pre
+ && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
+ && !TEST_BIT (transp[bb->index], expr->bitmap_index))
+ abort ();
/* If this is a jump table, then we can't insert stuff here. Since
we know the previous real insn must be the tablejump, we insert
/* Likewise if the last insn is a call, as will happen in the presence
of exception handling. */
- else if (GET_CODE (insn) == CALL_INSN)
+ else if (GET_CODE (insn) == CALL_INSN
+ && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
{
/* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
we search backward and place the instructions before the first
}
free (index_map);
- free (pre_redundant_insns);
+ sbitmap_free (pre_redundant_insns);
return changed;
}
they are not our responsibility to free. */
static void
-delete_null_pointer_checks_1 (delete_list, block_reg, nonnull_avin,
+delete_null_pointer_checks_1 (block_reg, nonnull_avin,
nonnull_avout, npi)
- varray_type *delete_list;
unsigned int *block_reg;
sbitmap *nonnull_avin;
sbitmap *nonnull_avout;
{
rtx new_jump;
- new_jump = emit_jump_insn_before (gen_jump (JUMP_LABEL (last_insn)),
- last_insn);
+ new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
+ last_insn);
JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
LABEL_NUSES (JUMP_LABEL (new_jump))++;
emit_barrier_after (new_jump);
}
- if (!*delete_list)
- VARRAY_RTX_INIT (*delete_list, 10, "delete_list");
- VARRAY_PUSH_RTX (*delete_list, last_insn);
+ delete_insn (last_insn);
if (compare_and_branch == 2)
- VARRAY_PUSH_RTX (*delete_list, earliest);
+ delete_insn (earliest);
+ purge_dead_edges (BASIC_BLOCK (bb));
/* Don't check this block again. (Note that BLOCK_END is
invalid here; we deleted the last instruction in the
{
sbitmap *nonnull_avin, *nonnull_avout;
unsigned int *block_reg;
- varray_type delete_list = NULL;
int bb;
int reg;
int regs_per_pass;
int max_reg;
- unsigned int i;
struct null_pointer_info npi;
/* If we have only a single block, then there's nothing to do. */
{
npi.min_reg = reg;
npi.max_reg = MIN (reg + regs_per_pass, max_reg);
- delete_null_pointer_checks_1 (&delete_list, block_reg, nonnull_avin,
+ delete_null_pointer_checks_1 (block_reg, nonnull_avin,
nonnull_avout, &npi);
}
- /* Now delete the instructions all at once. This breaks the CFG. */
- if (delete_list)
- {
- for (i = 0; i < VARRAY_ACTIVE_SIZE (delete_list); i++)
- delete_related_insns (VARRAY_RTX (delete_list, i));
- VARRAY_FREE (delete_list);
- }
-
/* Free the table of registers compared at the end of every block. */
free (block_reg);
the convergence. */
for (bb = n_basic_blocks - 1; bb >= 0; bb--)
{
- changed |= sbitmap_a_or_b_and_c (hoist_vbein[bb], antloc[bb],
- hoist_vbeout[bb], transp[bb]);
+ changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb], antloc[bb],
+ hoist_vbeout[bb], transp[bb]);
if (bb != n_basic_blocks - 1)
sbitmap_intersection_of_succs (hoist_vbeout[bb], hoist_vbein, bb);
}
if (visited == NULL)
{
- visited_allocated_locally = 1;
- visited = xcalloc (n_basic_blocks, 1);
+ visited_allocated_locally = 1;
+ visited = xcalloc (n_basic_blocks, 1);
}
for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
}
}
- free (index_map);
+ free (index_map);
}
/* Top level routine to perform one code hoisting (aka unification) pass
rtx x;
{
const char * fmt;
- int i,j;
+ int i, j;
struct ls_expr * ptr;
/* Invalidate it in the list. */
case CONST:
case CONST_INT:
case CONST_DOUBLE:
+ case CONST_VECTOR:
case SYMBOL_REF:
case LABEL_REF:
case ADDR_VEC:
rtx x, store_pattern;
{
const char * fmt;
- int i,j;
+ int i, j;
int ret = 0;
if (!x)
if (GET_CODE (insn) == CALL_INSN)
{
- if (CONST_OR_PURE_CALL_P (insn))
- return 0;
- else
- return 1;
+ /* A normal or pure call might read from pattern,
+ but a const call will not. */
+ return ! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn);
}
if (GET_CODE (PATTERN (insn)) == SET)
rtx x, insn;
basic_block bb;
{
- rtx last = bb->end;
+ rtx last = bb->end;
- if (insn == last)
- return 0;
+ if (insn == last)
+ return 0;
/* Check if the register operands of the store are OK in this block.
Note that if registers are changed ANYWHERE in the block, we'll
if (!store_ops_ok (XEXP (x, 0), bb))
return 1;
- for ( ; insn && insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
- if (store_killed_in_insn (x, insn))
- return 1;
+ for ( ; insn && insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
+ if (store_killed_in_insn (x, insn))
+ return 1;
return 0;
}
rtx x, insn;
basic_block bb;
{
- rtx first = bb->head;
+ rtx first = bb->head;
- if (insn == first)
- return store_killed_in_insn (x, insn);
+ if (insn == first)
+ return store_killed_in_insn (x, insn);
/* Check if the register operands of the store are OK in this block.
Note that if registers are changed ANYWHERE in the block, we'll
if (!store_ops_ok (XEXP (x, 0), bb))
return 1;
- for ( ; insn && insn != PREV_INSN (first); insn = PREV_INSN (insn))
- if (store_killed_in_insn (x, insn))
- return 1;
+ for ( ; insn && insn != PREV_INSN (first); insn = PREV_INSN (insn))
+ if (store_killed_in_insn (x, insn))
+ return 1;
- return 0;
+ return 0;
}
#define ANTIC_STORE_LIST(x) ((x)->loads)
{
rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
if (gcse_file)
- fprintf(gcse_file, "Removing redundant store:\n");
+ fprintf (gcse_file, "Removing redundant store:\n");
replace_store_insn (r, XEXP (st, 0), bb);
XEXP (st, 0) = insn;
continue;
fprintf (gcse_file,
"STORE_MOTION delete insn in BB %d:\n ", bb->index);
print_inline_rtx (gcse_file, del, 6);
- fprintf(gcse_file, "\nSTORE MOTION replaced with insn:\n ");
+ fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
print_inline_rtx (gcse_file, insn, 6);
- fprintf(gcse_file, "\n");
+ fprintf (gcse_file, "\n");
}
delete_insn (del);