/* Global common subexpression elimination/Partial redundancy elimination
and global constant/copy propagation for GNU compiler.
- Copyright (C) 1997, 1998, 1999, 2000 Free Software Foundation, Inc.
+ Copyright (C) 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
This file is part of GNU CC.
static struct expr **expr_hash_table;
/* Total size of the copy propagation hash table, in elements. */
-static int set_hash_table_size;
+static unsigned int set_hash_table_size;
/* The table itself.
This is an array of `set_hash_table_size' elements. */
static void insert_set_in_table PARAMS ((rtx, rtx));
static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
+static unsigned int hash_string_1 PARAMS ((const char *));
static unsigned int hash_set PARAMS ((int, int));
static int expr_equiv_p PARAMS ((rtx, rtx));
static void record_last_reg_set_info PARAMS ((rtx, int));
a couple switch statements. So we require a relatively large number
of basic blocks and the ratio of edges to blocks to be high. */
if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
- return 0;
+ {
+ if (warn_disabled_optimization)
+ warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
+ n_basic_blocks, n_edges / n_basic_blocks);
+ return 0;
+ }
/* See what modes support reg/reg copy operations. */
if (! can_copy_init_p)
#ifndef AVOID_CCMODE_COPIES
rtx reg,insn;
#endif
- char *free_point = (char *) oballoc (1);
-
- bzero (can_copy_p, NUM_MACHINE_MODES);
+ memset (can_copy_p, 0, NUM_MACHINE_MODES);
start_sequence ();
for (i = 0; i < NUM_MACHINE_MODES; i++)
can_copy_p[i] = 1;
end_sequence ();
-
- /* Free the objects we just allocated. */
- obfree (free_point);
}
\f
/* Cover function to xmalloc to record bytes allocated. */
max_uid = get_max_uid ();
n = (max_uid + 1) * sizeof (int);
uid_cuid = (int *) gmalloc (n);
- bzero ((char *) uid_cuid, n);
+ memset ((char *) uid_cuid, 0, n);
for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
max_cuid = i;
n = (max_cuid + 1) * sizeof (rtx);
cuid_insn = (rtx *) gmalloc (n);
- bzero ((char *) cuid_insn, n);
+ memset ((char *) cuid_insn, 0, n);
for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
if (INSN_P (insn))
CUID_INSN (i++) = insn;
reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
n = reg_set_table_size * sizeof (struct reg_set *);
reg_set_table = (struct reg_set **) gmalloc (n);
- bzero ((char *) reg_set_table, n);
+ memset ((char *) reg_set_table, 0, n);
gcc_obstack_init (®_set_obstack);
}
reg_set_table
= (struct reg_set **) grealloc ((char *) reg_set_table,
new_size * sizeof (struct reg_set *));
- bzero ((char *) (reg_set_table + reg_set_table_size),
+ memset ((char *) (reg_set_table + reg_set_table_size), 0,
(new_size - reg_set_table_size) * sizeof (struct reg_set *));
reg_set_table_size = new_size;
}
hash = hash_expr_1 (x, mode, do_not_record_p);
return hash % hash_table_size;
}
+/* Hash a string. Just add its bytes up. */
+static inline unsigned
+hash_string_1 (ps)
+ const char *ps;
+{
+ unsigned hash = 0;
+ const unsigned char *p = (const unsigned char *)ps;
+
+ if (p)
+ while (*p)
+ hash += *p++;
+
+ return hash;
+}
/* Subroutine of hash_expr to do the actual work. */
*do_not_record_p = 1;
return 0;
}
+ else
+ {
+ /* We don't want to take the filename and line into account. */
+ hash += (unsigned) code + (unsigned) GET_MODE (x)
+ + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
+ + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
+ + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
+
+ if (ASM_OPERANDS_INPUT_LENGTH (x))
+ {
+ for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
+ {
+ hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
+ GET_MODE (ASM_OPERANDS_INPUT (x, i)),
+ do_not_record_p)
+ + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
+ (x, i)));
+ }
+
+ hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
+ x = ASM_OPERANDS_INPUT (x, 0);
+ mode = GET_MODE (x);
+ goto repeat;
+ }
+ return hash;
+ }
default:
break;
}
else if (fmt[i] == 's')
- {
- register const unsigned char *p =
- (const unsigned char *) XSTR (x, i);
-
- if (p)
- while (*p)
- hash += *p++;
- }
+ hash += hash_string_1 (XSTR (x, i));
else if (fmt[i] == 'i')
hash += (unsigned int) XINT (x, i);
else
|| (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
&& expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
+ case ASM_OPERANDS:
+ /* We don't use the generic code below because we want to
+ disregard filename and line numbers. */
+
+ /* A volatile asm isn't equivalent to any other. */
+ if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
+ return 0;
+
+ if (GET_MODE (x) != GET_MODE (y)
+ || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
+ || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
+ ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
+ || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
+ || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
+ return 0;
+
+ if (ASM_OPERANDS_INPUT_LENGTH (x))
+ {
+ for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
+ if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
+ ASM_OPERANDS_INPUT (y, i))
+ || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
+ ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
+ return 0;
+ }
+
+ return 1;
+
default:
break;
}
??? This isn't needed during const/copy propagation, but it's cheap to
compute. Later. */
sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
- bzero ((char *) mem_set_in_block, n_basic_blocks);
+ memset ((char *) mem_set_in_block, 0, n_basic_blocks);
/* Some working arrays used to track first and last set in each block. */
/* ??? One could use alloca here, but at some size a threshold is crossed
{
/* Initialize count of number of entries in hash table. */
n_sets = 0;
- bzero ((char *) set_hash_table,
+ memset ((char *) set_hash_table, 0,
set_hash_table_size * sizeof (struct expr *));
compute_hash_table (1);
{
/* Initialize count of number of entries in hash table. */
n_exprs = 0;
- bzero ((char *) expr_hash_table,
+ memset ((char *) expr_hash_table, 0,
expr_hash_table_size * sizeof (struct expr *));
compute_hash_table (0);
&& GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
&& condjump_p (NEXT_INSN (insn))
&& ! simplejump_p (NEXT_INSN (insn)))
- changed |= cprop_cc0_jump (insn, reg_used, src);
+ {
+ if (cprop_cc0_jump (insn, reg_used, src))
+ {
+ changed = 1;
+ break;
+ }
+ }
#endif
}
else if (GET_CODE (src) == REG
static void
compute_pre_data ()
{
+ sbitmap trapping_expr;
int i;
+ unsigned int ui;
compute_local_properties (transp, comp, antloc, 0);
sbitmap_vector_zero (ae_kill, n_basic_blocks);
+ /* Collect expressions which might trap. */
+ trapping_expr = sbitmap_alloc (n_exprs);
+ sbitmap_zero (trapping_expr);
+ for (ui = 0; ui < expr_hash_table_size; ui++)
+ {
+ struct expr *e;
+ for (e = expr_hash_table[ui]; e != NULL; e = e->next_same_hash)
+ if (may_trap_p (e->expr))
+ SET_BIT (trapping_expr, e->bitmap_index);
+ }
+
/* Compute ae_kill for each basic block using:
~(TRANSP | COMP)
for (i = 0; i < n_basic_blocks; i++)
{
+ edge e;
+
+ /* If the current block is the destination of an abnormal edge, we
+ kill all trapping expressions because we won't be able to properly
+ place the instruction on the edge. So make them neither
+ anticipatable nor transparent. This is fairly conservative. */
+ for (e = BASIC_BLOCK (i)->pred; e ; e = e->pred_next)
+ if (e->flags & EDGE_ABNORMAL)
+ {
+ sbitmap_difference (antloc[i], antloc[i], trapping_expr);
+ sbitmap_difference (transp[i], transp[i], trapping_expr);
+ break;
+ }
+
sbitmap_a_or_b (ae_kill[i], transp[i], comp[i]);
sbitmap_not (ae_kill[i], ae_kill[i]);
}
antloc = NULL;
free (ae_kill);
ae_kill = NULL;
+ free (trapping_expr);
}
\f
/* PRE utilities */
}
\f
/* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
- We have to add REG_LABEL notes, because the following loop optimization
- pass requires them. */
+ If notes are added to an insn which references a CODE_LABEL, the
+ LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
+ because the following loop optimization pass requires them. */
/* ??? This is very similar to the loop.c add_label_notes function. We
could probably share code here. */
REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, XEXP (x, 0),
REG_NOTES (insn));
+ if (LABEL_P (XEXP (x, 0)))
+ LABEL_NUSES (XEXP (x, 0))++;
return;
}
compute_local_properties (transp, comp, antloc, 0);
compute_transpout ();
compute_code_hoist_vbeinout ();
- compute_flow_dominators (dominators, NULL);
+ calculate_dominance_info (NULL, dominators, CDI_DOMINATORS);
if (gcse_file)
fprintf (gcse_file, "\n");
}
visited = xcalloc (n_basic_blocks, 1);
}
- visited[expr_bb] = 1;
for (pred = BASIC_BLOCK (bb)->pred; pred != NULL; pred = pred->pred_next)
{
int pred_bb = pred->src->index;