/* SSA-PRE for trees.
- Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
<stevenb@suse.de>
#include "system.h"
#include "coretypes.h"
#include "tm.h"
-#include "ggc.h"
#include "tree.h"
#include "basic-block.h"
-#include "diagnostic.h"
+#include "tree-pretty-print.h"
+#include "gimple-pretty-print.h"
#include "tree-inline.h"
#include "tree-flow.h"
#include "gimple.h"
#include "fibheap.h"
#include "hashtab.h"
#include "tree-iterator.h"
-#include "real.h"
#include "alloc-pool.h"
#include "obstack.h"
#include "tree-pass.h"
expressions. */
typedef struct bitmap_set
{
- bitmap expressions;
- bitmap values;
+ bitmap_head expressions;
+ bitmap_head values;
} *bitmap_set_t;
#define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
- EXECUTE_IF_SET_IN_BITMAP((set)->expressions, 0, (id), (bi))
+ EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi))
#define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
- EXECUTE_IF_SET_IN_BITMAP((set)->values, 0, (id), (bi))
+ EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi))
/* Mapping from value id to expressions with that value_id. */
DEF_VEC_P (bitmap_set_t);
static tree storetemp;
static tree prephitemp;
-/* Set of blocks with statements that have had its EH information
- cleaned up. */
+/* Set of blocks with statements that have had their EH properties changed. */
static bitmap need_eh_cleanup;
+/* Set of blocks with statements that have had their AB properties changed. */
+static bitmap need_ab_cleanup;
+
/* The phi_translate_table caches phi translations for a given
expression and predecessor. */
slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
new_pair->hashcode, INSERT);
- if (*slot)
- free (*slot);
+ free (*slot);
*slot = (void *) new_pair;
}
bitmap_set_new (void)
{
bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
- ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
- ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
+ bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
+ bitmap_initialize (&ret->values, &grand_bitmap_obstack);
return ret;
}
unsigned int val = get_expr_value_id (expr);
if (!value_id_constant_p (val))
{
- bitmap_clear_bit (set->values, val);
- bitmap_clear_bit (set->expressions, get_expression_id (expr));
+ bitmap_clear_bit (&set->values, val);
+ bitmap_clear_bit (&set->expressions, get_expression_id (expr));
}
}
{
/* We specifically expect this and only this function to be able to
insert constants into a set. */
- bitmap_set_bit (set->values, val);
- bitmap_set_bit (set->expressions, get_or_alloc_expression_id (expr));
+ bitmap_set_bit (&set->values, val);
+ bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
}
}
static void
bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
{
- bitmap_copy (dest->expressions, orig->expressions);
- bitmap_copy (dest->values, orig->values);
+ bitmap_copy (&dest->expressions, &orig->expressions);
+ bitmap_copy (&dest->values, &orig->values);
}
static void
bitmap_set_free (bitmap_set_t set)
{
- BITMAP_FREE (set->expressions);
- BITMAP_FREE (set->values);
+ bitmap_clear (&set->expressions);
+ bitmap_clear (&set->values);
}
VEC(pre_expr, heap) *result;
/* Pre-allocate roughly enough space for the array. */
- result = VEC_alloc (pre_expr, heap, bitmap_count_bits (set->values));
+ result = VEC_alloc (pre_expr, heap, bitmap_count_bits (&set->values));
FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
{
bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i);
FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj)
{
- if (bitmap_bit_p (set->expressions, j))
+ if (bitmap_bit_p (&set->expressions, j))
VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
}
}
if (dest != orig)
{
- bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
+ bitmap_head temp;
+ bitmap_initialize (&temp, &grand_bitmap_obstack);
- bitmap_and_into (dest->values, orig->values);
- bitmap_copy (temp, dest->expressions);
- EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
+ bitmap_and_into (&dest->values, &orig->values);
+ bitmap_copy (&temp, &dest->expressions);
+ EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
{
pre_expr expr = expression_for_id (i);
unsigned int value_id = get_expr_value_id (expr);
- if (!bitmap_bit_p (dest->values, value_id))
- bitmap_clear_bit (dest->expressions, i);
+ if (!bitmap_bit_p (&dest->values, value_id))
+ bitmap_clear_bit (&dest->expressions, i);
}
- BITMAP_FREE (temp);
+ bitmap_clear (&temp);
}
}
bitmap_iterator bi;
unsigned int i;
- bitmap_and_compl (result->expressions, dest->expressions,
- orig->expressions);
+ bitmap_and_compl (&result->expressions, &dest->expressions,
+ &orig->expressions);
FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
{
pre_expr expr = expression_for_id (i);
unsigned int value_id = get_expr_value_id (expr);
- bitmap_set_bit (result->values, value_id);
+ bitmap_set_bit (&result->values, value_id);
}
return result;
{
unsigned int i;
bitmap_iterator bi;
- bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
+ bitmap_head temp;
+
+ bitmap_initialize (&temp, &grand_bitmap_obstack);
- bitmap_copy (temp, a->expressions);
- EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
+ bitmap_copy (&temp, &a->expressions);
+ EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
{
pre_expr expr = expression_for_id (i);
if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
bitmap_remove_from_set (a, expr);
}
- BITMAP_FREE (temp);
+ bitmap_clear (&temp);
}
if (value_id_constant_p (value_id))
return true;
- if (!set || bitmap_empty_p (set->expressions))
+ if (!set || bitmap_empty_p (&set->expressions))
return false;
- return bitmap_bit_p (set->values, value_id);
+ return bitmap_bit_p (&set->values, value_id);
}
static inline bool
bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
{
- return bitmap_bit_p (set->expressions, get_expression_id (expr));
+ return bitmap_bit_p (&set->expressions, get_expression_id (expr));
}
/* Replace an instance of value LOOKFOR with expression EXPR in SET. */
exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
{
- if (bitmap_bit_p (set->expressions, i))
+ if (bitmap_clear_bit (&set->expressions, i))
{
- bitmap_clear_bit (set->expressions, i);
- bitmap_set_bit (set->expressions, get_expression_id (expr));
+ bitmap_set_bit (&set->expressions, get_expression_id (expr));
return;
}
}
static bool
bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
{
- return bitmap_equal_p (a->values, b->values);
+ return bitmap_equal_p (&a->values, &b->values);
}
/* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
{
unsigned int val = get_expr_value_id (expr);
-#ifdef ENABLE_CHECKING
- gcc_assert (expr->id == get_or_alloc_expression_id (expr));
-#endif
+ gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
/* Constant values are always considered to be part of the set. */
if (value_id_constant_p (val))
return;
/* If the value membership changed, add the expression. */
- if (bitmap_set_bit (set->values, val))
- bitmap_set_bit (set->expressions, expr->id);
+ if (bitmap_set_bit (&set->values, val))
+ bitmap_set_bit (&set->expressions, expr->id);
}
/* Print out EXPR to outfile. */
void debug_pre_expr (pre_expr);
/* Like print_pre_expr but always prints to stderr. */
-void
+DEBUG_FUNCTION void
debug_pre_expr (pre_expr e)
{
print_pre_expr (stderr, e);
void debug_bitmap_set (bitmap_set_t);
-void
+DEBUG_FUNCTION void
debug_bitmap_set (bitmap_set_t set)
{
print_bitmap_set (stderr, set, "debug", 0);
}
-void
+DEBUG_FUNCTION void
debug_value_expressions (unsigned int val)
{
print_value_expressions (stderr, val);
case REFERENCE:
{
vn_reference_t ref = PRE_EXPR_REFERENCE (e);
- VEC (vn_reference_op_s, heap) *operands = ref->operands;
- vn_reference_op_t op;
-
- /* Try to simplify the translated expression if it is
- a call to a builtin function with at most two arguments. */
- op = VEC_index (vn_reference_op_s, operands, 0);
- if (op->opcode == CALL_EXPR
- && TREE_CODE (op->op0) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
- && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
- && VEC_length (vn_reference_op_s, operands) >= 2
- && VEC_length (vn_reference_op_s, operands) <= 3)
- {
- vn_reference_op_t arg0, arg1 = NULL;
- bool anyconst = false;
- arg0 = VEC_index (vn_reference_op_s, operands, 1);
- if (VEC_length (vn_reference_op_s, operands) > 2)
- arg1 = VEC_index (vn_reference_op_s, operands, 2);
- if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
- || (arg0->opcode == ADDR_EXPR
- && is_gimple_min_invariant (arg0->op0)))
- anyconst = true;
- if (arg1
- && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
- || (arg1->opcode == ADDR_EXPR
- && is_gimple_min_invariant (arg1->op0))))
- anyconst = true;
- if (anyconst)
- {
- tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
- arg1 ? 2 : 1,
- arg0->op0,
- arg1 ? arg1->op0 : NULL);
- if (folded
- && TREE_CODE (folded) == NOP_EXPR)
- folded = TREE_OPERAND (folded, 0);
- if (folded
- && is_gimple_min_invariant (folded))
- return get_or_alloc_expr_for_constant (folded);
- }
- }
- return e;
- }
+ tree folded;
+ if ((folded = fully_constant_vn_reference_p (ref)))
+ return get_or_alloc_expr_for_constant (folded);
+ return e;
+ }
default:
return e;
}
that we will return. */
if (!pretemp || exprtype != TREE_TYPE (pretemp))
{
- pretemp = create_tmp_var (exprtype, "pretmp");
+ pretemp = create_tmp_reg (exprtype, "pretmp");
get_var_ann (pretemp);
}
+static pre_expr
+phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
+ basic_block pred, basic_block phiblock);
/* Translate EXPR using phis in PHIBLOCK, so that it has the values of
the phis in PRED. Return NULL if we can't find a leader for each part
of the translated expression. */
static pre_expr
-phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
- basic_block pred, basic_block phiblock)
+phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
+ basic_block pred, basic_block phiblock)
{
- pre_expr oldexpr = expr;
- pre_expr phitrans;
-
- if (!expr)
- return NULL;
-
- /* Constants contain no values that need translation. */
- if (expr->kind == CONSTANT)
- return expr;
-
- if (value_id_constant_p (get_expr_value_id (expr)))
- return expr;
-
- phitrans = phi_trans_lookup (expr, pred);
- if (phitrans)
- return phitrans;
-
switch (expr->kind)
{
case NARY:
}
add_to_value (new_val_id, expr);
}
- phi_trans_add (oldexpr, expr, pred);
return expr;
}
break;
newop.op0 = op0;
newop.op1 = op1;
newop.op2 = op2;
+ /* If it transforms a non-constant ARRAY_REF into a constant
+ one, adjust the constant offset. */
+ if (newop.opcode == ARRAY_REF
+ && newop.off == -1
+ && TREE_CODE (op0) == INTEGER_CST
+ && TREE_CODE (op1) == INTEGER_CST
+ && TREE_CODE (op2) == INTEGER_CST)
+ {
+ double_int off = tree_to_double_int (op0);
+ off = double_int_add (off,
+ double_int_neg
+ (tree_to_double_int (op1)));
+ off = double_int_mul (off, tree_to_double_int (op2));
+ if (double_int_fits_in_shwi_p (off))
+ newop.off = off.low;
+ }
VEC_replace (vn_reference_op_s, newoperands, j, &newop);
/* If it transforms from an SSA_NAME to an address, fold with
a preceding indirect reference. */
if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR
&& VEC_index (vn_reference_op_s,
- newoperands, j - 1)->opcode == INDIRECT_REF)
+ newoperands, j - 1)->opcode == MEM_REF)
vn_reference_fold_indirect (&newoperands, &j);
}
if (i != VEC_length (vn_reference_op_s, operands))
{
unsigned int new_val_id;
pre_expr constant;
+ bool converted = false;
tree result = vn_reference_lookup_pieces (newvuse, ref->set,
ref->type,
newoperands,
- &newref, true);
- if (newref)
+ &newref, VN_WALK);
+ if (result)
VEC_free (vn_reference_op_s, heap, newoperands);
+ if (result
+ && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
+ {
+ result = fold_build1 (VIEW_CONVERT_EXPR, ref->type, result);
+ converted = true;
+ }
+ else if (!result && newref
+ && !useless_type_conversion_p (ref->type, newref->type))
+ {
+ VEC_free (vn_reference_op_s, heap, newoperands);
+ return NULL;
+ }
+
if (result && is_gimple_min_invariant (result))
{
gcc_assert (!newoperands);
expr->kind = REFERENCE;
expr->id = 0;
- if (newref)
+ if (converted)
+ {
+ vn_nary_op_t nary;
+ tree nresult;
+
+ gcc_assert (CONVERT_EXPR_P (result)
+ || TREE_CODE (result) == VIEW_CONVERT_EXPR);
+
+ nresult = vn_nary_op_lookup_pieces (1, TREE_CODE (result),
+ TREE_TYPE (result),
+ TREE_OPERAND (result, 0),
+ NULL_TREE, NULL_TREE,
+ NULL_TREE,
+ &nary);
+ if (nresult && is_gimple_min_invariant (nresult))
+ return get_or_alloc_expr_for_constant (nresult);
+
+ expr->kind = NARY;
+ if (nary)
+ {
+ PRE_EXPR_NARY (expr) = nary;
+ constant = fully_constant_expression (expr);
+ if (constant != expr)
+ return constant;
+
+ new_val_id = nary->value_id;
+ get_or_alloc_expression_id (expr);
+ }
+ else
+ {
+ new_val_id = get_next_value_id ();
+ VEC_safe_grow_cleared (bitmap_set_t, heap,
+ value_expressions,
+ get_max_value_id() + 1);
+ nary = vn_nary_op_insert_pieces (1, TREE_CODE (result),
+ TREE_TYPE (result),
+ TREE_OPERAND (result, 0),
+ NULL_TREE, NULL_TREE,
+ NULL_TREE, NULL_TREE,
+ new_val_id);
+ PRE_EXPR_NARY (expr) = nary;
+ constant = fully_constant_expression (expr);
+ if (constant != expr)
+ return constant;
+ get_or_alloc_expression_id (expr);
+ }
+ }
+ else if (newref)
{
PRE_EXPR_REFERENCE (expr) = newref;
constant = fully_constant_expression (expr);
add_to_value (new_val_id, expr);
}
VEC_free (vn_reference_op_s, heap, newoperands);
- phi_trans_add (oldexpr, expr, pred);
return expr;
}
break;
}
}
+/* Wrapper around phi_translate_1 providing caching functionality. */
+
+static pre_expr
+phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
+ basic_block pred, basic_block phiblock)
+{
+ pre_expr phitrans;
+
+ if (!expr)
+ return NULL;
+
+ /* Constants contain no values that need translation. */
+ if (expr->kind == CONSTANT)
+ return expr;
+
+ if (value_id_constant_p (get_expr_value_id (expr)))
+ return expr;
+
+ if (expr->kind != NAME)
+ {
+ phitrans = phi_trans_lookup (expr, pred);
+ if (phitrans)
+ return phitrans;
+ }
+
+ /* Translate. */
+ phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
+
+ /* Don't add empty translations to the cache. Neither add
+ translations of NAMEs as those are cheap to translate. */
+ if (phitrans
+ && expr->kind != NAME)
+ phi_trans_add (expr, phitrans, pred);
+
+ return phitrans;
+}
+
+
/* For each expression in SET, translate the values through phi nodes
in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
expressions in DEST. */
pre_expr expr;
int i;
- if (!phi_nodes (phiblock))
+ if (gimple_seq_empty_p (phi_nodes (phiblock)))
{
bitmap_set_copy (dest, set);
return;
}
exprs = sorted_array_from_bitmap_set (set);
- for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
+ FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
{
pre_expr translated;
translated = phi_translate (expr, set, NULL, pred, phiblock);
+ if (!translated)
+ continue;
- /* Don't add empty translations to the cache */
- if (translated)
- phi_trans_add (expr, translated, pred);
-
- if (translated != NULL)
+ /* We might end up with multiple expressions from SET being
+ translated to the same value. In this case we do not want
+ to retain the NARY or REFERENCE expression but prefer a NAME
+ which would be the leader. */
+ if (translated->kind == NAME)
+ bitmap_value_replace_in_set (dest, translated);
+ else
bitmap_value_insert_into_set (dest, translated);
}
VEC_free (pre_expr, heap, exprs);
bitmap_iterator bi;
bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
- EXECUTE_IF_AND_IN_BITMAP (exprset->expressions,
- set->expressions, 0, i, bi)
+ EXECUTE_IF_AND_IN_BITMAP (&exprset->expressions,
+ &set->expressions, 0, i, bi)
{
pre_expr val = expression_for_id (i);
/* At the point where stmt is not null, there should always
gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
if (gimple_code (def_stmt) != GIMPLE_PHI
&& gimple_bb (def_stmt) == gimple_bb (stmt)
- && gimple_uid (def_stmt) >= gimple_uid (stmt))
+ /* PRE insertions are at the end of the basic-block
+ and have UID 0. */
+ && (gimple_uid (def_stmt) == 0
+ || gimple_uid (def_stmt) >= gimple_uid (stmt)))
continue;
}
return val;
vn_reference_op_t vro;
unsigned int i;
- for (i = 0; VEC_iterate (vn_reference_op_s, ref->operands, i, vro); i++)
+ FOR_EACH_VEC_ELT (vn_reference_op_s, ref->operands, i, vro)
{
if (!vro_valid_in_sets (set1, set2, vro))
return false;
pre_expr expr;
int i;
- for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
+ FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
{
if (!valid_in_sets (set1, set2, expr, block))
bitmap_remove_from_set (set1, expr);
pre_expr expr;
int i;
- for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
+ FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
{
if (!valid_in_sets (set, NULL, expr, block))
bitmap_remove_from_set (set, expr);
goto maybe_dump_sets;
}
- if (phi_nodes (first))
+ if (!gimple_seq_empty_p (phi_nodes (first)))
phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
else
bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
- for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
+ FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
{
- if (phi_nodes (bprime))
+ if (!gimple_seq_empty_p (phi_nodes (bprime)))
{
bitmap_set_t tmp = bitmap_set_new ();
phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
clean (ANTIC_IN (block), block);
- /* !old->expressions can happen when we deferred a block. */
- if (!old->expressions || !bitmap_set_equal (old, ANTIC_IN (block)))
+ if (!bitmap_set_equal (old, ANTIC_IN (block)))
{
changed = true;
SET_BIT (changed_blocks, block->index);
before the translation starts. */
if (max_pa
&& single_succ_p (block)
- && bitmap_count_bits (PA_IN (single_succ (block))->values) > max_pa)
+ && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
goto maybe_dump_sets;
old_PA_IN = PA_IN (block);
}
if (VEC_length (basic_block, worklist) > 0)
{
- for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
+ FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
{
unsigned int i;
bitmap_iterator bi;
FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
bitmap_value_insert_into_set (PA_OUT,
expression_for_id (i));
- if (phi_nodes (bprime))
+ if (!gimple_seq_empty_p (phi_nodes (bprime)))
{
bitmap_set_t pa_in = bitmap_set_new ();
phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
/* For partial antic, we want to put back in the phi results, since
we will properly avoid making them partially antic over backedges. */
- bitmap_ior_into (PA_IN (block)->values, PHI_GEN (block)->values);
- bitmap_ior_into (PA_IN (block)->expressions, PHI_GEN (block)->expressions);
+ bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
+ bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
/* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Starting iteration %d\n", num_iterations);
+ /* ??? We need to clear our PHI translation cache here as the
+ ANTIC sets shrink and we restrict valid translations to
+ those having operands with leaders in ANTIC. Same below
+ for PA ANTIC computation. */
num_iterations++;
changed = false;
- for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
+ for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
{
if (TEST_BIT (changed_blocks, postorder[i]))
{
block->index));
}
}
-#ifdef ENABLE_CHECKING
/* Theoretically possible, but *highly* unlikely. */
- gcc_assert (num_iterations < 500);
-#endif
+ gcc_checking_assert (num_iterations < 500);
}
statistics_histogram_event (cfun, "compute_antic iterations",
fprintf (dump_file, "Starting iteration %d\n", num_iterations);
num_iterations++;
changed = false;
- for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
+ for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
{
if (TEST_BIT (changed_blocks, postorder[i]))
{
block->index));
}
}
-#ifdef ENABLE_CHECKING
/* Theoretically possible, but *highly* unlikely. */
- gcc_assert (num_iterations < 500);
-#endif
+ gcc_checking_assert (num_iterations < 500);
}
statistics_histogram_event (cfun, "compute_partial_antic iterations",
num_iterations);
}
/* Return true if we can value number the call in STMT. This is true
- if we have a pure or constant call. */
+ if we have a pure or constant call to a real function. */
static bool
can_value_number_call (gimple stmt)
{
+ if (gimple_call_internal_p (stmt))
+ return false;
if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
return true;
return false;
return UNARY_CLASS_P (op)
|| BINARY_CLASS_P (op)
|| COMPARISON_CLASS_P (op)
- || TREE_CODE (op) == INDIRECT_REF
+ || TREE_CODE (op) == MEM_REF
|| TREE_CODE (op) == COMPONENT_REF
|| TREE_CODE (op) == VIEW_CONVERT_EXPR
|| TREE_CODE (op) == CALL_EXPR
/* Inserted expressions are placed onto this worklist, which is used
for performing quick dead code elimination of insertions we made
that didn't turn out to be necessary. */
-static VEC(gimple,heap) *inserted_exprs;
-static bitmap inserted_phi_names;
+static bitmap inserted_exprs;
/* Pool allocated fake store expressions are placed onto this
worklist, which, after performing dead code elimination, is walked
{
case CALL_EXPR:
{
- tree folded, sc = currop->op1;
+ tree folded, sc = NULL_TREE;
unsigned int nargs = 0;
- tree *args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
- ref->operands) - 1);
+ tree fn, *args;
+ if (TREE_CODE (currop->op0) == FUNCTION_DECL)
+ fn = currop->op0;
+ else
+ {
+ pre_expr op0 = get_or_alloc_expr_for (currop->op0);
+ fn = find_or_generate_expression (block, op0, stmts, domstmt);
+ if (!fn)
+ return NULL_TREE;
+ }
+ if (currop->op1)
+ {
+ pre_expr scexpr = get_or_alloc_expr_for (currop->op1);
+ sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
+ if (!sc)
+ return NULL_TREE;
+ }
+ args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
+ ref->operands) - 1);
while (*operand < VEC_length (vn_reference_op_s, ref->operands))
{
args[nargs] = create_component_ref_by_pieces_1 (block, ref,
operand, stmts,
domstmt);
+ if (!args[nargs])
+ {
+ free (args);
+ return NULL_TREE;
+ }
nargs++;
}
folded = build_call_array (currop->type,
- TREE_CODE (currop->op0) == FUNCTION_DECL
- ? build_fold_addr_expr (currop->op0)
- : currop->op0,
+ (TREE_CODE (fn) == FUNCTION_DECL
+ ? build_fold_addr_expr (fn) : fn),
nargs, args);
free (args);
if (sc)
+ CALL_EXPR_STATIC_CHAIN (folded) = sc;
+ return folded;
+ }
+ break;
+ case MEM_REF:
+ {
+ tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
+ stmts, domstmt);
+ tree offset = currop->op0;
+ if (!baseop)
+ return NULL_TREE;
+ if (TREE_CODE (baseop) == ADDR_EXPR
+ && handled_component_p (TREE_OPERAND (baseop, 0)))
{
- pre_expr scexpr = get_or_alloc_expr_for (sc);
- sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
- if (!sc)
- return NULL_TREE;
- CALL_EXPR_STATIC_CHAIN (folded) = sc;
+ HOST_WIDE_INT off;
+ tree base;
+ base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
+ &off);
+ gcc_assert (base);
+ offset = int_const_binop (PLUS_EXPR, offset,
+ build_int_cst (TREE_TYPE (offset),
+ off));
+ baseop = build_fold_addr_expr (base);
}
- return folded;
+ return fold_build2 (MEM_REF, currop->type, baseop, offset);
}
break;
case TARGET_MEM_REF:
{
+ pre_expr op0expr, op1expr;
+ tree genop0 = NULL_TREE, genop1 = NULL_TREE;
vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
- *operand);
- pre_expr op0expr;
- tree genop0 = NULL_TREE;
+ ++*operand);
tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
stmts, domstmt);
if (!baseop)
if (!genop0)
return NULL_TREE;
}
- if (DECL_P (baseop))
- return build6 (TARGET_MEM_REF, currop->type,
- baseop, NULL_TREE,
- genop0, currop->op1, currop->op2,
- unshare_expr (nextop->op1));
- else
- return build6 (TARGET_MEM_REF, currop->type,
- NULL_TREE, baseop,
- genop0, currop->op1, currop->op2,
- unshare_expr (nextop->op1));
+ if (nextop->op0)
+ {
+ op1expr = get_or_alloc_expr_for (nextop->op0);
+ genop1 = find_or_generate_expression (block, op1expr,
+ stmts, domstmt);
+ if (!genop1)
+ return NULL_TREE;
+ }
+ return build5 (TARGET_MEM_REF, currop->type,
+ baseop, currop->op2, genop0, currop->op1, genop1);
}
break;
case ADDR_EXPR:
return folded;
}
break;
- case ALIGN_INDIRECT_REF:
- case MISALIGNED_INDIRECT_REF:
- case INDIRECT_REF:
- {
- tree folded;
- tree genop1 = create_component_ref_by_pieces_1 (block, ref,
- operand,
- stmts, domstmt);
- if (!genop1)
- return NULL_TREE;
- genop1 = fold_convert (build_pointer_type (currop->type),
- genop1);
-
- if (currop->opcode == MISALIGNED_INDIRECT_REF)
- folded = fold_build2 (currop->opcode, currop->type,
- genop1, currop->op1);
- else
- folded = fold_build1 (currop->opcode, currop->type,
- genop1);
- return folded;
- }
- break;
case BIT_FIELD_REF:
{
tree folded;
return NULL_TREE;
if (genop2)
{
- op2expr = get_or_alloc_expr_for (genop2);
- genop2 = find_or_generate_expression (block, op2expr, stmts,
- domstmt);
- if (!genop2)
- return NULL_TREE;
+ tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
+ /* Drop zero minimum index if redundant. */
+ if (integer_zerop (genop2)
+ && (!domain_type
+ || integer_zerop (TYPE_MIN_VALUE (domain_type))))
+ genop2 = NULL_TREE;
+ else
+ {
+ op2expr = get_or_alloc_expr_for (genop2);
+ genop2 = find_or_generate_expression (block, op2expr, stmts,
+ domstmt);
+ if (!genop2)
+ return NULL_TREE;
+ }
}
if (genop3)
{
tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
- genop3 = size_binop (EXACT_DIV_EXPR, genop3,
- size_int (TYPE_ALIGN_UNIT (elmt_type)));
- op3expr = get_or_alloc_expr_for (genop3);
- genop3 = find_or_generate_expression (block, op3expr, stmts,
- domstmt);
- if (!genop3)
- return NULL_TREE;
+ /* We can't always put a size in units of the element alignment
+ here as the element alignment may be not visible. See
+ PR43783. Simply drop the element size for constant
+ sizes. */
+ if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
+ genop3 = NULL_TREE;
+ else
+ {
+ genop3 = size_binop (EXACT_DIV_EXPR, genop3,
+ size_int (TYPE_ALIGN_UNIT (elmt_type)));
+ op3expr = get_or_alloc_expr_for (genop3);
+ genop3 = find_or_generate_expression (block, op3expr, stmts,
+ domstmt);
+ if (!genop3)
+ return NULL_TREE;
+ }
}
return build4 (currop->opcode, currop->type, genop0, genop1,
genop2, genop3);
}
/* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
- COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
+ COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
trying to rename aggregates into ssa form directly, which is a no no.
Thus, this routine doesn't create temporaries, it just builds a
}
/* If it's still NULL, it must be a complex expression, so generate
- it recursively. Not so for FRE though. */
+ it recursively. Not so if inserting expressions for values generated
+ by SCCVN. */
if (genop == NULL
- && !in_fre)
+ && !domstmt)
{
bitmap_set_t exprset;
unsigned int lookfor = get_expr_value_id (expr);
stmts, domstmt);
if (!genop1 || !genop2)
return NULL_TREE;
- genop1 = fold_convert (TREE_TYPE (nary->op[0]),
- genop1);
/* Ensure op2 is a sizetype for POINTER_PLUS_EXPR. It
may be a constant with the wrong type. */
if (nary->opcode == POINTER_PLUS_EXPR)
- genop2 = fold_convert (sizetype, genop2);
+ {
+ genop1 = fold_convert (nary->type, genop1);
+ genop2 = fold_convert (sizetype, genop2);
+ }
else
- genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
+ {
+ genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
+ genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
+ }
folded = fold_build2 (nary->opcode, nary->type,
genop1, genop2);
tree forcedname = gimple_get_lhs (stmt);
pre_expr nameexpr;
- VEC_safe_push (gimple, heap, inserted_exprs, stmt);
if (TREE_CODE (forcedname) == SSA_NAME)
{
+ bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
VN_INFO_GET (forcedname)->valnum = forcedname;
VN_INFO (forcedname)->value_id = get_next_value_id ();
nameexpr = get_or_alloc_expr_for_name (forcedname);
that we will return. */
if (!pretemp || exprtype != TREE_TYPE (pretemp))
{
- pretemp = create_tmp_var (exprtype, "pretmp");
+ pretemp = create_tmp_reg (exprtype, "pretmp");
get_var_ann (pretemp);
}
temp = pretemp;
add_referenced_var (temp);
- if (TREE_CODE (exprtype) == COMPLEX_TYPE
- || TREE_CODE (exprtype) == VECTOR_TYPE)
- DECL_GIMPLE_REG_P (temp) = 1;
-
newstmt = gimple_build_assign (temp, folded);
name = make_ssa_name (temp, newstmt);
gimple_assign_set_lhs (newstmt, name);
gimple_set_plf (newstmt, NECESSARY, false);
gimple_seq_add_stmt (stmts, newstmt);
- VEC_safe_push (gimple, heap, inserted_exprs, newstmt);
+ bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
/* All the symbols in NEWEXPR should be put into SSA form. */
mark_symbols_for_renaming (newstmt);
VN_INFO (name)->value_id = value_id;
nameexpr = get_or_alloc_expr_for_name (name);
add_to_value (value_id, nameexpr);
- if (!in_fre)
+ if (NEW_SETS (block))
bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
memory reference is a simple induction variable. In other
cases the vectorizer won't do anything anyway (either it's
loop invariant or a complicated expression). */
- for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
+ FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
{
switch (op->opcode)
{
for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
- VEC_safe_push (gimple, heap, inserted_exprs, stmt);
+ tree lhs = gimple_get_lhs (stmt);
+ if (TREE_CODE (lhs) == SSA_NAME)
+ bitmap_set_bit (inserted_exprs,
+ SSA_NAME_VERSION (lhs));
gimple_set_plf (stmt, NECESSARY, false);
}
gsi_insert_seq_on_edge (pred, stmts);
avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
}
}
+ else
+ avail[bprime->index] = get_or_alloc_expr_for_constant (builtexpr);
}
}
else if (eprime->kind == NAME)
for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
- VEC_safe_push (gimple, heap, inserted_exprs, stmt);
+ tree lhs = gimple_get_lhs (stmt);
+ if (TREE_CODE (lhs) == SSA_NAME)
+ bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
gimple_set_plf (stmt, NECESSARY, false);
}
gsi_insert_seq_on_edge (pred, stmts);
gimple_set_plf (phi, NECESSARY, false);
VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
VN_INFO (gimple_phi_result (phi))->value_id = val;
- VEC_safe_push (gimple, heap, inserted_exprs, phi);
- bitmap_set_bit (inserted_phi_names,
- SSA_NAME_VERSION (gimple_phi_result (phi)));
+ bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (gimple_phi_result (phi)));
FOR_EACH_EDGE (pred, ei, block->preds)
{
pre_expr ae = avail[pred->src->index];
pre_expr expr;
int i;
- for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
+ FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
{
if (expr->kind != NAME)
{
already existing along every predecessor, and
it's defined by some predecessor, it is
partially redundant. */
- if (!cant_insert && !all_same && by_some && do_insertion
- && dbg_cnt (treepre_insert))
+ if (!cant_insert && !all_same && by_some)
{
- if (insert_into_preds_of_block (block, get_expression_id (expr),
- avail))
+ if (!do_insertion)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Skipping partial redundancy for "
+ "expression ");
+ print_pre_expr (dump_file, expr);
+ fprintf (dump_file, " (%04d), no redundancy on to be "
+ "optimized for speed edge\n", val);
+ }
+ }
+ else if (dbg_cnt (treepre_insert)
+ && insert_into_preds_of_block (block,
+ get_expression_id (expr),
+ avail))
new_stuff = true;
}
/* If all edges produce the same value and that value is
pre_expr expr;
int i;
- for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
+ FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
{
if (expr->kind != NAME)
{
copy_reference_ops_from_call (stmt, &ops);
vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
gimple_expr_type (stmt),
- ops, &ref, false);
+ ops, &ref, VN_NOWALK);
VEC_free (vn_reference_op_s, heap, ops);
if (!ref)
continue;
vn_reference_lookup (gimple_assign_rhs1 (stmt),
gimple_vuse (stmt),
- true, &ref);
+ VN_WALK, &ref);
if (!ref)
continue;
eliminate (void)
{
VEC (gimple, heap) *to_remove = NULL;
+ VEC (gimple, heap) *to_update = NULL;
basic_block b;
unsigned int todo = 0;
gimple_stmt_iterator gsi;
|| TREE_CODE (rhs) != SSA_NAME
|| may_propagate_copy (rhs, sprime)))
{
+ bool can_make_abnormal_goto
+ = is_gimple_call (stmt)
+ && stmt_can_make_abnormal_goto (stmt);
+
gcc_assert (sprime != rhs);
if (dump_file && (dump_flags & TDF_DETAILS))
stmt = gsi_stmt (gsi);
update_stmt (stmt);
- /* If we removed EH side effects from the statement, clean
+ /* If we removed EH side-effects from the statement, clean
its EH information. */
if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
{
bitmap_set_bit (need_eh_cleanup,
gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, " Removed EH side effects.\n");
+ fprintf (dump_file, " Removed EH side-effects.\n");
+ }
+
+ /* Likewise for AB side-effects. */
+ if (can_make_abnormal_goto
+ && !stmt_can_make_abnormal_goto (stmt))
+ {
+ bitmap_set_bit (need_ab_cleanup,
+ gimple_bb (stmt)->index);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, " Removed AB side-effects.\n");
}
}
}
tree rhs = gimple_assign_rhs1 (stmt);
tree val;
val = vn_reference_lookup (gimple_assign_lhs (stmt),
- gimple_vuse (stmt), true, NULL);
+ gimple_vuse (stmt), VN_WALK, NULL);
if (TREE_CODE (rhs) == SSA_NAME)
rhs = VN_INFO (rhs)->valnum;
if (val
}
/* Visit indirect calls and turn them into direct calls if
possible. */
- if (gimple_code (stmt) == GIMPLE_CALL
- && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME)
+ if (is_gimple_call (stmt))
{
- tree fn = VN_INFO (gimple_call_fn (stmt))->valnum;
- if (TREE_CODE (fn) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
+ tree orig_fn = gimple_call_fn (stmt);
+ tree fn;
+ if (!orig_fn)
+ continue;
+ if (TREE_CODE (orig_fn) == SSA_NAME)
+ fn = VN_INFO (orig_fn)->valnum;
+ else if (TREE_CODE (orig_fn) == OBJ_TYPE_REF
+ && TREE_CODE (OBJ_TYPE_REF_EXPR (orig_fn)) == SSA_NAME)
+ fn = VN_INFO (OBJ_TYPE_REF_EXPR (orig_fn))->valnum;
+ else
+ continue;
+ if (gimple_call_addr_fndecl (fn) != NULL_TREE
+ && useless_type_conversion_p (TREE_TYPE (orig_fn),
+ TREE_TYPE (fn)))
{
+ bool can_make_abnormal_goto
+ = stmt_can_make_abnormal_goto (stmt);
+ bool was_noreturn = gimple_call_noreturn_p (stmt);
+
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Replacing call target with ");
}
gimple_call_set_fn (stmt, fn);
- update_stmt (stmt);
+ VEC_safe_push (gimple, heap, to_update, stmt);
+
+ /* When changing a call into a noreturn call, cfg cleanup
+ is needed to fix up the noreturn call. */
+ if (!was_noreturn && gimple_call_noreturn_p (stmt))
+ todo |= TODO_cleanup_cfg;
+
+ /* If we removed EH side-effects from the statement, clean
+ its EH information. */
if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
{
bitmap_set_bit (need_eh_cleanup,
gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, " Removed EH side effects.\n");
+ fprintf (dump_file, " Removed EH side-effects.\n");
+ }
+
+ /* Likewise for AB side-effects. */
+ if (can_make_abnormal_goto
+ && !stmt_can_make_abnormal_goto (stmt))
+ {
+ bitmap_set_bit (need_ab_cleanup,
+ gimple_bb (stmt)->index);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, " Removed AB side-effects.\n");
}
/* Changing an indirect call to a direct call may
replacing the PHI with a single copy if possible.
Do not touch inserted, single-argument or virtual PHIs. */
if (gimple_phi_num_args (phi) == 1
- || !is_gimple_reg (res)
- || bitmap_bit_p (inserted_phi_names, SSA_NAME_VERSION (res)))
+ || !is_gimple_reg (res))
{
gsi_next (&gsi);
continue;
else
gcc_unreachable ();
}
- if (!sprimeexpr
+ if (!sprime && is_gimple_min_invariant (VN_INFO (res)->valnum))
+ {
+ sprime = VN_INFO (res)->valnum;
+ if (!useless_type_conversion_p (TREE_TYPE (res),
+ TREE_TYPE (sprime)))
+ sprime = fold_convert (TREE_TYPE (res), sprime);
+ }
+ if (!sprime
|| sprime == res)
{
gsi_next (&gsi);
remove_phi_node (&gsi, false);
+ if (!bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
+ && TREE_CODE (sprime) == SSA_NAME)
+ gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
+
if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
sprime = fold_convert (TREE_TYPE (res), sprime);
stmt = gimple_build_assign (res, sprime);
SSA_NAME_DEF_STMT (res) = stmt;
- if (TREE_CODE (sprime) == SSA_NAME)
- gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
- NECESSARY, true);
+ gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
+
gsi2 = gsi_after_labels (b);
gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
/* Queue the copy for eventual removal. */
VEC_safe_push (gimple, heap, to_remove, stmt);
- pre_stats.eliminations++;
+ /* If we inserted this PHI node ourself, it's not an elimination. */
+ if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
+ pre_stats.phis--;
+ else
+ pre_stats.eliminations++;
}
}
/* We cannot remove stmts during BB walk, especially not release SSA
names there as this confuses the VN machinery. The stmts ending
up in to_remove are either stores or simple copies. */
- for (i = 0; VEC_iterate (gimple, to_remove, i, stmt); ++i)
+ FOR_EACH_VEC_ELT (gimple, to_remove, i, stmt)
{
tree lhs = gimple_assign_lhs (stmt);
+ tree rhs = gimple_assign_rhs1 (stmt);
use_operand_p use_p;
gimple use_stmt;
/* If there is a single use only, propagate the equivalency
instead of keeping the copy. */
if (TREE_CODE (lhs) == SSA_NAME
+ && TREE_CODE (rhs) == SSA_NAME
&& single_imm_use (lhs, &use_p, &use_stmt)
- && may_propagate_copy (USE_FROM_PTR (use_p),
- gimple_assign_rhs1 (stmt)))
+ && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
{
- SET_USE (use_p, gimple_assign_rhs1 (stmt));
+ SET_USE (use_p, rhs);
update_stmt (use_stmt);
+ if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (lhs))
+ && TREE_CODE (rhs) == SSA_NAME)
+ gimple_set_plf (SSA_NAME_DEF_STMT (rhs), NECESSARY, true);
}
/* If this is a store or a now unused copy, remove it. */
if (TREE_CODE (lhs) != SSA_NAME
|| has_zero_uses (lhs))
{
+ basic_block bb = gimple_bb (stmt);
gsi = gsi_for_stmt (stmt);
unlink_stmt_vdef (stmt);
gsi_remove (&gsi, true);
+ if (gimple_purge_dead_eh_edges (bb))
+ todo |= TODO_cleanup_cfg;
+ if (TREE_CODE (lhs) == SSA_NAME)
+ bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
release_defs (stmt);
}
}
VEC_free (gimple, heap, to_remove);
+ /* We cannot update call statements with virtual operands during
+ SSA walk. This might remove them which in turn makes our
+ VN lattice invalid. */
+ FOR_EACH_VEC_ELT (gimple, to_update, i, stmt)
+ update_stmt (stmt);
+ VEC_free (gimple, heap, to_update);
+
return todo;
}
static void
remove_dead_inserted_code (void)
{
- VEC(gimple,heap) *worklist = NULL;
- int i;
+ bitmap worklist;
+ unsigned i;
+ bitmap_iterator bi;
gimple t;
- worklist = VEC_alloc (gimple, heap, VEC_length (gimple, inserted_exprs));
- for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
+ worklist = BITMAP_ALLOC (NULL);
+ EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
{
+ t = SSA_NAME_DEF_STMT (ssa_name (i));
if (gimple_plf (t, NECESSARY))
- VEC_quick_push (gimple, worklist, t);
+ bitmap_set_bit (worklist, i);
}
- while (VEC_length (gimple, worklist) > 0)
+ while (!bitmap_empty_p (worklist))
{
- t = VEC_pop (gimple, worklist);
+ i = bitmap_first_set_bit (worklist);
+ bitmap_clear_bit (worklist, i);
+ t = SSA_NAME_DEF_STMT (ssa_name (i));
/* PHI nodes are somewhat special in that each PHI alternative has
data and control dependencies. All the statements feeding the
{
unsigned k;
- VEC_reserve (gimple, heap, worklist, gimple_phi_num_args (t));
for (k = 0; k < gimple_phi_num_args (t); k++)
{
tree arg = PHI_ARG_DEF (t, k);
{
gimple n = mark_operand_necessary (arg);
if (n)
- VEC_quick_push (gimple, worklist, n);
+ bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
}
}
}
{
gimple n = mark_operand_necessary (use);
if (n)
- VEC_safe_push (gimple, heap, worklist, n);
+ bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
}
}
}
- for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
+ EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
{
+ t = SSA_NAME_DEF_STMT (ssa_name (i));
if (!gimple_plf (t, NECESSARY))
{
gimple_stmt_iterator gsi;
}
}
}
- VEC_free (gimple, heap, worklist);
+ BITMAP_FREE (worklist);
}
+/* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
+ true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
+ the number of visited blocks. */
+
+static int
+my_rev_post_order_compute (int *post_order, bool include_entry_exit)
+{
+ edge_iterator *stack;
+ int sp;
+ int post_order_num = 0;
+ sbitmap visited;
+
+ if (include_entry_exit)
+ post_order[post_order_num++] = EXIT_BLOCK;
+
+ /* Allocate stack for back-tracking up CFG. */
+ stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
+ sp = 0;
+
+ /* Allocate bitmap to track nodes that have been visited. */
+ visited = sbitmap_alloc (last_basic_block);
+
+ /* None of the nodes in the CFG have been visited yet. */
+ sbitmap_zero (visited);
+
+ /* Push the last edge on to the stack. */
+ stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
+
+ while (sp)
+ {
+ edge_iterator ei;
+ basic_block src;
+ basic_block dest;
+
+ /* Look at the edge on the top of the stack. */
+ ei = stack[sp - 1];
+ src = ei_edge (ei)->src;
+ dest = ei_edge (ei)->dest;
+
+ /* Check if the edge destination has been visited yet. */
+ if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
+ {
+ /* Mark that we have visited the destination. */
+ SET_BIT (visited, src->index);
+
+ if (EDGE_COUNT (src->preds) > 0)
+ /* Since the DEST node has been visited for the first
+ time, check its successors. */
+ stack[sp++] = ei_start (src->preds);
+ else
+ post_order[post_order_num++] = src->index;
+ }
+ else
+ {
+ if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
+ post_order[post_order_num++] = dest->index;
+
+ if (!ei_one_before_end_p (ei))
+ ei_next (&stack[sp - 1]);
+ else
+ sp--;
+ }
+ }
+
+ if (include_entry_exit)
+ post_order[post_order_num++] = ENTRY_BLOCK;
+
+ free (stack);
+ sbitmap_free (visited);
+ return post_order_num;
+}
+
+
/* Initialize data structures used by PRE. */
static void
in_fre = do_fre;
- inserted_exprs = NULL;
+ inserted_exprs = BITMAP_ALLOC (NULL);
need_creation = NULL;
pretemp = NULL_TREE;
storetemp = NULL_TREE;
postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
- post_order_compute (postorder, false, false);
+ my_rev_post_order_compute (postorder, false);
- FOR_ALL_BB (bb)
- bb->aux = XCNEWVEC (struct bb_bitmap_sets, 1);
+ alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
calculate_dominance_info (CDI_POST_DOMINATORS);
calculate_dominance_info (CDI_DOMINATORS);
bitmap_obstack_initialize (&grand_bitmap_obstack);
- inserted_phi_names = BITMAP_ALLOC (&grand_bitmap_obstack);
phi_translate_table = htab_create (5110, expr_pred_trans_hash,
expr_pred_trans_eq, free);
expression_to_id = htab_create (num_ssa_names * 3,
}
need_eh_cleanup = BITMAP_ALLOC (NULL);
+ need_ab_cleanup = BITMAP_ALLOC (NULL);
}
static void
fini_pre (bool do_fre)
{
- basic_block bb;
-
free (postorder);
VEC_free (bitmap_set_t, heap, value_expressions);
- VEC_free (gimple, heap, inserted_exprs);
+ BITMAP_FREE (inserted_exprs);
VEC_free (gimple, heap, need_creation);
bitmap_obstack_release (&grand_bitmap_obstack);
free_alloc_pool (bitmap_set_pool);
htab_delete (expression_to_id);
VEC_free (unsigned, heap, name_to_id);
- FOR_ALL_BB (bb)
- {
- free (bb->aux);
- bb->aux = NULL;
- }
+ free_aux_for_blocks ();
free_dominance_info (CDI_POST_DOMINATORS);
BITMAP_FREE (need_eh_cleanup);
+ if (!bitmap_empty_p (need_ab_cleanup))
+ {
+ gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
+ cleanup_tree_cfg ();
+ }
+
+ BITMAP_FREE (need_ab_cleanup);
+
if (!do_fre)
loop_optimizer_finalize ();
}
if (!do_fre)
loop_optimizer_init (LOOPS_NORMAL);
- if (!run_scc_vn (do_fre))
+ if (!run_scc_vn (do_fre ? VN_WALKREWRITE : VN_WALK))
{
if (!do_fre)
- {
- remove_dead_inserted_code ();
- loop_optimizer_finalize ();
- }
+ loop_optimizer_finalize ();
return 0;
}
+
init_pre (do_fre);
scev_initialize ();
-
/* Collect and value number expressions computed in each basic block. */
compute_avail ();
insert ();
}
+ /* Make sure to remove fake edges before committing our inserts.
+ This makes sure we don't end up with extra critical edges that
+ we would need to split. */
+ remove_fake_exit_edges ();
+ gsi_commit_edge_inserts ();
+
/* Remove all the redundant expressions. */
todo |= eliminate ();
statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
statistics_counter_event (cfun, "Constified", pre_stats.constified);
- /* Make sure to remove fake edges before committing our inserts.
- This makes sure we don't end up with extra critical edges that
- we would need to split. */
- remove_fake_exit_edges ();
- gsi_commit_edge_inserts ();
-
clear_expression_ids ();
free_scc_vn ();
if (!do_fre)
- remove_dead_inserted_code ();
+ {
+ remove_dead_inserted_code ();
+ todo |= TODO_verify_flow;
+ }
scev_finalize ();
fini_pre (do_fre);