/* Conditional constant propagation pass for the GNU compiler.
- Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
+ Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006
Free Software Foundation, Inc.
Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
/* The regular is_gimple_min_invariant does a shallow test of the object.
It assumes that full gimplification has happened, or will happen on the
object. For a value coming from DECL_INITIAL, this is not true, so we
- have to be more strict outselves. */
+ have to be more strict ourselves. */
static bool
ccp_decl_initial_min_invariant (tree t)
/* If we are not doing store-ccp, statements with loads
and/or stores will never fold into a constant. */
if (!do_store_ccp
- && (ann->makes_aliased_stores
- || ann->makes_aliased_loads
- || !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)))
+ && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
return VARYING;
{
basic_block bb;
- const_val = xmalloc (num_ssa_names * sizeof (*const_val));
+ const_val = XNEWVEC (prop_value_t, num_ssa_names);
memset (const_val, 0, num_ssa_names * sizeof (*const_val));
/* Initialize simulation flags for PHI nodes and statements. */
/* If the RHS is a memory load, see if the VUSEs associated with
it are a valid constant for that memory load. */
prop_value_t *val = get_value_loaded_by (stmt, const_val);
- if (val && val->mem_ref
- && operand_equal_p (val->mem_ref, rhs, 0))
- return val->value;
- else
- return NULL_TREE;
+ if (val && val->mem_ref)
+ {
+ if (operand_equal_p (val->mem_ref, rhs, 0))
+ return val->value;
+
+ /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
+ complex type with a known constant value, return it. */
+ if ((TREE_CODE (rhs) == REALPART_EXPR
+ || TREE_CODE (rhs) == IMAGPART_EXPR)
+ && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
+ return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
+ }
+ return NULL_TREE;
}
/* Unary operators. Note that we know the single operand must
use_operand_p var_p;
/* Preserve the original values of every operand. */
- orig = xmalloc (sizeof (tree) * NUM_SSA_OPERANDS (stmt, SSA_OP_USE));
+ orig = XNEWVEC (tree, NUM_SSA_OPERANDS (stmt, SSA_OP_USE));
FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
orig[i++] = var;
if (TREE_CODE (orig_lhs) == VIEW_CONVERT_EXPR
&& val.lattice_val == CONSTANT)
{
- tree w = fold_build1 (VIEW_CONVERT_EXPR,
- TREE_TYPE (TREE_OPERAND (orig_lhs, 0)),
- val.value);
+ tree w = fold_unary (VIEW_CONVERT_EXPR,
+ TREE_TYPE (TREE_OPERAND (orig_lhs, 0)),
+ val.value);
orig_lhs = TREE_OPERAND (orig_lhs, 0);
if (w && is_gimple_min_invariant (w))
}
-static void
+static unsigned int
do_ssa_ccp (void)
{
execute_ssa_ccp (false);
+ return 0;
}
TV_TREE_CCP, /* tv_id */
PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
0, /* properties_provided */
- 0, /* properties_destroyed */
+ PROP_smt_usage, /* properties_destroyed */
0, /* todo_flags_start */
TODO_cleanup_cfg | TODO_dump_func | TODO_update_ssa
| TODO_ggc_collect | TODO_verify_ssa
- | TODO_verify_stmts, /* todo_flags_finish */
+ | TODO_verify_stmts | TODO_update_smt_usage, /* todo_flags_finish */
0 /* letter */
};
-static void
+static unsigned int
do_ssa_store_ccp (void)
{
/* If STORE-CCP is not enabled, we just run regular CCP. */
execute_ssa_ccp (flag_tree_store_ccp != 0);
+ return 0;
}
static bool
TV_TREE_STORE_CCP, /* tv_id */
PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
0, /* properties_provided */
- 0, /* properties_destroyed */
+ PROP_smt_usage, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func | TODO_update_ssa
| TODO_ggc_collect | TODO_verify_ssa
| TODO_cleanup_cfg
- | TODO_verify_stmts, /* todo_flags_finish */
+ | TODO_verify_stmts | TODO_update_smt_usage, /* todo_flags_finish */
0 /* letter */
};
if (TREE_CODE (min_idx) != INTEGER_CST)
break;
- array_idx = convert (TREE_TYPE (min_idx), array_idx);
+ array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
if (!integer_zerop (min_idx))
array_idx = int_const_binop (MINUS_EXPR, array_idx,
min_idx, 0);
}
/* Convert the index to a byte offset. */
- array_idx = convert (sizetype, array_idx);
+ array_idx = fold_convert (sizetype, array_idx);
array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
/* Update the operands for the next round, or for folding. */
{
if (TYPE_UNSIGNED (TREE_TYPE (op1)))
return NULL;
- op1 = fold_build1 (NEGATE_EXPR, TREE_TYPE (op1), op1);
+ op1 = fold_unary (NEGATE_EXPR, TREE_TYPE (op1), op1);
/* ??? In theory fold should always produce another integer. */
- if (TREE_CODE (op1) != INTEGER_CST)
+ if (op1 == NULL || TREE_CODE (op1) != INTEGER_CST)
return NULL;
}
static tree
fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
{
- struct fold_stmt_r_data *fold_stmt_r_data = data;
+ struct fold_stmt_r_data *fold_stmt_r_data = (struct fold_stmt_r_data *) data;
bool *inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
bool *changed_p = fold_stmt_r_data->changed_p;
tree expr = *expr_p, t;
/* ??? Should perhaps do this in fold proper. However, doing it
there requires that we create a new CALL_EXPR, and that requires
copying EH region info to the new node. Easier to just do it
- here where we can just smash the call operand. */
+ here where we can just smash the call operand. Also
+ CALL_EXPR_RETURN_SLOT_OPT needs to be handled correctly and
+ copied, fold_ternary does not have not information. */
callee = TREE_OPERAND (rhs, 0);
if (TREE_CODE (callee) == OBJ_TYPE_REF
&& lang_hooks.fold_obj_type_ref
/* A simple pass that attempts to fold all builtin functions. This pass
is run after we've propagated as many constants as we can. */
-static void
+static unsigned int
execute_fold_all_builtins (void)
{
bool cfg_changed = false;
/* Delete unreachable blocks. */
if (cfg_changed)
cleanup_tree_cfg ();
+ return 0;
}