doing the store). */
static prop_value_t *const_val;
-/* True if we are also propagating constants in stores and loads. */
-static bool do_store_ccp;
-
/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
static void
get_default_value (tree var)
{
tree sym = SSA_NAME_VAR (var);
- prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE };
+ prop_value_t val = { UNINITIALIZED, NULL_TREE };
tree cst_val;
- if (!do_store_ccp && !is_gimple_reg (var))
+ if (!is_gimple_reg (var))
{
/* Short circuit for regular CCP. We are not interested in any
non-register when DO_STORE_CCP is false. */
initial value. */
val.lattice_val = CONSTANT;
val.value = cst_val;
- val.mem_ref = sym;
}
else
{
val->lattice_val = VARYING;
val->value = NULL_TREE;
- val->mem_ref = NULL_TREE;
}
/* For float types, modify the value of VAL to make ccp work correctly
{
val->lattice_val = UNDEFINED;
val->value = NULL;
- val->mem_ref = NULL;
return;
}
}
gcc_assert (old_val->lattice_val < new_val.lattice_val
|| (old_val->lattice_val == new_val.lattice_val
&& ((!old_val->value && !new_val.value)
- || operand_equal_p (old_val->value, new_val.value, 0))
- && old_val->mem_ref == new_val.mem_ref));
+ || operand_equal_p (old_val->value, new_val.value, 0))));
if (old_val->lattice_val != new_val.lattice_val)
{
tree use;
ssa_op_iter iter;
- enum tree_code code = gimple_code (stmt);
+ enum gimple_code code = gimple_code (stmt);
/* This function appears to be called only for assignments, calls,
conditionals, and switches, due to the logic in visit_stmt. */
/* If we are not doing store-ccp, statements with loads
and/or stores will never fold into a constant. */
- if (!do_store_ccp
- && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
+ if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
return VARYING;
/* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
return true;
if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
- {
- if (!do_store_ccp)
- return true;
-
- /* We can only handle simple loads and stores. */
- if (!stmt_makes_single_load (stmt)
- && !stmt_makes_single_store (stmt))
- return true;
- }
+ return true;
/* If it is a call and does not return a value or is not a
builtin and not an indirect call, it is varying. */
{
gimple phi = gsi_stmt (i);
- if (!do_store_ccp && !is_gimple_reg (gimple_phi_result (phi)))
+ if (!is_gimple_reg (gimple_phi_result (phi)))
prop_set_simulate_again (phi, false);
else
prop_set_simulate_again (phi, true);
/* any M VARYING = VARYING. */
val1->lattice_val = VARYING;
val1->value = NULL_TREE;
- val1->mem_ref = NULL_TREE;
}
else if (val1->lattice_val == CONSTANT
&& val2->lattice_val == CONSTANT
- && simple_cst_equal (val1->value, val2->value) == 1
- && (!do_store_ccp
- || (val1->mem_ref && val2->mem_ref
- && operand_equal_p (val1->mem_ref, val2->mem_ref, 0))))
+ && simple_cst_equal (val1->value, val2->value) == 1)
{
/* Ci M Cj = Ci if (i == j)
Ci M Cj = VARYING if (i != j)
they come from the same memory reference. */
val1->lattice_val = CONSTANT;
val1->value = val1->value;
- val1->mem_ref = val1->mem_ref;
}
else
{
/* Any other combination is VARYING. */
val1->lattice_val = VARYING;
val1->value = NULL_TREE;
- val1->mem_ref = NULL_TREE;
}
}
case UNDEFINED:
new_val.lattice_val = UNDEFINED;
new_val.value = NULL_TREE;
- new_val.mem_ref = NULL_TREE;
break;
default:
{
arg_val.lattice_val = CONSTANT;
arg_val.value = arg;
- arg_val.mem_ref = NULL_TREE;
}
else
arg_val = *(get_value (arg));
}
}
- else if (do_store_ccp && stmt_makes_single_load (stmt))
- {
- /* If the RHS is a memory load, see if the VUSEs associated with
- it are a valid constant for that memory load. */
- prop_value_t *val = get_value_loaded_by (stmt, const_val);
- if (val && val->mem_ref)
- {
- if (operand_equal_p (val->mem_ref, rhs, 0))
- return val->value;
-
- /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
- complex type with a known constant value, return it. */
- if ((TREE_CODE (rhs) == REALPART_EXPR
- || TREE_CODE (rhs) == IMAGPART_EXPR)
- && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
- return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
- }
- }
-
if (kind == tcc_reference)
- return fold_const_aggregate_ref (rhs);
+ {
+ if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
+ && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
+ {
+ prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
+ if (val->lattice_val == CONSTANT)
+ return fold_unary (VIEW_CONVERT_EXPR,
+ TREE_TYPE (rhs), val->value);
+ }
+ return fold_const_aggregate_ref (rhs);
+ }
else if (kind == tcc_declaration)
return get_symbol_constant_value (rhs);
return rhs;
so this should almost always return a simplified RHS. */
tree lhs = gimple_assign_lhs (stmt);
tree op0 = gimple_assign_rhs1 (stmt);
+ tree res;
/* Simplify the operand down to a constant. */
if (TREE_CODE (op0) == SSA_NAME)
useless_type_conversion_p places for pointer type conversions
do not apply here. Substitution later will only substitute to
allowed places. */
- if (IS_CONVERT_EXPR_CODE_P (subcode)
+ if (CONVERT_EXPR_CODE_P (subcode)
&& POINTER_TYPE_P (TREE_TYPE (lhs))
&& POINTER_TYPE_P (TREE_TYPE (op0))
/* Do not allow differences in volatile qualification
return op0;
}
- return fold_unary (subcode, gimple_expr_type (stmt), op0);
- }
+ res = fold_unary (subcode, gimple_expr_type (stmt), op0);
+
+ /* If the operation was a conversion do _not_ mark a
+ resulting constant with TREE_OVERFLOW if the original
+ constant was not. These conversions have implementation
+ defined behavior and retaining the TREE_OVERFLOW flag
+ here would confuse later passes such as VRP. */
+ if (res
+ && TREE_CODE (res) == INTEGER_CST
+ && TREE_CODE (op0) == INTEGER_CST
+ && CONVERT_EXPR_CODE_P (subcode))
+ TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
+
+ return res;
+ }
case GIMPLE_BINARY_RHS:
{
fn = val->value;
}
if (TREE_CODE (fn) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
&& DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
{
tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
ccp_lattice_t likelyvalue = likely_value (stmt);
bool is_constant;
- val.mem_ref = NULL_TREE;
-
fold_defer_overflow_warnings ();
/* If the statement is likely to have a CONSTANT result, then try
bother folding the statement. */
else if (likelyvalue == VARYING)
{
- enum tree_code code = gimple_code (stmt);
+ enum gimple_code code = gimple_code (stmt);
if (code == GIMPLE_ASSIGN)
{
enum tree_code subcode = gimple_assign_rhs_code (stmt);
prop_value_t *nval = get_value (rhs);
val = *nval;
}
- else if (do_store_ccp && stmt_makes_single_load (stmt))
- {
- /* Same as above, but the RHS is not a gimple register and yet
- has a known VUSE. If STMT is loading from the same memory
- location that created the SSA_NAMEs for the virtual operands,
- we can propagate the value on the RHS. */
- prop_value_t *nval = get_value_loaded_by (stmt, const_val);
-
- if (nval
- && nval->mem_ref
- && operand_equal_p (nval->mem_ref, rhs, 0))
- val = *nval;
- else
- val = evaluate_stmt (stmt);
- }
else
val = evaluate_stmt (stmt);
}
retval = SSA_PROP_INTERESTING;
}
}
- else if (do_store_ccp && stmt_makes_single_store (stmt))
- {
- /* Otherwise, set the names in VDEF operands to the new
- constant value and mark the LHS as the memory reference
- associated with VAL. */
- ssa_op_iter i;
- tree vdef;
- bool changed;
-
- /* Mark VAL as stored in the LHS of this assignment. */
- if (val.lattice_val == CONSTANT)
- val.mem_ref = lhs;
-
- /* Set the value of every VDEF to VAL. */
- changed = false;
- FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
- {
- /* See PR 29801. We may have VDEFs for read-only variables
- (see the handling of unmodifiable variables in
- add_virtual_operand); do not attempt to change their value. */
- if (get_symbol_constant_value (SSA_NAME_VAR (vdef)) != NULL_TREE)
- continue;
-
- changed |= set_lattice_value (vdef, val);
- }
-
- /* Note that for propagation purposes, we are only interested in
- visiting statements that load the exact same memory reference
- stored here. Those statements will have the exact same list
- of virtual uses, so it is enough to set the output of this
- statement to be its first virtual definition. */
- *output_p = first_vdef (stmt);
- if (changed)
- {
- if (val.lattice_val == VARYING)
- retval = SSA_PROP_VARYING;
- else
- retval = SSA_PROP_INTERESTING;
- }
- }
return retval;
}
Mark them VARYING. */
FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
{
- prop_value_t v = { VARYING, NULL_TREE, NULL_TREE };
+ prop_value_t v = { VARYING, NULL_TREE };
set_lattice_value (def, v);
}
/* Main entry point for SSA Conditional Constant Propagation. */
static unsigned int
-execute_ssa_ccp (bool store_ccp)
+do_ssa_ccp (void)
{
- do_store_ccp = store_ccp;
ccp_initialize ();
ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
if (ccp_finalize ())
}
-static unsigned int
-do_ssa_ccp (void)
-{
- return execute_ssa_ccp (false);
-}
-
-
static bool
gate_ccp (void)
{
};
-static unsigned int
-do_ssa_store_ccp (void)
-{
- /* If STORE-CCP is not enabled, we just run regular CCP. */
- return execute_ssa_ccp (flag_tree_store_ccp != 0);
-}
-
-static bool
-gate_store_ccp (void)
-{
- /* STORE-CCP is enabled only with -ftree-store-ccp, but when
- -fno-tree-store-ccp is specified, we should run regular CCP.
- That's why the pass is enabled with either flag. */
- return flag_tree_store_ccp != 0 || flag_tree_ccp != 0;
-}
-
-
-struct gimple_opt_pass pass_store_ccp =
-{
- {
- GIMPLE_PASS,
- "store_ccp", /* name */
- gate_store_ccp, /* gate */
- do_ssa_store_ccp, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- TV_TREE_STORE_CCP, /* tv_id */
- PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- TODO_dump_func | TODO_verify_ssa
- | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
- }
-};
-
/* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
is the desired result type. */
&size, &maxsize);
gcc_assert (newbase);
if (size == maxsize
+ && size != -1
&& !(sub_offset & (BITS_PER_UNIT - 1)))
{
base = newbase;
TREE_TYPE (expr));
if (t)
{
- TREE_THIS_VOLATILE (t) = volatile_p;
+ /* Preserve volatileness of the original expression.
+ We can end up with a plain decl here which is shared
+ and we shouldn't mess with its flags. */
+ if (!SSA_VAR_P (t))
+ TREE_THIS_VOLATILE (t) = volatile_p;
return t;
}
}
*walk_subtrees = 0;
if (POINTER_TYPE_P (TREE_TYPE (expr))
+ && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
&& POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
&& (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
integer_zero_node,
if (t)
{
- /* Preserve volatileness of the original expression. */
- TREE_THIS_VOLATILE (t) = volatile_p;
+ /* Preserve volatileness of the original expression.
+ We can end up with a plain decl here which is shared
+ and we shouldn't mess with its flags. */
+ if (!SSA_VAR_P (t))
+ TREE_THIS_VOLATILE (t) = volatile_p;
*expr_p = t;
*changed_p = true;
}
break;
case GIMPLE_UNARY_RHS:
- result = fold_unary (subcode,
- gimple_expr_type (stmt),
- gimple_assign_rhs1 (stmt));
+ {
+ tree rhs = gimple_assign_rhs1 (stmt);
- if (result)
- {
- STRIP_USELESS_TYPE_CONVERSION (result);
- if (valid_gimple_rhs_p (result))
- return result;
- }
- else if ((gimple_assign_rhs_code (stmt) == NOP_EXPR
- || gimple_assign_rhs_code (stmt) == CONVERT_EXPR)
- && POINTER_TYPE_P (gimple_expr_type (stmt))
- && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
- {
- tree type = gimple_expr_type (stmt);
- tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
- integer_zero_node, type);
- if (t)
- return t;
- }
+ result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
+ if (result)
+ {
+ /* If the operation was a conversion do _not_ mark a
+ resulting constant with TREE_OVERFLOW if the original
+ constant was not. These conversions have implementation
+ defined behavior and retaining the TREE_OVERFLOW flag
+ here would confuse later passes such as VRP. */
+ if (CONVERT_EXPR_CODE_P (subcode)
+ && TREE_CODE (result) == INTEGER_CST
+ && TREE_CODE (rhs) == INTEGER_CST)
+ TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
+
+ STRIP_USELESS_TYPE_CONVERSION (result);
+ if (valid_gimple_rhs_p (result))
+ return result;
+ }
+ else if (CONVERT_EXPR_CODE_P (subcode)
+ && POINTER_TYPE_P (gimple_expr_type (stmt))
+ && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
+ {
+ tree type = gimple_expr_type (stmt);
+ tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
+ integer_zero_node, type);
+ if (t)
+ return t;
+ }
+ }
break;
case GIMPLE_BINARY_RHS: