/* Conditional constant propagation pass for the GNU compiler.
Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
- 2010, 2011 Free Software Foundation, Inc.
+ 2010, 2011, 2012, 2013 Free Software Foundation, Inc.
Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
/* Now both lattice values are CONSTANT. */
- /* Allow transitioning from &x to &x & ~3. */
+ /* Allow transitioning from PHI <&x, not executable> == &x
+ to PHI <&x, &y> == common alignment. */
if (TREE_CODE (old_val.value) != INTEGER_CST
&& TREE_CODE (new_val.value) == INTEGER_CST)
return true;
the undefined operand may be promoted. */
return UNDEFINED;
+ case ADDR_EXPR:
+ /* If any part of an address is UNDEFINED, like the index
+ of an ARRAY_EXPR, then treat the result as UNDEFINED. */
+ return UNDEFINED;
+
default:
;
}
}
/* If there was an UNDEFINED operand but the result may be not UNDEFINED
- fall back to VARYING even if there were CONSTANT operands. */
+ fall back to CONSTANT. During iteration UNDEFINED may still drop
+ to CONSTANT. */
if (has_undefined_operand)
- return VARYING;
+ return CONSTANT;
/* We do not consider virtual operands here -- load from read-only
memory may have only VARYING virtual operands, but still be
prop_value_t rval = get_value_for_expr (rhs, true);
double_int value, mask;
prop_value_t val;
+
+ if (rval.lattice_val == UNDEFINED)
+ return rval;
+
gcc_assert ((rval.lattice_val == CONSTANT
&& TREE_CODE (rval.value) == INTEGER_CST)
|| double_int_minus_one_p (rval.mask));
prop_value_t r2val = get_value_for_expr (rhs2, true);
double_int value, mask;
prop_value_t val;
+
+ if (r1val.lattice_val == UNDEFINED
+ || r2val.lattice_val == UNDEFINED)
+ {
+ val.lattice_val = VARYING;
+ val.value = NULL_TREE;
+ val.mask = double_int_minus_one;
+ return val;
+ }
+
gcc_assert ((r1val.lattice_val == CONSTANT
&& TREE_CODE (r1val.value) == INTEGER_CST)
|| double_int_minus_one_p (r1val.mask));
tree simplified = NULL_TREE;
ccp_lattice_t likelyvalue = likely_value (stmt);
bool is_constant = false;
+ unsigned int align;
if (dump_file && (dump_flags & TDF_DETAILS))
{
&& !is_constant)
{
enum gimple_code code = gimple_code (stmt);
- tree fndecl;
val.lattice_val = VARYING;
val.value = NULL_TREE;
val.mask = double_int_minus_one;
|| POINTER_TYPE_P (TREE_TYPE (rhs1)))
val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
}
- else if (code == GIMPLE_CALL
- && (fndecl = gimple_call_fndecl (stmt))
- && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
+ else if (gimple_call_builtin_class_p (stmt, BUILT_IN_NORMAL))
{
+ tree fndecl = gimple_call_fndecl (stmt);
switch (DECL_FUNCTION_CODE (fndecl))
{
case BUILT_IN_MALLOC:
break;
case BUILT_IN_ALLOCA:
+ case BUILT_IN_ALLOCA_WITH_ALIGN:
+ align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
+ ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
+ : BIGGEST_ALIGNMENT);
val.lattice_val = CONSTANT;
val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
val.mask = shwi_to_double_int
- (~(((HOST_WIDE_INT) BIGGEST_ALIGNMENT)
+ (~(((HOST_WIDE_INT) align)
/ BITS_PER_UNIT - 1));
break;
return val;
}
-/* Detects a vla-related alloca with a constant argument. Declares fixed-size
- array and return the address, if found, otherwise returns NULL_TREE. */
+/* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
+ each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
+
+static void
+insert_clobber_before_stack_restore (tree saved_val, tree var, htab_t *visited)
+{
+ gimple stmt, clobber_stmt;
+ tree clobber;
+ imm_use_iterator iter;
+ gimple_stmt_iterator i;
+ gimple *slot;
+
+ FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
+ if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
+ {
+ clobber = build_constructor (TREE_TYPE (var), NULL);
+ TREE_THIS_VOLATILE (clobber) = 1;
+ clobber_stmt = gimple_build_assign (var, clobber);
+
+ i = gsi_for_stmt (stmt);
+ gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
+ }
+ else if (gimple_code (stmt) == GIMPLE_PHI)
+ {
+ if (*visited == NULL)
+ *visited = htab_create (10, htab_hash_pointer, htab_eq_pointer, NULL);
+
+ slot = (gimple *)htab_find_slot (*visited, stmt, INSERT);
+ if (*slot != NULL)
+ continue;
+
+ *slot = stmt;
+ insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
+ visited);
+ }
+ else if (gimple_assign_ssa_name_copy_p (stmt))
+ insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
+ visited);
+ else
+ gcc_assert (is_gimple_debug (stmt));
+}
+
+/* Advance the iterator to the previous non-debug gimple statement in the same
+ or dominating basic block. */
+
+static inline void
+gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
+{
+ basic_block dom;
+
+ gsi_prev_nondebug (i);
+ while (gsi_end_p (*i))
+ {
+ dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
+ if (dom == NULL || dom == ENTRY_BLOCK_PTR)
+ return;
+
+ *i = gsi_last_bb (dom);
+ }
+}
+
+/* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
+ a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
+
+ It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
+ previous pass (such as DOM) duplicated it along multiple paths to a BB. In
+ that case the function gives up without inserting the clobbers. */
+
+static void
+insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
+{
+ gimple stmt;
+ tree saved_val;
+ htab_t visited = NULL;
+
+ for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
+ {
+ stmt = gsi_stmt (i);
+
+ if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
+ continue;
+
+ saved_val = gimple_call_lhs (stmt);
+ if (saved_val == NULL_TREE)
+ continue;
+
+ insert_clobber_before_stack_restore (saved_val, var, &visited);
+ break;
+ }
+
+ if (visited != NULL)
+ htab_delete (visited);
+}
+
+/* Detects a __builtin_alloca_with_align with constant size argument. Declares
+ fixed-size array and returns the address, if found, otherwise returns
+ NULL_TREE. */
static tree
-fold_builtin_alloca_for_var (gimple stmt)
+fold_builtin_alloca_with_align (gimple stmt)
{
unsigned HOST_WIDE_INT size, threshold, n_elem;
tree lhs, arg, block, var, elem_type, array_type;
- unsigned int align;
/* Get lhs. */
lhs = gimple_call_lhs (stmt);
/* Detect constant argument. */
arg = get_constant_value (gimple_call_arg (stmt, 0));
- if (arg == NULL_TREE || TREE_CODE (arg) != INTEGER_CST
+ if (arg == NULL_TREE
+ || TREE_CODE (arg) != INTEGER_CST
|| !host_integerp (arg, 1))
return NULL_TREE;
+
size = TREE_INT_CST_LOW (arg);
- /* Heuristic: don't fold large vlas. */
+ /* Heuristic: don't fold large allocas. */
threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
- /* In case a vla is declared at function scope, it has the same lifetime as a
- declared array, so we allow a larger size. */
+ /* In case the alloca is located at function entry, it has the same lifetime
+ as a declared array, so we allow a larger size. */
block = gimple_block (stmt);
if (!(cfun->after_inlining
&& TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
/* Declare array. */
elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
n_elem = size * 8 / BITS_PER_UNIT;
- align = MIN (size * 8, BIGGEST_ALIGNMENT);
array_type = build_array_type_nelts (elem_type, n_elem);
var = create_tmp_var (array_type, NULL);
- DECL_ALIGN (var) = align;
+ DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
+ {
+ struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
+ if (pi != NULL && !pi->pt.anything)
+ {
+ bool singleton_p;
+ unsigned uid;
+ singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
+ gcc_assert (singleton_p);
+ SET_DECL_PT_UID (var, uid);
+ }
+ }
/* Fold alloca to the address of the array. */
return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
case GIMPLE_CALL:
{
tree lhs = gimple_call_lhs (stmt);
+ int flags = gimple_call_flags (stmt);
tree val;
tree argt;
bool changed = false;
type issues. */
if (lhs
&& TREE_CODE (lhs) == SSA_NAME
- && (val = get_constant_value (lhs)))
+ && (val = get_constant_value (lhs))
+ /* Don't optimize away calls that have side-effects. */
+ && (flags & (ECF_CONST|ECF_PURE)) != 0
+ && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
{
tree new_rhs = unshare_expr (val);
bool res;
if (gimple_call_internal_p (stmt))
return false;
- /* The heuristic of fold_builtin_alloca_for_var differs before and after
- inlining, so we don't require the arg to be changed into a constant
- for folding, but just to be constant. */
- if (gimple_call_alloca_for_var_p (stmt))
+ /* The heuristic of fold_builtin_alloca_with_align differs before and
+ after inlining, so we don't require the arg to be changed into a
+ constant for folding, but just to be constant. */
+ if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
{
- tree new_rhs = fold_builtin_alloca_for_var (stmt);
- bool res;
- if (new_rhs == NULL_TREE)
- return false;
- res = update_call_from_tree (gsi, new_rhs);
- gcc_assert (res);
- return true;
+ tree new_rhs = fold_builtin_alloca_with_align (stmt);
+ if (new_rhs)
+ {
+ bool res = update_call_from_tree (gsi, new_rhs);
+ tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
+ gcc_assert (res);
+ insert_clobbers_for_var (*gsi, var);
+ return true;
+ }
}
/* Propagate into the call arguments. Compared to replace_uses_in
static unsigned int
do_ssa_ccp (void)
{
+ unsigned int todo = 0;
+ calculate_dominance_info (CDI_DOMINATORS);
ccp_initialize ();
ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
if (ccp_finalize ())
- return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
- else
- return 0;
+ todo = (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
+ free_dominance_info (CDI_DOMINATORS);
+ return todo;
}
if (!callee
|| DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
/* All regular builtins are ok, just obviously not alloca. */
- || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
+ || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
+ || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
return NULL_TREE;
if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
case BUILT_IN_VA_START:
if (!va_list_simple_ptr
|| targetm.expand_builtin_va_start != NULL
- || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
+ || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
return NULL_TREE;
if (gimple_call_num_args (call) != 2)
return NULL_TREE;
lhs = build_fold_indirect_ref_loc (loc, lhs);
- rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
+ rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
1, integer_zero_node);
rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);