/* Conditional constant propagation pass for the GNU compiler.
- Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
+ Free Software Foundation, Inc.
Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
print_generic_expr (outf, val.const_val, dump_flags);
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
sym = SSA_NAME_VAR (var);
else
{
-#ifdef ENABLE_CHECKING
- if (!DECL_P (var))
- abort ();
-#endif
+ gcc_assert (DECL_P (var));
sym = var;
}
val.lattice_val = UNDEFINED;
val.const_val = NULL_TREE;
- if (TREE_CODE (sym) == PARM_DECL || TREE_THIS_VOLATILE (sym))
+ if (TREE_CODE (var) == SSA_NAME
+ && SSA_NAME_VALUE (var)
+ && is_gimple_min_invariant (SSA_NAME_VALUE (var)))
+ {
+ val.lattice_val = CONSTANT;
+ val.const_val = SSA_NAME_VALUE (var);
+ }
+ else if (TREE_CODE (sym) == PARM_DECL || TREE_THIS_VOLATILE (sym))
{
/* Function arguments and volatile variables are considered VARYING. */
val.lattice_val = VARYING;
return val;
}
-
/* Get the constant value associated with variable VAR. */
static value *
{
value *val;
-#if defined ENABLE_CHECKING
- if (TREE_CODE (var) != SSA_NAME)
- abort ();
-#endif
+ gcc_assert (TREE_CODE (var) == SSA_NAME);
val = &value_vector[SSA_NAME_VERSION (var)];
if (val->lattice_val == UNINITIALIZED)
{
value *old = get_value (var);
-#ifdef ENABLE_CHECKING
if (val.lattice_val == UNDEFINED)
{
/* CONSTANT->UNDEFINED is never a valid state transition. */
- if (old->lattice_val == CONSTANT)
- abort ();
+ gcc_assert (old->lattice_val != CONSTANT);
/* UNKNOWN_VAL->UNDEFINED is never a valid state transition. */
- if (old->lattice_val == UNKNOWN_VAL)
- abort ();
+ gcc_assert (old->lattice_val != UNKNOWN_VAL);
/* VARYING->UNDEFINED is generally not a valid state transition,
except for values which are initialized to VARYING. */
- if (old->lattice_val == VARYING
- && get_default_value (var).lattice_val != VARYING)
- abort ();
+ gcc_assert (old->lattice_val != VARYING
+ || get_default_value (var).lattice_val == VARYING);
}
else if (val.lattice_val == CONSTANT)
- {
- /* VARYING -> CONSTANT is an invalid state transition, except
- for objects which start off in a VARYING state. */
- if (old->lattice_val == VARYING
- && get_default_value (var).lattice_val != VARYING)
- abort ();
- }
-#endif
+ /* VARYING -> CONSTANT is an invalid state transition, except
+ for objects which start off in a VARYING state. */
+ gcc_assert (old->lattice_val != VARYING
+ || get_default_value (var).lattice_val == VARYING);
/* If the constant for VAR has changed, then this VAR is really varying. */
if (old->lattice_val == CONSTANT
if (val->lattice_val == UNKNOWN_VAL)
return UNKNOWN_VAL;
-#ifdef ENABLE_CHECKING
- /* There should be no VUSE operands that are UNDEFINED. */
- if (val->lattice_val == UNDEFINED)
- abort ();
-#endif
+ /* There should be no VUSE operands that are UNDEFINED. */
+ gcc_assert (val->lattice_val != UNDEFINED);
if (val->lattice_val == CONSTANT)
found_constant = 1;
}
-/* Function indicating whether we ought to include information for VAR
- when calculating immediate uses. */
-
-static bool
-need_imm_uses_for (tree var)
-{
- return get_value (var)->lattice_val != VARYING;
-}
-
-
/* Initialize local data structures for CCP. */
static void
}
sbitmap_free (is_may_def);
-
- /* Compute immediate uses for variables we care about. */
- compute_immediate_uses (TDFA_USE_OPS | TDFA_USE_VOPS, need_imm_uses_for);
}
FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
{
- value *val = get_value (USE_FROM_PTR (use));
+ tree tuse = USE_FROM_PTR (use);
+ value *val = get_value (tuse);
- if (val->lattice_val == CONSTANT)
- {
- SET_USE (use, val->const_val);
- replaced = true;
- if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (use)))
- && replaced_addresses_p)
- *replaced_addresses_p = true;
- }
+ if (val->lattice_val != CONSTANT)
+ continue;
+
+ if (TREE_CODE (stmt) == ASM_EXPR
+ && !may_propagate_copy_into_asm (tuse))
+ continue;
+
+ SET_USE (use, val->const_val);
+
+ replaced = true;
+ if (POINTER_TYPE_P (TREE_TYPE (tuse)) && replaced_addresses_p)
+ *replaced_addresses_p = true;
}
return replaced;
substitute_and_fold (void)
{
basic_block bb;
+ unsigned int i;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file,
{
bool changed = fold_stmt (bsi_stmt_ptr (i));
stmt = bsi_stmt(i);
+
/* If we folded a builtin function, we'll likely
need to rename VDEFs. */
if (replaced_address || changed)
- {
- mark_new_vars_to_rename (stmt, vars_to_rename);
- if (maybe_clean_eh_stmt (stmt))
- tree_purge_dead_eh_edges (bb);
- }
- else
- modify_stmt (stmt);
+ mark_new_vars_to_rename (stmt, vars_to_rename);
+
+ /* If we cleaned up EH information from the statement,
+ remove EH edges. */
+ if (maybe_clean_eh_stmt (stmt))
+ tree_purge_dead_eh_edges (bb);
+
+ update_stmt (stmt);
}
if (dump_file && (dump_flags & TDF_DETAILS))
}
}
}
+
+ /* And transfer what we learned from VALUE_VECTOR into the
+ SSA_NAMEs themselves. This probably isn't terribly important
+ since we probably constant propagated the values to their
+ use sites above. */
+ for (i = 0; i < num_ssa_names; i++)
+ {
+ tree name = ssa_name (i);
+ value *value;
+
+ if (!name)
+ continue;
+
+ value = get_value (name);
+ if (value->lattice_val == CONSTANT
+ && is_gimple_reg (name)
+ && is_gimple_min_invariant (value->const_val))
+ SSA_NAME_VALUE (name) = value->const_val;
+ }
}
/* Perform substitutions based on the known constant values. */
substitute_and_fold ();
- /* Now cleanup any unreachable code. */
- cleanup_tree_cfg ();
-
free (value_vector);
}
break;
default:
- abort ();
+ gcc_unreachable ();
}
for (i = 0; i < PHI_NUM_ARGS (phi); i++)
fprintf (dump_file, "\n\n");
}
- /* Check for an invalid change from UNKNOWN_VAL to UNDEFINED. */
+ /* Check for an invalid change from UNKNOWN_VAL to UNDEFINED. */
if (old_val->lattice_val == UNKNOWN_VAL
&& new_val.lattice_val == UNDEFINED)
return SSA_PROP_NOT_INTERESTING;
{
tree rhs = get_rhs (stmt);
enum tree_code code = TREE_CODE (rhs);
- int kind = TREE_CODE_CLASS (code);
+ enum tree_code_class kind = TREE_CODE_CLASS (code);
tree retval = NULL_TREE;
vuse_optype vuses;
/* Unary operators. Note that we know the single operand must
be a constant. So this should almost always return a
simplified RHS. */
- if (kind == '1')
+ if (kind == tcc_unary)
{
/* Handle unary operators which can appear in GIMPLE form. */
tree op0 = TREE_OPERAND (rhs, 0);
op0 = get_value (op0)->const_val;
}
- retval = nondestructive_fold_unary_to_constant (code,
- TREE_TYPE (rhs),
- op0);
+ retval = fold_unary_to_constant (code, TREE_TYPE (rhs), op0);
/* If we folded, but did not create an invariant, then we can not
use this expression. */
/* Binary and comparison operators. We know one or both of the
operands are constants. */
- else if (kind == '2'
- || kind == '<'
+ else if (kind == tcc_binary
+ || kind == tcc_comparison
|| code == TRUTH_AND_EXPR
|| code == TRUTH_OR_EXPR
|| code == TRUTH_XOR_EXPR)
op1 = val->const_val;
}
- retval = nondestructive_fold_binary_to_constant (code,
- TREE_TYPE (rhs),
- op0, op1);
+ retval = fold_binary_to_constant (code, TREE_TYPE (rhs), op0, op1);
/* If we folded, but did not create an invariant, then we can not
use this expression. */
if (NUM_USES (uses) != 0)
{
tree *orig;
+ tree fndecl, arglist;
size_t i;
/* Preserve the original values of every operand. */
/* Substitute operands with their values and try to fold. */
replace_uses_in (stmt, NULL);
- retval = fold_builtin (rhs, false);
+ fndecl = get_callee_fndecl (rhs);
+ arglist = TREE_OPERAND (rhs, 1);
+ retval = fold_builtin (fndecl, arglist, false);
/* Restore operands to their original form. */
for (i = 0; i < NUM_USES (uses); i++)
vuses = STMT_VUSE_OPS (stmt);
v_must_defs = STMT_V_MUST_DEF_OPS (stmt);
-#if defined ENABLE_CHECKING
- if (NUM_V_MAY_DEFS (STMT_V_MAY_DEF_OPS (stmt)) > 0
- || (NUM_V_MUST_DEFS (v_must_defs) != 1
- && TREE_CODE (lhs) != SSA_NAME))
- abort ();
-#endif
+ gcc_assert (NUM_V_MAY_DEFS (STMT_V_MAY_DEF_OPS (stmt)) == 0);
+ gcc_assert (NUM_V_MUST_DEFS (v_must_defs) == 1
+ || TREE_CODE (lhs) == SSA_NAME);
/* We require the SSA version number of the lhs for the value_vector.
Make sure we have it. */
{
/* If we make it here, then stmt only has one definition:
a V_MUST_DEF. */
- lhs = V_MUST_DEF_OP (v_must_defs, 0);
+ lhs = V_MUST_DEF_RESULT (v_must_defs, 0);
}
if (TREE_CODE (rhs) == SSA_NAME)
val = *nval;
}
else
- {
- /* Evaluate the statement. */
+ /* Evaluate the statement. */
val = evaluate_stmt (stmt);
- }
- /* FIXME: Hack. If this was a definition of a bitfield, we need to widen
+ /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
+ value to be a VIEW_CONVERT_EXPR of the old constant value.
+
+ ??? Also, if this was a definition of a bitfield, we need to widen
the constant value into the type of the destination variable. This
should not be necessary if GCC represented bitfields properly. */
{
- tree lhs = TREE_OPERAND (stmt, 0);
+ tree orig_lhs = TREE_OPERAND (stmt, 0);
+
+ if (TREE_CODE (orig_lhs) == VIEW_CONVERT_EXPR
+ && val.lattice_val == CONSTANT)
+ {
+ tree w = fold (build1 (VIEW_CONVERT_EXPR,
+ TREE_TYPE (TREE_OPERAND (orig_lhs, 0)),
+ val.const_val));
+
+ orig_lhs = TREE_OPERAND (orig_lhs, 1);
+ if (w && is_gimple_min_invariant (w))
+ val.const_val = w;
+ else
+ {
+ val.lattice_val = VARYING;
+ val.const_val = NULL;
+ }
+ }
+
if (val.lattice_val == CONSTANT
- && TREE_CODE (lhs) == COMPONENT_REF
- && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
+ && TREE_CODE (orig_lhs) == COMPONENT_REF
+ && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs, 1)))
{
- tree w = widen_bitfield (val.const_val, TREE_OPERAND (lhs, 1), lhs);
+ tree w = widen_bitfield (val.const_val, TREE_OPERAND (orig_lhs, 1),
+ orig_lhs);
if (w && is_gimple_min_invariant (w))
val.const_val = w;
}
/* If LHS is not a gimple register, then it cannot take on an
- UNDEFINED value. */
+ UNDEFINED value. */
if (!is_gimple_reg (SSA_NAME_VAR (lhs))
&& val.lattice_val == UNDEFINED)
val.lattice_val = UNKNOWN_VAL;
to the worklist. If no single edge can be determined statically,
return SSA_PROP_VARYING to feed all the outgoing edges to the
propagation engine. */
- *taken_edge_p = find_taken_edge (block, val.const_val);
+ *taken_edge_p = val.const_val ? find_taken_edge (block, val.const_val) : 0;
if (*taken_edge_p)
return SSA_PROP_INTERESTING;
else
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_rename_vars
+ TODO_cleanup_cfg | TODO_dump_func | TODO_rename_vars
| TODO_ggc_collect | TODO_verify_ssa
- | TODO_verify_stmts /* todo_flags_finish */
+ | TODO_verify_stmts, /* todo_flags_finish */
+ 0 /* letter */
};
if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
return NULL_TREE;
-#if defined ENABLE_CHECKING
- if (var_size < field_size)
- abort ();
-#endif
+ gcc_assert (var_size >= field_size);
/* If the sign bit of the value is not set or the field's type is unsigned,
just mask off the high order bits of the value. */
for (i = 0, mask = 0; i < field_size; i++)
mask |= ((HOST_WIDE_INT) 1) << i;
- wide_val = build (BIT_AND_EXPR, TREE_TYPE (var), val,
- fold_convert (TREE_TYPE (var),
- build_int_cst (NULL_TREE, mask)));
+ wide_val = build2 (BIT_AND_EXPR, TREE_TYPE (var), val,
+ build_int_cst (TREE_TYPE (var), mask));
}
else
{
for (i = 0, mask = 0; i < (var_size - field_size); i++)
mask |= ((HOST_WIDE_INT) 1) << (var_size - i - 1);
- wide_val = build (BIT_IOR_EXPR, TREE_TYPE (var), val,
- fold_convert (TREE_TYPE (var),
- build_int_cst (NULL_TREE, mask)));
+ wide_val = build2 (BIT_IOR_EXPR, TREE_TYPE (var), val,
+ build_int_cst (TREE_TYPE (var), mask));
}
return fold (wide_val);
maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
tree orig_type, bool base_is_ptr)
{
- tree f, t, field_type, tail_array_field;
+ tree f, t, field_type, tail_array_field, field_offset;
if (TREE_CODE (record_type) != RECORD_TYPE
&& TREE_CODE (record_type) != UNION_TYPE
continue;
if (DECL_BIT_FIELD (f))
continue;
- if (TREE_CODE (DECL_FIELD_OFFSET (f)) != INTEGER_CST)
+
+ field_offset = byte_position (f);
+ if (TREE_CODE (field_offset) != INTEGER_CST)
continue;
/* ??? Java creates "interesting" fields for representing base classes.
tail_array_field = NULL_TREE;
/* Check to see if this offset overlaps with the field. */
- cmp = tree_int_cst_compare (DECL_FIELD_OFFSET (f), offset);
+ cmp = tree_int_cst_compare (field_offset, offset);
if (cmp > 0)
continue;
field_type = TREE_TYPE (f);
- if (cmp < 0)
- {
- /* Don't care about offsets into the middle of scalars. */
- if (!AGGREGATE_TYPE_P (field_type))
- continue;
-
- /* Check for array at the end of the struct. This is often
- used as for flexible array members. We should be able to
- turn this into an array access anyway. */
- if (TREE_CODE (field_type) == ARRAY_TYPE)
- tail_array_field = f;
-
- /* Check the end of the field against the offset. */
- if (!DECL_SIZE_UNIT (f)
- || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
- continue;
- t = int_const_binop (MINUS_EXPR, offset, DECL_FIELD_OFFSET (f), 1);
- if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
- continue;
-
- /* If we matched, then set offset to the displacement into
- this field. */
- offset = t;
- }
/* Here we exactly match the offset being checked. If the types match,
then we can return that field. */
- else if (lang_hooks.types_compatible_p (orig_type, field_type))
+ if (cmp == 0
+ && lang_hooks.types_compatible_p (orig_type, field_type))
{
if (base_is_ptr)
base = build1 (INDIRECT_REF, record_type, base);
t = build (COMPONENT_REF, field_type, base, f, NULL_TREE);
return t;
}
+
+ /* Don't care about offsets into the middle of scalars. */
+ if (!AGGREGATE_TYPE_P (field_type))
+ continue;
- /* Don't care about type-punning of scalars. */
- else if (!AGGREGATE_TYPE_P (field_type))
- return NULL_TREE;
+ /* Check for array at the end of the struct. This is often
+ used as for flexible array members. We should be able to
+ turn this into an array access anyway. */
+ if (TREE_CODE (field_type) == ARRAY_TYPE)
+ tail_array_field = f;
+ /* Check the end of the field against the offset. */
+ if (!DECL_SIZE_UNIT (f)
+ || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
+ continue;
+ t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
+ if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
+ continue;
+
+ /* If we matched, then set offset to the displacement into
+ this field. */
+ offset = t;
goto found;
}
f = tail_array_field;
field_type = TREE_TYPE (f);
+ offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
found:
/* If we get here, we've got an aggregate field, and a possibly
/* We can get here for out-of-range string constant accesses,
such as "_"[3]. Bail out of the entire substitution search
and arrange for the entire statement to be replaced by a
- call to __builtin_trap. In all likelyhood this will all be
+ call to __builtin_trap. In all likelihood this will all be
constant-folded away, but in the meantime we can't leave with
something that get_expr_operands can't understand. */
/* First try the generic builtin folder. If that succeeds, return the
result directly. */
- result = fold_builtin (fn, ignore);
+ callee = get_callee_fndecl (fn);
+ arglist = TREE_OPERAND (fn, 1);
+ result = fold_builtin (callee, arglist, ignore);
if (result)
{
if (ignore)
}
/* Ignore MD builtins. */
- callee = get_callee_fndecl (fn);
if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
return NULL_TREE;
/* If the builtin could not be folded, and it has no argument list,
we're done. */
- arglist = TREE_OPERAND (fn, 1);
if (!arglist)
return NULL_TREE;
}
/* Try to use the dataflow information gathered by the CCP process. */
- visited = BITMAP_XMALLOC ();
+ visited = BITMAP_ALLOC (NULL);
memset (strlen_val, 0, sizeof (strlen_val));
for (i = 0, a = arglist;
strlen_val[i] = NULL_TREE;
}
- BITMAP_XFREE (visited);
+ BITMAP_FREE (visited);
result = NULL_TREE;
switch (DECL_FUNCTION_CODE (callee))
case BUILT_IN_STRCPY:
if (strlen_val[1] && is_gimple_val (strlen_val[1]))
- result = fold_builtin_strcpy (fn, strlen_val[1]);
+ {
+ tree fndecl = get_callee_fndecl (fn);
+ tree arglist = TREE_OPERAND (fn, 1);
+ result = fold_builtin_strcpy (fndecl, arglist, strlen_val[1]);
+ }
break;
case BUILT_IN_STRNCPY:
if (strlen_val[1] && is_gimple_val (strlen_val[1]))
- result = fold_builtin_strncpy (fn, strlen_val[1]);
+ {
+ tree fndecl = get_callee_fndecl (fn);
+ tree arglist = TREE_OPERAND (fn, 1);
+ result = fold_builtin_strncpy (fndecl, arglist, strlen_val[1]);
+ }
break;
case BUILT_IN_FPUTS:
break;
default:
- abort ();
+ gcc_unreachable ();
}
if (result && ignore)
if (TREE_CODE (callee) == OBJ_TYPE_REF
&& lang_hooks.fold_obj_type_ref
&& TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
- && DECL_P (TREE_OPERAND (OBJ_TYPE_REF_OBJECT (callee), 0)))
+ && DECL_P (TREE_OPERAND
+ (OBJ_TYPE_REF_OBJECT (callee), 0)))
{
tree t;
}
\f
+/* Convert EXPR into a GIMPLE value suitable for substitution on the
+ RHS of an assignment. Insert the necessary statements before
+ iterator *SI_P. */
+
+static tree
+convert_to_gimple_builtin (block_stmt_iterator *si_p, tree expr)
+{
+ tree_stmt_iterator ti;
+ tree stmt = bsi_stmt (*si_p);
+ tree tmp, stmts = NULL;
+
+ push_gimplify_context ();
+ tmp = get_initialized_tmp_var (expr, &stmts, NULL);
+ pop_gimplify_context (NULL);
+
+ /* The replacement can expose previously unreferenced variables. */
+ for (ti = tsi_start (stmts); !tsi_end_p (ti); tsi_next (&ti))
+ {
+ find_new_referenced_vars (tsi_stmt_ptr (ti));
+ mark_new_vars_to_rename (tsi_stmt (ti), vars_to_rename);
+ }
+
+ if (EXPR_HAS_LOCATION (stmt))
+ annotate_all_with_locus (&stmts, EXPR_LOCATION (stmt));
+
+ bsi_insert_before (si_p, stmts, BSI_SAME_STMT);
+
+ return tmp;
+}
+
+
/* A simple pass that attempts to fold all builtin functions. This pass
is run after we've propagated as many constants as we can. */
static void
execute_fold_all_builtins (void)
{
+ bool cfg_changed = false;
basic_block bb;
FOR_EACH_BB (bb)
{
print_generic_stmt (dump_file, *stmtp, dump_flags);
}
- if (set_rhs (stmtp, result))
- modify_stmt (*stmtp);
+ if (!set_rhs (stmtp, result))
+ {
+ result = convert_to_gimple_builtin (&i, result);
+ if (result)
+ {
+ bool ok = set_rhs (stmtp, result);
+
+ gcc_assert (ok);
+ }
+ }
+ update_stmt (*stmtp);
+ if (maybe_clean_eh_stmt (*stmtp)
+ && tree_purge_dead_eh_edges (bb))
+ cfg_changed = true;
if (dump_file && (dump_flags & TDF_DETAILS))
{
}
}
}
+
+ /* Delete unreachable blocks. */
+ if (cfg_changed)
+ cleanup_tree_cfg ();
}
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
+ TODO_dump_func
+ | TODO_verify_ssa
+ | TODO_rename_vars, /* todo_flags_finish */
+ 0 /* letter */
};