val.lattice_val = UNDEFINED;
val.const_val = NULL_TREE;
- if (TREE_CODE (sym) == PARM_DECL || TREE_THIS_VOLATILE (sym))
+ if (TREE_CODE (var) == SSA_NAME
+ && SSA_NAME_VALUE (var)
+ && is_gimple_min_invariant (SSA_NAME_VALUE (var)))
+ {
+ val.lattice_val = CONSTANT;
+ val.const_val = SSA_NAME_VALUE (var);
+ }
+ else if (TREE_CODE (sym) == PARM_DECL || TREE_THIS_VOLATILE (sym))
{
/* Function arguments and volatile variables are considered VARYING. */
val.lattice_val = VARYING;
FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
{
- value *val = get_value (USE_FROM_PTR (use));
+ tree tuse = USE_FROM_PTR (use);
+ value *val = get_value (tuse);
- if (val->lattice_val == CONSTANT)
- {
- SET_USE (use, val->const_val);
- replaced = true;
- if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (use)))
- && replaced_addresses_p)
- *replaced_addresses_p = true;
- }
+ if (val->lattice_val != CONSTANT)
+ continue;
+
+ if (TREE_CODE (stmt) == ASM_EXPR
+ && !may_propagate_copy_into_asm (tuse))
+ continue;
+
+ SET_USE (use, val->const_val);
+
+ replaced = true;
+ if (POINTER_TYPE_P (TREE_TYPE (tuse)) && replaced_addresses_p)
+ *replaced_addresses_p = true;
}
return replaced;
substitute_and_fold (void)
{
basic_block bb;
+ unsigned int i;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file,
{
bool changed = fold_stmt (bsi_stmt_ptr (i));
stmt = bsi_stmt(i);
+
/* If we folded a builtin function, we'll likely
need to rename VDEFs. */
if (replaced_address || changed)
- {
- mark_new_vars_to_rename (stmt, vars_to_rename);
- if (maybe_clean_eh_stmt (stmt))
- tree_purge_dead_eh_edges (bb);
- }
- else
- modify_stmt (stmt);
+ mark_new_vars_to_rename (stmt, vars_to_rename);
+
+ /* If we cleaned up EH information from the statement,
+ remove EH edges. */
+ if (maybe_clean_eh_stmt (stmt))
+ tree_purge_dead_eh_edges (bb);
+
+ modify_stmt (stmt);
}
if (dump_file && (dump_flags & TDF_DETAILS))
}
}
}
+
+ /* And transfer what we learned from VALUE_VECTOR into the
+ SSA_NAMEs themselves. This probably isn't terribly important
+ since we probably constant propagated the values to their
+ use sites above. */
+ for (i = 0; i < num_ssa_names; i++)
+ {
+ tree name = ssa_name (i);
+ value *value;
+
+ if (!name)
+ continue;
+
+ value = get_value (name);
+ if (value->lattice_val == CONSTANT
+ && is_gimple_reg (name)
+ && is_gimple_min_invariant (value->const_val))
+ SSA_NAME_VALUE (name) = value->const_val;
+ }
}
/* Perform substitutions based on the known constant values. */
substitute_and_fold ();
- /* Now cleanup any unreachable code. */
- cleanup_tree_cfg ();
-
free (value_vector);
}
op0 = get_value (op0)->const_val;
}
- retval = nondestructive_fold_unary_to_constant (code,
- TREE_TYPE (rhs),
- op0);
+ retval = fold_unary_to_constant (code, TREE_TYPE (rhs), op0);
/* If we folded, but did not create an invariant, then we can not
use this expression. */
op1 = val->const_val;
}
- retval = nondestructive_fold_binary_to_constant (code,
- TREE_TYPE (rhs),
- op0, op1);
+ retval = fold_binary_to_constant (code, TREE_TYPE (rhs), op0, op1);
/* If we folded, but did not create an invariant, then we can not
use this expression. */
{
/* If we make it here, then stmt only has one definition:
a V_MUST_DEF. */
- lhs = V_MUST_DEF_OP (v_must_defs, 0);
+ lhs = V_MUST_DEF_RESULT (v_must_defs, 0);
}
if (TREE_CODE (rhs) == SSA_NAME)
val = *nval;
}
else
- {
- /* Evaluate the statement. */
+ /* Evaluate the statement. */
val = evaluate_stmt (stmt);
- }
- /* FIXME: Hack. If this was a definition of a bitfield, we need to widen
+ /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
+ value to be a VIEW_CONVERT_EXPR of the old constant value. This is
+ valid because a VIEW_CONVERT_EXPR is valid everywhere an operand of
+ aggregate type is valid.
+
+ ??? Also, if this was a definition of a bitfield, we need to widen
the constant value into the type of the destination variable. This
should not be necessary if GCC represented bitfields properly. */
{
- tree lhs = TREE_OPERAND (stmt, 0);
+ tree orig_lhs = TREE_OPERAND (stmt, 0);
+
+ if (TREE_CODE (orig_lhs) == VIEW_CONVERT_EXPR
+ && val.lattice_val == CONSTANT)
+ {
+ val.const_val = build1 (VIEW_CONVERT_EXPR,
+ TREE_TYPE (TREE_OPERAND (orig_lhs, 0)),
+ val.const_val);
+ orig_lhs = TREE_OPERAND (orig_lhs, 1);
+ }
+
if (val.lattice_val == CONSTANT
- && TREE_CODE (lhs) == COMPONENT_REF
- && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
+ && TREE_CODE (orig_lhs) == COMPONENT_REF
+ && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs, 1)))
{
- tree w = widen_bitfield (val.const_val, TREE_OPERAND (lhs, 1), lhs);
+ tree w = widen_bitfield (val.const_val, TREE_OPERAND (orig_lhs, 1),
+ orig_lhs);
if (w && is_gimple_min_invariant (w))
val.const_val = w;
to the worklist. If no single edge can be determined statically,
return SSA_PROP_VARYING to feed all the outgoing edges to the
propagation engine. */
- *taken_edge_p = find_taken_edge (block, val.const_val);
+ *taken_edge_p = val.const_val ? find_taken_edge (block, val.const_val) : 0;
if (*taken_edge_p)
return SSA_PROP_INTERESTING;
else
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_rename_vars
+ TODO_cleanup_cfg | TODO_dump_func | TODO_rename_vars
| TODO_ggc_collect | TODO_verify_ssa
| TODO_verify_stmts, /* todo_flags_finish */
0 /* letter */
maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
tree orig_type, bool base_is_ptr)
{
- tree f, t, field_type, tail_array_field;
+ tree f, t, field_type, tail_array_field, field_offset;
if (TREE_CODE (record_type) != RECORD_TYPE
&& TREE_CODE (record_type) != UNION_TYPE
continue;
if (DECL_BIT_FIELD (f))
continue;
- if (TREE_CODE (DECL_FIELD_OFFSET (f)) != INTEGER_CST)
+
+ field_offset = byte_position (f);
+ if (TREE_CODE (field_offset) != INTEGER_CST)
continue;
/* ??? Java creates "interesting" fields for representing base classes.
tail_array_field = NULL_TREE;
/* Check to see if this offset overlaps with the field. */
- cmp = tree_int_cst_compare (DECL_FIELD_OFFSET (f), offset);
+ cmp = tree_int_cst_compare (field_offset, offset);
if (cmp > 0)
continue;
}
\f
+/* Convert EXPR into a GIMPLE value suitable for substitution on the
+ RHS of an assignment. Insert the necessary statements before
+ iterator *SI_P. */
+
+static tree
+convert_to_gimple_builtin (block_stmt_iterator *si_p, tree expr)
+{
+ tree_stmt_iterator ti;
+ tree stmt = bsi_stmt (*si_p);
+ tree tmp, stmts = NULL;
+
+ push_gimplify_context ();
+ tmp = get_initialized_tmp_var (expr, &stmts, NULL);
+ pop_gimplify_context (NULL);
+
+ /* The replacement can expose previously unreferenced variables. */
+ for (ti = tsi_start (stmts); !tsi_end_p (ti); tsi_next (&ti))
+ {
+ find_new_referenced_vars (tsi_stmt_ptr (ti));
+ mark_new_vars_to_rename (tsi_stmt (ti), vars_to_rename);
+ }
+
+ if (EXPR_HAS_LOCATION (stmt))
+ annotate_all_with_locus (&stmts, EXPR_LOCATION (stmt));
+
+ bsi_insert_before (si_p, stmts, BSI_SAME_STMT);
+
+ return tmp;
+}
+
+
/* A simple pass that attempts to fold all builtin functions. This pass
is run after we've propagated as many constants as we can. */
static void
execute_fold_all_builtins (void)
{
+ bool cfg_changed = false;
basic_block bb;
FOR_EACH_BB (bb)
{
print_generic_stmt (dump_file, *stmtp, dump_flags);
}
- if (set_rhs (stmtp, result))
- modify_stmt (*stmtp);
+ if (!set_rhs (stmtp, result))
+ {
+ result = convert_to_gimple_builtin (&i, result);
+ if (result && !set_rhs (stmtp, result))
+ abort ();
+ }
+ modify_stmt (*stmtp);
+ if (maybe_clean_eh_stmt (*stmtp)
+ && tree_purge_dead_eh_edges (bb))
+ cfg_changed = true;
if (dump_file && (dump_flags & TDF_DETAILS))
{
}
}
}
+
+ /* Delete unreachable blocks. */
+ if (cfg_changed)
+ cleanup_tree_cfg ();
}
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_verify_ssa, /* todo_flags_finish */
+ TODO_dump_func
+ | TODO_verify_ssa
+ | TODO_rename_vars, /* todo_flags_finish */
0 /* letter */
};