/* Conditional constant propagation pass for the GNU compiler.
- Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
+ Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006
Free Software Foundation, Inc.
Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
/* If the RHS is a memory load, see if the VUSEs associated with
it are a valid constant for that memory load. */
prop_value_t *val = get_value_loaded_by (stmt, const_val);
- if (val && val->mem_ref
- && operand_equal_p (val->mem_ref, rhs, 0))
- return val->value;
- else
- return NULL_TREE;
+ if (val && val->mem_ref)
+ {
+ if (operand_equal_p (val->mem_ref, rhs, 0))
+ return val->value;
+
+ /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
+ complex type with a known constant value, return it. */
+ if ((TREE_CODE (rhs) == REALPART_EXPR
+ || TREE_CODE (rhs) == IMAGPART_EXPR)
+ && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
+ return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
+ }
+ return NULL_TREE;
}
/* Unary operators. Note that we know the single operand must
}
if (ctor == NULL_TREE
- || TREE_CODE (ctor) != CONSTRUCTOR
+ || (TREE_CODE (ctor) != CONSTRUCTOR
+ && TREE_CODE (ctor) != STRING_CST)
|| !TREE_STATIC (ctor))
return NULL_TREE;
return NULL_TREE;
}
+ /* Fold read from constant string. */
+ if (TREE_CODE (ctor) == STRING_CST)
+ {
+ if ((TYPE_MODE (TREE_TYPE (t))
+ == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
+ && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
+ == MODE_INT)
+ && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
+ && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
+ return build_int_cst (TREE_TYPE (t), (TREE_STRING_POINTER (ctor)
+ [TREE_INT_CST_LOW (idx)]));
+ return NULL_TREE;
+ }
+
/* Whoo-hoo! I'll fold ya baby. Yeah! */
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
if (tree_int_cst_equal (cfield, idx))
evaluate_stmt (tree stmt)
{
prop_value_t val;
- tree simplified;
+ tree simplified = NULL_TREE;
ccp_lattice_t likelyvalue = likely_value (stmt);
val.mem_ref = NULL_TREE;
simplified = ccp_fold (stmt);
/* If the statement is likely to have a VARYING result, then do not
bother folding the statement. */
- else if (likelyvalue == VARYING)
+ if (likelyvalue == VARYING)
simplified = get_rhs (stmt);
/* If the statement is an ARRAY_REF or COMPONENT_REF into constant
aggregates, extract the referenced constant. Otherwise the
statement is likely to have an UNDEFINED value, and there will be
nothing to do. Note that fold_const_aggregate_ref returns
NULL_TREE if the first case does not match. */
- else
+ else if (!simplified)
simplified = fold_const_aggregate_ref (get_rhs (stmt));
if (simplified && is_gimple_min_invariant (simplified))
}
-static void
+static unsigned int
do_ssa_ccp (void)
{
execute_ssa_ccp (false);
+ return 0;
}
TV_TREE_CCP, /* tv_id */
PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
0, /* properties_provided */
- 0, /* properties_destroyed */
+ PROP_smt_usage, /* properties_destroyed */
0, /* todo_flags_start */
TODO_cleanup_cfg | TODO_dump_func | TODO_update_ssa
| TODO_ggc_collect | TODO_verify_ssa
- | TODO_verify_stmts, /* todo_flags_finish */
+ | TODO_verify_stmts | TODO_update_smt_usage, /* todo_flags_finish */
0 /* letter */
};
-static void
+static unsigned int
do_ssa_store_ccp (void)
{
/* If STORE-CCP is not enabled, we just run regular CCP. */
execute_ssa_ccp (flag_tree_store_ccp != 0);
+ return 0;
}
static bool
TV_TREE_STORE_CCP, /* tv_id */
PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
0, /* properties_provided */
- 0, /* properties_destroyed */
+ PROP_smt_usage, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func | TODO_update_ssa
| TODO_ggc_collect | TODO_verify_ssa
| TODO_cleanup_cfg
- | TODO_verify_stmts, /* todo_flags_finish */
+ | TODO_verify_stmts | TODO_update_smt_usage, /* todo_flags_finish */
0 /* letter */
};
if (TREE_CODE (min_idx) != INTEGER_CST)
break;
- array_idx = convert (TREE_TYPE (min_idx), array_idx);
+ array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
if (!integer_zerop (min_idx))
array_idx = int_const_binop (MINUS_EXPR, array_idx,
min_idx, 0);
}
/* Convert the index to a byte offset. */
- array_idx = convert (sizetype, array_idx);
+ array_idx = fold_convert (sizetype, array_idx);
array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
/* Update the operands for the next round, or for folding. */
/* ??? Should perhaps do this in fold proper. However, doing it
there requires that we create a new CALL_EXPR, and that requires
copying EH region info to the new node. Easier to just do it
- here where we can just smash the call operand. */
+ here where we can just smash the call operand. Also
+ CALL_EXPR_RETURN_SLOT_OPT needs to be handled correctly and
+ copied, fold_ternary does not have not information. */
callee = TREE_OPERAND (rhs, 0);
if (TREE_CODE (callee) == OBJ_TYPE_REF
&& lang_hooks.fold_obj_type_ref
/* A simple pass that attempts to fold all builtin functions. This pass
is run after we've propagated as many constants as we can. */
-static void
+static unsigned int
execute_fold_all_builtins (void)
{
bool cfg_changed = false;
/* Delete unreachable blocks. */
if (cfg_changed)
cleanup_tree_cfg ();
+ return 0;
}