/* We may be able to fold away calls to builtin functions if their
arguments are constants. */
else if (code == CALL_EXPR
- && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
- && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))
- == FUNCTION_DECL)
- && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)))
+ && TREE_CODE (CALL_EXPR_FN (rhs)) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (rhs), 0)) == FUNCTION_DECL
+ && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (rhs), 0)))
{
if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
{
tree *orig, var;
- tree fndecl, arglist;
size_t i = 0;
ssa_op_iter iter;
use_operand_p var_p;
/* Substitute operands with their values and try to fold. */
replace_uses_in (stmt, NULL, const_val);
- fndecl = get_callee_fndecl (rhs);
- arglist = TREE_OPERAND (rhs, 1);
- retval = fold_builtin (fndecl, arglist, false);
+ retval = fold_call_expr (rhs, false);
/* Restore operands to their original form. */
i = 0;
ccp_initialize ();
ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
if (ccp_finalize ())
- return (TODO_cleanup_cfg | TODO_update_ssa | TODO_update_smt_usage
- | TODO_remove_unused_locals);
+ return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
else
return 0;
}
if (!integer_zerop (elt_offset))
idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
- return build4 (ARRAY_REF, orig_type, base, idx, min_idx,
- size_int (tree_low_cst (elt_size, 1)
- / (TYPE_ALIGN_UNIT (elt_type))));
+ return build4 (ARRAY_REF, orig_type, base, idx, NULL_TREE, NULL_TREE);
}
is the desired result type. */
/* ??? This doesn't handle class inheritance. */
-static tree
+tree
maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
tree orig_type, bool base_is_ptr)
{
tree f, t, field_type, tail_array_field, field_offset;
+ tree ret;
+ tree new_base;
if (TREE_CODE (record_type) != RECORD_TYPE
&& TREE_CODE (record_type) != UNION_TYPE
/* If we matched, then set offset to the displacement into
this field. */
- offset = t;
- goto found;
+ if (base_is_ptr)
+ new_base = build1 (INDIRECT_REF, record_type, base);
+ else
+ new_base = base;
+ new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
+
+ /* Recurse to possibly find the match. */
+ ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type);
+ if (ret)
+ return ret;
+ ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
+ orig_type, false);
+ if (ret)
+ return ret;
}
if (!tail_array_field)
field_type = TREE_TYPE (f);
offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
- found:
/* If we get here, we've got an aggregate field, and a possibly
nonzero offset into them. Recurse and hope for a valid match. */
if (base_is_ptr)
ccp_fold_builtin (tree stmt, tree fn)
{
tree result, val[3];
- tree callee, arglist, a;
+ tree callee, a;
int arg_mask, i, type;
bitmap visited;
bool ignore;
+ call_expr_arg_iterator iter;
+ int nargs;
ignore = TREE_CODE (stmt) != GIMPLE_MODIFY_STMT;
/* First try the generic builtin folder. If that succeeds, return the
result directly. */
- callee = get_callee_fndecl (fn);
- arglist = TREE_OPERAND (fn, 1);
- result = fold_builtin (callee, arglist, ignore);
+ result = fold_call_expr (fn, ignore);
if (result)
{
if (ignore)
}
/* Ignore MD builtins. */
+ callee = get_callee_fndecl (fn);
if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
return NULL_TREE;
/* If the builtin could not be folded, and it has no argument list,
we're done. */
- if (!arglist)
+ nargs = call_expr_nargs (fn);
+ if (nargs == 0)
return NULL_TREE;
/* Limit the work only for builtins we know how to simplify. */
visited = BITMAP_ALLOC (NULL);
memset (val, 0, sizeof (val));
- for (i = 0, a = arglist;
- arg_mask;
- i++, arg_mask >>= 1, a = TREE_CHAIN (a))
- if (arg_mask & 1)
- {
- bitmap_clear (visited);
- if (!get_maxval_strlen (TREE_VALUE (a), &val[i], visited, type))
- val[i] = NULL_TREE;
- }
+ init_call_expr_arg_iterator (fn, &iter);
+ for (i = 0; arg_mask; i++, arg_mask >>= 1)
+ {
+ a = next_call_expr_arg (&iter);
+ if (arg_mask & 1)
+ {
+ bitmap_clear (visited);
+ if (!get_maxval_strlen (a, &val[i], visited, type))
+ val[i] = NULL_TREE;
+ }
+ }
BITMAP_FREE (visited);
break;
case BUILT_IN_STRCPY:
- if (val[1] && is_gimple_val (val[1]))
- result = fold_builtin_strcpy (callee, arglist, val[1]);
+ if (val[1] && is_gimple_val (val[1]) && nargs == 2)
+ result = fold_builtin_strcpy (callee,
+ CALL_EXPR_ARG (fn, 0),
+ CALL_EXPR_ARG (fn, 1),
+ val[1]);
break;
case BUILT_IN_STRNCPY:
- if (val[1] && is_gimple_val (val[1]))
- result = fold_builtin_strncpy (callee, arglist, val[1]);
+ if (val[1] && is_gimple_val (val[1]) && nargs == 3)
+ result = fold_builtin_strncpy (callee,
+ CALL_EXPR_ARG (fn, 0),
+ CALL_EXPR_ARG (fn, 1),
+ CALL_EXPR_ARG (fn, 2),
+ val[1]);
break;
case BUILT_IN_FPUTS:
- result = fold_builtin_fputs (arglist,
+ result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0),
+ CALL_EXPR_ARG (fn, 1),
TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 0,
val[0]);
break;
case BUILT_IN_FPUTS_UNLOCKED:
- result = fold_builtin_fputs (arglist,
+ result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0),
+ CALL_EXPR_ARG (fn, 1),
TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 1,
val[0]);
break;
case BUILT_IN_MEMMOVE_CHK:
case BUILT_IN_MEMSET_CHK:
if (val[2] && is_gimple_val (val[2]))
- result = fold_builtin_memory_chk (callee, arglist, val[2], ignore,
+ result = fold_builtin_memory_chk (callee,
+ CALL_EXPR_ARG (fn, 0),
+ CALL_EXPR_ARG (fn, 1),
+ CALL_EXPR_ARG (fn, 2),
+ CALL_EXPR_ARG (fn, 3),
+ val[2], ignore,
DECL_FUNCTION_CODE (callee));
break;
case BUILT_IN_STRCPY_CHK:
case BUILT_IN_STPCPY_CHK:
if (val[1] && is_gimple_val (val[1]))
- result = fold_builtin_stxcpy_chk (callee, arglist, val[1], ignore,
+ result = fold_builtin_stxcpy_chk (callee,
+ CALL_EXPR_ARG (fn, 0),
+ CALL_EXPR_ARG (fn, 1),
+ CALL_EXPR_ARG (fn, 2),
+ val[1], ignore,
DECL_FUNCTION_CODE (callee));
break;
case BUILT_IN_STRNCPY_CHK:
if (val[2] && is_gimple_val (val[2]))
- result = fold_builtin_strncpy_chk (arglist, val[2]);
+ result = fold_builtin_strncpy_chk (CALL_EXPR_ARG (fn, 0),
+ CALL_EXPR_ARG (fn, 1),
+ CALL_EXPR_ARG (fn, 2),
+ CALL_EXPR_ARG (fn, 3),
+ val[2]);
break;
case BUILT_IN_SNPRINTF_CHK:
case BUILT_IN_VSNPRINTF_CHK:
if (val[1] && is_gimple_val (val[1]))
- result = fold_builtin_snprintf_chk (arglist, val[1],
+ result = fold_builtin_snprintf_chk (fn, val[1],
DECL_FUNCTION_CODE (callee));
break;
then we may need to fold instances of *&VAR into VAR, etc. */
if (walk_tree (stmt_p, fold_stmt_r, &fold_stmt_r_data, NULL))
{
- *stmt_p
- = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
- NULL);
+ *stmt_p = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
return true;
}
copying EH region info to the new node. Easier to just do it
here where we can just smash the call operand. Also
CALL_EXPR_RETURN_SLOT_OPT needs to be handled correctly and
- copied, fold_ternary does not have not information. */
- callee = TREE_OPERAND (rhs, 0);
+ copied, fold_call_expr does not have not information. */
+ callee = CALL_EXPR_FN (rhs);
if (TREE_CODE (callee) == OBJ_TYPE_REF
&& lang_hooks.fold_obj_type_ref
&& TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
t = lang_hooks.fold_obj_type_ref (callee, t);
if (t)
{
- TREE_OPERAND (rhs, 0) = t;
+ CALL_EXPR_FN (rhs) = t;
changed = true;
}
}