+2009-07-17 Aldy Hernandez <aldyh@redhat.com>
+ Manuel López-Ibáñez <manu@gcc.gnu.org>
+
+ PR 40435
+ * tree-complex.c, tree-loop-distribution.c, tree.c, tree.h,
+ builtins.c, fold-const.c, omp-low.c, cgraphunit.c, tree-ssa-ccp.c,
+ tree-ssa-dom.c, gimple-low.c, expr.c, tree-ssa-ifcombine.c,
+ c-decl.c, stor-layout.c, tree-if-conv.c, c-typeck.c, gimplify.c,
+ calls.c, tree-sra.c, tree-mudflap.c, tree-ssa-copy.c,
+ tree-ssa-forwprop.c, c-convert.c, c-omp.c, varasm.c,
+ tree-inline.c, c-common.c, c-common.h, gimple.c,
+ tree-switch-conversion.c, gimple.h, tree-cfg.c, c-parser.c,
+ convert.c: Add location argument to fold_{unary,binary,ternary},
+ fold_build[123], build_call_expr, build_size_arg,
+ build_fold_addr_expr, build_call_array, non_lvalue, size_diffop,
+ fold_build1_initializer, fold_build2_initializer,
+ fold_build3_initializer, fold_build_call_array,
+ fold_build_call_array_initializer, fold_single_bit_test,
+ omit_one_operand, omit_two_operands, invert_truthvalue,
+ fold_truth_not_expr, build_fold_indirect_ref, fold_indirect_ref,
+ combine_comparisons, fold_builtin_*, fold_call_expr,
+ build_range_check, maybe_fold_offset_to_address, round_up,
+ round_down.
+
2009-07-16 Jason Merrill <jason@redhat.com>
PR libstdc++/37907
static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
-static tree stabilize_va_list (tree, int);
+static tree stabilize_va_list_loc (location_t, tree, int);
static rtx expand_builtin_expect (tree, rtx);
static tree fold_builtin_constant_p (tree);
-static tree fold_builtin_expect (tree, tree);
+static tree fold_builtin_expect (location_t, tree, tree);
static tree fold_builtin_classify_type (tree);
-static tree fold_builtin_strlen (tree);
-static tree fold_builtin_inf (tree, int);
+static tree fold_builtin_strlen (location_t, tree);
+static tree fold_builtin_inf (location_t, tree, int);
static tree fold_builtin_nan (tree, tree, int);
-static tree rewrite_call_expr (tree, int, tree, int, ...);
+static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
static bool validate_arg (const_tree, enum tree_code code);
static bool integer_valued_real_p (tree);
-static tree fold_trunc_transparent_mathfn (tree, tree);
+static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
static bool readonly_data_expr (tree);
static rtx expand_builtin_fabs (tree, rtx, rtx);
static rtx expand_builtin_signbit (tree, rtx);
-static tree fold_builtin_sqrt (tree, tree);
-static tree fold_builtin_cbrt (tree, tree);
-static tree fold_builtin_pow (tree, tree, tree, tree);
-static tree fold_builtin_powi (tree, tree, tree, tree);
-static tree fold_builtin_cos (tree, tree, tree);
-static tree fold_builtin_cosh (tree, tree, tree);
+static tree fold_builtin_sqrt (location_t, tree, tree);
+static tree fold_builtin_cbrt (location_t, tree, tree);
+static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
+static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
+static tree fold_builtin_cos (location_t, tree, tree, tree);
+static tree fold_builtin_cosh (location_t, tree, tree, tree);
static tree fold_builtin_tan (tree, tree);
-static tree fold_builtin_trunc (tree, tree);
-static tree fold_builtin_floor (tree, tree);
-static tree fold_builtin_ceil (tree, tree);
-static tree fold_builtin_round (tree, tree);
-static tree fold_builtin_int_roundingfn (tree, tree);
+static tree fold_builtin_trunc (location_t, tree, tree);
+static tree fold_builtin_floor (location_t, tree, tree);
+static tree fold_builtin_ceil (location_t, tree, tree);
+static tree fold_builtin_round (location_t, tree, tree);
+static tree fold_builtin_int_roundingfn (location_t, tree, tree);
static tree fold_builtin_bitop (tree, tree);
-static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
-static tree fold_builtin_strchr (tree, tree, tree);
-static tree fold_builtin_memchr (tree, tree, tree, tree);
-static tree fold_builtin_memcmp (tree, tree, tree);
-static tree fold_builtin_strcmp (tree, tree);
-static tree fold_builtin_strncmp (tree, tree, tree);
-static tree fold_builtin_signbit (tree, tree);
-static tree fold_builtin_copysign (tree, tree, tree, tree);
-static tree fold_builtin_isascii (tree);
-static tree fold_builtin_toascii (tree);
-static tree fold_builtin_isdigit (tree);
-static tree fold_builtin_fabs (tree, tree);
-static tree fold_builtin_abs (tree, tree);
-static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
+static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
+static tree fold_builtin_strchr (location_t, tree, tree, tree);
+static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
+static tree fold_builtin_memcmp (location_t, tree, tree, tree);
+static tree fold_builtin_strcmp (location_t, tree, tree);
+static tree fold_builtin_strncmp (location_t, tree, tree, tree);
+static tree fold_builtin_signbit (location_t, tree, tree);
+static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
+static tree fold_builtin_isascii (location_t, tree);
+static tree fold_builtin_toascii (location_t, tree);
+static tree fold_builtin_isdigit (location_t, tree);
+static tree fold_builtin_fabs (location_t, tree, tree);
+static tree fold_builtin_abs (location_t, tree, tree);
+static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
enum tree_code);
-static tree fold_builtin_n (tree, tree *, int, bool);
-static tree fold_builtin_0 (tree, bool);
-static tree fold_builtin_1 (tree, tree, bool);
-static tree fold_builtin_2 (tree, tree, tree, bool);
-static tree fold_builtin_3 (tree, tree, tree, tree, bool);
-static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
-static tree fold_builtin_varargs (tree, tree, bool);
-
-static tree fold_builtin_strpbrk (tree, tree, tree);
-static tree fold_builtin_strstr (tree, tree, tree);
-static tree fold_builtin_strrchr (tree, tree, tree);
-static tree fold_builtin_strcat (tree, tree);
-static tree fold_builtin_strncat (tree, tree, tree);
-static tree fold_builtin_strspn (tree, tree);
-static tree fold_builtin_strcspn (tree, tree);
-static tree fold_builtin_sprintf (tree, tree, tree, int);
+static tree fold_builtin_n (location_t, tree, tree *, int, bool);
+static tree fold_builtin_0 (location_t, tree, bool);
+static tree fold_builtin_1 (location_t, tree, tree, bool);
+static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
+static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
+static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
+static tree fold_builtin_varargs (location_t, tree, tree, bool);
+
+static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
+static tree fold_builtin_strstr (location_t, tree, tree, tree);
+static tree fold_builtin_strrchr (location_t, tree, tree, tree);
+static tree fold_builtin_strcat (location_t, tree, tree);
+static tree fold_builtin_strncat (location_t, tree, tree, tree);
+static tree fold_builtin_strspn (location_t, tree, tree);
+static tree fold_builtin_strcspn (location_t, tree, tree);
+static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
static rtx expand_builtin_object_size (tree);
static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
static void maybe_emit_free_warning (tree);
static tree fold_builtin_object_size (tree, tree);
-static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
-static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
-static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
-static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
-static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
+static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
+static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
+static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
+static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
+static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
enum built_in_function);
static bool init_target_chars (void);
and return that. This would perhaps not be valid if we were dealing
with named arrays in addition to literal string constants. */
- return size_diffop (size_int (max), offset_node);
+ return size_diffop_loc (input_location, size_int (max), offset_node);
}
/* We have a known offset into the string. Start searching there for
enum machine_mode mode;
bool errno_set = false;
tree arg;
+ location_t loc = EXPR_LOCATION (exp);
if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
return NULL_RTX;
get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
real_from_string (&r, buf);
result = build_call_expr (isgr_fn, 2,
- fold_build1 (ABS_EXPR, type, arg),
+ fold_build1_loc (loc, ABS_EXPR, type, arg),
build_real (type, r));
return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
}
get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
real_from_string (&r, buf);
result = build_call_expr (isle_fn, 2,
- fold_build1 (ABS_EXPR, type, arg),
+ fold_build1_loc (loc, ABS_EXPR, type, arg),
build_real (type, r));
return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
}
real_from_string (&rmax, buf);
sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
real_from_string (&rmin, buf);
- arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
+ arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
result = build_call_expr (isle_fn, 2, arg,
build_real (type, rmax));
result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
enum machine_mode mode;
tree arg, sinp, cosp;
int result;
+ location_t loc = EXPR_LOCATION (exp);
if (!validate_arglist (exp, REAL_TYPE,
POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
target2 = gen_reg_rtx (mode);
op0 = expand_normal (arg);
- op1 = expand_normal (build_fold_indirect_ref (sinp));
- op2 = expand_normal (build_fold_indirect_ref (cosp));
+ op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
+ op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
/* Compute into target1 and target2.
Set TARGET to wherever the result comes back. */
tree arg, type;
enum machine_mode mode;
rtx op0, op1, op2;
+ location_t loc = EXPR_LOCATION (exp);
if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
return NULL_RTX;
fn = build_fn_decl (name, fntype);
}
- narg = fold_build2 (COMPLEX_EXPR, ctype,
+ narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
build_real (type, dconst0), arg);
/* Make sure not to fold the cexp call again. */
if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
{
tree type = TREE_TYPE (exp);
- tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strstr (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1), type);
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
{
tree type = TREE_TYPE (exp);
- tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strchr (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1), type);
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
{
tree type = TREE_TYPE (exp);
- tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strrchr (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1), type);
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
{
tree type = TREE_TYPE (exp);
- tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strpbrk (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1), type);
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
unsigned int dest_align
= get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
rtx dest_mem, src_mem, dest_addr, len_rtx;
- tree result = fold_builtin_memory_op (dest, src, len,
+ tree result = fold_builtin_memory_op (EXPR_LOCATION (exp),
+ dest, src, len,
TREE_TYPE (TREE_TYPE (fndecl)),
false, /*endp=*/0);
HOST_WIDE_INT expected_size = -1;
unsigned int dest_align
= get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
rtx dest_mem, src_mem, len_rtx;
- tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
+ tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
+ dest, src, len, type, false, endp);
if (result)
{
tree type, rtx target, enum machine_mode mode,
int ignore)
{
- tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
+ tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
+ dest, src, len, type, ignore, /*endp=*/3);
if (result)
{
{
tree type = TREE_TYPE (exp);
tree src, dest, size;
+ location_t loc = EXPR_LOCATION (exp);
if (!validate_arglist (exp,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
This is done this way so that if it isn't expanded inline, we fall
back to calling bcopy instead of memmove. */
return expand_builtin_memmove_args (dest, src,
- fold_convert (sizetype, size),
+ fold_convert_loc (loc, sizetype, size),
type, const0_rtx, VOIDmode,
ignore);
}
expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
rtx target, enum machine_mode mode)
{
- tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
+ tree result = fold_builtin_strcpy (UNKNOWN_LOCATION,
+ fndecl, dest, src, 0);
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
return expand_movstr (dest, src, target, /*endp=*/0);
expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
{
tree dst, src;
+ location_t loc = EXPR_LOCATION (exp);
if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
return NULL_RTX;
if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
return expand_movstr (dst, src, target, /*endp=*/2);
- lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
+ lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
target, mode, /*endp=*/2);
expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
{
tree fndecl = get_callee_fndecl (exp);
+ location_t loc = EXPR_LOCATION (exp);
if (validate_arglist (exp,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
tree src = CALL_EXPR_ARG (exp, 1);
tree len = CALL_EXPR_ARG (exp, 2);
tree slen = c_strlen (src, 1);
- tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
+ tree result = fold_builtin_strncpy (EXPR_LOCATION (exp),
+ fndecl, dest, src, len, slen);
if (result)
{
if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
return NULL_RTX;
- slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
+ slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
/* We're required to pad with trailing zeros if the requested
len is greater than strlen(s2)+1. In that case try to
expand_builtin_bzero (tree exp)
{
tree dest, size;
+ location_t loc = EXPR_LOCATION (exp);
if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
return NULL_RTX;
calling bzero instead of memset. */
return expand_builtin_memset_args (dest, integer_zero_node,
- fold_convert (sizetype, size),
+ fold_convert_loc (loc, sizetype, size),
const0_rtx, VOIDmode, exp);
}
INTEGER_TYPE, VOID_TYPE))
{
tree type = TREE_TYPE (exp);
- tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_memchr (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1),
CALL_EXPR_ARG (exp, 2), type);
if (result)
static rtx
expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
{
+ location_t loc = EXPR_LOCATION (exp);
+
if (!validate_arglist (exp,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
return NULL_RTX;
else
{
- tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_memcmp (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1),
CALL_EXPR_ARG (exp, 2));
if (result)
arg1_rtx = get_memory_rtx (arg1, len);
arg2_rtx = get_memory_rtx (arg2, len);
- arg3_rtx = expand_normal (fold_convert (sizetype, len));
+ arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
/* Set MEM_SIZE as appropriate. */
if (CONST_INT_P (arg3_rtx))
static rtx
expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
{
+ location_t loc = EXPR_LOCATION (exp);
+
if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
return NULL_RTX;
else
{
- tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strcmp (loc,
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1));
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
static rtx
expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
{
+ location_t loc = EXPR_LOCATION (exp);
+
if (!validate_arglist (exp,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
return NULL_RTX;
else
{
- tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strncmp (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1),
CALL_EXPR_ARG (exp, 2));
if (result)
len2 = c_strlen (arg2, 1);
if (len1)
- len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
+ len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
if (len2)
- len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
+ len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
/* If we don't have a constant length for the first, use the length
of the second, if we know it. We don't require a constant for
return NULL_RTX;
/* The actual new length parameter is MIN(len,arg3). */
- len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
- fold_convert (TREE_TYPE (len), arg3));
+ len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
+ fold_convert_loc (loc, TREE_TYPE (len), arg3));
/* If we don't have POINTER_TYPE, call the function. */
if (arg1_align == 0 || arg2_align == 0)
static rtx
expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
{
+ location_t loc = EXPR_LOCATION (exp);
+
if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
return NULL_RTX;
else
newdst = build_call_expr (strlen_fn, 1, dst);
/* Create (dst p+ strlen (dst)). */
- newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
+ newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
+ TREE_TYPE (dst), dst, newdst);
newdst = builtin_save_expr (newdst);
if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
if (validate_arglist (exp,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
{
- tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strncat (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1),
CALL_EXPR_ARG (exp, 2));
if (result)
{
if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
{
- tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strspn (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1));
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
{
if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
{
- tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strcspn (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1));
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
from multiple evaluations. */
static tree
-stabilize_va_list (tree valist, int needs_lvalue)
+stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
{
tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
{
tree p1 = build_pointer_type (TREE_TYPE (vatype));
- valist = build_fold_addr_expr_with_type (valist, p1);
+ valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
}
}
else
return valist;
pt = build_pointer_type (vatype);
- valist = fold_build1 (ADDR_EXPR, pt, valist);
+ valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
TREE_SIDE_EFFECTS (valist) = 1;
}
if (TREE_SIDE_EFFECTS (valist))
valist = save_expr (valist);
- valist = build_fold_indirect_ref (valist);
+ valist = build_fold_indirect_ref_loc (loc, valist);
}
return valist;
{
rtx nextarg;
tree valist;
+ location_t loc = EXPR_LOCATION (exp);
if (call_expr_nargs (exp) < 2)
{
- error ("too few arguments to function %<va_start%>");
+ error_at (loc, "too few arguments to function %<va_start%>");
return const0_rtx;
}
return const0_rtx;
nextarg = expand_builtin_next_arg ();
- valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
+ valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
if (targetm.expand_builtin_va_start)
targetm.expand_builtin_va_start (valist, nextarg);
&& !integer_zerop (TYPE_SIZE (type)))
{
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
- fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
+ fold_build2 (POINTER_PLUS_EXPR,
+ TREE_TYPE (valist),
valist_tmp, size_int (boundary - 1)));
gimplify_and_add (t, pre_p);
if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
{
/* Small args are padded downward. */
- t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
+ t = fold_build2_loc (input_location, GT_EXPR, sizetype,
+ rounded_size, size_int (align));
t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
size_binop (MINUS_EXPR, rounded_size, type_size));
- addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
+ addr = fold_build2 (POINTER_PLUS_EXPR,
+ TREE_TYPE (addr), addr, t);
}
/* Compute new value for AP. */
tree
build_va_arg_indirect_ref (tree addr)
{
- addr = build_fold_indirect_ref (addr);
+ addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
mf_mark (addr);
tree valist = TREE_OPERAND (*expr_p, 0);
tree type = TREE_TYPE (*expr_p);
tree t;
- location_t loc = EXPR_HAS_LOCATION (*expr_p) ? EXPR_LOCATION (*expr_p) :
- UNKNOWN_LOCATION;
+ location_t loc = EXPR_LOCATION (*expr_p);
/* Verify that valist is of the proper type. */
have_va_type = TREE_TYPE (valist);
/* Before the abort, allow the evaluation of the va_list
expression to exit or longjmp. */
gimplify_and_add (valist, pre_p);
- t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
+ t = build_call_expr_loc (loc,
+ implicit_built_in_decls[BUILT_IN_TRAP], 0);
gimplify_and_add (t, pre_p);
/* This is dead code, but go ahead and finish so that the
if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
{
tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
- valist = fold_convert (p1, build_fold_addr_expr (valist));
+ valist = fold_convert_loc (loc, p1,
+ build_fold_addr_expr_loc (loc, valist));
}
gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
return GS_ALL_DONE;
*expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
+ SET_EXPR_LOCATION (*expr_p, loc);
return GS_OK;
}
}
expand_builtin_va_copy (tree exp)
{
tree dst, src, t;
+ location_t loc = EXPR_LOCATION (exp);
dst = CALL_EXPR_ARG (exp, 0);
src = CALL_EXPR_ARG (exp, 1);
- dst = stabilize_va_list (dst, 1);
- src = stabilize_va_list (src, 0);
+ dst = stabilize_va_list_loc (loc, dst, 1);
+ src = stabilize_va_list_loc (loc, src, 0);
gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
/* Verify the arguments in the original call. */
if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
{
- tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_fputs (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1),
(target == const0_rtx),
unlocked, NULL_TREE);
int word, bitpos;
enum insn_code icode;
rtx temp;
+ location_t loc = EXPR_LOCATION (exp);
if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
return NULL_RTX;
if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
return NULL_RTX;
- arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
+ arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
build_real (TREE_TYPE (arg), dconst0));
return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
}
TREE_NOTHROW (decl) = 1;
DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
DECL_VISIBILITY_SPECIFIED (decl) = 1;
- call = rewrite_call_expr (exp, 0, decl, 0);
+ call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
return expand_call (call, target, ignore);
}
return it as a truthvalue. */
static tree
-build_builtin_expect_predicate (tree pred, tree expected)
+build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
{
tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
pred_type = TREE_VALUE (arg_types);
expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
- pred = fold_convert (pred_type, pred);
- expected = fold_convert (expected_type, expected);
- call_expr = build_call_expr (fn, 2, pred, expected);
+ pred = fold_convert_loc (loc, pred_type, pred);
+ expected = fold_convert_loc (loc, expected_type, expected);
+ call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
build_int_cst (ret_type, 0));
NULL_TREE if no simplification is possible. */
static tree
-fold_builtin_expect (tree arg0, tree arg1)
+fold_builtin_expect (location_t loc, tree arg0, tree arg1)
{
tree inner, fndecl;
enum tree_code code;
tree op0 = TREE_OPERAND (inner, 0);
tree op1 = TREE_OPERAND (inner, 1);
- op0 = build_builtin_expect_predicate (op0, arg1);
- op1 = build_builtin_expect_predicate (op1, arg1);
+ op0 = build_builtin_expect_predicate (loc, op0, arg1);
+ op1 = build_builtin_expect_predicate (loc, op1, arg1);
inner = build2 (code, TREE_TYPE (inner), op0, op1);
- return fold_convert (TREE_TYPE (arg0), inner);
+ return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
}
/* If the argument isn't invariant then there's nothing else we can do. */
/* Fold a call to __builtin_strlen with argument ARG. */
static tree
-fold_builtin_strlen (tree arg)
+fold_builtin_strlen (location_t loc, tree arg)
{
if (!validate_arg (arg, POINTER_TYPE))
return NULL_TREE;
{
/* Convert from the internal "sizetype" type to "size_t". */
if (size_type_node)
- len = fold_convert (size_type_node, len);
+ len = fold_convert_loc (loc, size_type_node, len);
return len;
}
/* Fold a call to __builtin_inf or __builtin_huge_val. */
static tree
-fold_builtin_inf (tree type, int warn)
+fold_builtin_inf (location_t loc, tree type, int warn)
{
REAL_VALUE_TYPE real;
Thus we pedwarn to ensure this constraint violation is
diagnosed. */
if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
- pedwarn (input_location, 0, "target format does not support infinity");
+ pedwarn (loc, 0, "target format does not support infinity");
real_inf (&real);
return build_real (type, real);
Do the transformation for a call with argument ARG. */
static tree
-fold_trunc_transparent_mathfn (tree fndecl, tree arg)
+fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
{
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
&& (decl = mathfn_built_in (newtype, fcode)))
- return fold_convert (ftype,
- build_call_expr (decl, 1,
- fold_convert (newtype, arg0)));
+ return fold_convert_loc (loc, ftype,
+ build_call_expr_loc (loc, decl, 1,
+ fold_convert_loc (loc,
+ newtype,
+ arg0)));
}
return NULL_TREE;
}
Do the transformation for a call with argument ARG. */
static tree
-fold_fixed_mathfn (tree fndecl, tree arg)
+fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
{
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
/* If argument is already integer valued, and we don't need to worry
about setting errno, there's no need to perform rounding. */
if (! flag_errno_math && integer_valued_real_p (arg))
- return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
+ return fold_build1_loc (loc, FIX_TRUNC_EXPR,
+ TREE_TYPE (TREE_TYPE (fndecl)), arg);
if (optimize)
{
if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
&& (decl = mathfn_built_in (newtype, fcode)))
- return build_call_expr (decl, 1, fold_convert (newtype, arg0));
+ return build_call_expr_loc (loc, decl, 1,
+ fold_convert_loc (loc, newtype, arg0));
}
/* Canonicalize llround (x) to lround (x) on LP64 targets where
if (newfn)
{
- tree newcall = build_call_expr(newfn, 1, arg);
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
+ tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
+ return fold_convert_loc (loc,
+ TREE_TYPE (TREE_TYPE (fndecl)), newcall);
}
}
return type. Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_cabs (tree arg, tree type, tree fndecl)
+fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
{
tree res;
/* If either part is zero, cabs is fabs of the other. */
if (real_zerop (real))
- return fold_build1 (ABS_EXPR, type, imag);
+ return fold_build1_loc (loc, ABS_EXPR, type, imag);
if (real_zerop (imag))
- return fold_build1 (ABS_EXPR, type, real);
+ return fold_build1_loc (loc, ABS_EXPR, type, real);
/* cabs(x+xi) -> fabs(x)*sqrt(2). */
if (flag_unsafe_math_optimizations
const REAL_VALUE_TYPE sqrt2_trunc
= real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
STRIP_NOPS (real);
- return fold_build2 (MULT_EXPR, type,
- fold_build1 (ABS_EXPR, type, real),
+ return fold_build2_loc (loc, MULT_EXPR, type,
+ fold_build1_loc (loc, ABS_EXPR, type, real),
build_real (type, sqrt2_trunc));
}
}
/* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
if (TREE_CODE (arg) == NEGATE_EXPR
|| TREE_CODE (arg) == CONJ_EXPR)
- return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
+ return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
/* Don't do this when optimizing for size. */
if (flag_unsafe_math_optimizations
arg = builtin_save_expr (arg);
- rpart = fold_build1 (REALPART_EXPR, type, arg);
- ipart = fold_build1 (IMAGPART_EXPR, type, arg);
+ rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
+ ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
rpart = builtin_save_expr (rpart);
ipart = builtin_save_expr (ipart);
- result = fold_build2 (PLUS_EXPR, type,
- fold_build2 (MULT_EXPR, type,
+ result = fold_build2_loc (loc, PLUS_EXPR, type,
+ fold_build2_loc (loc, MULT_EXPR, type,
rpart, rpart),
- fold_build2 (MULT_EXPR, type,
+ fold_build2_loc (loc, MULT_EXPR, type,
ipart, ipart));
- return build_call_expr (sqrtfn, 1, result);
+ return build_call_expr_loc (loc, sqrtfn, 1, result);
}
}
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_sqrt (tree arg, tree type)
+fold_builtin_sqrt (location_t loc, tree arg, tree type)
{
enum built_in_function fcode;
if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
{
tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
- arg = fold_build2 (MULT_EXPR, type,
+ arg = fold_build2_loc (loc, MULT_EXPR, type,
CALL_EXPR_ARG (arg, 0),
build_real (type, dconsthalf));
- return build_call_expr (expfn, 1, arg);
+ return build_call_expr_loc (loc, expfn, 1, arg);
}
/* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
tree_root = build_real (type, dconstroot);
- return build_call_expr (powfn, 2, arg0, tree_root);
+ return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
}
}
tree narg1;
if (!tree_expr_nonnegative_p (arg0))
arg0 = build1 (ABS_EXPR, type, arg0);
- narg1 = fold_build2 (MULT_EXPR, type, arg1,
+ narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
build_real (type, dconsthalf));
- return build_call_expr (powfn, 2, arg0, narg1);
+ return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
}
return NULL_TREE;
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_cbrt (tree arg, tree type)
+fold_builtin_cbrt (location_t loc, tree arg, tree type)
{
const enum built_in_function fcode = builtin_mathfn_code (arg);
tree res;
tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
const REAL_VALUE_TYPE third_trunc =
real_value_truncate (TYPE_MODE (type), dconst_third ());
- arg = fold_build2 (MULT_EXPR, type,
+ arg = fold_build2_loc (loc, MULT_EXPR, type,
CALL_EXPR_ARG (arg, 0),
build_real (type, third_trunc));
- return build_call_expr (expfn, 1, arg);
+ return build_call_expr_loc (loc, expfn, 1, arg);
}
/* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
tree_root = build_real (type, dconstroot);
- return build_call_expr (powfn, 2, arg0, tree_root);
+ return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
}
}
dconst_third_ptr (), dconst_third_ptr ());
dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
tree_root = build_real (type, dconstroot);
- return build_call_expr (powfn, 2, arg0, tree_root);
+ return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
}
}
}
tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
const REAL_VALUE_TYPE dconstroot
= real_value_truncate (TYPE_MODE (type), dconst_third ());
- tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
+ tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
build_real (type, dconstroot));
- return build_call_expr (powfn, 2, arg00, narg01);
+ return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
}
}
}
simplification can be made. */
static tree
-fold_builtin_cos (tree arg, tree type, tree fndecl)
+fold_builtin_cos (location_t loc,
+ tree arg, tree type, tree fndecl)
{
tree res, narg;
/* Optimize cos(-x) into cos (x). */
if ((narg = fold_strip_sign_ops (arg)))
- return build_call_expr (fndecl, 1, narg);
+ return build_call_expr_loc (loc, fndecl, 1, narg);
return NULL_TREE;
}
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_cosh (tree arg, tree type, tree fndecl)
+fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
{
if (validate_arg (arg, REAL_TYPE))
{
/* Optimize cosh(-x) into cosh (x). */
if ((narg = fold_strip_sign_ops (arg)))
- return build_call_expr (fndecl, 1, narg);
+ return build_call_expr_loc (loc, fndecl, 1, narg);
}
return NULL_TREE;
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_ccos (tree arg, tree type ATTRIBUTE_UNUSED, tree fndecl,
+fold_builtin_ccos (location_t loc,
+ tree arg, tree type ATTRIBUTE_UNUSED, tree fndecl,
bool hyper ATTRIBUTE_UNUSED)
{
if (validate_arg (arg, COMPLEX_TYPE)
/* Optimize fn(-x) into fn(x). */
if ((tmp = fold_strip_sign_ops (arg)))
- return build_call_expr (fndecl, 1, tmp);
+ return build_call_expr_loc (loc, fndecl, 1, tmp);
}
return NULL_TREE;
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
+fold_builtin_sincos (location_t loc,
+ tree arg0, tree arg1, tree arg2)
{
tree type;
tree res, fn, call;
if (!fn)
return NULL_TREE;
- call = build_call_expr (fn, 1, arg0);
+ call = build_call_expr_loc (loc, fn, 1, arg0);
call = builtin_save_expr (call);
return build2 (COMPOUND_EXPR, void_type_node,
build2 (MODIFY_EXPR, void_type_node,
- build_fold_indirect_ref (arg1),
+ build_fold_indirect_ref_loc (loc, arg1),
build1 (IMAGPART_EXPR, type, call)),
build2 (MODIFY_EXPR, void_type_node,
- build_fold_indirect_ref (arg2),
+ build_fold_indirect_ref_loc (loc, arg2),
build1 (REALPART_EXPR, type, call)));
}
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_cexp (tree arg0, tree type)
+fold_builtin_cexp (location_t loc, tree arg0, tree type)
{
tree rtype;
tree realp, imagp, ifn;
if (!ifn)
return NULL_TREE;
- if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
+ if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
&& real_zerop (realp))
{
- tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
- return build_call_expr (ifn, 1, narg);
+ tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
+ return build_call_expr_loc (loc, ifn, 1, narg);
}
/* In case we can easily decompose real and imaginary parts split cexp
if (!rfn)
return NULL_TREE;
- imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
+ imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
if (!imagp)
return NULL_TREE;
- icall = build_call_expr (ifn, 1, imagp);
+ icall = build_call_expr_loc (loc, ifn, 1, imagp);
icall = builtin_save_expr (icall);
- rcall = build_call_expr (rfn, 1, realp);
+ rcall = build_call_expr_loc (loc, rfn, 1, realp);
rcall = builtin_save_expr (rcall);
- return fold_build2 (COMPLEX_EXPR, type,
- fold_build2 (MULT_EXPR, rtype,
+ return fold_build2_loc (loc, COMPLEX_EXPR, type,
+ fold_build2_loc (loc, MULT_EXPR, rtype,
rcall,
- fold_build1 (REALPART_EXPR, rtype, icall)),
- fold_build2 (MULT_EXPR, rtype,
+ fold_build1_loc (loc, REALPART_EXPR,
+ rtype, icall)),
+ fold_build2_loc (loc, MULT_EXPR, rtype,
rcall,
- fold_build1 (IMAGPART_EXPR, rtype, icall)));
+ fold_build1_loc (loc, IMAGPART_EXPR,
+ rtype, icall)));
}
return NULL_TREE;
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_trunc (tree fndecl, tree arg)
+fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
{
if (!validate_arg (arg, REAL_TYPE))
return NULL_TREE;
return build_real (type, r);
}
- return fold_trunc_transparent_mathfn (fndecl, arg);
+ return fold_trunc_transparent_mathfn (loc, fndecl, arg);
}
/* Fold function call to builtin floor, floorf or floorl with argument ARG.
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_floor (tree fndecl, tree arg)
+fold_builtin_floor (location_t loc, tree fndecl, tree arg)
{
if (!validate_arg (arg, REAL_TYPE))
return NULL_TREE;
{
tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
if (truncfn)
- return build_call_expr (truncfn, 1, arg);
+ return build_call_expr_loc (loc, truncfn, 1, arg);
}
- return fold_trunc_transparent_mathfn (fndecl, arg);
+ return fold_trunc_transparent_mathfn (loc, fndecl, arg);
}
/* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_ceil (tree fndecl, tree arg)
+fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
{
if (!validate_arg (arg, REAL_TYPE))
return NULL_TREE;
}
}
- return fold_trunc_transparent_mathfn (fndecl, arg);
+ return fold_trunc_transparent_mathfn (loc, fndecl, arg);
}
/* Fold function call to builtin round, roundf or roundl with argument ARG.
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_round (tree fndecl, tree arg)
+fold_builtin_round (location_t loc, tree fndecl, tree arg)
{
if (!validate_arg (arg, REAL_TYPE))
return NULL_TREE;
}
}
- return fold_trunc_transparent_mathfn (fndecl, arg);
+ return fold_trunc_transparent_mathfn (loc, fndecl, arg);
}
/* Fold function call to builtin lround, lroundf or lroundl (or the
can be made. */
static tree
-fold_builtin_int_roundingfn (tree fndecl, tree arg)
+fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
{
if (!validate_arg (arg, REAL_TYPE))
return NULL_TREE;
CASE_FLT_FN (BUILT_IN_LLFLOOR):
/* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
if (tree_expr_nonnegative_p (arg))
- return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
- arg);
+ return fold_build1_loc (loc, FIX_TRUNC_EXPR,
+ TREE_TYPE (TREE_TYPE (fndecl)), arg);
break;
default:;
}
- return fold_fixed_mathfn (fndecl, arg);
+ return fold_fixed_mathfn (loc, fndecl, arg);
}
/* Fold function call to builtin ffs, clz, ctz, popcount and parity
FUNC is the corresponding MPFR logarithm function. */
static tree
-fold_builtin_logarithm (tree fndecl, tree arg,
+fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
{
if (validate_arg (arg, REAL_TYPE))
|| fcode == BUILT_IN_EXP2F
|| fcode == BUILT_IN_EXP2L))
|| (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
- return fold_convert (type, CALL_EXPR_ARG (arg, 0));
+ return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
/* Optimize logN(func()) for various exponential functions. We
want to determine the value "x" and the power "exponent" in
/* Now perform the optimization. */
if (x && exponent)
{
- tree logfn = build_call_expr (fndecl, 1, x);
- return fold_build2 (MULT_EXPR, type, exponent, logfn);
+ tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
+ return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
}
}
}
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
+fold_builtin_hypot (location_t loc, tree fndecl,
+ tree arg0, tree arg1, tree type)
{
tree res, narg0, narg1;
narg1 = fold_strip_sign_ops (arg1);
if (narg0 || narg1)
{
- return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
+ return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
narg1 ? narg1 : arg1);
}
/* If either argument is zero, hypot is fabs of the other. */
if (real_zerop (arg0))
- return fold_build1 (ABS_EXPR, type, arg1);
+ return fold_build1_loc (loc, ABS_EXPR, type, arg1);
else if (real_zerop (arg1))
- return fold_build1 (ABS_EXPR, type, arg0);
+ return fold_build1_loc (loc, ABS_EXPR, type, arg0);
/* hypot(x,x) -> fabs(x)*sqrt(2). */
if (flag_unsafe_math_optimizations
{
const REAL_VALUE_TYPE sqrt2_trunc
= real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
- return fold_build2 (MULT_EXPR, type,
- fold_build1 (ABS_EXPR, type, arg0),
+ return fold_build2_loc (loc, MULT_EXPR, type,
+ fold_build1_loc (loc, ABS_EXPR, type, arg0),
build_real (type, sqrt2_trunc));
}
/* Fold a builtin function call to pow, powf, or powl. Return
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
+fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
{
tree res;
/* Optimize pow(1.0,y) = 1.0. */
if (real_onep (arg0))
- return omit_one_operand (type, build_real (type, dconst1), arg1);
+ return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
if (TREE_CODE (arg1) == REAL_CST
&& !TREE_OVERFLOW (arg1))
/* Optimize pow(x,0.0) = 1.0. */
if (REAL_VALUES_EQUAL (c, dconst0))
- return omit_one_operand (type, build_real (type, dconst1),
+ return omit_one_operand_loc (loc, type, build_real (type, dconst1),
arg0);
/* Optimize pow(x,1.0) = x. */
/* Optimize pow(x,-1.0) = 1.0/x. */
if (REAL_VALUES_EQUAL (c, dconstm1))
- return fold_build2 (RDIV_EXPR, type,
+ return fold_build2_loc (loc, RDIV_EXPR, type,
build_real (type, dconst1), arg0);
/* Optimize pow(x,0.5) = sqrt(x). */
tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
if (sqrtfn != NULL_TREE)
- return build_call_expr (sqrtfn, 1, arg0);
+ return build_call_expr_loc (loc, sqrtfn, 1, arg0);
}
/* Optimize pow(x,1.0/3.0) = cbrt(x). */
{
tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
if (cbrtfn != NULL_TREE)
- return build_call_expr (cbrtfn, 1, arg0);
+ return build_call_expr_loc (loc, cbrtfn, 1, arg0);
}
}
{
tree narg0 = fold_strip_sign_ops (arg0);
if (narg0)
- return build_call_expr (fndecl, 2, narg0, arg1);
+ return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
}
}
}
{
tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
tree arg = CALL_EXPR_ARG (arg0, 0);
- arg = fold_build2 (MULT_EXPR, type, arg, arg1);
- return build_call_expr (expfn, 1, arg);
+ arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
+ return build_call_expr_loc (loc, expfn, 1, arg);
}
/* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
if (BUILTIN_SQRT_P (fcode))
{
tree narg0 = CALL_EXPR_ARG (arg0, 0);
- tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
+ tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
build_real (type, dconsthalf));
- return build_call_expr (fndecl, 2, narg0, narg1);
+ return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
}
/* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
{
const REAL_VALUE_TYPE dconstroot
= real_value_truncate (TYPE_MODE (type), dconst_third ());
- tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
+ tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
build_real (type, dconstroot));
- return build_call_expr (fndecl, 2, arg, narg1);
+ return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
}
}
{
tree arg00 = CALL_EXPR_ARG (arg0, 0);
tree arg01 = CALL_EXPR_ARG (arg0, 1);
- tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
- return build_call_expr (fndecl, 2, arg00, narg1);
+ tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
+ return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
}
}
/* Fold a builtin function call to powi, powif, or powil with argument ARG.
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
+fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
tree arg0, tree arg1, tree type)
{
if (!validate_arg (arg0, REAL_TYPE)
/* Optimize pow(1.0,y) = 1.0. */
if (real_onep (arg0))
- return omit_one_operand (type, build_real (type, dconst1), arg1);
+ return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
if (host_integerp (arg1, 0))
{
/* Optimize pow(x,0) = 1.0. */
if (c == 0)
- return omit_one_operand (type, build_real (type, dconst1),
+ return omit_one_operand_loc (loc, type, build_real (type, dconst1),
arg0);
/* Optimize pow(x,1) = x. */
/* Optimize pow(x,-1) = 1.0/x. */
if (c == -1)
- return fold_build2 (RDIV_EXPR, type,
+ return fold_build2_loc (loc, RDIV_EXPR, type,
build_real (type, dconst1), arg0);
}
FUNC is the corresponding MPFR exponent function. */
static tree
-fold_builtin_exponent (tree fndecl, tree arg,
+fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
{
if (validate_arg (arg, REAL_TYPE))
&& (fcode == BUILT_IN_LOG10
|| fcode == BUILT_IN_LOG10F
|| fcode == BUILT_IN_LOG10L)))
- return fold_convert (type, CALL_EXPR_ARG (arg, 0));
+ return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
}
}
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
+fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
+ tree type, bool ignore)
{
tree var, ret, etype;
unsigned HOST_WIDE_INT length, cval;
/* If the LEN parameter is zero, return DEST. */
if (integer_zerop (len))
- return omit_one_operand (type, dest, c);
+ return omit_one_operand_loc (loc, type, dest, c);
if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
return NULL_TREE;
}
ret = build_int_cst_type (etype, cval);
- var = build_fold_indirect_ref (fold_convert (build_pointer_type (etype),
- dest));
+ var = build_fold_indirect_ref_loc (loc,
+ fold_convert_loc (loc,
+ build_pointer_type (etype),
+ dest));
ret = build2 (MODIFY_EXPR, etype, var, ret);
if (ignore)
return ret;
- return omit_one_operand (type, dest, ret);
+ return omit_one_operand_loc (loc, type, dest, ret);
}
/* Fold function call to builtin memset. Return
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_bzero (tree dest, tree size, bool ignore)
+fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
{
if (! validate_arg (dest, POINTER_TYPE)
|| ! validate_arg (size, INTEGER_TYPE))
so that if it isn't expanded inline, we fallback to
calling bzero instead of memset. */
- return fold_builtin_memset (dest, integer_zero_node,
- fold_convert (sizetype, size),
+ return fold_builtin_memset (loc, dest, integer_zero_node,
+ fold_convert_loc (loc, sizetype, size),
void_type_node, ignore);
}
(memmove). */
static tree
-fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
+fold_builtin_memory_op (location_t loc, tree dest, tree src,
+ tree len, tree type, bool ignore, int endp)
{
tree destvar, srcvar, expr;
/* If the LEN parameter is zero, return DEST. */
if (integer_zerop (len))
- return omit_one_operand (type, dest, src);
+ return omit_one_operand_loc (loc, type, dest, src);
/* If SRC and DEST are the same (and not volatile), return
DEST{,+LEN,+LEN-1}. */
tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 3, dest, src, len);
+ return build_call_expr_loc (loc, fn, 3, dest, src, len);
}
/* If *src and *dest can't overlap, optimize into memcpy as well. */
- srcvar = build_fold_indirect_ref (src);
- destvar = build_fold_indirect_ref (dest);
+ srcvar = build_fold_indirect_ref_loc (loc, src);
+ destvar = build_fold_indirect_ref_loc (loc, dest);
if (srcvar
&& !TREE_THIS_VOLATILE (srcvar)
&& destvar
fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 3, dest, src, len);
+ return build_call_expr_loc (loc, fn, 3, dest, src, len);
}
return NULL_TREE;
}
srcvar = NULL_TREE;
if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
{
- srcvar = build_fold_indirect_ref (src);
+ srcvar = build_fold_indirect_ref_loc (loc, src);
if (TREE_THIS_VOLATILE (srcvar))
return NULL_TREE;
else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
destvar = NULL_TREE;
if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
{
- destvar = build_fold_indirect_ref (dest);
+ destvar = build_fold_indirect_ref_loc (loc, dest);
if (TREE_THIS_VOLATILE (destvar))
return NULL_TREE;
else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
TYPE_PACKED (srctype) = 1;
}
srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
- src = fold_convert (srcptype, src);
- srcvar = build_fold_indirect_ref (src);
+ src = fold_convert_loc (loc, srcptype, src);
+ srcvar = build_fold_indirect_ref_loc (loc, src);
}
else if (destvar == NULL_TREE)
{
TYPE_PACKED (desttype) = 1;
}
destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
- dest = fold_convert (destptype, dest);
- destvar = build_fold_indirect_ref (dest);
+ dest = fold_convert_loc (loc, destptype, dest);
+ destvar = build_fold_indirect_ref_loc (loc, dest);
}
if (srctype == desttype
|| POINTER_TYPE_P (TREE_TYPE (srcvar)))
&& (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
|| POINTER_TYPE_P (TREE_TYPE (destvar))))
- expr = fold_convert (TREE_TYPE (destvar), srcvar);
+ expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
else
- expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
+ expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
+ TREE_TYPE (destvar), srcvar);
expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
}
return expr;
if (endp == 0 || endp == 3)
- return omit_one_operand (type, dest, expr);
+ return omit_one_operand_loc (loc, type, dest, expr);
if (expr == len)
expr = NULL_TREE;
if (endp == 2)
- len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
+ len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
ssize_int (1));
- len = fold_convert (sizetype, len);
- dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
- dest = fold_convert (type, dest);
+ len = fold_convert_loc (loc, sizetype, len);
+ dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
+ dest = fold_convert_loc (loc, type, dest);
if (expr)
- dest = omit_one_operand (type, dest, expr);
+ dest = omit_one_operand_loc (loc, type, dest, expr);
return dest;
}
copied. Return NULL_TREE if no simplification can be made. */
tree
-fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
+fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
{
tree fn;
/* If SRC and DEST are the same (and not volatile), return DEST. */
if (operand_equal_p (src, dest, 0))
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
if (optimize_function_for_size_p (cfun))
return NULL_TREE;
return NULL_TREE;
}
- len = size_binop (PLUS_EXPR, len, ssize_int (1));
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
- build_call_expr (fn, 3, dest, src, len));
+ len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
+ build_call_expr_loc (loc, fn, 3, dest, src, len));
}
/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
Return NULL_TREE if no simplification can be made. */
tree
-fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
+fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
+ tree src, tree len, tree slen)
{
tree fn;
/* If the LEN parameter is zero, return DEST. */
if (integer_zerop (len))
- return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
+ return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
/* We can't compare slen with len as constants below if len is not a
constant. */
if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
return NULL_TREE;
- slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
+ slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
/* We do not support simplification of this case, though we do
support it when expanding trees into RTL. */
fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
if (!fn)
return NULL_TREE;
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
- build_call_expr (fn, 3, dest, src, len));
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
+ build_call_expr_loc (loc, fn, 3, dest, src, len));
}
/* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
+fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
{
if (!validate_arg (arg1, POINTER_TYPE)
|| !validate_arg (arg2, INTEGER_TYPE)
if (r == NULL)
return build_int_cst (TREE_TYPE (arg1), 0);
- tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
+ tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
size_int (r - p1));
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
return NULL_TREE;
}
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_memcmp (tree arg1, tree arg2, tree len)
+fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
{
const char *p1, *p2;
/* If the LEN parameter is zero, return zero. */
if (integer_zerop (len))
- return omit_two_operands (integer_type_node, integer_zero_node,
+ return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
arg1, arg2);
/* If ARG1 and ARG2 are the same (and not volatile), return zero. */
if (operand_equal_p (arg1, arg2, 0))
- return omit_one_operand (integer_type_node, integer_zero_node, len);
+ return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
p1 = c_getstr (arg1);
p2 = c_getstr (arg2);
tree cst_uchar_ptr_node
= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
- tree ind1 = fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
+ tree ind1
+ = fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
arg1)));
- tree ind2 = fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
+ tree ind2
+ = fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
arg2)));
- return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
+ return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
}
return NULL_TREE;
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_strcmp (tree arg1, tree arg2)
+fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
{
const char *p1, *p2;
tree cst_uchar_ptr_node
= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
- return fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
- arg1)));
+ return fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
+ arg1)));
}
/* If the first arg is "", return -*(const unsigned char*)arg2. */
tree cst_uchar_ptr_node
= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
- tree temp = fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
+ tree temp
+ = fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
arg2)));
- return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
+ return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
}
return NULL_TREE;
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_strncmp (tree arg1, tree arg2, tree len)
+fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
{
const char *p1, *p2;
/* If the LEN parameter is zero, return zero. */
if (integer_zerop (len))
- return omit_two_operands (integer_type_node, integer_zero_node,
+ return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
arg1, arg2);
/* If ARG1 and ARG2 are the same (and not volatile), return zero. */
if (operand_equal_p (arg1, arg2, 0))
- return omit_one_operand (integer_type_node, integer_zero_node, len);
+ return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
p1 = c_getstr (arg1);
p2 = c_getstr (arg2);
tree cst_uchar_ptr_node
= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
- return fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
- arg1)));
+ return fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
+ arg1)));
}
/* If the first arg is "", and the length is greater than zero,
tree cst_uchar_ptr_node
= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
- tree temp = fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
- arg2)));
- return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
+ tree temp = fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
+ arg2)));
+ return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
}
/* If len parameter is one, return an expression corresponding to
tree cst_uchar_ptr_node
= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
- tree ind1 = fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
- arg1)));
- tree ind2 = fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
- arg2)));
- return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
+ tree ind1 = fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
+ arg1)));
+ tree ind2 = fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
+ arg2)));
+ return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
}
return NULL_TREE;
ARG. Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_signbit (tree arg, tree type)
+fold_builtin_signbit (location_t loc, tree arg, tree type)
{
tree temp;
c = TREE_REAL_CST (arg);
temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
- return fold_convert (type, temp);
+ return fold_convert_loc (loc, type, temp);
}
/* If ARG is non-negative, the result is always zero. */
if (tree_expr_nonnegative_p (arg))
- return omit_one_operand (type, integer_zero_node, arg);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg);
/* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
- return fold_build2 (LT_EXPR, type, arg,
+ return fold_build2_loc (loc, LT_EXPR, type, arg,
build_real (TREE_TYPE (arg), dconst0));
return NULL_TREE;
be made. */
static tree
-fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
+fold_builtin_copysign (location_t loc, tree fndecl,
+ tree arg1, tree arg2, tree type)
{
tree tem;
/* copysign(X,X) is X. */
if (operand_equal_p (arg1, arg2, 0))
- return fold_convert (type, arg1);
+ return fold_convert_loc (loc, type, arg1);
/* If ARG1 and ARG2 are compile-time constants, determine the result. */
if (TREE_CODE (arg1) == REAL_CST
/* copysign(X, Y) is fabs(X) when Y is always non-negative.
Remember to evaluate Y for side-effects. */
if (tree_expr_nonnegative_p (arg2))
- return omit_one_operand (type,
- fold_build1 (ABS_EXPR, type, arg1),
+ return omit_one_operand_loc (loc, type,
+ fold_build1_loc (loc, ABS_EXPR, type, arg1),
arg2);
/* Strip sign changing operations for the first argument. */
tem = fold_strip_sign_ops (arg1);
if (tem)
- return build_call_expr (fndecl, 2, tem, arg2);
+ return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
return NULL_TREE;
}
/* Fold a call to builtin isascii with argument ARG. */
static tree
-fold_builtin_isascii (tree arg)
+fold_builtin_isascii (location_t loc, tree arg)
{
if (!validate_arg (arg, INTEGER_TYPE))
return NULL_TREE;
arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
build_int_cst (NULL_TREE,
~ (unsigned HOST_WIDE_INT) 0x7f));
- return fold_build2 (EQ_EXPR, integer_type_node,
+ return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
arg, integer_zero_node);
}
}
/* Fold a call to builtin toascii with argument ARG. */
static tree
-fold_builtin_toascii (tree arg)
+fold_builtin_toascii (location_t loc, tree arg)
{
if (!validate_arg (arg, INTEGER_TYPE))
return NULL_TREE;
/* Transform toascii(c) -> (c & 0x7f). */
- return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
+ return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
build_int_cst (NULL_TREE, 0x7f));
}
/* Fold a call to builtin isdigit with argument ARG. */
static tree
-fold_builtin_isdigit (tree arg)
+fold_builtin_isdigit (location_t loc, tree arg)
{
if (!validate_arg (arg, INTEGER_TYPE))
return NULL_TREE;
if (target_digit0 == 0)
return NULL_TREE;
- arg = fold_convert (unsigned_type_node, arg);
+ arg = fold_convert_loc (loc, unsigned_type_node, arg);
arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
build_int_cst (unsigned_type_node, target_digit0));
- return fold_build2 (LE_EXPR, integer_type_node, arg,
+ return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
build_int_cst (unsigned_type_node, 9));
}
}
/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
static tree
-fold_builtin_fabs (tree arg, tree type)
+fold_builtin_fabs (location_t loc, tree arg, tree type)
{
if (!validate_arg (arg, REAL_TYPE))
return NULL_TREE;
- arg = fold_convert (type, arg);
+ arg = fold_convert_loc (loc, type, arg);
if (TREE_CODE (arg) == REAL_CST)
return fold_abs_const (arg, type);
- return fold_build1 (ABS_EXPR, type, arg);
+ return fold_build1_loc (loc, ABS_EXPR, type, arg);
}
/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
static tree
-fold_builtin_abs (tree arg, tree type)
+fold_builtin_abs (location_t loc, tree arg, tree type)
{
if (!validate_arg (arg, INTEGER_TYPE))
return NULL_TREE;
- arg = fold_convert (type, arg);
+ arg = fold_convert_loc (loc, type, arg);
if (TREE_CODE (arg) == INTEGER_CST)
return fold_abs_const (arg, type);
- return fold_build1 (ABS_EXPR, type, arg);
+ return fold_build1_loc (loc, ABS_EXPR, type, arg);
}
/* Fold a call to builtin fmin or fmax. */
static tree
-fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
+fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
+ tree type, bool max)
{
if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
{
&& real_isnan (&TREE_REAL_CST (arg0))
&& (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
|| ! TREE_REAL_CST (arg0).signalling))
- return omit_one_operand (type, arg1, arg0);
+ return omit_one_operand_loc (loc, type, arg1, arg0);
if (TREE_CODE (arg1) == REAL_CST
&& real_isnan (&TREE_REAL_CST (arg1))
&& (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
|| ! TREE_REAL_CST (arg1).signalling))
- return omit_one_operand (type, arg0, arg1);
+ return omit_one_operand_loc (loc, type, arg0, arg1);
/* Transform fmin/fmax(x,x) -> x. */
if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
- return omit_one_operand (type, arg0, arg1);
+ return omit_one_operand_loc (loc, type, arg0, arg1);
/* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
functions to return the numeric arg if the other one is NaN.
-ffinite-math-only is set. C99 doesn't require -0.0 to be
handled, so we don't have to worry about it either. */
if (flag_finite_math_only)
- return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
- fold_convert (type, arg0),
- fold_convert (type, arg1));
+ return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
+ fold_convert_loc (loc, type, arg0),
+ fold_convert_loc (loc, type, arg1));
}
return NULL_TREE;
}
/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
static tree
-fold_builtin_carg (tree arg, tree type)
+fold_builtin_carg (location_t loc, tree arg, tree type)
{
if (validate_arg (arg, COMPLEX_TYPE)
&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
if (atan2_fn)
{
tree new_arg = builtin_save_expr (arg);
- tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
- tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
- return build_call_expr (atan2_fn, 2, i_arg, r_arg);
+ tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
+ tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
+ return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
}
}
/* Fold a call to builtin logb/ilogb. */
static tree
-fold_builtin_logb (tree arg, tree rettype)
+fold_builtin_logb (location_t loc, tree arg, tree rettype)
{
if (! validate_arg (arg, REAL_TYPE))
return NULL_TREE;
case rvc_inf:
/* If arg is Inf or NaN and we're logb, return it. */
if (TREE_CODE (rettype) == REAL_TYPE)
- return fold_convert (rettype, arg);
+ return fold_convert_loc (loc, rettype, arg);
/* Fall through... */
case rvc_zero:
/* Zero may set errno and/or raise an exception for logb, also
want the exponent as if they were [1.0, 2.0) so get the
exponent and subtract 1. */
if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
- return fold_convert (rettype, build_int_cst (NULL_TREE,
- REAL_EXP (value)-1));
+ return fold_convert_loc (loc, rettype,
+ build_int_cst (NULL_TREE,
+ REAL_EXP (value)-1));
break;
}
}
/* Fold a call to builtin significand, if radix == 2. */
static tree
-fold_builtin_significand (tree arg, tree rettype)
+fold_builtin_significand (location_t loc, tree arg, tree rettype)
{
if (! validate_arg (arg, REAL_TYPE))
return NULL_TREE;
case rvc_nan:
case rvc_inf:
/* If arg is +-0, +-Inf or +-NaN, then return it. */
- return fold_convert (rettype, arg);
+ return fold_convert_loc (loc, rettype, arg);
case rvc_normal:
/* For normal numbers, proceed iff radix == 2. */
if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
/* Fold a call to builtin frexp, we can assume the base is 2. */
static tree
-fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
+fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
{
if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
return NULL_TREE;
if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
return NULL_TREE;
- arg1 = build_fold_indirect_ref (arg1);
+ arg1 = build_fold_indirect_ref_loc (loc, arg1);
/* Proceed if a valid pointer type was passed in. */
if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
case rvc_nan:
case rvc_inf:
/* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
- return omit_one_operand (rettype, arg0, arg1);
+ return omit_one_operand_loc (loc, rettype, arg0, arg1);
case rvc_normal:
{
/* Since the frexp function always expects base 2, and in
}
/* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
- arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
+ arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
TREE_SIDE_EFFECTS (arg1) = 1;
- return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
+ return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
}
return NULL_TREE;
check the mode of the TYPE parameter in certain cases. */
static tree
-fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
+fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
+ tree type, bool ldexp)
{
if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
{
if (real_zerop (arg0) || integer_zerop (arg1)
|| (TREE_CODE (arg0) == REAL_CST
&& !real_isfinite (&TREE_REAL_CST (arg0))))
- return omit_one_operand (type, arg0, arg1);
+ return omit_one_operand_loc (loc, type, arg0, arg1);
/* If both arguments are constant, then try to evaluate it. */
if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
/* Fold a call to builtin modf. */
static tree
-fold_builtin_modf (tree arg0, tree arg1, tree rettype)
+fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
{
if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
return NULL_TREE;
if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
return NULL_TREE;
- arg1 = build_fold_indirect_ref (arg1);
+ arg1 = build_fold_indirect_ref_loc (loc, arg1);
/* Proceed if a valid pointer type was passed in. */
if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
}
/* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
- arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
+ arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
build_real (rettype, trunc));
TREE_SIDE_EFFECTS (arg1) = 1;
- return fold_build2 (COMPOUND_EXPR, rettype, arg1,
+ return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
build_real (rettype, frac));
}
ARG is the argument for the call. */
static tree
-fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
+fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
{
tree type = TREE_TYPE (TREE_TYPE (fndecl));
REAL_VALUE_TYPE r;
{
case BUILT_IN_ISINF:
if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
- return omit_one_operand (type, integer_zero_node, arg);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg);
if (TREE_CODE (arg) == REAL_CST)
{
if (signbit_fn && isinf_fn)
{
- tree signbit_call = build_call_expr (signbit_fn, 1, arg);
- tree isinf_call = build_call_expr (isinf_fn, 1, arg);
+ tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
+ tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
- signbit_call = fold_build2 (NE_EXPR, integer_type_node,
+ signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
signbit_call, integer_zero_node);
- isinf_call = fold_build2 (NE_EXPR, integer_type_node,
+ isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
isinf_call, integer_zero_node);
- tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
+ tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
integer_minus_one_node, integer_one_node);
- tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
+ tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
+ isinf_call, tmp,
integer_zero_node);
}
case BUILT_IN_ISFINITE:
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
&& !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
- return omit_one_operand (type, integer_one_node, arg);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg);
if (TREE_CODE (arg) == REAL_CST)
{
case BUILT_IN_ISNAN:
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
- return omit_one_operand (type, integer_zero_node, arg);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg);
if (TREE_CODE (arg) == REAL_CST)
{
}
arg = builtin_save_expr (arg);
- return fold_build2 (UNORDERED_EXPR, type, arg, arg);
+ return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
default:
gcc_unreachable ();
one floating point argument which is "type generic". */
static tree
-fold_builtin_fpclassify (tree exp)
+fold_builtin_fpclassify (location_t loc, tree exp)
{
tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
arg, type, res, tmp;
arg = CALL_EXPR_ARG (exp, 5);
type = TREE_TYPE (arg);
mode = TYPE_MODE (type);
- arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
+ arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
/* fpclassify(x) ->
isnan(x) ? FP_NAN :
(fabs(x) >= DBL_MIN ? FP_NORMAL :
(x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
- tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
+ tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
build_real (type, dconst0));
- res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
+ res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
+ tmp, fp_zero, fp_subnormal);
sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
real_from_string (&r, buf);
- tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
- res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
+ tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
+ arg, build_real (type, r));
+ res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
if (HONOR_INFINITIES (mode))
{
real_inf (&r);
- tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
+ tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
build_real (type, r));
- res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
+ res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
+ fp_infinite, res);
}
if (HONOR_NANS (mode))
{
- tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
- res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
+ tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
+ res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
}
return res;
the rest. */
static tree
-fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
+fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
enum tree_code unordered_code,
enum tree_code ordered_code)
{
else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
cmp_type = type1;
- arg0 = fold_convert (cmp_type, arg0);
- arg1 = fold_convert (cmp_type, arg1);
+ arg0 = fold_convert_loc (loc, cmp_type, arg0);
+ arg1 = fold_convert_loc (loc, cmp_type, arg1);
if (unordered_code == UNORDERED_EXPR)
{
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
- return omit_two_operands (type, integer_zero_node, arg0, arg1);
- return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
+ return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
+ return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
}
code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
: ordered_code;
- return fold_build1 (TRUTH_NOT_EXPR, type,
- fold_build2 (code, type, arg0, arg1));
+ return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
+ fold_build2_loc (loc, code, type, arg0, arg1));
}
/* Fold a call to built-in function FNDECL with 0 arguments.
function returns NULL_TREE if no simplification was possible. */
static tree
-fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
+fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
{
tree type = TREE_TYPE (TREE_TYPE (fndecl));
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
case BUILT_IN_INFD32:
case BUILT_IN_INFD64:
case BUILT_IN_INFD128:
- return fold_builtin_inf (type, true);
+ return fold_builtin_inf (loc, type, true);
CASE_FLT_FN (BUILT_IN_HUGE_VAL):
- return fold_builtin_inf (type, false);
+ return fold_builtin_inf (loc, type, false);
case BUILT_IN_CLASSIFY_TYPE:
return fold_builtin_classify_type (NULL_TREE);
function returns NULL_TREE if no simplification was possible. */
static tree
-fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
+fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
{
tree type = TREE_TYPE (TREE_TYPE (fndecl));
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
return fold_builtin_classify_type (arg0);
case BUILT_IN_STRLEN:
- return fold_builtin_strlen (arg0);
+ return fold_builtin_strlen (loc, arg0);
CASE_FLT_FN (BUILT_IN_FABS):
- return fold_builtin_fabs (arg0, type);
+ return fold_builtin_fabs (loc, arg0, type);
case BUILT_IN_ABS:
case BUILT_IN_LABS:
case BUILT_IN_LLABS:
case BUILT_IN_IMAXABS:
- return fold_builtin_abs (arg0, type);
+ return fold_builtin_abs (loc, arg0, type);
CASE_FLT_FN (BUILT_IN_CONJ):
if (validate_arg (arg0, COMPLEX_TYPE)
&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
- return fold_build1 (CONJ_EXPR, type, arg0);
+ return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
break;
CASE_FLT_FN (BUILT_IN_CREAL):
if (validate_arg (arg0, COMPLEX_TYPE)
&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
- return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
+ return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
break;
CASE_FLT_FN (BUILT_IN_CIMAG):
if (validate_arg (arg0, COMPLEX_TYPE))
- return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
+ return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
break;
CASE_FLT_FN (BUILT_IN_CCOS):
- return fold_builtin_ccos(arg0, type, fndecl, /*hyper=*/ false);
+ return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
CASE_FLT_FN (BUILT_IN_CCOSH):
- return fold_builtin_ccos(arg0, type, fndecl, /*hyper=*/ true);
+ return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
#ifdef HAVE_mpc
CASE_FLT_FN (BUILT_IN_CSIN):
#endif
CASE_FLT_FN (BUILT_IN_CABS):
- return fold_builtin_cabs (arg0, type, fndecl);
+ return fold_builtin_cabs (loc, arg0, type, fndecl);
CASE_FLT_FN (BUILT_IN_CARG):
- return fold_builtin_carg (arg0, type);
+ return fold_builtin_carg (loc, arg0, type);
CASE_FLT_FN (BUILT_IN_SQRT):
- return fold_builtin_sqrt (arg0, type);
+ return fold_builtin_sqrt (loc, arg0, type);
CASE_FLT_FN (BUILT_IN_CBRT):
- return fold_builtin_cbrt (arg0, type);
+ return fold_builtin_cbrt (loc, arg0, type);
CASE_FLT_FN (BUILT_IN_ASIN):
if (validate_arg (arg0, REAL_TYPE))
break;
CASE_FLT_FN (BUILT_IN_COS):
- return fold_builtin_cos (arg0, type, fndecl);
+ return fold_builtin_cos (loc, arg0, type, fndecl);
CASE_FLT_FN (BUILT_IN_TAN):
return fold_builtin_tan (arg0, type);
CASE_FLT_FN (BUILT_IN_CEXP):
- return fold_builtin_cexp (arg0, type);
+ return fold_builtin_cexp (loc, arg0, type);
CASE_FLT_FN (BUILT_IN_CEXPI):
if (validate_arg (arg0, REAL_TYPE))
break;
CASE_FLT_FN (BUILT_IN_COSH):
- return fold_builtin_cosh (arg0, type, fndecl);
+ return fold_builtin_cosh (loc, arg0, type, fndecl);
CASE_FLT_FN (BUILT_IN_TANH):
if (validate_arg (arg0, REAL_TYPE))
break;
CASE_FLT_FN (BUILT_IN_EXP):
- return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
+ return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
CASE_FLT_FN (BUILT_IN_EXP2):
- return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
+ return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
CASE_FLT_FN (BUILT_IN_EXP10):
CASE_FLT_FN (BUILT_IN_POW10):
- return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
+ return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
CASE_FLT_FN (BUILT_IN_EXPM1):
if (validate_arg (arg0, REAL_TYPE))
break;
CASE_FLT_FN (BUILT_IN_LOG):
- return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
+ return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
CASE_FLT_FN (BUILT_IN_LOG2):
- return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
+ return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
CASE_FLT_FN (BUILT_IN_LOG10):
- return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
+ return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
CASE_FLT_FN (BUILT_IN_LOG1P):
if (validate_arg (arg0, REAL_TYPE))
return fold_builtin_nan (arg0, type, false);
CASE_FLT_FN (BUILT_IN_FLOOR):
- return fold_builtin_floor (fndecl, arg0);
+ return fold_builtin_floor (loc, fndecl, arg0);
CASE_FLT_FN (BUILT_IN_CEIL):
- return fold_builtin_ceil (fndecl, arg0);
+ return fold_builtin_ceil (loc, fndecl, arg0);
CASE_FLT_FN (BUILT_IN_TRUNC):
- return fold_builtin_trunc (fndecl, arg0);
+ return fold_builtin_trunc (loc, fndecl, arg0);
CASE_FLT_FN (BUILT_IN_ROUND):
- return fold_builtin_round (fndecl, arg0);
+ return fold_builtin_round (loc, fndecl, arg0);
CASE_FLT_FN (BUILT_IN_NEARBYINT):
CASE_FLT_FN (BUILT_IN_RINT):
- return fold_trunc_transparent_mathfn (fndecl, arg0);
+ return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
CASE_FLT_FN (BUILT_IN_LCEIL):
CASE_FLT_FN (BUILT_IN_LLCEIL):
CASE_FLT_FN (BUILT_IN_LLFLOOR):
CASE_FLT_FN (BUILT_IN_LROUND):
CASE_FLT_FN (BUILT_IN_LLROUND):
- return fold_builtin_int_roundingfn (fndecl, arg0);
+ return fold_builtin_int_roundingfn (loc, fndecl, arg0);
CASE_FLT_FN (BUILT_IN_LRINT):
CASE_FLT_FN (BUILT_IN_LLRINT):
- return fold_fixed_mathfn (fndecl, arg0);
+ return fold_fixed_mathfn (loc, fndecl, arg0);
case BUILT_IN_BSWAP32:
case BUILT_IN_BSWAP64:
return fold_builtin_bitop (fndecl, arg0);
CASE_FLT_FN (BUILT_IN_SIGNBIT):
- return fold_builtin_signbit (arg0, type);
+ return fold_builtin_signbit (loc, arg0, type);
CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
- return fold_builtin_significand (arg0, type);
+ return fold_builtin_significand (loc, arg0, type);
CASE_FLT_FN (BUILT_IN_ILOGB):
CASE_FLT_FN (BUILT_IN_LOGB):
- return fold_builtin_logb (arg0, type);
+ return fold_builtin_logb (loc, arg0, type);
case BUILT_IN_ISASCII:
- return fold_builtin_isascii (arg0);
+ return fold_builtin_isascii (loc, arg0);
case BUILT_IN_TOASCII:
- return fold_builtin_toascii (arg0);
+ return fold_builtin_toascii (loc, arg0);
case BUILT_IN_ISDIGIT:
- return fold_builtin_isdigit (arg0);
+ return fold_builtin_isdigit (loc, arg0);
CASE_FLT_FN (BUILT_IN_FINITE):
case BUILT_IN_FINITED32:
case BUILT_IN_FINITED64:
case BUILT_IN_FINITED128:
case BUILT_IN_ISFINITE:
- return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
+ return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
CASE_FLT_FN (BUILT_IN_ISINF):
case BUILT_IN_ISINFD32:
case BUILT_IN_ISINFD64:
case BUILT_IN_ISINFD128:
- return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
+ return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
case BUILT_IN_ISINF_SIGN:
- return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
+ return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
CASE_FLT_FN (BUILT_IN_ISNAN):
case BUILT_IN_ISNAND32:
case BUILT_IN_ISNAND64:
case BUILT_IN_ISNAND128:
- return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
+ return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
case BUILT_IN_PRINTF:
case BUILT_IN_PRINTF_UNLOCKED:
case BUILT_IN_VPRINTF:
- return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
+ return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
default:
break;
function returns NULL_TREE if no simplification was possible. */
static tree
-fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
+fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
{
tree type = TREE_TYPE (TREE_TYPE (fndecl));
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
break;
CASE_FLT_FN (BUILT_IN_HYPOT):
- return fold_builtin_hypot (fndecl, arg0, arg1, type);
+ return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
#ifdef HAVE_mpc_pow
CASE_FLT_FN (BUILT_IN_CPOW):
#endif
CASE_FLT_FN (BUILT_IN_LDEXP):
- return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
+ return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
CASE_FLT_FN (BUILT_IN_SCALBN):
CASE_FLT_FN (BUILT_IN_SCALBLN):
- return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
+ return fold_builtin_load_exponent (loc, arg0, arg1,
+ type, /*ldexp=*/false);
CASE_FLT_FN (BUILT_IN_FREXP):
- return fold_builtin_frexp (arg0, arg1, type);
+ return fold_builtin_frexp (loc, arg0, arg1, type);
CASE_FLT_FN (BUILT_IN_MODF):
- return fold_builtin_modf (arg0, arg1, type);
+ return fold_builtin_modf (loc, arg0, arg1, type);
case BUILT_IN_BZERO:
- return fold_builtin_bzero (arg0, arg1, ignore);
+ return fold_builtin_bzero (loc, arg0, arg1, ignore);
case BUILT_IN_FPUTS:
- return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
+ return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
case BUILT_IN_FPUTS_UNLOCKED:
- return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
+ return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
case BUILT_IN_STRSTR:
- return fold_builtin_strstr (arg0, arg1, type);
+ return fold_builtin_strstr (loc, arg0, arg1, type);
case BUILT_IN_STRCAT:
- return fold_builtin_strcat (arg0, arg1);
+ return fold_builtin_strcat (loc, arg0, arg1);
case BUILT_IN_STRSPN:
- return fold_builtin_strspn (arg0, arg1);
+ return fold_builtin_strspn (loc, arg0, arg1);
case BUILT_IN_STRCSPN:
- return fold_builtin_strcspn (arg0, arg1);
+ return fold_builtin_strcspn (loc, arg0, arg1);
case BUILT_IN_STRCHR:
case BUILT_IN_INDEX:
- return fold_builtin_strchr (arg0, arg1, type);
+ return fold_builtin_strchr (loc, arg0, arg1, type);
case BUILT_IN_STRRCHR:
case BUILT_IN_RINDEX:
- return fold_builtin_strrchr (arg0, arg1, type);
+ return fold_builtin_strrchr (loc, arg0, arg1, type);
case BUILT_IN_STRCPY:
- return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
+ return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
case BUILT_IN_STPCPY:
if (ignore)
if (!fn)
break;
- return build_call_expr (fn, 2, arg0, arg1);
+ return build_call_expr_loc (loc, fn, 2, arg0, arg1);
}
break;
case BUILT_IN_STRCMP:
- return fold_builtin_strcmp (arg0, arg1);
+ return fold_builtin_strcmp (loc, arg0, arg1);
case BUILT_IN_STRPBRK:
- return fold_builtin_strpbrk (arg0, arg1, type);
+ return fold_builtin_strpbrk (loc, arg0, arg1, type);
case BUILT_IN_EXPECT:
- return fold_builtin_expect (arg0, arg1);
+ return fold_builtin_expect (loc, arg0, arg1);
CASE_FLT_FN (BUILT_IN_POW):
- return fold_builtin_pow (fndecl, arg0, arg1, type);
+ return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
CASE_FLT_FN (BUILT_IN_POWI):
- return fold_builtin_powi (fndecl, arg0, arg1, type);
+ return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
CASE_FLT_FN (BUILT_IN_COPYSIGN):
- return fold_builtin_copysign (fndecl, arg0, arg1, type);
+ return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
CASE_FLT_FN (BUILT_IN_FMIN):
- return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
+ return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
CASE_FLT_FN (BUILT_IN_FMAX):
- return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
+ return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
case BUILT_IN_ISGREATER:
- return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
+ return fold_builtin_unordered_cmp (loc, fndecl,
+ arg0, arg1, UNLE_EXPR, LE_EXPR);
case BUILT_IN_ISGREATEREQUAL:
- return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
+ return fold_builtin_unordered_cmp (loc, fndecl,
+ arg0, arg1, UNLT_EXPR, LT_EXPR);
case BUILT_IN_ISLESS:
- return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
+ return fold_builtin_unordered_cmp (loc, fndecl,
+ arg0, arg1, UNGE_EXPR, GE_EXPR);
case BUILT_IN_ISLESSEQUAL:
- return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
+ return fold_builtin_unordered_cmp (loc, fndecl,
+ arg0, arg1, UNGT_EXPR, GT_EXPR);
case BUILT_IN_ISLESSGREATER:
- return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
+ return fold_builtin_unordered_cmp (loc, fndecl,
+ arg0, arg1, UNEQ_EXPR, EQ_EXPR);
case BUILT_IN_ISUNORDERED:
- return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
+ return fold_builtin_unordered_cmp (loc, fndecl,
+ arg0, arg1, UNORDERED_EXPR,
NOP_EXPR);
/* We do the folding for va_start in the expander. */
break;
case BUILT_IN_SPRINTF:
- return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
+ return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
case BUILT_IN_OBJECT_SIZE:
return fold_builtin_object_size (arg0, arg1);
case BUILT_IN_PRINTF:
case BUILT_IN_PRINTF_UNLOCKED:
case BUILT_IN_VPRINTF:
- return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
+ return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
case BUILT_IN_PRINTF_CHK:
case BUILT_IN_VPRINTF_CHK:
|| TREE_SIDE_EFFECTS (arg0))
return NULL_TREE;
else
- return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
+ return fold_builtin_printf (loc, fndecl,
+ arg1, NULL_TREE, ignore, fcode);
break;
case BUILT_IN_FPRINTF:
case BUILT_IN_FPRINTF_UNLOCKED:
case BUILT_IN_VFPRINTF:
- return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
+ return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
ignore, fcode);
default:
This function returns NULL_TREE if no simplification was possible. */
static tree
-fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
+fold_builtin_3 (location_t loc, tree fndecl,
+ tree arg0, tree arg1, tree arg2, bool ignore)
{
tree type = TREE_TYPE (TREE_TYPE (fndecl));
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
{
CASE_FLT_FN (BUILT_IN_SINCOS):
- return fold_builtin_sincos (arg0, arg1, arg2);
+ return fold_builtin_sincos (loc, arg0, arg1, arg2);
CASE_FLT_FN (BUILT_IN_FMA):
if (validate_arg (arg0, REAL_TYPE)
break;
case BUILT_IN_MEMSET:
- return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
+ return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
case BUILT_IN_BCOPY:
- return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
+ return fold_builtin_memory_op (loc, arg1, arg0, arg2,
+ void_type_node, true, /*endp=*/3);
case BUILT_IN_MEMCPY:
- return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
+ return fold_builtin_memory_op (loc, arg0, arg1, arg2,
+ type, ignore, /*endp=*/0);
case BUILT_IN_MEMPCPY:
- return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
+ return fold_builtin_memory_op (loc, arg0, arg1, arg2,
+ type, ignore, /*endp=*/1);
case BUILT_IN_MEMMOVE:
- return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
+ return fold_builtin_memory_op (loc, arg0, arg1, arg2,
+ type, ignore, /*endp=*/3);
case BUILT_IN_STRNCAT:
- return fold_builtin_strncat (arg0, arg1, arg2);
+ return fold_builtin_strncat (loc, arg0, arg1, arg2);
case BUILT_IN_STRNCPY:
- return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
+ return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
case BUILT_IN_STRNCMP:
- return fold_builtin_strncmp (arg0, arg1, arg2);
+ return fold_builtin_strncmp (loc, arg0, arg1, arg2);
case BUILT_IN_MEMCHR:
- return fold_builtin_memchr (arg0, arg1, arg2, type);
+ return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
case BUILT_IN_BCMP:
case BUILT_IN_MEMCMP:
- return fold_builtin_memcmp (arg0, arg1, arg2);;
+ return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
case BUILT_IN_SPRINTF:
- return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
+ return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
case BUILT_IN_STRCPY_CHK:
case BUILT_IN_STPCPY_CHK:
- return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
+ return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
ignore, fcode);
case BUILT_IN_STRCAT_CHK:
- return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
+ return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
case BUILT_IN_PRINTF_CHK:
case BUILT_IN_VPRINTF_CHK:
|| TREE_SIDE_EFFECTS (arg0))
return NULL_TREE;
else
- return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
+ return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
break;
case BUILT_IN_FPRINTF:
case BUILT_IN_FPRINTF_UNLOCKED:
case BUILT_IN_VFPRINTF:
- return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
+ return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
+ ignore, fcode);
case BUILT_IN_FPRINTF_CHK:
case BUILT_IN_VFPRINTF_CHK:
|| TREE_SIDE_EFFECTS (arg1))
return NULL_TREE;
else
- return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
+ return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
ignore, fcode);
default:
possible. */
static tree
-fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
- bool ignore)
+fold_builtin_4 (location_t loc, tree fndecl,
+ tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
{
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
case BUILT_IN_MEMPCPY_CHK:
case BUILT_IN_MEMMOVE_CHK:
case BUILT_IN_MEMSET_CHK:
- return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
+ return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
NULL_TREE, ignore,
DECL_FUNCTION_CODE (fndecl));
case BUILT_IN_STRNCPY_CHK:
- return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
+ return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
case BUILT_IN_STRNCAT_CHK:
- return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
+ return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
case BUILT_IN_FPRINTF_CHK:
case BUILT_IN_VFPRINTF_CHK:
|| TREE_SIDE_EFFECTS (arg1))
return NULL_TREE;
else
- return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
+ return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
ignore, fcode);
break;
#define MAX_ARGS_TO_FOLD_BUILTIN 4
static tree
-fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
+fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
{
tree ret = NULL_TREE;
switch (nargs)
{
case 0:
- ret = fold_builtin_0 (fndecl, ignore);
+ ret = fold_builtin_0 (loc, fndecl, ignore);
break;
case 1:
- ret = fold_builtin_1 (fndecl, args[0], ignore);
+ ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
break;
case 2:
- ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
+ ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
break;
case 3:
- ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
+ ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
break;
case 4:
- ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
+ ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
ignore);
break;
default:
if (ret)
{
ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
+ SET_EXPR_LOCATION (ret, loc);
TREE_NO_WARNING (ret) = 1;
return ret;
}
result of the function call is ignored. */
static tree
-fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
+fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
+ bool ignore ATTRIBUTE_UNUSED)
{
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
tree ret = NULL_TREE;
{
case BUILT_IN_SPRINTF_CHK:
case BUILT_IN_VSPRINTF_CHK:
- ret = fold_builtin_sprintf_chk (exp, fcode);
+ ret = fold_builtin_sprintf_chk (loc, exp, fcode);
break;
case BUILT_IN_SNPRINTF_CHK:
case BUILT_IN_VSNPRINTF_CHK:
- ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
+ ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
break;
case BUILT_IN_FPCLASSIFY:
- ret = fold_builtin_fpclassify (exp);
+ ret = fold_builtin_fpclassify (loc, exp);
break;
default:
if (ret)
{
ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
+ SET_EXPR_LOCATION (ret, loc);
TREE_NO_WARNING (ret) = 1;
return ret;
}
call node earlier than the warning is generated. */
tree
-fold_call_expr (tree exp, bool ignore)
+fold_call_expr (location_t loc, tree exp, bool ignore)
{
tree ret = NULL_TREE;
tree fndecl = get_callee_fndecl (exp);
if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
{
tree *args = CALL_EXPR_ARGP (exp);
- ret = fold_builtin_n (fndecl, args, nargs, ignore);
+ ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
}
if (!ret)
- ret = fold_builtin_varargs (fndecl, exp, ignore);
+ ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
if (ret)
- {
- /* Propagate location information from original call to
- expansion of builtin. Otherwise things like
- maybe_emit_chk_warning, that operate on the expansion
- of a builtin, will use the wrong location information. */
- if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
- {
- tree realret = ret;
- if (TREE_CODE (ret) == NOP_EXPR)
- realret = TREE_OPERAND (ret, 0);
- if (CAN_HAVE_LOCATION_P (realret)
- && !EXPR_HAS_LOCATION (realret))
- SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
- }
- return ret;
- }
+ return ret;
}
}
return NULL_TREE;
function to be called and ARGLIST is a TREE_LIST of arguments. */
tree
-build_function_call_expr (tree fndecl, tree arglist)
+build_function_call_expr (location_t loc, tree fndecl, tree arglist)
{
tree fntype = TREE_TYPE (fndecl);
tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
int n = list_length (arglist);
tree *argarray = (tree *) alloca (n * sizeof (tree));
int i;
-
+
for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
argarray[i] = TREE_VALUE (arglist);
- return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
+ return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
}
/* Conveniently construct a function call expression. FNDECL names the
parameters are the argument expressions. */
tree
-build_call_expr (tree fndecl, int n, ...)
+build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
{
va_list ap;
tree fntype = TREE_TYPE (fndecl);
for (i = 0; i < n; i++)
argarray[i] = va_arg (ap, tree);
va_end (ap);
- return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
+ return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
}
/* Construct a CALL_EXPR with type TYPE with FN as the function expression.
N arguments are passed in the array ARGARRAY. */
tree
-fold_builtin_call_array (tree type,
+fold_builtin_call_array (location_t loc, tree type,
tree fn,
int n,
tree *argarray)
&& TREE_CODE (fndecl2) == FUNCTION_DECL
&& DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
- return build_call_array (type, fn, n, argarray);
+ return build_call_array_loc (loc, type, fn, n, argarray);
}
if (avoid_folding_inline_builtin (fndecl))
- return build_call_array (type, fn, n, argarray);
+ return build_call_array_loc (loc, type, fn, n, argarray);
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
{
tree arglist = NULL_TREE;
ret = targetm.fold_builtin (fndecl, arglist, false);
if (ret)
return ret;
- return build_call_array (type, fn, n, argarray);
+ return build_call_array_loc (loc, type, fn, n, argarray);
}
else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
{
/* First try the transformations that don't require consing up
an exp. */
- ret = fold_builtin_n (fndecl, argarray, n, false);
+ ret = fold_builtin_n (loc, fndecl, argarray, n, false);
if (ret)
return ret;
}
/* If we got this far, we need to build an exp. */
- exp = build_call_array (type, fn, n, argarray);
- ret = fold_builtin_varargs (fndecl, exp, false);
+ exp = build_call_array_loc (loc, type, fn, n, argarray);
+ ret = fold_builtin_varargs (loc, fndecl, exp, false);
return ret ? ret : exp;
}
}
- return build_call_array (type, fn, n, argarray);
+ return build_call_array_loc (loc, type, fn, n, argarray);
}
/* Construct a new CALL_EXPR using the tail of the argument list of EXP
to do varargs-to-varargs transformations. */
static tree
-rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
+rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
{
int oldnargs = call_expr_nargs (exp);
int nargs = oldnargs - skip + n;
else
buffer = CALL_EXPR_ARGP (exp) + skip;
- return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
+ return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
}
/* Validate a single argument ARG against a tree code CODE representing
form of the builtin function call. */
static tree
-fold_builtin_strstr (tree s1, tree s2, tree type)
+fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
{
if (!validate_arg (s1, POINTER_TYPE)
|| !validate_arg (s2, POINTER_TYPE))
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
s1, size_int (r - p1));
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
/* The argument is const char *, and the result is char *, so we need
a type conversion here to avoid a warning. */
if (p2[0] == '\0')
- return fold_convert (type, s1);
+ return fold_convert_loc (loc, type, s1);
if (p2[1] != '\0')
return NULL_TREE;
/* New argument list transforming strstr(s1, s2) to
strchr(s1, s2[0]). */
- return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
+ return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
}
}
form of the builtin function call. */
static tree
-fold_builtin_strchr (tree s1, tree s2, tree type)
+fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
{
if (!validate_arg (s1, POINTER_TYPE)
|| !validate_arg (s2, INTEGER_TYPE))
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
s1, size_int (r - p1));
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
return NULL_TREE;
}
form of the builtin function call. */
static tree
-fold_builtin_strrchr (tree s1, tree s2, tree type)
+fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
{
if (!validate_arg (s1, POINTER_TYPE)
|| !validate_arg (s2, INTEGER_TYPE))
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
s1, size_int (r - p1));
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
if (! integer_zerop (s2))
return NULL_TREE;
/* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
- return build_call_expr (fn, 2, s1, s2);
+ return build_call_expr_loc (loc, fn, 2, s1, s2);
}
}
form of the builtin function call. */
static tree
-fold_builtin_strpbrk (tree s1, tree s2, tree type)
+fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
{
if (!validate_arg (s1, POINTER_TYPE)
|| !validate_arg (s2, POINTER_TYPE))
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
s1, size_int (r - p1));
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
if (p2[0] == '\0')
/* strpbrk(x, "") == NULL.
Evaluate and ignore s1 in case it had side-effects. */
- return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
+ return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
if (p2[1] != '\0')
return NULL_TREE; /* Really call strpbrk. */
/* New argument list transforming strpbrk(s1, s2) to
strchr(s1, s2[0]). */
- return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
+ return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
}
}
form of the builtin function call. */
static tree
-fold_builtin_strcat (tree dst, tree src)
+fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
{
if (!validate_arg (dst, POINTER_TYPE)
|| !validate_arg (src, POINTER_TYPE))
form of the builtin function call. */
static tree
-fold_builtin_strncat (tree dst, tree src, tree len)
+fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
{
if (!validate_arg (dst, POINTER_TYPE)
|| !validate_arg (src, POINTER_TYPE)
/* If the requested length is zero, or the src parameter string
length is zero, return the dst parameter. */
if (integer_zerop (len) || (p && *p == '\0'))
- return omit_two_operands (TREE_TYPE (dst), dst, src, len);
+ return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
/* If the requested len is greater than or equal to the string
length, call strcat. */
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 2, dst, src);
+ return build_call_expr_loc (loc, fn, 2, dst, src);
}
return NULL_TREE;
}
form of the builtin function call. */
static tree
-fold_builtin_strspn (tree s1, tree s2)
+fold_builtin_strspn (location_t loc, tree s1, tree s2)
{
if (!validate_arg (s1, POINTER_TYPE)
|| !validate_arg (s2, POINTER_TYPE))
if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
/* Evaluate and ignore both arguments in case either one has
side-effects. */
- return omit_two_operands (size_type_node, size_zero_node,
+ return omit_two_operands_loc (loc, size_type_node, size_zero_node,
s1, s2);
return NULL_TREE;
}
form of the builtin function call. */
static tree
-fold_builtin_strcspn (tree s1, tree s2)
+fold_builtin_strcspn (location_t loc, tree s1, tree s2)
{
if (!validate_arg (s1, POINTER_TYPE)
|| !validate_arg (s2, POINTER_TYPE))
{
/* Evaluate and ignore argument s2 in case it has
side-effects. */
- return omit_one_operand (size_type_node,
+ return omit_one_operand_loc (loc, size_type_node,
size_zero_node, s2);
}
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 1, s1);
+ return build_call_expr_loc (loc, fn, 1, s1);
}
return NULL_TREE;
}
was possible. */
tree
-fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
+fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
+ bool ignore, bool unlocked, tree len)
{
/* If we're using an unlocked function, assume the other unlocked
functions exist explicitly. */
switch (compare_tree_int (len, 1))
{
case -1: /* length is 0, delete the call entirely . */
- return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
+ return omit_one_operand_loc (loc, integer_type_node,
+ integer_zero_node, arg1);;
case 0: /* length is 1, call fputc. */
{
if (p != NULL)
{
if (fn_fputc)
- return build_call_expr (fn_fputc, 2,
+ return build_call_expr_loc (loc, fn_fputc, 2,
build_int_cst (NULL_TREE, p[0]), arg1);
else
return NULL_TREE;
/* New argument list transforming fputs(string, stream) to
fwrite(string, 1, len, stream). */
if (fn_fwrite)
- return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
+ return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
+ size_one_node, len, arg1);
else
return NULL_TREE;
}
the caller does not use the returned value of the function. */
static tree
-fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
+fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
+ tree orig, int ignored)
{
tree call, retval;
const char *fmt_str = NULL;
/* Convert sprintf (str, fmt) into strcpy (str, fmt) when
'format' is known to contain no % formats. */
- call = build_call_expr (fn, 2, dest, fmt);
+ call = build_call_expr_loc (loc, fn, 2, dest, fmt);
if (!ignored)
retval = build_int_cst (NULL_TREE, strlen (fmt_str));
}
if (!retval || TREE_CODE (retval) != INTEGER_CST)
return NULL_TREE;
}
- call = build_call_expr (fn, 2, dest, orig);
+ call = build_call_expr_loc (loc, fn, 2, dest, orig);
}
if (call && retval)
{
- retval = fold_convert
- (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
+ retval = fold_convert_loc
+ (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
retval);
return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
}
passed as third argument. */
tree
-fold_builtin_memory_chk (tree fndecl,
+fold_builtin_memory_chk (location_t loc, tree fndecl,
tree dest, tree src, tree len, tree size,
tree maxlen, bool ignore,
enum built_in_function fcode)
if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
{
if (fcode != BUILT_IN_MEMPCPY_CHK)
- return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
+ return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
+ dest, len);
else
{
- tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
+ tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
+ dest, len);
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
}
}
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 4, dest, src, len, size);
+ return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
}
return NULL_TREE;
}
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 3, dest, src, len);
+ return build_call_expr_loc (loc, fn, 3, dest, src, len);
}
/* Fold a call to the __st[rp]cpy_chk builtin.
strings passed as second argument. */
tree
-fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
+fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
+ tree src, tree size,
tree maxlen, bool ignore,
enum built_in_function fcode)
{
/* If SRC and DEST are the same (and not volatile), return DEST. */
if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
if (! host_integerp (size, 1))
return NULL_TREE;
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 3, dest, src, size);
+ return build_call_expr_loc (loc, fn, 3, dest, src, size);
}
if (! len || TREE_SIDE_EFFECTS (len))
if (!fn)
return NULL_TREE;
- len = size_binop (PLUS_EXPR, len, ssize_int (1));
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
- build_call_expr (fn, 4,
- dest, src, len, size));
+ len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
+ build_call_expr_loc (loc, fn, 4,
+ dest, src, len, size));
}
}
else
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 2, dest, src);
+ return build_call_expr_loc (loc, fn, 2, dest, src);
}
/* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
length passed as third argument. */
tree
-fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
- tree maxlen)
+fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
+ tree len, tree size, tree maxlen)
{
tree fn;
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 3, dest, src, len);
+ return build_call_expr_loc (loc, fn, 3, dest, src, len);
}
/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
are the arguments to the call. */
static tree
-fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
+fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
+ tree src, tree size)
{
tree fn;
const char *p;
p = c_getstr (src);
/* If the SRC parameter is "", return DEST. */
if (p && *p == '\0')
- return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
+ return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
if (! host_integerp (size, 1) || ! integer_all_onesp (size))
return NULL_TREE;
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 2, dest, src);
+ return build_call_expr_loc (loc, fn, 2, dest, src);
}
/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
LEN, and SIZE. */
static tree
-fold_builtin_strncat_chk (tree fndecl,
+fold_builtin_strncat_chk (location_t loc, tree fndecl,
tree dest, tree src, tree len, tree size)
{
tree fn;
p = c_getstr (src);
/* If the SRC parameter is "" or if LEN is 0, return DEST. */
if (p && *p == '\0')
- return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
+ return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
else if (integer_zerop (len))
- return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
+ return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
if (! host_integerp (size, 1))
return NULL_TREE;
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 3, dest, src, size);
+ return build_call_expr_loc (loc, fn, 3, dest, src, size);
}
return NULL_TREE;
}
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 3, dest, src, len);
+ return build_call_expr_loc (loc, fn, 3, dest, src, len);
}
/* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
static tree
-fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
+fold_builtin_sprintf_chk (location_t loc, tree exp,
+ enum built_in_function fcode)
{
tree dest, size, len, fn, fmt, flag;
const char *fmt_str;
if (!fn)
return NULL_TREE;
- return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
+ return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
}
/* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
passed as second argument. */
tree
-fold_builtin_snprintf_chk (tree exp, tree maxlen,
+fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
enum built_in_function fcode)
{
tree dest, size, len, fn, fmt, flag;
if (!fn)
return NULL_TREE;
- return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
+ return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
}
/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
code of the function to be simplified. */
static tree
-fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
+fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
+ tree arg, bool ignore,
enum built_in_function fcode)
{
tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
function. */
newarg = build_int_cst (NULL_TREE, str[0]);
if (fn_putchar)
- call = build_call_expr (fn_putchar, 1, newarg);
+ call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
}
else
{
newarg = build_string_literal (len, newstr);
if (fn_puts)
- call = build_call_expr (fn_puts, 1, newarg);
+ call = build_call_expr_loc (loc, fn_puts, 1, newarg);
}
else
/* We'd like to arrange to call fputs(string,stdout) here,
if (!arg || !validate_arg (arg, POINTER_TYPE))
return NULL_TREE;
if (fn_puts)
- call = build_call_expr (fn_puts, 1, arg);
+ call = build_call_expr_loc (loc, fn_puts, 1, arg);
}
/* If the format specifier was "%c", call __builtin_putchar(arg). */
if (!arg || !validate_arg (arg, INTEGER_TYPE))
return NULL_TREE;
if (fn_putchar)
- call = build_call_expr (fn_putchar, 1, arg);
+ call = build_call_expr_loc (loc, fn_putchar, 1, arg);
}
if (!call)
return NULL_TREE;
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
}
/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
code of the function to be simplified. */
static tree
-fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
+fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
+ tree fmt, tree arg, bool ignore,
enum built_in_function fcode)
{
tree fn_fputc, fn_fputs, call = NULL_TREE;
fprintf (fp, string) with fputs (string, fp). The fputs
builtin will take care of special cases like length == 1. */
if (fn_fputs)
- call = build_call_expr (fn_fputs, 2, fmt, fp);
+ call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
}
/* The other optimizations can be done only on the non-va_list variants. */
if (!arg || !validate_arg (arg, POINTER_TYPE))
return NULL_TREE;
if (fn_fputs)
- call = build_call_expr (fn_fputs, 2, arg, fp);
+ call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
}
/* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
if (!arg || !validate_arg (arg, INTEGER_TYPE))
return NULL_TREE;
if (fn_fputc)
- call = build_call_expr (fn_fputc, 2, arg, fp);
+ call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
}
if (!call)
return NULL_TREE;
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
}
/* Initialize format string characters in the target charset. */
tree *buffer;
int i, j;
va_list ap;
+ location_t loc = gimple_location (stmt);
buffer = XALLOCAVEC (tree, nargs);
va_start (ap, n);
for (j = skip; j < oldnargs; j++, i++)
buffer[i] = gimple_call_arg (stmt, j);
- return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
+ return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
}
/* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
result of the function call is ignored. */
static tree
-gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
+gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
+ bool ignore ATTRIBUTE_UNUSED)
{
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
tree ret = NULL_TREE;
{
tree ret = NULL_TREE;
tree fndecl = gimple_call_fndecl (stmt);
+ location_t loc = gimple_location (stmt);
if (fndecl
&& TREE_CODE (fndecl) == FUNCTION_DECL
&& DECL_BUILT_IN (fndecl)
int i;
for (i = 0; i < nargs; i++)
args[i] = gimple_call_arg (stmt, i);
- ret = fold_builtin_n (fndecl, args, nargs, ignore);
+ ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
}
if (!ret)
ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
realret = TREE_OPERAND (ret, 0);
if (CAN_HAVE_LOCATION_P (realret)
&& !EXPR_HAS_LOCATION (realret))
- SET_EXPR_LOCATION (realret, gimple_location (stmt));
+ SET_EXPR_LOCATION (realret, loc);
return realret;
}
return ret;
tree eptype = NULL_TREE;
bool dummy = true;
bool maybe_const_itself = true;
+ location_t loc = EXPR_LOCATION (expr);
/* This function is not relevant to C++ because C++ folds while
parsing, and may need changes to be correct for C++ when C++
ret = c_fully_fold_internal (expr, in_init, maybe_const,
&maybe_const_itself);
if (eptype)
- ret = fold_convert (eptype, ret);
+ ret = fold_convert_loc (loc, eptype, ret);
*maybe_const &= maybe_const_itself;
return ret;
}
op1 = decl_constant_value_for_optimization (op1);
if (op0 != orig_op0 || op1 != orig_op1 || in_init)
ret = in_init
- ? fold_build2_initializer (code, TREE_TYPE (expr), op0, op1)
- : fold_build2 (code, TREE_TYPE (expr), op0, op1);
+ ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
+ : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
else
ret = fold (expr);
goto out;
op0 = decl_constant_value_for_optimization (op0);
if (op0 != orig_op0 || in_init)
ret = in_init
- ? fold_build1_initializer (code, TREE_TYPE (expr), op0)
- : fold_build1 (code, TREE_TYPE (expr), op0);
+ ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0)
+ : fold_build1_loc (loc, code, TREE_TYPE (expr), op0);
else
ret = fold (expr);
if (code == INDIRECT_REF
op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self);
if (op0 != orig_op0 || op1 != orig_op1 || in_init)
ret = in_init
- ? fold_build2_initializer (code, TREE_TYPE (expr), op0, op1)
- : fold_build2 (code, TREE_TYPE (expr), op0, op1);
+ ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
+ : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
else
ret = fold (expr);
*maybe_const_operands &= op0_const;
op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self);
op2 = c_fully_fold_internal (op2, in_init, &op2_const, &op2_const_self);
if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
- ret = fold_build3 (code, TREE_TYPE (expr), op0, op1, op2);
+ ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
else
ret = fold (expr);
*maybe_const_operands &= op0_const;
if (lhs && rhs && operand_equal_p (lhs, rhs, 0)
&& merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
in1_p, low1, high1)
- && 0 != (tem = build_range_check (type, lhs, in_p, low, high)))
+ && 0 != (tem = build_range_check (UNKNOWN_LOCATION,
+ type, lhs, in_p, low, high)))
{
if (TREE_CODE (tem) != INTEGER_CST)
return;
of pointer PTROP and integer INTOP. */
tree
-pointer_int_sum (enum tree_code resultcode, tree ptrop, tree intop)
+pointer_int_sum (location_t loc, enum tree_code resultcode,
+ tree ptrop, tree intop)
{
tree size_exp, ret;
if (TREE_CODE (TREE_TYPE (result_type)) == VOID_TYPE)
{
- pedwarn (input_location, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"pointer of type %<void *%> used in arithmetic");
size_exp = integer_one_node;
}
else if (TREE_CODE (TREE_TYPE (result_type)) == FUNCTION_TYPE)
{
- pedwarn (input_location, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"pointer to a function used in arithmetic");
size_exp = integer_one_node;
}
else if (TREE_CODE (TREE_TYPE (result_type)) == METHOD_TYPE)
{
- pedwarn (input_location, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"pointer to member function used in arithmetic");
size_exp = integer_one_node;
}
Do this multiplication as signed, then convert to the appropriate
type for the pointer operation. */
intop = convert (sizetype,
- build_binary_op (EXPR_LOCATION (intop),
+ build_binary_op (loc,
MULT_EXPR, intop,
convert (TREE_TYPE (intop), size_exp), 1));
/* Create the sum or difference. */
if (resultcode == MINUS_EXPR)
- intop = fold_build1 (NEGATE_EXPR, sizetype, intop);
+ intop = fold_build1_loc (loc, NEGATE_EXPR, sizetype, intop);
- ret = fold_build2 (POINTER_PLUS_EXPR, result_type, ptrop, intop);
+ ret = fold_build2_loc (loc, POINTER_PLUS_EXPR, result_type, ptrop, intop);
fold_undefer_and_ignore_overflow_warnings ();
/* Distribute the conversion into the arms of a COND_EXPR. */
if (c_dialect_cxx ())
{
- expr = fold_build3 (COND_EXPR, truthvalue_type_node,
+ expr = fold_build3_loc (location, COND_EXPR, truthvalue_type_node,
TREE_OPERAND (expr, 0),
c_common_truthvalue_conversion (location,
TREE_OPERAND (expr,
{
if (is_sizeof)
/* Convert in case a char is more than one unit. */
- value = size_binop (CEIL_DIV_EXPR, TYPE_SIZE_UNIT (type),
- size_int (TYPE_PRECISION (char_type_node)
- / BITS_PER_UNIT));
+ value = size_binop_loc (loc, CEIL_DIV_EXPR, TYPE_SIZE_UNIT (type),
+ size_int (TYPE_PRECISION (char_type_node)
+ / BITS_PER_UNIT));
else
value = size_int (TYPE_ALIGN_UNIT (type));
}
TYPE_IS_SIZETYPE means that certain things (like overflow) will
never happen. However, this node should really have type
`size_t', which is just a typedef for an ordinary integer type. */
- value = fold_convert (size_type_node, value);
+ value = fold_convert_loc (loc, size_type_node, value);
gcc_assert (!TYPE_IS_SIZETYPE (TREE_TYPE (value)));
return value;
else
return c_alignof (loc, TREE_TYPE (expr));
- return fold_convert (size_type_node, t);
+ return fold_convert_loc (loc, size_type_node, t);
}
\f
/* Handle C and C++ default attributes. */
break;
case PREDECREMENT_EXPR:
val = build2 (MODIFY_EXPR, TREE_TYPE (arg), arg,
- invert_truthvalue (arg));
+ invert_truthvalue_loc (input_location, arg));
break;
case POSTDECREMENT_EXPR:
val = build2 (MODIFY_EXPR, TREE_TYPE (arg), arg,
- invert_truthvalue (arg));
+ invert_truthvalue_loc (input_location, arg));
arg = save_expr (arg);
val = build2 (COMPOUND_EXPR, TREE_TYPE (arg), val, arg);
val = build2 (COMPOUND_EXPR, TREE_TYPE (arg), arg, val);
"member %qD", t);
return error_mark_node;
}
- off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
- size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
- / BITS_PER_UNIT));
+ off = size_binop_loc (input_location, PLUS_EXPR, DECL_FIELD_OFFSET (t),
+ size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t),
+ 1)
+ / BITS_PER_UNIT));
break;
case ARRAY_REF:
if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
{
code = MINUS_EXPR;
- t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
+ t = fold_build1_loc (input_location, NEGATE_EXPR, TREE_TYPE (t), t);
}
t = convert (sizetype, t);
off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
bool fold_p = false;
if (VEC_index (constructor_elt, v, 0)->index)
- maxindex = fold_convert (sizetype,
- VEC_index (constructor_elt,
- v, 0)->index);
+ maxindex = fold_convert_loc (input_location, sizetype,
+ VEC_index (constructor_elt,
+ v, 0)->index);
curindex = maxindex;
for (cnt = 1;
{
if (fold_p)
curindex = fold_convert (sizetype, curindex);
- curindex = size_binop (PLUS_EXPR, curindex, size_one_node);
+ curindex = size_binop (PLUS_EXPR, curindex,
+ size_one_node);
}
if (tree_int_cst_lt (maxindex, curindex))
maxindex = curindex, fold_p = curfold_p;
and, if so, perhaps change them both back to their original type. */
extern tree shorten_compare (tree *, tree *, tree *, enum tree_code *);
-extern tree pointer_int_sum (enum tree_code, tree, tree);
+extern tree pointer_int_sum (location_t, enum tree_code, tree, tree);
/* Add qualifiers to a type, in the fashion for C. */
extern tree c_build_qualified_type (tree, int);
enum tree_code code = TREE_CODE (type);
const char *invalid_conv_diag;
tree ret;
+ location_t loc = EXPR_LOCATION (expr);
if (type == error_mark_node
|| expr == error_mark_node
STRIP_TYPE_NOPS (e);
if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (expr)))
- return fold_convert (type, expr);
+ return fold_convert_loc (loc, type, expr);
if (TREE_CODE (TREE_TYPE (expr)) == ERROR_MARK)
return error_mark_node;
if (TREE_CODE (TREE_TYPE (expr)) == VOID_TYPE)
switch (code)
{
case VOID_TYPE:
- return fold_convert (type, e);
+ return fold_convert_loc (loc, type, e);
case INTEGER_TYPE:
case ENUMERAL_TYPE:
goto maybe_fold;
case BOOLEAN_TYPE:
- return fold_convert
- (type, c_objc_common_truthvalue_conversion (input_location, expr));
+ return fold_convert_loc
+ (loc, type, c_objc_common_truthvalue_conversion (input_location, expr));
case POINTER_TYPE:
case REFERENCE_TYPE:
- 1. Do the calculation in index_type, so that
if it is a variable the computations will be
done in the proper mode. */
- itype = fold_build2 (MINUS_EXPR, index_type,
- convert (index_type, size),
- convert (index_type,
- size_one_node));
+ itype = fold_build2_loc (loc, MINUS_EXPR, index_type,
+ convert (index_type, size),
+ convert (index_type,
+ size_one_node));
/* If that overflowed, the array is too big. ???
While a size of INT_MAX+1 technically shouldn't
tree x;
x = built_in_decls[BUILT_IN_GOMP_BARRIER];
- x = build_call_expr (x, 0);
- SET_EXPR_LOCATION (x, loc);
+ x = build_call_expr_loc (loc, x, 0);
add_stmt (x);
}
tree x;
x = built_in_decls[BUILT_IN_GOMP_TASKWAIT];
- x = build_call_expr (x, 0);
- SET_EXPR_LOCATION (x, loc);
+ x = build_call_expr_loc (loc, x, 0);
add_stmt (x);
}
tree x;
x = built_in_decls[BUILT_IN_SYNCHRONIZE];
- x = build_call_expr (x, 0);
- SET_EXPR_LOCATION (x, loc);
+ x = build_call_expr_loc (loc, x, 0);
add_stmt (x);
}
Helper function for c_finish_omp_for. */
static tree
-check_omp_for_incr_expr (tree exp, tree decl)
+check_omp_for_incr_expr (location_t loc, tree exp, tree decl)
{
tree t;
switch (TREE_CODE (exp))
{
CASE_CONVERT:
- t = check_omp_for_incr_expr (TREE_OPERAND (exp, 0), decl);
+ t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 0), decl);
if (t != error_mark_node)
- return fold_convert (TREE_TYPE (exp), t);
+ return fold_convert_loc (loc, TREE_TYPE (exp), t);
break;
case MINUS_EXPR:
- t = check_omp_for_incr_expr (TREE_OPERAND (exp, 0), decl);
+ t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 0), decl);
if (t != error_mark_node)
- return fold_build2 (MINUS_EXPR, TREE_TYPE (exp), t, TREE_OPERAND (exp, 1));
+ return fold_build2_loc (loc, MINUS_EXPR,
+ TREE_TYPE (exp), t, TREE_OPERAND (exp, 1));
break;
case PLUS_EXPR:
- t = check_omp_for_incr_expr (TREE_OPERAND (exp, 0), decl);
+ t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 0), decl);
if (t != error_mark_node)
- return fold_build2 (PLUS_EXPR, TREE_TYPE (exp), t, TREE_OPERAND (exp, 1));
- t = check_omp_for_incr_expr (TREE_OPERAND (exp, 1), decl);
+ return fold_build2_loc (loc, PLUS_EXPR,
+ TREE_TYPE (exp), t, TREE_OPERAND (exp, 1));
+ t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 1), decl);
if (t != error_mark_node)
- return fold_build2 (PLUS_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0), t);
+ return fold_build2_loc (loc, PLUS_EXPR,
+ TREE_TYPE (exp), TREE_OPERAND (exp, 0), t);
break;
default:
break;
{
TREE_OPERAND (cond, 0) = TREE_OPERAND (op0, 0);
TREE_OPERAND (cond, 1)
- = fold_build1 (NOP_EXPR, TREE_TYPE (decl),
+ = fold_build1_loc (elocus, NOP_EXPR, TREE_TYPE (decl),
TREE_OPERAND (cond, 1));
}
else if (TREE_CODE (op1) == NOP_EXPR
{
TREE_OPERAND (cond, 1) = TREE_OPERAND (op1, 0);
TREE_OPERAND (cond, 0)
- = fold_build1 (NOP_EXPR, TREE_TYPE (decl),
+ = fold_build1_loc (elocus, NOP_EXPR, TREE_TYPE (decl),
TREE_OPERAND (cond, 0));
}
if (POINTER_TYPE_P (TREE_TYPE (decl))
&& TREE_OPERAND (incr, 1))
{
- tree t = fold_convert (sizetype, TREE_OPERAND (incr, 1));
+ tree t = fold_convert_loc (elocus,
+ sizetype, TREE_OPERAND (incr, 1));
if (TREE_CODE (incr) == POSTDECREMENT_EXPR
|| TREE_CODE (incr) == PREDECREMENT_EXPR)
- t = fold_build1 (NEGATE_EXPR, sizetype, t);
+ t = fold_build1_loc (elocus, NEGATE_EXPR, sizetype, t);
t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (decl), decl, t);
incr = build2 (MODIFY_EXPR, void_type_node, decl, t);
}
incr_ok = true;
else
{
- tree t = check_omp_for_incr_expr (TREE_OPERAND (incr, 1),
+ tree t = check_omp_for_incr_expr (elocus,
+ TREE_OPERAND (incr, 1),
decl);
if (t != error_mark_node)
{
}
/* Attempt to statically determine when the number isn't positive. */
- c = fold_build2 (LE_EXPR, boolean_type_node, t,
+ c = fold_build2_loc (expr_loc, LE_EXPR, boolean_type_node, t,
build_int_cst (TREE_TYPE (t), 0));
if (CAN_HAVE_LOCATION_P (c))
SET_EXPR_LOCATION (c, expr_loc);
static tree lookup_field (tree, tree);
static int convert_arguments (tree, VEC(tree,gc) *, VEC(tree,gc) *, tree,
tree);
-static tree pointer_diff (tree, tree);
+static tree pointer_diff (location_t, tree, tree);
static tree convert_for_assignment (location_t, tree, tree, tree,
enum impl_conv, bool, tree, tree, int);
static tree valid_compound_expr_initializer (tree, tree);
}
/* Convert in case a char is more than one unit. */
- return size_binop (CEIL_DIV_EXPR, TYPE_SIZE_UNIT (type),
- size_int (TYPE_PRECISION (char_type_node)
- / BITS_PER_UNIT));
+ return size_binop_loc (input_location, CEIL_DIV_EXPR, TYPE_SIZE_UNIT (type),
+ size_int (TYPE_PRECISION (char_type_node)
+ / BITS_PER_UNIT));
}
\f
/* Return either DECL or its known constant value (if it has one). */
if (VOID_TYPE_P (return_type))
{
if (TYPE_QUALS (return_type) != TYPE_UNQUALIFIED)
- pedwarn (input_location, 0,
+ pedwarn (loc, 0,
"function with qualified void return type called");
return trap;
}
build_constructor (return_type, 0),
false);
else
- rhs = fold_convert (return_type, integer_zero_node);
+ rhs = fold_convert_loc (loc, return_type, integer_zero_node);
return require_complete_type (build2 (COMPOUND_EXPR, return_type,
trap, rhs));
&& !strncmp (IDENTIFIER_POINTER (name), "__builtin_", 10))
{
if (require_constant_value)
- result = fold_build_call_array_initializer (TREE_TYPE (fntype),
- function, nargs, argarray);
+ result =
+ fold_build_call_array_initializer_loc (loc, TREE_TYPE (fntype),
+ function, nargs, argarray);
else
- result = fold_build_call_array (TREE_TYPE (fntype),
- function, nargs, argarray);
+ result = fold_build_call_array_loc (loc, TREE_TYPE (fntype),
+ function, nargs, argarray);
if (TREE_CODE (result) == NOP_EXPR
&& TREE_CODE (TREE_OPERAND (result, 0)) == INTEGER_CST)
STRIP_TYPE_NOPS (result);
}
else
- result = build_call_array (TREE_TYPE (fntype),
- function, nargs, argarray);
+ result = build_call_array_loc (loc, TREE_TYPE (fntype),
+ function, nargs, argarray);
if (VOID_TYPE_P (TREE_TYPE (result)))
{
if (TYPE_QUALS (TREE_TYPE (result)) != TYPE_UNQUALIFIED)
- pedwarn (input_location, 0,
+ pedwarn (loc, 0,
"function with qualified void return type called");
return result;
}
The resulting tree has type int. */
static tree
-pointer_diff (tree op0, tree op1)
+pointer_diff (location_t loc, tree op0, tree op1)
{
tree restype = ptrdiff_type_node;
tree orig_op1 = op1;
if (TREE_CODE (target_type) == VOID_TYPE)
- pedwarn (input_location, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"pointer of type %<void *%> used in subtraction");
if (TREE_CODE (target_type) == FUNCTION_TYPE)
- pedwarn (input_location, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"pointer to a function used in subtraction");
/* If the conversion to ptrdiff_type does anything like widening or
Do not do default conversions on the minus operator
in case restype is a short type. */
- op0 = build_binary_op (input_location,
+ op0 = build_binary_op (loc,
MINUS_EXPR, convert (restype, op0),
convert (restype, op1), 0);
/* This generates an error if op1 is pointer to incomplete type. */
if (!COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (TREE_TYPE (orig_op1))))
- error ("arithmetic on pointer to an incomplete type");
+ error_at (loc, "arithmetic on pointer to an incomplete type");
/* This generates an error if op0 is pointer to incomplete type. */
op1 = c_size_in_bytes (target_type);
/* Divide by the size, in easiest possible way. */
- return fold_build2 (EXACT_DIV_EXPR, restype, op0, convert (restype, op1));
+ return fold_build2_loc (loc, EXACT_DIV_EXPR, restype,
+ op0, convert (restype, op1));
}
\f
/* Construct and perhaps optimize a tree representation
}
else if (!noconvert)
arg = default_conversion (arg);
- arg = non_lvalue (arg);
+ arg = non_lvalue_loc (location, arg);
break;
case NEGATE_EXPR:
return error_mark_node;
}
arg = c_objc_common_truthvalue_conversion (location, arg);
- ret = invert_truthvalue (arg);
+ ret = invert_truthvalue_loc (location, arg);
/* If the TRUTH_NOT_EXPR has been folded, reset the location. */
if (EXPR_P (ret) && EXPR_HAS_LOCATION (ret))
location = EXPR_LOCATION (ret);
if (TREE_CODE (arg) == COMPLEX_CST)
ret = TREE_REALPART (arg);
else if (TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
- ret = fold_build1 (REALPART_EXPR, TREE_TYPE (TREE_TYPE (arg)), arg);
+ ret = fold_build1_loc (location,
+ REALPART_EXPR, TREE_TYPE (TREE_TYPE (arg)), arg);
else
ret = arg;
if (eptype && TREE_CODE (eptype) == COMPLEX_TYPE)
if (TREE_CODE (arg) == COMPLEX_CST)
ret = TREE_IMAGPART (arg);
else if (TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
- ret = fold_build1 (IMAGPART_EXPR, TREE_TYPE (TREE_TYPE (arg)), arg);
+ ret = fold_build1_loc (location,
+ IMAGPART_EXPR, TREE_TYPE (TREE_TYPE (arg)), arg);
else
- ret = omit_one_operand (TREE_TYPE (arg), integer_zero_node, arg);
+ ret = omit_one_operand_loc (location, TREE_TYPE (arg),
+ integer_zero_node, arg);
if (eptype && TREE_CODE (eptype) == COMPLEX_TYPE)
eptype = TREE_TYPE (eptype);
goto return_build_unary_op;
}
inc = c_size_in_bytes (TREE_TYPE (argtype));
- inc = fold_convert (sizetype, inc);
+ inc = fold_convert_loc (location, sizetype, inc);
}
else if (FRACT_MODE_P (TYPE_MODE (argtype)))
{
{
/* Don't let this be an lvalue. */
if (lvalue_p (TREE_OPERAND (arg, 0)))
- return non_lvalue (TREE_OPERAND (arg, 0));
+ return non_lvalue_loc (location, TREE_OPERAND (arg, 0));
ret = TREE_OPERAND (arg, 0);
goto return_build_unary_op;
}
if (val && TREE_CODE (val) == INDIRECT_REF
&& TREE_CONSTANT (TREE_OPERAND (val, 0)))
{
- tree op0 = fold_convert (sizetype, fold_offsetof (arg, val)), op1;
+ tree op0 = fold_convert_loc (location, sizetype,
+ fold_offsetof (arg, val)), op1;
- op1 = fold_convert (argtype, TREE_OPERAND (val, 0));
- ret = fold_build2 (POINTER_PLUS_EXPR, argtype, op1, op0);
+ op1 = fold_convert_loc (location, argtype, TREE_OPERAND (val, 0));
+ ret = fold_build2_loc (location, POINTER_PLUS_EXPR, argtype, op1, op0);
goto return_build_unary_op;
}
argtype = TREE_TYPE (arg);
if (TREE_CODE (arg) == INTEGER_CST)
ret = (require_constant_value
- ? fold_build1_initializer (code, argtype, arg)
- : fold_build1 (code, argtype, arg));
+ ? fold_build1_initializer_loc (location, code, argtype, arg)
+ : fold_build1_loc (location, code, argtype, arg));
else
ret = build1 (code, argtype, arg);
return_build_unary_op:
&& !TREE_OVERFLOW (orig_op2)));
}
if (int_const || (ifexp_bcp && TREE_CODE (ifexp) == INTEGER_CST))
- ret = fold_build3 (COND_EXPR, result_type, ifexp, op1, op2);
+ ret = fold_build3_loc (colon_loc, COND_EXPR, result_type, ifexp, op1, op2);
else
{
ret = build3 (COND_EXPR, result_type, ifexp, op1, op2);
/* Don't let a cast be an lvalue. */
if (value == expr)
- value = non_lvalue (value);
+ value = non_lvalue_loc (loc, value);
/* Don't allow the results of casting to floating-point or complex
types be confused with actual constants, or casts involving
pedwarn (location, OPT_pedantic,
"ISO C prohibits argument conversion to union type");
- rhs = fold_convert (TREE_TYPE (memb), rhs);
+ rhs = fold_convert_loc (location, TREE_TYPE (memb), rhs);
return build_constructor_single (type, memb, rhs);
}
}
/* Advance the variable that indicates sequential elements output. */
if (TREE_CODE (constructor_type) == ARRAY_TYPE)
constructor_unfilled_index
- = size_binop (PLUS_EXPR, constructor_unfilled_index,
- bitsize_one_node);
+ = size_binop_loc (input_location, PLUS_EXPR, constructor_unfilled_index,
+ bitsize_one_node);
else if (TREE_CODE (constructor_type) == RECORD_TYPE)
{
constructor_unfilled_fields
/* For a record, keep track of end position of last field. */
if (DECL_SIZE (constructor_fields))
constructor_bit_index
- = size_binop (PLUS_EXPR,
- bit_position (constructor_fields),
- DECL_SIZE (constructor_fields));
+ = size_binop_loc (input_location, PLUS_EXPR,
+ bit_position (constructor_fields),
+ DECL_SIZE (constructor_fields));
/* If the current field was the first one not yet written out,
it isn't now, so update. */
}
constructor_index
- = size_binop (PLUS_EXPR, constructor_index, bitsize_one_node);
+ = size_binop_loc (input_location, PLUS_EXPR,
+ constructor_index, bitsize_one_node);
if (!value.value)
/* If we are doing the bookkeeping for an element that was
}
constructor_index
- = size_binop (PLUS_EXPR, constructor_index, bitsize_one_node);
+ = size_binop_loc (input_location,
+ PLUS_EXPR, constructor_index, bitsize_one_node);
if (!value.value)
/* If we are doing the bookkeeping for an element that was
process_init_element (pop_init_level (1), true);
}
- p->index = size_binop (PLUS_EXPR, p->index, bitsize_one_node);
+ p->index = size_binop_loc (input_location,
+ PLUS_EXPR, p->index, bitsize_one_node);
if (tree_int_cst_equal (p->index, p->range_end) && !p->prev)
finish = 1;
}
t = build_and_jump (&blab);
- exit = fold_build3 (COND_EXPR, void_type_node, cond, exit, t);
if (cond_is_first)
- SET_EXPR_LOCATION (exit, start_locus);
+ exit = fold_build3_loc (start_locus,
+ COND_EXPR, void_type_node, cond, exit, t);
else
- SET_EXPR_LOCATION (exit, input_location);
+ exit = fold_build3_loc (input_location,
+ COND_EXPR, void_type_node, cond, exit, t);
}
add_stmt (top);
/* Handle the pointer + int case. */
if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
{
- ret = pointer_int_sum (PLUS_EXPR, op0, op1);
+ ret = pointer_int_sum (location, PLUS_EXPR, op0, op1);
goto return_build_binary_op;
}
else if (code1 == POINTER_TYPE && code0 == INTEGER_TYPE)
{
- ret = pointer_int_sum (PLUS_EXPR, op1, op0);
+ ret = pointer_int_sum (location, PLUS_EXPR, op1, op0);
goto return_build_binary_op;
}
else
if (code0 == POINTER_TYPE && code1 == POINTER_TYPE
&& comp_target_types (location, type0, type1))
{
- ret = pointer_diff (op0, op1);
+ ret = pointer_diff (location, op0, op1);
goto return_build_binary_op;
}
/* Handle pointer minus int. Just like pointer plus int. */
else if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
{
- ret = pointer_int_sum (MINUS_EXPR, op0, op1);
+ ret = pointer_int_sum (location, MINUS_EXPR, op0, op1);
goto return_build_binary_op;
}
else
/* Treat expressions in initializers specially as they can't trap. */
if (int_const_or_overflow)
ret = (require_constant_value
- ? fold_build2_initializer (resultcode, build_type, op0, op1)
- : fold_build2 (resultcode, build_type, op0, op1));
+ ? fold_build2_initializer_loc (location, resultcode, build_type,
+ op0, op1)
+ : fold_build2_loc (location, resultcode, build_type, op0, op1));
else
ret = build2 (resultcode, build_type, op0, op1);
if (final_type != 0)
int *must_preallocate, int *ecf_flags,
bool *may_tailcall, bool call_from_thunk_p)
{
+ location_t loc = EXPR_LOCATION (exp);
/* 1 if scanning parms front to back, -1 if scanning back to front. */
int inc;
if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
*may_tailcall = false;
- args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
+ args[i].tree_value = build_fold_addr_expr_loc (loc,
+ args[i].tree_value);
type = TREE_TYPE (args[i].tree_value);
if (*ecf_flags & ECF_CONST)
*ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
args[i].tree_value
- = build_fold_addr_expr (make_tree (type, copy));
+ = build_fold_addr_expr_loc (loc, make_tree (type, copy));
type = TREE_TYPE (args[i].tree_value);
*may_tailcall = false;
}
priority = p;
else if (p != priority)
break;
- append_to_statement_list (build_function_call_expr (fn, 0),
+ append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
+ fn, 0),
&body);
++i;
}
tree
convert_to_pointer (tree type, tree expr)
{
+ location_t loc = EXPR_LOCATION (expr);
if (TREE_TYPE (expr) == type)
return expr;
{
case POINTER_TYPE:
case REFERENCE_TYPE:
- return fold_build1 (NOP_EXPR, type, expr);
+ return fold_build1_loc (loc, NOP_EXPR, type, expr);
case INTEGER_TYPE:
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
if (TYPE_PRECISION (TREE_TYPE (expr)) != POINTER_SIZE)
- expr = fold_build1 (NOP_EXPR,
+ expr = fold_build1_loc (loc, NOP_EXPR,
lang_hooks.types.type_for_size (POINTER_SIZE, 0),
expr);
- return fold_build1 (CONVERT_EXPR, type, expr);
+ return fold_build1_loc (loc, CONVERT_EXPR, type, expr);
default:
+2009-07-17 Aldy Hernandez <aldyh@redhat.com>
+ Manuel López-Ibáñez <manu@gcc.gnu.org>
+
+ PR 40435
+ * typeck.c, init.c, class.c, method.c, rtti.c, except.c, error.c,
+ tree.c, cp-gimplify.c, cxx-pretty-print.c, pt.c, semantics.c,
+ call.c, cvt.c, mangle.c: Add location argument to
+ fold_{unary,binary,ternary}, fold_build[123], build_call_expr,
+ build_size_arg, build_fold_addr_expr, build_call_array,
+ non_lvalue, size_diffop, fold_build1_initializer,
+ fold_build2_initializer, fold_build3_initializer,
+ fold_build_call_array, fold_build_call_array_initializer,
+ fold_single_bit_test, omit_one_operand, omit_two_operands,
+ invert_truthvalue, fold_truth_not_expr, build_fold_indirect_ref,
+ fold_indirect_ref, combine_comparisons, fold_builtin_*,
+ fold_call_expr, build_range_check, maybe_fold_offset_to_address,
+ round_up, round_down.
+
2009-07-16 Jason Merrill <jason@redhat.com>
PR libstdc++/37907
argarray[i], t);
}
- function = build_call_array (result_type, function, n, argarray);
+ function = build_call_array_loc (input_location,
+ result_type, function, n, argarray);
TREE_HAS_CONSTRUCTOR (function) = is_constructor;
TREE_NOTHROW (function) = nothrow;
alcarray[ix + 1] = arg;
argarray = alcarray;
}
- expr = build_call_array (return_type, build_addr_func (fn), nargs,
- argarray);
+ expr = build_call_array_loc (input_location,
+ return_type, build_addr_func (fn), nargs,
+ argarray);
if (TREE_THIS_VOLATILE (fn) && cfun)
current_function_returns_abnormally = 1;
if (!VOID_TYPE_P (return_type))
if (null_test)
{
tree zero = cp_convert (TREE_TYPE (expr), integer_zero_node);
- null_test = fold_build2 (NE_EXPR, boolean_type_node,
+ null_test = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
expr, zero);
}
TREE_CONSTANT (v_offset) = 1;
offset = convert_to_integer (ptrdiff_type_node,
- size_diffop (offset,
+ size_diffop_loc (input_location, offset,
BINFO_OFFSET (v_binfo)));
if (!integer_zerop (offset))
{
offset = fold_convert (sizetype, offset);
if (code == MINUS_EXPR)
- offset = fold_build1 (NEGATE_EXPR, sizetype, offset);
+ offset = fold_build1_loc (input_location, NEGATE_EXPR, sizetype, offset);
expr = build2 (POINTER_PLUS_EXPR, ptr_target_type, expr, offset);
}
else
out:
if (null_test)
- expr = fold_build3 (COND_EXPR, target_type, null_test, expr,
- fold_build1 (NOP_EXPR, target_type,
+ expr = fold_build3_loc (input_location, COND_EXPR, target_type, null_test, expr,
+ fold_build1_loc (input_location, NOP_EXPR, target_type,
integer_zero_node));
return expr;
expr = cp_build_unary_op (ADDR_EXPR, expr, /*noconvert=*/1,
tf_warning_or_error);
if (!integer_zerop (BINFO_OFFSET (base)))
- expr = fold_build2 (POINTER_PLUS_EXPR, pointer_type, expr,
+ expr = fold_build2_loc (input_location,
+ POINTER_PLUS_EXPR, pointer_type, expr,
fold_convert (sizetype, BINFO_OFFSET (base)));
expr = fold_convert (build_pointer_type (BINFO_TYPE (base)), expr);
- expr = build_fold_indirect_ref (expr);
+ expr = build_fold_indirect_ref_loc (input_location, expr);
}
return expr;
/* A virtual binfo might have been copied from within
another hierarchy. As we're about to use it as a
primary base, make sure the offsets match. */
- delta = size_diffop (convert (ssizetype,
+ delta = size_diffop_loc (input_location,
+ convert (ssizetype,
BINFO_OFFSET (base_binfo)),
convert (ssizetype,
BINFO_OFFSET (this_primary)));
/* A virtual binfo might have been copied from within
another hierarchy. As we're about to use it as a primary
base, make sure the offsets match. */
- delta = size_diffop (ssize_int (0),
+ delta = size_diffop_loc (input_location, ssize_int (0),
convert (ssizetype, BINFO_OFFSET (primary)));
propagate_binfo_offsets (primary, delta);
{
/* We convert via virtual base. Adjust the fixed
offset to be from there. */
- offset = size_diffop
- (offset, convert
- (ssizetype, BINFO_OFFSET (virtual_offset)));
+ offset =
+ size_diffop (offset,
+ convert (ssizetype,
+ BINFO_OFFSET (virtual_offset)));
}
if (fixed_offset)
/* There was an existing fixed offset, this must be
if (virtual_base)
/* The `this' pointer needs to be adjusted from the declaration to
the nearest virtual base. */
- delta = size_diffop (convert (ssizetype, BINFO_OFFSET (virtual_base)),
+ delta = size_diffop_loc (input_location,
+ convert (ssizetype, BINFO_OFFSET (virtual_base)),
convert (ssizetype, BINFO_OFFSET (first_defn)));
else if (lost)
/* If the nearest definition is in a lost primary, we don't need an
BINFO to pointing at the base where the final overrider
appears. */
virtual_covariant:
- delta = size_diffop (convert (ssizetype,
+ delta = size_diffop_loc (input_location,
+ convert (ssizetype,
BINFO_OFFSET (TREE_VALUE (overrider))),
convert (ssizetype, BINFO_OFFSET (binfo)));
hierarchy. Therefore, we may not need to add the entire
OFFSET. */
propagate_binfo_offsets (binfo,
- size_diffop (convert (ssizetype, offset),
+ size_diffop_loc (input_location,
+ convert (ssizetype, offset),
convert (ssizetype,
BINFO_OFFSET (binfo))));
}
{
if (abi_version_at_least (2))
propagate_binfo_offsets
- (binfo, size_diffop (size_zero_node, BINFO_OFFSET (binfo)));
+ (binfo, size_diffop_loc (input_location,
+ size_zero_node, BINFO_OFFSET (binfo)));
else
warning (OPT_Wabi,
"offset of empty base %qT may not be ABI-compliant and may"
/* On some platforms (ARM), even empty classes will not be
byte-aligned. */
- eoc = round_up (rli_size_unit_so_far (rli),
+ eoc = round_up_loc (input_location,
+ rli_size_unit_so_far (rli),
CLASSTYPE_ALIGN_UNIT (basetype));
atend = layout_empty_base (rli, binfo, eoc, offsets);
/* A nearly-empty class "has no proper base class that is empty,
&& first_vbase
&& (tree_int_cst_lt
(size_binop (CEIL_DIV_EXPR,
- round_up (CLASSTYPE_SIZE (t),
+ round_up_loc (input_location,
+ CLASSTYPE_SIZE (t),
CLASSTYPE_ALIGN (basetype)),
bitsize_unit_node),
BINFO_OFFSET (vbase))))
/* Make sure that we are on a byte boundary so that the size of
the class without virtual bases will always be a round number
of bytes. */
- rli->bitpos = round_up (rli->bitpos, BITS_PER_UNIT);
+ rli->bitpos = round_up_loc (input_location, rli->bitpos, BITS_PER_UNIT);
normalize_rli (rli);
}
The vbase offsets go in reverse inheritance-graph order, and
we are walking in inheritance graph order so these end up in
the right order. */
- delta = size_diffop (BINFO_OFFSET (b), BINFO_OFFSET (non_primary_binfo));
+ delta = size_diffop_loc (input_location,
+ BINFO_OFFSET (b), BINFO_OFFSET (non_primary_binfo));
*vid->last_init
= build_tree_list (NULL_TREE,
- fold_build1 (NOP_EXPR,
+ fold_build1_loc (input_location, NOP_EXPR,
vtable_entry_type,
delta));
vid->last_init = &TREE_CHAIN (*vid->last_init);
vid->binfo. But it might be a lost primary, so its
BINFO_OFFSET might be wrong, so we just use the
BINFO_OFFSET from vid->binfo. */
- vcall_offset = size_diffop (BINFO_OFFSET (base),
+ vcall_offset = size_diffop_loc (input_location,
+ BINFO_OFFSET (base),
BINFO_OFFSET (vid->binfo));
- vcall_offset = fold_build1 (NOP_EXPR, vtable_entry_type,
+ vcall_offset = fold_build1_loc (input_location,
+ NOP_EXPR, vtable_entry_type,
vcall_offset);
}
/* Add the initializer to the vtable. */
&& BINFO_INHERITANCE_CHAIN (primary_base) == b);
b = primary_base;
}
- offset = size_diffop (BINFO_OFFSET (vid->rtti_binfo), BINFO_OFFSET (b));
+ offset = size_diffop_loc (input_location,
+ BINFO_OFFSET (vid->rtti_binfo), BINFO_OFFSET (b));
/* The second entry is the address of the typeinfo object. */
if (flag_rtti)
size_zero_node, NULL, NULL);
}
while (TREE_CODE (inner_type) == ARRAY_TYPE);
- start1 = build_fold_addr_expr (start1);
+ start1 = build_fold_addr_expr_loc (input_location, start1);
if (arg2)
- start2 = build_fold_addr_expr (start2);
+ start2 = build_fold_addr_expr_loc (input_location, start2);
end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
end1 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (start1), start1, end1);
}
else
{
- argarray[i++] = build_fold_addr_expr (arg1);
+ argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
if (arg2)
- argarray[i++] = build_fold_addr_expr (arg2);
+ argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
/* Handle default arguments. */
for (parm = defparm; parm && parm != void_list_node;
parm = TREE_CHAIN (parm), i++)
&& !AGGR_INIT_VIA_CTOR_P (init))
{
tree fn = AGGR_INIT_EXPR_FN (init);
- expr = build_call_array (TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))),
- fn,
- aggr_init_expr_nargs (init),
- AGGR_INIT_EXPR_ARGP (init));
+ expr = build_call_array_loc (input_location,
+ TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))),
+ fn,
+ aggr_init_expr_nargs (init),
+ AGGR_INIT_EXPR_ARGP (init));
}
}
break;
if (TREE_CODE (type) == ARRAY_REF)
type = build_cplus_array_type
(TREE_OPERAND (type, 0),
- build_index_type (fold_build2 (MINUS_EXPR, integer_type_node,
+ build_index_type (fold_build2_loc (input_location,
+ MINUS_EXPR, integer_type_node,
TREE_OPERAND (type, 1),
integer_one_node)));
pp_cxx_type_id (pp, type);
dump_expr (TREE_OPERAND (max, 0),
flags & ~TFF_EXPR_IN_PARENS);
else
- dump_expr (fold_build2 (PLUS_EXPR, dtype, max,
+ dump_expr (fold_build2_loc (input_location,
+ PLUS_EXPR, dtype, max,
build_int_cst (dtype, 1)),
flags & ~TFF_EXPR_IN_PARENS);
}
exp = build_exc_ptr ();
exp = build1 (NOP_EXPR, build_pointer_type (type), exp);
exp = build2 (POINTER_PLUS_EXPR, TREE_TYPE (exp), exp,
- fold_build1 (NEGATE_EXPR, sizetype,
+ fold_build1_loc (input_location,
+ NEGATE_EXPR, sizetype,
TYPE_SIZE_UNIT (TREE_TYPE (exp))));
exp = cp_build_indirect_ref (exp, NULL, tf_warning_or_error);
initialize_handler_parm (decl, exp);
/* Iterate over the array elements, building initializations. */
if (nelts)
- max_index = fold_build2 (MINUS_EXPR, TREE_TYPE (nelts),
+ max_index = fold_build2_loc (input_location,
+ MINUS_EXPR, TREE_TYPE (nelts),
nelts, integer_one_node);
else
max_index = array_type_nelts (type);
LOOKUP_NORMAL | LOOKUP_NONVIRTUAL,
tf_warning_or_error);
if (flag)
- expr = fold_build3 (COND_EXPR, void_type_node,
+ expr = fold_build3_loc (input_location,
+ COND_EXPR, void_type_node,
c_common_truthvalue_conversion (input_location, flag),
expr, integer_zero_node);
many elements to destroy later. We use the last sizeof
(size_t) bytes to store the number of elements. */
cookie_ptr = size_binop (MINUS_EXPR, cookie_size, size_in_bytes (sizetype));
- cookie_ptr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (alloc_node),
+ cookie_ptr = fold_build2_loc (input_location,
+ POINTER_PLUS_EXPR, TREE_TYPE (alloc_node),
alloc_node, cookie_ptr);
size_ptr_type = build_pointer_type (sizetype);
cookie_ptr = fold_convert (size_ptr_type, cookie_ptr);
{
/* Also store the element size. */
cookie_ptr = build2 (POINTER_PLUS_EXPR, size_ptr_type, cookie_ptr,
- fold_build1 (NEGATE_EXPR, sizetype,
+ fold_build1_loc (input_location,
+ NEGATE_EXPR, sizetype,
size_in_bytes (sizetype)));
cookie = cp_build_indirect_ref (cookie_ptr, NULL, complain);
tbase = create_temporary_var (ptype);
tbase_init = cp_build_modify_expr (tbase, NOP_EXPR,
- fold_build2 (POINTER_PLUS_EXPR, ptype,
+ fold_build2_loc (input_location,
+ POINTER_PLUS_EXPR, ptype,
fold_convert (ptype, base),
virtual_size),
tf_warning_or_error);
body = build1 (EXIT_EXPR, void_type_node,
build2 (EQ_EXPR, boolean_type_node, tbase,
fold_convert (ptype, base)));
- tmp = fold_build1 (NEGATE_EXPR, sizetype, size_exp);
+ tmp = fold_build1_loc (input_location, NEGATE_EXPR, sizetype, size_exp);
body = build_compound_expr
(input_location,
body, cp_build_modify_expr (tbase, NOP_EXPR,
body = integer_zero_node;
/* Outermost wrapper: If pointer is null, punt. */
- body = fold_build3 (COND_EXPR, void_type_node,
- fold_build2 (NE_EXPR, boolean_type_node, base,
+ body = fold_build3_loc (input_location, COND_EXPR, void_type_node,
+ fold_build2_loc (input_location,
+ NE_EXPR, boolean_type_node, base,
convert (TREE_TYPE (base),
integer_zero_node)),
body, integer_zero_node);
base = TARGET_EXPR_SLOT (base_init);
}
type = strip_array_types (TREE_TYPE (type));
- cookie_addr = fold_build1 (NEGATE_EXPR, sizetype, TYPE_SIZE_UNIT (sizetype));
+ cookie_addr = fold_build1_loc (input_location, NEGATE_EXPR,
+ sizetype, TYPE_SIZE_UNIT (sizetype));
cookie_addr = build2 (POINTER_PLUS_EXPR,
size_ptr_type,
fold_convert (size_ptr_type, base),
if (sign < 0)
{
write_char ('n');
- n = fold_build1 (NEGATE_EXPR, type, n);
+ n = fold_build1_loc (input_location, NEGATE_EXPR, type, n);
}
do
{
- tree d = fold_build2 (FLOOR_DIV_EXPR, type, n, base);
- tree tmp = fold_build2 (MULT_EXPR, type, d, base);
+ tree d = fold_build2_loc (input_location, FLOOR_DIV_EXPR, type, n, base);
+ tree tmp = fold_build2_loc (input_location, MULT_EXPR, type, d, base);
unsigned c;
done = integer_zerop (d);
- tmp = fold_build2 (MINUS_EXPR, type, n, tmp);
+ tmp = fold_build2_loc (input_location, MINUS_EXPR, type, n, tmp);
c = hwint_to_ascii (TREE_INT_CST_LOW (tmp), 10, ptr,
done ? 1 : chunk_digits);
ptr -= c;
{
if (this_adjusting)
/* Adjust the pointer by the constant. */
- ptr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
+ ptr = fold_build2_loc (input_location,
+ POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
size_int (fixed_offset));
/* If there's a virtual offset, look up that value in the vtable and
/* Form the vtable address. */
vtable = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (vtable)), vtable);
/* Find the entry with the vcall offset. */
- vtable = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (vtable), vtable,
- fold_convert (sizetype, virtual_offset));
+ vtable = fold_build2_loc (input_location,
+ POINTER_PLUS_EXPR, TREE_TYPE (vtable), vtable,
+ fold_convert (sizetype, virtual_offset));
/* Get the offset itself. */
vtable = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (vtable)), vtable);
/* Adjust the `this' pointer. */
- ptr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
+ ptr = fold_build2_loc (input_location,
+ POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
fold_convert (sizetype, vtable));
}
if (!this_adjusting)
/* Adjust the pointer by the constant. */
- ptr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
+ ptr = fold_build2_loc (input_location,
+ POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
size_int (fixed_offset));
return ptr;
if (e1 == error_mark_node || e2 == error_mark_node)
return error_mark_node;
- return fold_build2 (TREE_CODE (t), TREE_TYPE (t), e1, e2);
+ return fold_build2_loc (input_location,
+ TREE_CODE (t), TREE_TYPE (t), e1, e2);
}
case NEGATE_EXPR:
if (e == error_mark_node)
return error_mark_node;
- return fold_build1 (TREE_CODE (t), TREE_TYPE (t), e);
+ return fold_build1_loc (input_location, TREE_CODE (t), TREE_TYPE (t), e);
}
case TYPENAME_TYPE:
/* If only one of the bounds used a MINUS_EXPR, compensate
by adding one to the other bound. */
if (parm_cst && !arg_cst)
- parm_max = fold_build2 (PLUS_EXPR,
+ parm_max = fold_build2_loc (input_location, PLUS_EXPR,
integer_type_node,
parm_max,
integer_one_node);
else if (arg_cst && !parm_cst)
- arg_max = fold_build2 (PLUS_EXPR,
+ arg_max = fold_build2_loc (input_location, PLUS_EXPR,
integer_type_node,
arg_max,
integer_one_node);
/* Combine offset and flags into one field. */
offset = fold_convert (offset_type, offset);
- offset = fold_build2 (LSHIFT_EXPR, offset_type, offset,
+ offset = fold_build2_loc (input_location,
+ LSHIFT_EXPR, offset_type, offset,
build_int_cst (offset_type, 8));
- offset = fold_build2 (BIT_IOR_EXPR, offset_type, offset,
+ offset = fold_build2_loc (input_location,
+ BIT_IOR_EXPR, offset_type, offset,
build_int_cst (offset_type, flags));
base_init = tree_cons (NULL_TREE, offset, base_init);
base_init = tree_cons (NULL_TREE, tinfo, base_init);
style = arg;
}
- call_expr = build_call_array (TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))),
- fn,
- aggr_init_expr_nargs (aggr_init_expr),
- AGGR_INIT_EXPR_ARGP (aggr_init_expr));
+ call_expr = build_call_array_loc (input_location,
+ TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))),
+ fn,
+ aggr_init_expr_nargs (aggr_init_expr),
+ AGGR_INIT_EXPR_ARGP (aggr_init_expr));
if (style == ctor)
{
tree
array_type_nelts_top (tree type)
{
- return fold_build2 (PLUS_EXPR, sizetype,
+ return fold_build2_loc (input_location,
+ PLUS_EXPR, sizetype,
array_type_nelts (type),
size_one_node);
}
while (TREE_CODE (type) == ARRAY_TYPE)
{
tree n = array_type_nelts_top (type);
- sz = fold_build2 (MULT_EXPR, sizetype, sz, n);
+ sz = fold_build2_loc (input_location,
+ MULT_EXPR, sizetype, sz, n);
type = TREE_TYPE (type);
}
return sz;
/*want_type=*/false);
member_type = cp_build_qualified_type (TREE_TYPE (member),
cp_type_quals (ptrmem_type));
- return fold_build3 (COMPONENT_REF, member_type,
+ return fold_build3_loc (input_location,
+ COMPONENT_REF, member_type,
ptrmem, member, NULL_TREE);
}
TREE_NO_WARNING (vtbl) = 1;
/* Finally, extract the function pointer from the vtable. */
- e2 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (vtbl), vtbl,
+ e2 = fold_build2_loc (input_location,
+ POINTER_PLUS_EXPR, TREE_TYPE (vtbl), vtbl,
fold_convert (sizetype, idx));
e2 = cp_build_indirect_ref (e2, NULL, tf_warning_or_error);
TREE_CONSTANT (e2) = 1;
pointer_int_sum() anyway. */
complete_type (TREE_TYPE (res_type));
- return pointer_int_sum (resultcode, ptrop,
+ return pointer_int_sum (input_location, resultcode, ptrop,
fold_if_not_in_template (intop));
}
case TRUTH_NOT_EXPR:
arg = perform_implicit_conversion (boolean_type_node, arg,
complain);
- val = invert_truthvalue (arg);
+ val = invert_truthvalue_loc (input_location, arg);
if (arg != error_mark_node)
return val;
errstring = "in argument to unary !";
PLUS_EXPR, op1, delta,
tf_warning_or_error);
- expr = fold_build3 (COND_EXPR, ptrdiff_type_node, cond, op1, op2);
+ expr = fold_build3_loc (input_location,
+ COND_EXPR, ptrdiff_type_node, cond, op1, op2);
}
result = get_delta_difference_1 (to, from, c_cast_p);
if (result)
- result = size_diffop (size_zero_node, result);
+ result = size_diffop_loc (input_location,
+ size_zero_node, result);
else
{
error_not_base_type (from, to);
rtx temp;
rtx alt_rtl = NULL_RTX;
int dont_return_target = 0;
+ location_t loc = EXPR_LOCATION (exp);
if (VOID_TYPE_P (TREE_TYPE (exp)))
{
(TYPE_MODE (TREE_TYPE (exp)),
SUBREG_PROMOTED_UNSIGNED_P (target));
- exp = fold_convert (ntype, exp);
+ exp = fold_convert_loc (loc, ntype, exp);
}
- exp = fold_convert (lang_hooks.types.type_for_mode
- (GET_MODE (SUBREG_REG (target)),
- SUBREG_PROMOTED_UNSIGNED_P (target)),
- exp);
+ exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
+ (GET_MODE (SUBREG_REG (target)),
+ SUBREG_PROMOTED_UNSIGNED_P (target)),
+ exp);
inner_target = SUBREG_REG (target);
}
{
/* Compute the size of the data to copy from the string. */
tree copy_size
- = size_binop (MIN_EXPR,
- make_tree (sizetype, size),
- size_int (TREE_STRING_LENGTH (exp)));
+ = size_binop_loc (loc, MIN_EXPR,
+ make_tree (sizetype, size),
+ size_int (TREE_STRING_LENGTH (exp)));
rtx copy_size_rtx
= expand_expr (copy_size, NULL_RTX, VOIDmode,
(call_param_p
{
tree aligned_size = TREE_OPERAND (exp, 3);
tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ location_t loc = EXPR_LOCATION (exp);
/* If a size was specified in the ARRAY_REF, it's the size measured
in alignment units of the element type. So multiply by that value. */
/* ??? tree_ssa_useless_type_conversion will eliminate casts to
sizetype from another type of the same width and signedness. */
if (TREE_TYPE (aligned_size) != sizetype)
- aligned_size = fold_convert (sizetype, aligned_size);
- return size_binop (MULT_EXPR, aligned_size,
- size_int (TYPE_ALIGN_UNIT (elmt_type)));
+ aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
+ return size_binop_loc (loc, MULT_EXPR, aligned_size,
+ size_int (TYPE_ALIGN_UNIT (elmt_type)));
}
/* Otherwise, take the size from that of the element type. Substitute
{
tree aligned_offset = TREE_OPERAND (exp, 2);
tree field = TREE_OPERAND (exp, 1);
+ location_t loc = EXPR_LOCATION (exp);
/* If an offset was specified in the COMPONENT_REF, it's the offset measured
in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
/* ??? tree_ssa_useless_type_conversion will eliminate casts to
sizetype from another type of the same width and signedness. */
if (TREE_TYPE (aligned_offset) != sizetype)
- aligned_offset = fold_convert (sizetype, aligned_offset);
- return size_binop (MULT_EXPR, aligned_offset,
- size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
+ aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
+ return size_binop_loc (loc, MULT_EXPR, aligned_offset,
+ size_int (DECL_OFFSET_ALIGN (field)
+ / BITS_PER_UNIT));
}
/* Otherwise, take the offset from that of the field. Substitute
tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
tree arglist = build_tree_list (NULL_TREE, arg);
- tree call = build_function_call_expr (fn, arglist);
+ tree call = build_function_call_expr (UNKNOWN_LOCATION, fn, arglist);
return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
}
\f
bool reduce_bit_field;
gimple subexp0_def, subexp1_def;
tree top0, top1;
+ location_t loc = EXPR_LOCATION (exp);
#define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
? reduce_to_bit_field_precision ((expr), \
target, \
&& TREE_CODE (exp) == VAR_DECL
&& DECL_THREAD_LOCAL_P (exp))
{
- exp = build_fold_indirect_ref (emutls_var_address (exp));
+ exp = build_fold_indirect_ref_loc (loc, emutls_var_address (exp));
return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
}
{
tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
if (type_for_mode)
- tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
+ tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
}
if (!tmp)
tmp = build_constructor_from_list (type,
{
tree index1 = index;
tree low_bound = array_ref_low_bound (exp);
- index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
+ index1 = fold_convert_loc (loc, sizetype,
+ TREE_OPERAND (exp, 1));
/* Optimize the special-case of a zero lower bound.
+INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
if (! integer_zerop (low_bound))
- index1 = size_diffop (index1, fold_convert (sizetype,
- low_bound));
+ index1 = size_diffop_loc (loc, index1,
+ fold_convert_loc (loc, sizetype,
+ low_bound));
if (0 > compare_tree_int (index1,
TREE_STRING_LENGTH (init)))
/* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
if sizetype precision is smaller than pointer precision. */
if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
- exp = build2 (PLUS_EXPR, type,
- TREE_OPERAND (exp, 0),
- fold_convert (type,
- fold_convert (ssizetype,
- TREE_OPERAND (exp, 1))));
+ exp
+ = build2 (PLUS_EXPR, type,
+ TREE_OPERAND (exp, 0),
+ fold_convert_loc (loc, type,
+ fold_convert_loc (loc, ssizetype,
+ TREE_OPERAND (exp, 1))));
case PLUS_EXPR:
/* Check if this is a case for multiplication and addition. */
int unsignedp;
rtx op0, op1;
rtx subtarget = target;
+ location_t loc = EXPR_LOCATION (exp);
arg0 = TREE_OPERAND (exp, 0);
arg1 = TREE_OPERAND (exp, 1);
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
{
tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
- return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
+ return expand_expr (fold_single_bit_test (loc,
+ code == NE ? NE_EXPR : EQ_EXPR,
arg0, arg1, type),
target, VOIDmode, EXPAND_NORMAL);
}
static bool negate_expr_p (tree);
static tree negate_expr (tree);
static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
-static tree associate_trees (tree, tree, enum tree_code, tree);
+static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
static tree const_binop (enum tree_code, tree, tree, int);
static enum comparison_code comparison_to_compcode (enum tree_code);
static enum tree_code compcode_to_comparison (enum comparison_code);
static int operand_equal_for_comparison_p (tree, tree, tree);
static int twoval_comparison_p (tree, tree *, tree *, int *);
-static tree eval_subst (tree, tree, tree, tree, tree);
-static tree pedantic_omit_one_operand (tree, tree, tree);
-static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
-static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int);
-static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
-static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
+static tree eval_subst (location_t, tree, tree, tree, tree, tree);
+static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
+static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
+static tree make_bit_field_ref (location_t, tree, tree,
+ HOST_WIDE_INT, HOST_WIDE_INT, int);
+static tree optimize_bit_field_compare (location_t, enum tree_code,
+ tree, tree, tree);
+static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
+ HOST_WIDE_INT *,
enum machine_mode *, int *, int *,
tree *, tree *);
static int all_ones_mask_p (const_tree, int);
static tree range_predecessor (tree);
static tree range_successor (tree);
extern tree make_range (tree, int *, tree *, tree *, bool *);
-extern tree build_range_check (tree, tree, int, tree, tree);
extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
tree, tree);
-static tree fold_range_test (enum tree_code, tree, tree, tree);
-static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
+static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
+static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
static tree unextend (tree, int, int, tree);
-static tree fold_truthop (enum tree_code, tree, tree, tree);
-static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
+static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
+static tree optimize_minmax_comparison (location_t, enum tree_code,
+ tree, tree, tree);
static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
-static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
+static tree fold_binary_op_with_conditional_arg (location_t,
+ enum tree_code, tree,
tree, tree,
tree, tree, int);
-static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
+static tree fold_mathfn_compare (location_t,
+ enum built_in_function, enum tree_code,
tree, tree, tree);
-static tree fold_inf_compare (enum tree_code, tree, tree, tree);
-static tree fold_div_compare (enum tree_code, tree, tree, tree);
+static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
+static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
static bool reorder_operands_p (const_tree, const_tree);
static tree fold_negate_const (tree, tree);
static tree fold_not_const (tree, tree);
returned. */
static tree
-fold_negate_expr (tree t)
+fold_negate_expr (location_t loc, tree t)
{
tree type = TREE_TYPE (t);
tree tem;
/* Convert - (~A) to A + 1. */
case BIT_NOT_EXPR:
if (INTEGRAL_TYPE_P (type))
- return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
+ return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
build_int_cst (type, 1));
break;
case COMPLEX_EXPR:
if (negate_expr_p (t))
- return fold_build2 (COMPLEX_EXPR, type,
- fold_negate_expr (TREE_OPERAND (t, 0)),
- fold_negate_expr (TREE_OPERAND (t, 1)));
+ return fold_build2_loc (loc, COMPLEX_EXPR, type,
+ fold_negate_expr (loc, TREE_OPERAND (t, 0)),
+ fold_negate_expr (loc, TREE_OPERAND (t, 1)));
break;
case CONJ_EXPR:
if (negate_expr_p (t))
- return fold_build1 (CONJ_EXPR, type,
- fold_negate_expr (TREE_OPERAND (t, 0)));
+ return fold_build1_loc (loc, CONJ_EXPR, type,
+ fold_negate_expr (loc, TREE_OPERAND (t, 0)));
break;
case NEGATE_EXPR:
TREE_OPERAND (t, 1)))
{
tem = negate_expr (TREE_OPERAND (t, 1));
- return fold_build2 (MINUS_EXPR, type,
+ return fold_build2_loc (loc, MINUS_EXPR, type,
tem, TREE_OPERAND (t, 0));
}
if (negate_expr_p (TREE_OPERAND (t, 0)))
{
tem = negate_expr (TREE_OPERAND (t, 0));
- return fold_build2 (MINUS_EXPR, type,
+ return fold_build2_loc (loc, MINUS_EXPR, type,
tem, TREE_OPERAND (t, 1));
}
}
if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
&& reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
- return fold_build2 (MINUS_EXPR, type,
+ return fold_build2_loc (loc, MINUS_EXPR, type,
TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
break;
{
tem = TREE_OPERAND (t, 1);
if (negate_expr_p (tem))
- return fold_build2 (TREE_CODE (t), type,
+ return fold_build2_loc (loc, TREE_CODE (t), type,
TREE_OPERAND (t, 0), negate_expr (tem));
tem = TREE_OPERAND (t, 0);
if (negate_expr_p (tem))
- return fold_build2 (TREE_CODE (t), type,
+ return fold_build2_loc (loc, TREE_CODE (t), type,
negate_expr (tem), TREE_OPERAND (t, 1));
}
break;
&& (TREE_CODE (tem) != INTEGER_CST
|| integer_onep (tem)))
fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
- return fold_build2 (TREE_CODE (t), type,
+ return fold_build2_loc (loc, TREE_CODE (t), type,
TREE_OPERAND (t, 0), negate_expr (tem));
}
tem = TREE_OPERAND (t, 0);
&& (TREE_CODE (tem) != INTEGER_CST
|| tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
- return fold_build2 (TREE_CODE (t), type,
+ return fold_build2_loc (loc, TREE_CODE (t), type,
negate_expr (tem), TREE_OPERAND (t, 1));
}
}
{
tem = strip_float_extensions (t);
if (tem != t && negate_expr_p (tem))
- return fold_convert (type, negate_expr (tem));
+ return fold_convert_loc (loc, type, negate_expr (tem));
}
break;
fndecl = get_callee_fndecl (t);
arg = negate_expr (CALL_EXPR_ARG (t, 0));
- return build_call_expr (fndecl, 1, arg);
+ return build_call_expr_loc (loc, fndecl, 1, arg);
}
break;
tree ntype = TYPE_UNSIGNED (type)
? signed_type_for (type)
: unsigned_type_for (type);
- tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
- temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
- return fold_convert (type, temp);
+ tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
+ temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
+ return fold_convert_loc (loc, type, temp);
}
}
break;
negate_expr (tree t)
{
tree type, tem;
+ location_t loc;
if (t == NULL_TREE)
return NULL_TREE;
+ loc = EXPR_LOCATION (t);
type = TREE_TYPE (t);
STRIP_SIGN_NOPS (t);
- tem = fold_negate_expr (t);
+ tem = fold_negate_expr (loc, t);
if (!tem)
- tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
- return fold_convert (type, tem);
+ {
+ tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
+ SET_EXPR_LOCATION (tem, loc);
+ }
+ return fold_convert_loc (loc, type, tem);
}
\f
/* Split a tree IN into a constant, literal and variable parts that could be
return var;
}
-/* Re-associate trees split by the above function. T1 and T2 are either
- expressions to associate or null. Return the new expression, if any. If
+/* Re-associate trees split by the above function. T1 and T2 are
+ either expressions to associate or null. Return the new
+ expression, if any. LOC is the location of the new expression. If
we build an operation, do it in TYPE and with CODE. */
static tree
-associate_trees (tree t1, tree t2, enum tree_code code, tree type)
+associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
{
+ tree tem;
+
if (t1 == 0)
return t2;
else if (t2 == 0)
if (code == PLUS_EXPR)
{
if (TREE_CODE (t1) == NEGATE_EXPR)
- return build2 (MINUS_EXPR, type, fold_convert (type, t2),
- fold_convert (type, TREE_OPERAND (t1, 0)));
+ tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
+ fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
else if (TREE_CODE (t2) == NEGATE_EXPR)
- return build2 (MINUS_EXPR, type, fold_convert (type, t1),
- fold_convert (type, TREE_OPERAND (t2, 0)));
+ tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
+ fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
else if (integer_zerop (t2))
- return fold_convert (type, t1);
+ return fold_convert_loc (loc, type, t1);
}
else if (code == MINUS_EXPR)
{
if (integer_zerop (t2))
- return fold_convert (type, t1);
+ return fold_convert_loc (loc, type, t1);
}
- return build2 (code, type, fold_convert (type, t1),
- fold_convert (type, t2));
+ tem = build2 (code, type, fold_convert_loc (loc, type, t1),
+ fold_convert_loc (loc, type, t2));
+ goto associate_trees_exit;
}
- return fold_build2 (code, type, fold_convert (type, t1),
- fold_convert (type, t2));
+ return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
+ fold_convert_loc (loc, type, t2));
+ associate_trees_exit:
+ protected_set_expr_location (tem, loc);
+ return tem;
}
\f
/* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
If the operands are constant, so is the result. */
tree
-size_binop (enum tree_code code, tree arg0, tree arg1)
+size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
{
tree type = TREE_TYPE (arg0);
return int_const_binop (code, arg0, arg1, 0);
}
- return fold_build2 (code, type, arg0, arg1);
+ return fold_build2_loc (loc, code, type, arg0, arg1);
}
/* Given two values, either both of sizetype or both of bitsizetype,
in signed type corresponding to the type of the operands. */
tree
-size_diffop (tree arg0, tree arg1)
+size_diffop_loc (location_t loc, tree arg0, tree arg1)
{
tree type = TREE_TYPE (arg0);
tree ctype;
/* If the type is already signed, just do the simple thing. */
if (!TYPE_UNSIGNED (type))
- return size_binop (MINUS_EXPR, arg0, arg1);
+ return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
if (type == sizetype)
ctype = ssizetype;
type and subtract. The hardware will do the right thing with any
overflow in the subtraction. */
if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
- return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
- fold_convert (ctype, arg1));
+ return size_binop_loc (loc, MINUS_EXPR,
+ fold_convert_loc (loc, ctype, arg0),
+ fold_convert_loc (loc, ctype, arg1));
/* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
Otherwise, subtract the other way, convert to CTYPE (we know that can't
if (tree_int_cst_equal (arg0, arg1))
return build_int_cst (ctype, 0);
else if (tree_int_cst_lt (arg1, arg0))
- return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
+ return fold_convert_loc (loc, ctype,
+ size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
else
- return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
- fold_convert (ctype, size_binop (MINUS_EXPR,
- arg1, arg0)));
+ return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
+ fold_convert_loc (loc, ctype,
+ size_binop_loc (loc,
+ MINUS_EXPR,
+ arg1, arg0)));
}
\f
/* A subroutine of fold_convert_const handling conversions of an
simple conversions in preference to calling the front-end's convert. */
tree
-fold_convert (tree type, tree arg)
+fold_convert_loc (location_t loc, tree type, tree arg)
{
tree orig = TREE_TYPE (arg);
tree tem;
return error_mark_node;
if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
- return fold_build1 (NOP_EXPR, type, arg);
+ return fold_build1_loc (loc, NOP_EXPR, type, arg);
switch (TREE_CODE (type))
{
}
if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
|| TREE_CODE (orig) == OFFSET_TYPE)
- return fold_build1 (NOP_EXPR, type, arg);
+ return fold_build1_loc (loc, NOP_EXPR, type, arg);
if (TREE_CODE (orig) == COMPLEX_TYPE)
- {
- tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
- return fold_convert (type, tem);
- }
+ return fold_convert_loc (loc, type,
+ fold_build1_loc (loc, REALPART_EXPR,
+ TREE_TYPE (orig), arg));
gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
&& tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
- return fold_build1 (NOP_EXPR, type, arg);
+ return fold_build1_loc (loc, NOP_EXPR, type, arg);
case REAL_TYPE:
if (TREE_CODE (arg) == INTEGER_CST)
case INTEGER_TYPE:
case BOOLEAN_TYPE: case ENUMERAL_TYPE:
case POINTER_TYPE: case REFERENCE_TYPE:
- return fold_build1 (FLOAT_EXPR, type, arg);
+ return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
case REAL_TYPE:
- return fold_build1 (NOP_EXPR, type, arg);
+ return fold_build1_loc (loc, NOP_EXPR, type, arg);
case FIXED_POINT_TYPE:
- return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
+ return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
case COMPLEX_TYPE:
- tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
- return fold_convert (type, tem);
+ tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
+ return fold_convert_loc (loc, type, tem);
default:
gcc_unreachable ();
{
tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
if (tem != NULL_TREE)
- return tem;
+ goto fold_convert_exit;
}
switch (TREE_CODE (orig))
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
case REAL_TYPE:
- return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
+ return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
case COMPLEX_TYPE:
- tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
- return fold_convert (type, tem);
+ tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
+ return fold_convert_loc (loc, type, tem);
default:
gcc_unreachable ();
case POINTER_TYPE: case REFERENCE_TYPE:
case REAL_TYPE:
case FIXED_POINT_TYPE:
- return fold_build2 (COMPLEX_EXPR, type,
- fold_convert (TREE_TYPE (type), arg),
- fold_convert (TREE_TYPE (type),
+ return fold_build2_loc (loc, COMPLEX_EXPR, type,
+ fold_convert_loc (loc, TREE_TYPE (type), arg),
+ fold_convert_loc (loc, TREE_TYPE (type),
integer_zero_node));
case COMPLEX_TYPE:
{
if (TREE_CODE (arg) == COMPLEX_EXPR)
{
- rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
- ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
- return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
+ rpart = fold_convert_loc (loc, TREE_TYPE (type),
+ TREE_OPERAND (arg, 0));
+ ipart = fold_convert_loc (loc, TREE_TYPE (type),
+ TREE_OPERAND (arg, 1));
+ return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
}
arg = save_expr (arg);
- rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
- ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
- rpart = fold_convert (TREE_TYPE (type), rpart);
- ipart = fold_convert (TREE_TYPE (type), ipart);
- return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
+ rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
+ ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
+ rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
+ ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
+ return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
}
default:
gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
|| TREE_CODE (orig) == VECTOR_TYPE);
- return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
+ return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
case VOID_TYPE:
tem = fold_ignored_result (arg);
if (TREE_CODE (tem) == MODIFY_EXPR)
- return tem;
- return fold_build1 (NOP_EXPR, type, tem);
+ goto fold_convert_exit;
+ return fold_build1_loc (loc, NOP_EXPR, type, tem);
default:
gcc_unreachable ();
}
+ fold_convert_exit:
+ protected_set_expr_location (tem, loc);
+ return tem;
}
\f
/* Return false if expr can be assumed not to be an lvalue, true
/* Return an expr equal to X but certainly not valid as an lvalue. */
tree
-non_lvalue (tree x)
+non_lvalue_loc (location_t loc, tree x)
{
/* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
us. */
if (! maybe_lvalue_p (x))
return x;
- return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
+ x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
+ SET_EXPR_LOCATION (x, loc);
+ return x;
}
/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
pedantic lvalue. Otherwise, return X. */
static tree
-pedantic_non_lvalue (tree x)
+pedantic_non_lvalue_loc (location_t loc, tree x)
{
if (pedantic_lvalues)
- return non_lvalue (x);
- else
- return x;
+ return non_lvalue_loc (loc, x);
+ protected_set_expr_location (x, loc);
+ return x;
}
\f
/* Given a tree comparison code, return the code that is the logical inverse
if this makes the transformation invalid. */
tree
-combine_comparisons (enum tree_code code, enum tree_code lcode,
+combine_comparisons (location_t loc,
+ enum tree_code code, enum tree_code lcode,
enum tree_code rcode, tree truth_type,
tree ll_arg, tree lr_arg)
{
enum tree_code tcode;
tcode = compcode_to_comparison ((enum comparison_code) compcode);
- return fold_build2 (tcode, truth_type, ll_arg, lr_arg);
+ return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
}
}
\f
NEW1 and OLD1. */
static tree
-eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
+eval_subst (location_t loc, tree arg, tree old0, tree new0,
+ tree old1, tree new1)
{
tree type = TREE_TYPE (arg);
enum tree_code code = TREE_CODE (arg);
switch (tclass)
{
case tcc_unary:
- return fold_build1 (code, type,
- eval_subst (TREE_OPERAND (arg, 0),
+ return fold_build1_loc (loc, code, type,
+ eval_subst (loc, TREE_OPERAND (arg, 0),
old0, new0, old1, new1));
case tcc_binary:
- return fold_build2 (code, type,
- eval_subst (TREE_OPERAND (arg, 0),
+ return fold_build2_loc (loc, code, type,
+ eval_subst (loc, TREE_OPERAND (arg, 0),
old0, new0, old1, new1),
- eval_subst (TREE_OPERAND (arg, 1),
+ eval_subst (loc, TREE_OPERAND (arg, 1),
old0, new0, old1, new1));
case tcc_expression:
switch (code)
{
case SAVE_EXPR:
- return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
+ return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
+ old1, new1);
case COMPOUND_EXPR:
- return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
+ return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
+ old1, new1);
case COND_EXPR:
- return fold_build3 (code, type,
- eval_subst (TREE_OPERAND (arg, 0),
+ return fold_build3_loc (loc, code, type,
+ eval_subst (loc, TREE_OPERAND (arg, 0),
old0, new0, old1, new1),
- eval_subst (TREE_OPERAND (arg, 1),
+ eval_subst (loc, TREE_OPERAND (arg, 1),
old0, new0, old1, new1),
- eval_subst (TREE_OPERAND (arg, 2),
+ eval_subst (loc, TREE_OPERAND (arg, 2),
old0, new0, old1, new1));
default:
break;
else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
arg1 = new1;
- return fold_build2 (code, type, arg0, arg1);
+ return fold_build2_loc (loc, code, type, arg0, arg1);
}
default:
the conversion of RESULT to TYPE. */
tree
-omit_one_operand (tree type, tree result, tree omitted)
+omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
{
- tree t = fold_convert (type, result);
+ tree t = fold_convert_loc (loc, type, result);
/* If the resulting operand is an empty statement, just return the omitted
statement casted to void. */
if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
- return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
+ {
+ t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
+ goto omit_one_operand_exit;
+ }
if (TREE_SIDE_EFFECTS (omitted))
- return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
+ {
+ t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
+ goto omit_one_operand_exit;
+ }
+
+ return non_lvalue_loc (loc, t);
- return non_lvalue (t);
+ omit_one_operand_exit:
+ protected_set_expr_location (t, loc);
+ return t;
}
/* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
static tree
-pedantic_omit_one_operand (tree type, tree result, tree omitted)
+pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
+ tree omitted)
{
- tree t = fold_convert (type, result);
+ tree t = fold_convert_loc (loc, type, result);
/* If the resulting operand is an empty statement, just return the omitted
statement casted to void. */
if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
- return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
+ {
+ t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
+ goto pedantic_omit_one_operand_exit;
+ }
if (TREE_SIDE_EFFECTS (omitted))
- return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
+ {
+ t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
+ goto pedantic_omit_one_operand_exit;
+ }
- return pedantic_non_lvalue (t);
+ return pedantic_non_lvalue_loc (loc, t);
+
+ pedantic_omit_one_operand_exit:
+ protected_set_expr_location (t, loc);
+ return t;
}
/* Return a tree for the case when the result of an expression is RESULT
just do the conversion of RESULT to TYPE. */
tree
-omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
+omit_two_operands_loc (location_t loc, tree type, tree result,
+ tree omitted1, tree omitted2)
{
- tree t = fold_convert (type, result);
+ tree t = fold_convert_loc (loc, type, result);
if (TREE_SIDE_EFFECTS (omitted2))
- t = build2 (COMPOUND_EXPR, type, omitted2, t);
+ {
+ t = build2 (COMPOUND_EXPR, type, omitted2, t);
+ SET_EXPR_LOCATION (t, loc);
+ }
if (TREE_SIDE_EFFECTS (omitted1))
- t = build2 (COMPOUND_EXPR, type, omitted1, t);
+ {
+ t = build2 (COMPOUND_EXPR, type, omitted1, t);
+ SET_EXPR_LOCATION (t, loc);
+ }
- return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
+ return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
}
\f
problems with the dominator optimizer. */
tree
-fold_truth_not_expr (tree arg)
+fold_truth_not_expr (location_t loc, tree arg)
{
tree t, type = TREE_TYPE (arg);
enum tree_code code = TREE_CODE (arg);
+ location_t loc1, loc2;
/* If this is a comparison, we can simply invert it, except for
floating-point non-equality comparisons, in which case we just
return NULL_TREE;
t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
- if (EXPR_HAS_LOCATION (arg))
- SET_EXPR_LOCATION (t, EXPR_LOCATION (arg));
+ SET_EXPR_LOCATION (t, loc);
return t;
}
return constant_boolean_node (integer_zerop (arg), type);
case TRUTH_AND_EXPR:
+ loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
+ loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
+ if (loc1 == UNKNOWN_LOCATION)
+ loc1 = loc;
+ if (loc2 == UNKNOWN_LOCATION)
+ loc2 = loc;
t = build2 (TRUTH_OR_EXPR, type,
- invert_truthvalue (TREE_OPERAND (arg, 0)),
- invert_truthvalue (TREE_OPERAND (arg, 1)));
+ invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
+ invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
break;
case TRUTH_OR_EXPR:
+ loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
+ loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
+ if (loc1 == UNKNOWN_LOCATION)
+ loc1 = loc;
+ if (loc2 == UNKNOWN_LOCATION)
+ loc2 = loc;
t = build2 (TRUTH_AND_EXPR, type,
- invert_truthvalue (TREE_OPERAND (arg, 0)),
- invert_truthvalue (TREE_OPERAND (arg, 1)));
+ invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
+ invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
break;
case TRUTH_XOR_EXPR:
TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
else
t = build2 (TRUTH_XOR_EXPR, type,
- invert_truthvalue (TREE_OPERAND (arg, 0)),
+ invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
TREE_OPERAND (arg, 1));
break;
case TRUTH_ANDIF_EXPR:
+ loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
+ loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
+ if (loc1 == UNKNOWN_LOCATION)
+ loc1 = loc;
+ if (loc2 == UNKNOWN_LOCATION)
+ loc2 = loc;
t = build2 (TRUTH_ORIF_EXPR, type,
- invert_truthvalue (TREE_OPERAND (arg, 0)),
- invert_truthvalue (TREE_OPERAND (arg, 1)));
+ invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
+ invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
break;
case TRUTH_ORIF_EXPR:
+ loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
+ loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
+ if (loc1 == UNKNOWN_LOCATION)
+ loc1 = loc;
+ if (loc2 == UNKNOWN_LOCATION)
+ loc2 = loc;
t = build2 (TRUTH_ANDIF_EXPR, type,
- invert_truthvalue (TREE_OPERAND (arg, 0)),
- invert_truthvalue (TREE_OPERAND (arg, 1)));
+ invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
+ invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
break;
case TRUTH_NOT_EXPR:
{
tree arg1 = TREE_OPERAND (arg, 1);
tree arg2 = TREE_OPERAND (arg, 2);
+
+ loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
+ loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
+ if (loc1 == UNKNOWN_LOCATION)
+ loc1 = loc;
+ if (loc2 == UNKNOWN_LOCATION)
+ loc2 = loc;
+
/* A COND_EXPR may have a throw as one operand, which
then has void type. Just leave void operands
as they are. */
t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
VOID_TYPE_P (TREE_TYPE (arg1))
- ? arg1 : invert_truthvalue (arg1),
+ ? arg1 : invert_truthvalue_loc (loc1, arg1),
VOID_TYPE_P (TREE_TYPE (arg2))
- ? arg2 : invert_truthvalue (arg2));
+ ? arg2 : invert_truthvalue_loc (loc2, arg2));
break;
}
case COMPOUND_EXPR:
- t = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
- invert_truthvalue (TREE_OPERAND (arg, 1)));
+ loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
+ if (loc1 == UNKNOWN_LOCATION)
+ loc1 = loc;
+ t = build2 (COMPOUND_EXPR, type,
+ TREE_OPERAND (arg, 0),
+ invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
break;
case NON_LVALUE_EXPR:
- return invert_truthvalue (TREE_OPERAND (arg, 0));
+ loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
+ if (loc1 == UNKNOWN_LOCATION)
+ loc1 = loc;
+ return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
CASE_CONVERT:
if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
/* ... fall through ... */
case FLOAT_EXPR:
+ loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
+ if (loc1 == UNKNOWN_LOCATION)
+ loc1 = loc;
t = build1 (TREE_CODE (arg), type,
- invert_truthvalue (TREE_OPERAND (arg, 0)));
+ invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
break;
case BIT_AND_EXPR:
break;
case CLEANUP_POINT_EXPR:
+ loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
+ if (loc1 == UNKNOWN_LOCATION)
+ loc1 = loc;
t = build1 (CLEANUP_POINT_EXPR, type,
- invert_truthvalue (TREE_OPERAND (arg, 0)));
+ invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
break;
default:
break;
}
- if (t && EXPR_HAS_LOCATION (arg))
- SET_EXPR_LOCATION (t, EXPR_LOCATION (arg));
+ if (t)
+ SET_EXPR_LOCATION (t, loc);
return t;
}
problems with the dominator optimizer. */
tree
-invert_truthvalue (tree arg)
+invert_truthvalue_loc (location_t loc, tree arg)
{
tree tem;
if (TREE_CODE (arg) == ERROR_MARK)
return arg;
- tem = fold_truth_not_expr (arg);
+ tem = fold_truth_not_expr (loc, arg);
if (!tem)
- tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
+ {
+ tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
+ SET_EXPR_LOCATION (tem, loc);
+ }
return tem;
}
If this optimization cannot be done, 0 will be returned. */
static tree
-distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
+distribute_bit_expr (location_t loc, enum tree_code code, tree type,
+ tree arg0, tree arg1)
{
tree common;
tree left, right;
else
return 0;
- common = fold_convert (type, common);
- left = fold_convert (type, left);
- right = fold_convert (type, right);
- return fold_build2 (TREE_CODE (arg0), type, common,
- fold_build2 (code, type, left, right));
+ common = fold_convert_loc (loc, type, common);
+ left = fold_convert_loc (loc, type, left);
+ right = fold_convert_loc (loc, type, right);
+ return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
+ fold_build2_loc (loc, code, type, left, right));
}
/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
with code CODE. This optimization is unsafe. */
static tree
-distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
+distribute_real_division (location_t loc, enum tree_code code, tree type,
+ tree arg0, tree arg1)
{
bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
if (mul0 == mul1
&& operand_equal_p (TREE_OPERAND (arg0, 1),
TREE_OPERAND (arg1, 1), 0))
- return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
- fold_build2 (code, type,
+ return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
+ fold_build2_loc (loc, code, type,
TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg1, 0)),
TREE_OPERAND (arg0, 1));
if (!mul1)
real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
real_arithmetic (&r0, code, &r0, &r1);
- return fold_build2 (MULT_EXPR, type,
+ return fold_build2_loc (loc, MULT_EXPR, type,
TREE_OPERAND (arg0, 0),
build_real (type, r0));
}
starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
static tree
-make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
- HOST_WIDE_INT bitpos, int unsignedp)
+make_bit_field_ref (location_t loc, tree inner, tree type,
+ HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
{
tree result, bftype;
|| POINTER_TYPE_P (TREE_TYPE (inner)))
&& host_integerp (size, 0)
&& tree_low_cst (size, 0) == bitsize)
- return fold_convert (type, inner);
+ return fold_convert_loc (loc, type, inner);
}
bftype = type;
result = build3 (BIT_FIELD_REF, bftype, inner,
size_int (bitsize), bitsize_int (bitpos));
+ SET_EXPR_LOCATION (result, loc);
if (bftype != type)
- result = fold_convert (type, result);
+ result = fold_convert_loc (loc, type, result);
return result;
}
tree. Otherwise we return zero. */
static tree
-optimize_bit_field_compare (enum tree_code code, tree compare_type,
- tree lhs, tree rhs)
+optimize_bit_field_compare (location_t loc, enum tree_code code,
+ tree compare_type, tree lhs, tree rhs)
{
HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
tree type = TREE_TYPE (lhs);
if (! const_p)
/* If not comparing with constant, just rework the comparison
and return. */
- return fold_build2 (code, compare_type,
- fold_build2 (BIT_AND_EXPR, unsigned_type,
- make_bit_field_ref (linner,
+ return fold_build2_loc (loc, code, compare_type,
+ fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
+ make_bit_field_ref (loc, linner,
unsigned_type,
nbitsize, nbitpos,
1),
mask),
- fold_build2 (BIT_AND_EXPR, unsigned_type,
- make_bit_field_ref (rinner,
+ fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
+ make_bit_field_ref (loc, rinner,
unsigned_type,
nbitsize, nbitpos,
1),
if (lunsignedp)
{
if (! integer_zerop (const_binop (RSHIFT_EXPR,
- fold_convert (unsigned_type, rhs),
+ fold_convert_loc (loc,
+ unsigned_type, rhs),
size_int (lbitsize), 0)))
{
warning (0, "comparison is always %d due to width of bit-field",
}
else
{
- tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
+ tree tem = const_binop (RSHIFT_EXPR,
+ fold_convert_loc (loc, signed_type, rhs),
size_int (lbitsize - 1), 0);
if (! integer_zerop (tem) && ! integer_all_onesp (tem))
{
/* Make a new bitfield reference, shift the constant over the
appropriate number of bits and mask it with the computed mask
(in case this was a signed field). If we changed it, make a new one. */
- lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
+ lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
if (lvolatilep)
{
TREE_SIDE_EFFECTS (lhs) = 1;
rhs = const_binop (BIT_AND_EXPR,
const_binop (LSHIFT_EXPR,
- fold_convert (unsigned_type, rhs),
+ fold_convert_loc (loc, unsigned_type, rhs),
size_int (lbitpos), 0),
mask, 0);
- return build2 (code, compare_type,
- build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
- rhs);
+ lhs = build2 (code, compare_type,
+ build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
+ rhs);
+ SET_EXPR_LOCATION (lhs, loc);
+ return lhs;
}
\f
/* Subroutine for fold_truthop: decode a field reference.
do anything with. */
static tree
-decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
+decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
int *punsignedp, int *pvolatilep,
tree *pmask, tree *pand_mask)
/* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
if (and_mask != 0)
- mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
- fold_convert (unsigned_type, and_mask), mask);
+ mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
+ fold_convert_loc (loc, unsigned_type, and_mask), mask);
*pmask = mask;
*pand_mask = and_mask;
tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
int in_p, n_in_p;
tree low, high, n_low, n_high;
+ location_t loc = EXPR_LOCATION (exp);
/* Start with simply saying "EXP != 0" and then look at the code of EXP
and see if we can refine the range. Some of the cases below may not
/* ~ X -> -X - 1 */
exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
build_int_cst (exp_type, 1));
+ SET_EXPR_LOCATION (exp, loc);
continue;
case PLUS_EXPR: case MINUS_EXPR:
n_low = low, n_high = high;
if (n_low != 0)
- n_low = fold_convert (arg0_type, n_low);
+ n_low = fold_convert_loc (loc, arg0_type, n_low);
if (n_high != 0)
- n_high = fold_convert (arg0_type, n_high);
+ n_high = fold_convert_loc (loc, arg0_type, n_high);
/* If we're converting arg0 from an unsigned type, to exp,
: TYPE_MAX_VALUE (arg0_type);
if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
- high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
- fold_convert (arg0_type,
- high_positive),
+ high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
+ fold_convert_loc (loc, arg0_type,
+ high_positive),
build_int_cst (arg0_type, 1));
/* If the low bound is specified, "and" the range with the
{
if (! merge_ranges (&n_in_p, &n_low, &n_high,
1, n_low, n_high, 1,
- fold_convert (arg0_type,
- integer_zero_node),
+ fold_convert_loc (loc, arg0_type,
+ integer_zero_node),
high_positive))
break;
that will be interpreted as negative. */
if (! merge_ranges (&n_in_p, &n_low, &n_high,
0, n_low, n_high, 1,
- fold_convert (arg0_type,
- integer_zero_node),
+ fold_convert_loc (loc, arg0_type,
+ integer_zero_node),
high_positive))
break;
on IN_P) the range. Return 0 if the test couldn't be created. */
tree
-build_range_check (tree type, tree exp, int in_p, tree low, tree high)
+build_range_check (location_t loc, tree type, tree exp, int in_p,
+ tree low, tree high)
{
tree etype = TREE_TYPE (exp), value;
if (! in_p)
{
- value = build_range_check (type, exp, 1, low, high);
+ value = build_range_check (loc, type, exp, 1, low, high);
if (value != 0)
- return invert_truthvalue (value);
+ return invert_truthvalue_loc (loc, value);
return 0;
}
return build_int_cst (type, 1);
if (low == 0)
- return fold_build2 (LE_EXPR, type, exp,
- fold_convert (etype, high));
+ return fold_build2_loc (loc, LE_EXPR, type, exp,
+ fold_convert_loc (loc, etype, high));
if (high == 0)
- return fold_build2 (GE_EXPR, type, exp,
- fold_convert (etype, low));
+ return fold_build2_loc (loc, GE_EXPR, type, exp,
+ fold_convert_loc (loc, etype, low));
if (operand_equal_p (low, high, 0))
- return fold_build2 (EQ_EXPR, type, exp,
- fold_convert (etype, low));
+ return fold_build2_loc (loc, EQ_EXPR, type, exp,
+ fold_convert_loc (loc, etype, low));
if (integer_zerop (low))
{
if (! TYPE_UNSIGNED (etype))
{
etype = unsigned_type_for (etype);
- high = fold_convert (etype, high);
- exp = fold_convert (etype, exp);
+ high = fold_convert_loc (loc, etype, high);
+ exp = fold_convert_loc (loc, etype, exp);
}
- return build_range_check (type, exp, 1, 0, high);
+ return build_range_check (loc, type, exp, 1, 0, high);
}
/* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
= build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
else
etype = signed_etype;
- exp = fold_convert (etype, exp);
+ exp = fold_convert_loc (loc, etype, exp);
}
- return fold_build2 (GT_EXPR, type, exp,
+ return fold_build2_loc (loc, GT_EXPR, type, exp,
build_int_cst (etype, 0));
}
}
/* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
for the type in question, as we rely on this here. */
utype = unsigned_type_for (etype);
- maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
+ maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
integer_one_node, 1);
- minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
+ minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
minv, 1, maxv, 1)))
return 0;
}
- high = fold_convert (etype, high);
- low = fold_convert (etype, low);
- exp = fold_convert (etype, exp);
+ high = fold_convert_loc (loc, etype, high);
+ low = fold_convert_loc (loc, etype, low);
+ exp = fold_convert_loc (loc, etype, exp);
value = const_binop (MINUS_EXPR, high, low, 0);
{
if (value != 0 && !TREE_OVERFLOW (value))
{
- low = fold_convert (sizetype, low);
- low = fold_build1 (NEGATE_EXPR, sizetype, low);
- return build_range_check (type,
- fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
+ low = fold_convert_loc (loc, sizetype, low);
+ low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
+ return build_range_check (loc, type,
+ fold_build2_loc (loc, POINTER_PLUS_EXPR,
+ etype, exp, low),
1, build_int_cst (etype, 0), value);
}
return 0;
}
if (value != 0 && !TREE_OVERFLOW (value))
- return build_range_check (type,
- fold_build2 (MINUS_EXPR, etype, exp, low),
+ return build_range_check (loc, type,
+ fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
1, build_int_cst (etype, 0), value);
return 0;
anymore, or NULL_TREE if no folding opportunity is found. */
static tree
-fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
+fold_cond_expr_with_comparison (location_t loc, tree type,
+ tree arg0, tree arg1, tree arg2)
{
enum tree_code comp_code = TREE_CODE (arg0);
tree arg00 = TREE_OPERAND (arg0, 0);
{
case EQ_EXPR:
case UNEQ_EXPR:
- tem = fold_convert (arg1_type, arg1);
- return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
+ tem = fold_convert_loc (loc, arg1_type, arg1);
+ return pedantic_non_lvalue_loc (loc,
+ fold_convert_loc (loc, type,
+ negate_expr (tem)));
case NE_EXPR:
case LTGT_EXPR:
- return pedantic_non_lvalue (fold_convert (type, arg1));
+ return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
case UNGE_EXPR:
case UNGT_EXPR:
if (flag_trapping_math)
case GE_EXPR:
case GT_EXPR:
if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
- arg1 = fold_convert (signed_type_for
+ arg1 = fold_convert_loc (loc, signed_type_for
(TREE_TYPE (arg1)), arg1);
- tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
- return pedantic_non_lvalue (fold_convert (type, tem));
+ tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
+ return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
case UNLE_EXPR:
case UNLT_EXPR:
if (flag_trapping_math)
case LE_EXPR:
case LT_EXPR:
if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
- arg1 = fold_convert (signed_type_for
+ arg1 = fold_convert_loc (loc, signed_type_for
(TREE_TYPE (arg1)), arg1);
- tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
- return negate_expr (fold_convert (type, tem));
+ tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
+ return negate_expr (fold_convert_loc (loc, type, tem));
default:
gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
break;
&& integer_zerop (arg01) && integer_zerop (arg2))
{
if (comp_code == NE_EXPR)
- return pedantic_non_lvalue (fold_convert (type, arg1));
+ return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
else if (comp_code == EQ_EXPR)
return build_int_cst (type, 0);
}
switch (comp_code)
{
case EQ_EXPR:
- return pedantic_non_lvalue (fold_convert (type, arg2));
+ return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
case NE_EXPR:
- return pedantic_non_lvalue (fold_convert (type, arg1));
+ return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
case LE_EXPR:
case LT_EXPR:
case UNLE_EXPR:
corresponding COND_EXPR. */
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
{
- comp_op0 = fold_convert (comp_type, comp_op0);
- comp_op1 = fold_convert (comp_type, comp_op1);
+ comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
+ comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
- ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
- : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
- return pedantic_non_lvalue (fold_convert (type, tem));
+ ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
+ : fold_build2_loc (loc, MIN_EXPR, comp_type,
+ comp_op1, comp_op0);
+ return pedantic_non_lvalue_loc (loc,
+ fold_convert_loc (loc, type, tem));
}
break;
case GE_EXPR:
case UNGT_EXPR:
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
{
- comp_op0 = fold_convert (comp_type, comp_op0);
- comp_op1 = fold_convert (comp_type, comp_op1);
+ comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
+ comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
- ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
- : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
- return pedantic_non_lvalue (fold_convert (type, tem));
+ ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
+ : fold_build2_loc (loc, MAX_EXPR, comp_type,
+ comp_op1, comp_op0);
+ return pedantic_non_lvalue_loc (loc,
+ fold_convert_loc (loc, type, tem));
}
break;
case UNEQ_EXPR:
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
- return pedantic_non_lvalue (fold_convert (type, arg2));
+ return pedantic_non_lvalue_loc (loc,
+ fold_convert_loc (loc, type, arg2));
break;
case LTGT_EXPR:
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
- return pedantic_non_lvalue (fold_convert (type, arg1));
+ return pedantic_non_lvalue_loc (loc,
+ fold_convert_loc (loc, type, arg1));
break;
default:
gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
if (TREE_CODE (arg1) == INTEGER_CST)
break;
/* We can replace A with C1 in this case. */
- arg1 = fold_convert (type, arg01);
- return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
+ arg1 = fold_convert_loc (loc, type, arg01);
+ return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
case LT_EXPR:
/* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
build_int_cst (type, 1), 0),
OEP_ONLY_CONST))
{
- tem = fold_build2 (MIN_EXPR, TREE_TYPE (arg00), arg00,
- fold_convert (TREE_TYPE (arg00), arg2));
- return pedantic_non_lvalue (fold_convert (type, tem));
+ tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
+ fold_convert_loc (loc, TREE_TYPE (arg00),
+ arg2));
+ return pedantic_non_lvalue_loc (loc,
+ fold_convert_loc (loc, type, tem));
}
break;
build_int_cst (type, 1), 0),
OEP_ONLY_CONST))
{
- tem = fold_build2 (MIN_EXPR, TREE_TYPE (arg00), arg00,
- fold_convert (TREE_TYPE (arg00), arg2));
- return pedantic_non_lvalue (fold_convert (type, tem));
+ tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
+ fold_convert_loc (loc, TREE_TYPE (arg00),
+ arg2));
+ return pedantic_non_lvalue_loc (loc,
+ fold_convert_loc (loc, type, tem));
}
break;
build_int_cst (type, 1), 0),
OEP_ONLY_CONST))
{
- tem = fold_build2 (MAX_EXPR, TREE_TYPE (arg00), arg00,
- fold_convert (TREE_TYPE (arg00), arg2));
- return pedantic_non_lvalue (fold_convert (type, tem));
+ tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
+ fold_convert_loc (loc, TREE_TYPE (arg00),
+ arg2));
+ return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
}
break;
build_int_cst (type, 1), 0),
OEP_ONLY_CONST))
{
- tem = fold_build2 (MAX_EXPR, TREE_TYPE (arg00), arg00,
- fold_convert (TREE_TYPE (arg00), arg2));
- return pedantic_non_lvalue (fold_convert (type, tem));
+ tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
+ fold_convert_loc (loc, TREE_TYPE (arg00),
+ arg2));
+ return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
}
break;
case NE_EXPR:
merge it into some range test. Return the new tree if so. */
static tree
-fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
+fold_range_test (location_t loc, enum tree_code code, tree type,
+ tree op0, tree op1)
{
int or_op = (code == TRUTH_ORIF_EXPR
|| code == TRUTH_OR_EXPR);
if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
&& merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
in1_p, low1, high1)
- && 0 != (tem = (build_range_check (type,
+ && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
lhs != 0 ? lhs
: rhs != 0 ? rhs : integer_zero_node,
in_p, low, high))))
{
if (strict_overflow_p)
fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
- return or_op ? invert_truthvalue (tem) : tem;
+ return or_op ? invert_truthvalue_loc (loc, tem) : tem;
}
/* On machines where the branch cost is expensive, if this is a
unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
which cases we can't do this. */
if (simple_operand_p (lhs))
- return build2 (code == TRUTH_ANDIF_EXPR
- ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
- type, op0, op1);
+ {
+ tem = build2 (code == TRUTH_ANDIF_EXPR
+ ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
+ type, op0, op1);
+ SET_EXPR_LOCATION (tem, loc);
+ return tem;
+ }
else if (lang_hooks.decls.global_bindings_p () == 0
&& ! CONTAINS_PLACEHOLDER_P (lhs))
{
tree common = save_expr (lhs);
- if (0 != (lhs = build_range_check (type, common,
+ if (0 != (lhs = build_range_check (loc, type, common,
or_op ? ! in0_p : in0_p,
low0, high0))
- && (0 != (rhs = build_range_check (type, common,
+ && (0 != (rhs = build_range_check (loc, type, common,
or_op ? ! in1_p : in1_p,
low1, high1))))
{
if (strict_overflow_p)
fold_overflow_warning (warnmsg,
WARN_STRICT_OVERFLOW_COMPARISON);
- return build2 (code == TRUTH_ANDIF_EXPR
- ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
- type, lhs, rhs);
+ tem = build2 (code == TRUTH_ANDIF_EXPR
+ ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
+ type, lhs, rhs);
+ SET_EXPR_LOCATION (tem, loc);
+ return tem;
}
}
}
temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
if (mask != 0)
temp = const_binop (BIT_AND_EXPR, temp,
- fold_convert (TREE_TYPE (c), mask), 0);
+ fold_convert (TREE_TYPE (c), mask),
+ 0);
/* If necessary, convert the type back to match the type of C. */
if (TYPE_UNSIGNED (type))
temp = fold_convert (type, temp);
- return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
+ return fold_convert (type,
+ const_binop (BIT_XOR_EXPR, c, temp, 0));
}
\f
/* Find ways of folding logical expressions of LHS and RHS:
We return the simplified tree or 0 if no optimization is possible. */
static tree
-fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
+fold_truthop (location_t loc, enum tree_code code, tree truth_type,
+ tree lhs, tree rhs)
{
/* If this is the "or" of two comparisons, we can do something if
the comparisons are NE_EXPR. If this is the "and", we can do something
if (operand_equal_p (ll_arg, rl_arg, 0)
&& operand_equal_p (lr_arg, rr_arg, 0))
{
- result = combine_comparisons (code, lcode, rcode,
+ result = combine_comparisons (loc, code, lcode, rcode,
truth_type, ll_arg, lr_arg);
if (result)
return result;
else if (operand_equal_p (ll_arg, rr_arg, 0)
&& operand_equal_p (lr_arg, rl_arg, 0))
{
- result = combine_comparisons (code, lcode,
+ result = combine_comparisons (loc, code, lcode,
swap_tree_comparison (rcode),
truth_type, ll_arg, lr_arg);
if (result)
&& rcode == NE_EXPR && integer_zerop (rr_arg)
&& TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
&& INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
- return build2 (NE_EXPR, truth_type,
- build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
- ll_arg, rl_arg),
- build_int_cst (TREE_TYPE (ll_arg), 0));
+ {
+ result = build2 (NE_EXPR, truth_type,
+ build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
+ ll_arg, rl_arg),
+ build_int_cst (TREE_TYPE (ll_arg), 0));
+ goto fold_truthop_exit;
+ }
/* Convert (a == 0) && (b == 0) into (a | b) == 0. */
if (code == TRUTH_AND_EXPR
&& rcode == EQ_EXPR && integer_zerop (rr_arg)
&& TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
&& INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
- return build2 (EQ_EXPR, truth_type,
- build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
- ll_arg, rl_arg),
- build_int_cst (TREE_TYPE (ll_arg), 0));
+ {
+ result = build2 (EQ_EXPR, truth_type,
+ build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
+ ll_arg, rl_arg),
+ build_int_cst (TREE_TYPE (ll_arg), 0));
+ goto fold_truthop_exit;
+ }
if (LOGICAL_OP_NON_SHORT_CIRCUIT)
{
if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
- return build2 (code, truth_type, lhs, rhs);
+ {
+ result = build2 (code, truth_type, lhs, rhs);
+ goto fold_truthop_exit;
+ }
return NULL_TREE;
}
}
return 0;
volatilep = 0;
- ll_inner = decode_field_reference (ll_arg,
+ ll_inner = decode_field_reference (loc, ll_arg,
&ll_bitsize, &ll_bitpos, &ll_mode,
&ll_unsignedp, &volatilep, &ll_mask,
&ll_and_mask);
- lr_inner = decode_field_reference (lr_arg,
+ lr_inner = decode_field_reference (loc, lr_arg,
&lr_bitsize, &lr_bitpos, &lr_mode,
&lr_unsignedp, &volatilep, &lr_mask,
&lr_and_mask);
- rl_inner = decode_field_reference (rl_arg,
+ rl_inner = decode_field_reference (loc, rl_arg,
&rl_bitsize, &rl_bitpos, &rl_mode,
&rl_unsignedp, &volatilep, &rl_mask,
&rl_and_mask);
- rr_inner = decode_field_reference (rr_arg,
+ rr_inner = decode_field_reference (loc, rr_arg,
&rr_bitsize, &rr_bitpos, &rr_mode,
&rr_unsignedp, &volatilep, &rr_mask,
&rr_and_mask);
xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
}
- ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
+ ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
size_int (xll_bitpos), 0);
- rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
+ rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
size_int (xrl_bitpos), 0);
if (l_const)
{
- l_const = fold_convert (lntype, l_const);
+ l_const = fold_convert_loc (loc, lntype, l_const);
l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
- fold_build1 (BIT_NOT_EXPR,
+ fold_build1_loc (loc, BIT_NOT_EXPR,
lntype, ll_mask),
0)))
{
}
if (r_const)
{
- r_const = fold_convert (lntype, r_const);
+ r_const = fold_convert_loc (loc, lntype, r_const);
r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
- fold_build1 (BIT_NOT_EXPR,
+ fold_build1_loc (loc, BIT_NOT_EXPR,
lntype, rl_mask),
0)))
{
xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
}
- lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
+ lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
+ rntype, lr_mask),
size_int (xlr_bitpos), 0);
- rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
+ rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
+ rntype, rr_mask),
size_int (xrr_bitpos), 0);
/* Make a mask that corresponds to both fields being compared.
lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
{
- lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
+ lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
ll_unsignedp || rl_unsignedp);
if (! all_ones_mask_p (ll_mask, lnbitsize))
lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
- rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
+ rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
lr_unsignedp || rr_unsignedp);
if (! all_ones_mask_p (lr_mask, rnbitsize))
rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
- return build2 (wanted_code, truth_type, lhs, rhs);
+ result = build2 (wanted_code, truth_type, lhs, rhs);
+ goto fold_truthop_exit;
}
/* There is still another way we can do something: If both pairs of
{
tree type;
- lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
+ lhs = make_bit_field_ref (loc, ll_inner, lntype,
+ ll_bitsize + rl_bitsize,
MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
- rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
+ rhs = make_bit_field_ref (loc, lr_inner, rntype,
+ lr_bitsize + rr_bitsize,
MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
{
if (lnbitsize > rnbitsize)
{
- lhs = fold_convert (rntype, lhs);
- ll_mask = fold_convert (rntype, ll_mask);
+ lhs = fold_convert_loc (loc, rntype, lhs);
+ ll_mask = fold_convert_loc (loc, rntype, ll_mask);
type = rntype;
}
else if (lnbitsize < rnbitsize)
{
- rhs = fold_convert (lntype, rhs);
- lr_mask = fold_convert (lntype, lr_mask);
+ rhs = fold_convert_loc (loc, lntype, rhs);
+ lr_mask = fold_convert_loc (loc, lntype, lr_mask);
type = lntype;
}
}
if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
- return build2 (wanted_code, truth_type, lhs, rhs);
+ result = build2 (wanted_code, truth_type, lhs, rhs);
+ goto fold_truthop_exit;
}
return 0;
reference we will make. Unless the mask is all ones the width of
that field, perform the mask operation. Then compare with the
merged constant. */
- result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
+ result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
ll_unsignedp || rl_unsignedp);
ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
if (! all_ones_mask_p (ll_mask, lnbitsize))
- result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
+ {
+ result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
+ SET_EXPR_LOCATION (result, loc);
+ }
- return build2 (wanted_code, truth_type, result,
- const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
+ result = build2 (wanted_code, truth_type, result,
+ const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
+
+ fold_truthop_exit:
+ SET_EXPR_LOCATION (result, loc);
+ return result;
}
\f
/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
constant. */
static tree
-optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
+optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
+ tree op0, tree op1)
{
tree arg0 = op0;
enum tree_code op_code;
op_code = TREE_CODE (arg0);
minmax_const = TREE_OPERAND (arg0, 1);
- comp_const = fold_convert (TREE_TYPE (arg0), op1);
+ comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
consts_equal = tree_int_cst_equal (minmax_const, comp_const);
consts_lt = tree_int_cst_lt (minmax_const, comp_const);
inner = TREE_OPERAND (arg0, 0);
{
case NE_EXPR: case LT_EXPR: case LE_EXPR:
{
- tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
- type, op0, op1);
+ tree tem
+ = optimize_minmax_comparison (loc,
+ invert_tree_comparison (code, false),
+ type, op0, op1);
if (tem)
- return invert_truthvalue (tem);
+ return invert_truthvalue_loc (loc, tem);
return NULL_TREE;
}
case GE_EXPR:
return
- fold_build2 (TRUTH_ORIF_EXPR, type,
+ fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
optimize_minmax_comparison
- (EQ_EXPR, type, arg0, comp_const),
+ (loc, EQ_EXPR, type, arg0, comp_const),
optimize_minmax_comparison
- (GT_EXPR, type, arg0, comp_const));
+ (loc, GT_EXPR, type, arg0, comp_const));
case EQ_EXPR:
if (op_code == MAX_EXPR && consts_equal)
/* MAX (X, 0) == 0 -> X <= 0 */
- return fold_build2 (LE_EXPR, type, inner, comp_const);
+ return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
else if (op_code == MAX_EXPR && consts_lt)
/* MAX (X, 0) == 5 -> X == 5 */
- return fold_build2 (EQ_EXPR, type, inner, comp_const);
+ return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
else if (op_code == MAX_EXPR)
/* MAX (X, 0) == -1 -> false */
- return omit_one_operand (type, integer_zero_node, inner);
+ return omit_one_operand_loc (loc, type, integer_zero_node, inner);
else if (consts_equal)
/* MIN (X, 0) == 0 -> X >= 0 */
- return fold_build2 (GE_EXPR, type, inner, comp_const);
+ return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
else if (consts_lt)
/* MIN (X, 0) == 5 -> false */
- return omit_one_operand (type, integer_zero_node, inner);
+ return omit_one_operand_loc (loc, type, integer_zero_node, inner);
else
/* MIN (X, 0) == -1 -> X == -1 */
- return fold_build2 (EQ_EXPR, type, inner, comp_const);
+ return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
case GT_EXPR:
if (op_code == MAX_EXPR && (consts_equal || consts_lt))
/* MAX (X, 0) > 0 -> X > 0
MAX (X, 0) > 5 -> X > 5 */
- return fold_build2 (GT_EXPR, type, inner, comp_const);
+ return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
else if (op_code == MAX_EXPR)
/* MAX (X, 0) > -1 -> true */
- return omit_one_operand (type, integer_one_node, inner);
+ return omit_one_operand_loc (loc, type, integer_one_node, inner);
else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
/* MIN (X, 0) > 0 -> false
MIN (X, 0) > 5 -> false */
- return omit_one_operand (type, integer_zero_node, inner);
+ return omit_one_operand_loc (loc, type, integer_zero_node, inner);
else
/* MIN (X, 0) > -1 -> X > -1 */
- return fold_build2 (GT_EXPR, type, inner, comp_const);
+ return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
default:
return NULL_TREE;
&& !TREE_OVERFLOW (t1))
return extract_muldiv (build2 (tcode == LSHIFT_EXPR
? MULT_EXPR : FLOOR_DIV_EXPR,
- ctype, fold_convert (ctype, op0), t1),
+ ctype,
+ fold_convert (ctype, op0),
+ t1),
c, code, wide_type, strict_overflow_p);
break;
/* If these are the same operation types, we can associate them
assuming no overflow. */
if (tcode == code
- && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
+ && 0 != (t1 = int_const_binop (MULT_EXPR,
+ fold_convert (ctype, op1),
fold_convert (ctype, c), 1))
&& 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
TREE_INT_CST_HIGH (t1),
possible. */
static tree
-fold_binary_op_with_conditional_arg (enum tree_code code,
+fold_binary_op_with_conditional_arg (location_t loc,
+ enum tree_code code,
tree type, tree op0, tree op1,
tree cond, tree arg, int cond_first_p)
{
false_value = constant_boolean_node (false, testtype);
}
- arg = fold_convert (arg_type, arg);
+ arg = fold_convert_loc (loc, arg_type, arg);
if (lhs == 0)
{
- true_value = fold_convert (cond_type, true_value);
+ true_value = fold_convert_loc (loc, cond_type, true_value);
if (cond_first_p)
- lhs = fold_build2 (code, type, true_value, arg);
+ lhs = fold_build2_loc (loc, code, type, true_value, arg);
else
- lhs = fold_build2 (code, type, arg, true_value);
+ lhs = fold_build2_loc (loc, code, type, arg, true_value);
}
if (rhs == 0)
{
- false_value = fold_convert (cond_type, false_value);
+ false_value = fold_convert_loc (loc, cond_type, false_value);
if (cond_first_p)
- rhs = fold_build2 (code, type, false_value, arg);
+ rhs = fold_build2_loc (loc, code, type, false_value, arg);
else
- rhs = fold_build2 (code, type, arg, false_value);
+ rhs = fold_build2_loc (loc, code, type, arg, false_value);
}
- test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
- return fold_convert (type, test);
+ test = fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
+ return fold_convert_loc (loc, type, test);
}
\f
can be made, and NULL_TREE otherwise. */
static tree
-fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
+fold_mathfn_compare (location_t loc,
+ enum built_in_function fcode, enum tree_code code,
tree type, tree arg0, tree arg1)
{
REAL_VALUE_TYPE c;
{
/* sqrt(x) < y is always false, if y is negative. */
if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
- return omit_one_operand (type, integer_zero_node, arg);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg);
/* sqrt(x) > y is always true, if y is negative and we
don't care about NaNs, i.e. negative values of x. */
if (code == NE_EXPR || !HONOR_NANS (mode))
- return omit_one_operand (type, integer_one_node, arg);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg);
/* sqrt(x) > y is the same as x >= 0, if y is negative. */
- return fold_build2 (GE_EXPR, type, arg,
+ return fold_build2_loc (loc, GE_EXPR, type, arg,
build_real (TREE_TYPE (arg), dconst0));
}
else if (code == GT_EXPR || code == GE_EXPR)
{
/* sqrt(x) > y is x == +Inf, when y is very large. */
if (HONOR_INFINITIES (mode))
- return fold_build2 (EQ_EXPR, type, arg,
+ return fold_build2_loc (loc, EQ_EXPR, type, arg,
build_real (TREE_TYPE (arg), c2));
/* sqrt(x) > y is always false, when y is very large
and we don't care about infinities. */
- return omit_one_operand (type, integer_zero_node, arg);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg);
}
/* sqrt(x) > c is the same as x > c*c. */
- return fold_build2 (code, type, arg,
+ return fold_build2_loc (loc, code, type, arg,
build_real (TREE_TYPE (arg), c2));
}
else if (code == LT_EXPR || code == LE_EXPR)
/* sqrt(x) < y is always true, when y is a very large
value and we don't care about NaNs or Infinities. */
if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
- return omit_one_operand (type, integer_one_node, arg);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg);
/* sqrt(x) < y is x != +Inf when y is very large and we
don't care about NaNs. */
if (! HONOR_NANS (mode))
- return fold_build2 (NE_EXPR, type, arg,
+ return fold_build2_loc (loc, NE_EXPR, type, arg,
build_real (TREE_TYPE (arg), c2));
/* sqrt(x) < y is x >= 0 when y is very large and we
don't care about Infinities. */
if (! HONOR_INFINITIES (mode))
- return fold_build2 (GE_EXPR, type, arg,
+ return fold_build2_loc (loc, GE_EXPR, type, arg,
build_real (TREE_TYPE (arg), dconst0));
/* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
return NULL_TREE;
arg = save_expr (arg);
- return fold_build2 (TRUTH_ANDIF_EXPR, type,
- fold_build2 (GE_EXPR, type, arg,
+ return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
+ fold_build2_loc (loc, GE_EXPR, type, arg,
build_real (TREE_TYPE (arg),
dconst0)),
- fold_build2 (NE_EXPR, type, arg,
+ fold_build2_loc (loc, NE_EXPR, type, arg,
build_real (TREE_TYPE (arg),
c2)));
}
/* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
if (! HONOR_NANS (mode))
- return fold_build2 (code, type, arg,
+ return fold_build2_loc (loc, code, type, arg,
build_real (TREE_TYPE (arg), c2));
/* sqrt(x) < c is the same as x >= 0 && x < c*c. */
&& ! CONTAINS_PLACEHOLDER_P (arg))
{
arg = save_expr (arg);
- return fold_build2 (TRUTH_ANDIF_EXPR, type,
- fold_build2 (GE_EXPR, type, arg,
+ return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
+ fold_build2_loc (loc, GE_EXPR, type, arg,
build_real (TREE_TYPE (arg),
dconst0)),
- fold_build2 (code, type, arg,
+ fold_build2_loc (loc, code, type, arg,
build_real (TREE_TYPE (arg),
c2)));
}
can be made, and NULL_TREE otherwise. */
static tree
-fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
+fold_inf_compare (location_t loc, enum tree_code code, tree type,
+ tree arg0, tree arg1)
{
enum machine_mode mode;
REAL_VALUE_TYPE max;
/* x > +Inf is always false, if with ignore sNANs. */
if (HONOR_SNANS (mode))
return NULL_TREE;
- return omit_one_operand (type, integer_zero_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
case LE_EXPR:
/* x <= +Inf is always true, if we don't case about NaNs. */
if (! HONOR_NANS (mode))
- return omit_one_operand (type, integer_one_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg0);
/* x <= +Inf is the same as x == x, i.e. isfinite(x). */
if (lang_hooks.decls.global_bindings_p () == 0
&& ! CONTAINS_PLACEHOLDER_P (arg0))
{
arg0 = save_expr (arg0);
- return fold_build2 (EQ_EXPR, type, arg0, arg0);
+ return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
}
break;
case GE_EXPR:
/* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
real_maxval (&max, neg, mode);
- return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
+ return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
arg0, build_real (TREE_TYPE (arg0), max));
case LT_EXPR:
/* x < +Inf is always equal to x <= DBL_MAX. */
real_maxval (&max, neg, mode);
- return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
+ return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
arg0, build_real (TREE_TYPE (arg0), max));
case NE_EXPR:
/* x != +Inf is always equal to !(x > DBL_MAX). */
real_maxval (&max, neg, mode);
if (! HONOR_NANS (mode))
- return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
+ return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
arg0, build_real (TREE_TYPE (arg0), max));
- temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
+ temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
arg0, build_real (TREE_TYPE (arg0), max));
- return fold_build1 (TRUTH_NOT_EXPR, type, temp);
+ return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
default:
break;
can be made, and NULL_TREE otherwise. */
static tree
-fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
+fold_div_compare (location_t loc,
+ enum tree_code code, tree type, tree arg0, tree arg1)
{
tree prod, tmp, hi, lo;
tree arg00 = TREE_OPERAND (arg0, 0);
{
case EQ_EXPR:
if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
- return omit_one_operand (type, integer_zero_node, arg00);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
if (TREE_OVERFLOW (hi))
- return fold_build2 (GE_EXPR, type, arg00, lo);
+ return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
if (TREE_OVERFLOW (lo))
- return fold_build2 (LE_EXPR, type, arg00, hi);
- return build_range_check (type, arg00, 1, lo, hi);
+ return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
+ return build_range_check (loc, type, arg00, 1, lo, hi);
case NE_EXPR:
if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
- return omit_one_operand (type, integer_one_node, arg00);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg00);
if (TREE_OVERFLOW (hi))
- return fold_build2 (LT_EXPR, type, arg00, lo);
+ return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
if (TREE_OVERFLOW (lo))
- return fold_build2 (GT_EXPR, type, arg00, hi);
- return build_range_check (type, arg00, 0, lo, hi);
+ return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
+ return build_range_check (loc, type, arg00, 0, lo, hi);
case LT_EXPR:
if (TREE_OVERFLOW (lo))
{
tmp = neg_overflow ? integer_zero_node : integer_one_node;
- return omit_one_operand (type, tmp, arg00);
+ return omit_one_operand_loc (loc, type, tmp, arg00);
}
- return fold_build2 (LT_EXPR, type, arg00, lo);
+ return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
case LE_EXPR:
if (TREE_OVERFLOW (hi))
{
tmp = neg_overflow ? integer_zero_node : integer_one_node;
- return omit_one_operand (type, tmp, arg00);
+ return omit_one_operand_loc (loc, type, tmp, arg00);
}
- return fold_build2 (LE_EXPR, type, arg00, hi);
+ return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
case GT_EXPR:
if (TREE_OVERFLOW (hi))
{
tmp = neg_overflow ? integer_one_node : integer_zero_node;
- return omit_one_operand (type, tmp, arg00);
+ return omit_one_operand_loc (loc, type, tmp, arg00);
}
- return fold_build2 (GT_EXPR, type, arg00, hi);
+ return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
case GE_EXPR:
if (TREE_OVERFLOW (lo))
{
tmp = neg_overflow ? integer_one_node : integer_zero_node;
- return omit_one_operand (type, tmp, arg00);
+ return omit_one_operand_loc (loc, type, tmp, arg00);
}
- return fold_build2 (GE_EXPR, type, arg00, lo);
+ return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
default:
break;
result type. */
static tree
-fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
+fold_single_bit_test_into_sign_test (location_t loc,
+ enum tree_code code, tree arg0, tree arg1,
tree result_type)
{
/* If this is testing a single bit, we can optimize the test. */
== GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
{
tree stype = signed_type_for (TREE_TYPE (arg00));
- return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
- result_type, fold_convert (stype, arg00),
+ return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
+ result_type,
+ fold_convert_loc (loc, stype, arg00),
build_int_cst (stype, 0));
}
}
NULL. TYPE is the desired result type. */
tree
-fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
- tree result_type)
+fold_single_bit_test (location_t loc, enum tree_code code,
+ tree arg0, tree arg1, tree result_type)
{
/* If this is testing a single bit, we can optimize the test. */
if ((code == NE_EXPR || code == EQ_EXPR)
/* First, see if we can fold the single bit test into a sign-bit
test. */
- tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
+ tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
result_type);
if (tem)
return tem;
signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
intermediate_type = ops_unsigned ? unsigned_type : signed_type;
- inner = fold_convert (intermediate_type, inner);
+ inner = fold_convert_loc (loc, intermediate_type, inner);
if (bitnum != 0)
inner = build2 (RSHIFT_EXPR, intermediate_type,
one = build_int_cst (intermediate_type, 1);
if (code == EQ_EXPR)
- inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
+ inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
/* Put the AND last so it can combine with more things. */
inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
/* Make sure to return the proper type. */
- inner = fold_convert (result_type, inner);
+ inner = fold_convert_loc (loc, result_type, inner);
return inner;
}
ARG0 is extended to a wider type. */
static tree
-fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
+fold_widened_comparison (location_t loc, enum tree_code code,
+ tree type, tree arg0, tree arg1)
{
tree arg0_unw = get_unwidened (arg0, NULL_TREE);
tree arg1_unw;
&& (TREE_CODE (shorter_type) == INTEGER_TYPE
|| TREE_CODE (shorter_type) == BOOLEAN_TYPE)
&& int_fits_type_p (arg1_unw, shorter_type))))
- return fold_build2 (code, type, arg0_unw,
- fold_convert (shorter_type, arg1_unw));
+ return fold_build2_loc (loc, code, type, arg0_unw,
+ fold_convert_loc (loc, shorter_type, arg1_unw));
if (TREE_CODE (arg1_unw) != INTEGER_CST
|| TREE_CODE (shorter_type) != INTEGER_TYPE
{
case EQ_EXPR:
if (above || below)
- return omit_one_operand (type, integer_zero_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
break;
case NE_EXPR:
if (above || below)
- return omit_one_operand (type, integer_one_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg0);
break;
case LT_EXPR:
case LE_EXPR:
if (above)
- return omit_one_operand (type, integer_one_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg0);
else if (below)
- return omit_one_operand (type, integer_zero_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
case GT_EXPR:
case GE_EXPR:
if (above)
- return omit_one_operand (type, integer_zero_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
else if (below)
- return omit_one_operand (type, integer_one_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg0);
default:
break;
ARG0 just the signedness is changed. */
static tree
-fold_sign_changed_comparison (enum tree_code code, tree type,
+fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
tree arg0, tree arg1)
{
tree arg0_inner;
TREE_INT_CST_HIGH (arg1), 0,
TREE_OVERFLOW (arg1));
else
- arg1 = fold_convert (inner_type, arg1);
+ arg1 = fold_convert_loc (loc, inner_type, arg1);
- return fold_build2 (code, type, arg0_inner, arg1);
+ return fold_build2_loc (loc, code, type, arg0_inner, arg1);
}
/* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
- step of the array. Reconstructs s and delta in the case of s * delta
- being an integer constant (and thus already folded).
- ADDR is the address. MULT is the multiplicative expression.
- If the function succeeds, the new address expression is returned. Otherwise
- NULL_TREE is returned. */
+ step of the array. Reconstructs s and delta in the case of s *
+ delta being an integer constant (and thus already folded). ADDR is
+ the address. MULT is the multiplicative expression. If the
+ function succeeds, the new address expression is returned.
+ Otherwise NULL_TREE is returned. LOC is the location of the
+ resulting expression. */
static tree
-try_move_mult_to_index (tree addr, tree op1)
+try_move_mult_to_index (location_t loc, tree addr, tree op1)
{
tree s, delta, step;
tree ref = TREE_OPERAND (addr, 0), pref;
|| TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
continue;
- tmp = fold_binary (PLUS_EXPR, itype,
- fold_convert (itype,
- TREE_OPERAND (ref, 1)),
- fold_convert (itype, delta));
+ tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
+ fold_convert_loc (loc, itype,
+ TREE_OPERAND (ref, 1)),
+ fold_convert_loc (loc, itype, delta));
if (!tmp
|| TREE_CODE (tmp) != INTEGER_CST
|| tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
pref = TREE_OPERAND (addr, 0);
ret = copy_node (pref);
+ SET_EXPR_LOCATION (ret, loc);
pos = ret;
while (pref != ref)
pos = TREE_OPERAND (pos, 0);
}
- TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
- fold_convert (itype,
- TREE_OPERAND (pos, 1)),
- fold_convert (itype, delta));
+ TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
+ fold_convert_loc (loc, itype,
+ TREE_OPERAND (pos, 1)),
+ fold_convert_loc (loc, itype, delta));
- return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
+ return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
}
A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
static tree
-fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
+fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
{
tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
if (POINTER_TYPE_P (typea))
{
/* Convert the pointer types into integer before taking the difference. */
- tree ta = fold_convert (ssizetype, a);
- tree ta1 = fold_convert (ssizetype, a1);
- diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
+ tree ta = fold_convert_loc (loc, ssizetype, a);
+ tree ta1 = fold_convert_loc (loc, ssizetype, a1);
+ diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
}
else
- diff = fold_binary (MINUS_EXPR, typea, a1, a);
+ diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
if (!diff || !integer_onep (diff))
return NULL_TREE;
- return fold_build2 (GE_EXPR, type, a, y);
+ return fold_build2_loc (loc, GE_EXPR, type, a, y);
}
/* Fold a sum or difference of at least one multiplication.
Returns the folded tree or NULL if no simplification could be made. */
static tree
-fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
+fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
+ tree arg0, tree arg1)
{
tree arg00, arg01, arg10, arg11;
tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
increased the number of multiplications necessary. */
&& TREE_CODE (arg10) != INTEGER_CST)
{
- alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
+ alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
build_int_cst (TREE_TYPE (arg00),
int01 / int11));
alt1 = arg10;
}
if (same)
- return fold_build2 (MULT_EXPR, type,
- fold_build2 (code, type,
- fold_convert (type, alt0),
- fold_convert (type, alt1)),
- fold_convert (type, same));
+ return fold_build2_loc (loc, MULT_EXPR, type,
+ fold_build2_loc (loc, code, type,
+ fold_convert_loc (loc, type, alt0),
+ fold_convert_loc (loc, type, alt1)),
+ fold_convert_loc (loc, type, same));
return NULL_TREE;
}
to avoid confusing the gimplify process. */
tree
-build_fold_addr_expr_with_type (tree t, tree ptrtype)
+build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
{
/* The size of the object is not relevant when talking about its address. */
if (TREE_CODE (t) == WITH_SIZE_EXPR)
t = TREE_OPERAND (t, 0);
if (TREE_TYPE (t) != ptrtype)
- t = build1 (NOP_EXPR, ptrtype, t);
+ {
+ t = build1 (NOP_EXPR, ptrtype, t);
+ SET_EXPR_LOCATION (t, loc);
+ }
}
else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
{
- t = build_fold_addr_expr (TREE_OPERAND (t, 0));
+ t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
if (TREE_TYPE (t) != ptrtype)
- t = fold_convert (ptrtype, t);
+ t = fold_convert_loc (loc, ptrtype, t);
}
else
- t = build1 (ADDR_EXPR, ptrtype, t);
+ {
+ t = build1 (ADDR_EXPR, ptrtype, t);
+ SET_EXPR_LOCATION (t, loc);
+ }
return t;
}
/* Build an expression for the address of T. */
tree
-build_fold_addr_expr (tree t)
+build_fold_addr_expr_loc (location_t loc, tree t)
{
tree ptrtype = build_pointer_type (TREE_TYPE (t));
- return build_fold_addr_expr_with_type (t, ptrtype);
+ return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
}
/* Fold a unary expression of code CODE and type TYPE with operand
Otherwise, return NULL_TREE. */
tree
-fold_unary (enum tree_code code, tree type, tree op0)
+fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
{
tree tem;
tree arg0;
{
if (TREE_CODE (arg0) == COMPOUND_EXPR)
return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
- fold_build1 (code, type,
- fold_convert (TREE_TYPE (op0),
- TREE_OPERAND (arg0, 1))));
+ fold_build1_loc (loc, code, type,
+ fold_convert_loc (loc, TREE_TYPE (op0),
+ TREE_OPERAND (arg0, 1))));
else if (TREE_CODE (arg0) == COND_EXPR)
{
tree arg01 = TREE_OPERAND (arg0, 1);
tree arg02 = TREE_OPERAND (arg0, 2);
if (! VOID_TYPE_P (TREE_TYPE (arg01)))
- arg01 = fold_build1 (code, type,
- fold_convert (TREE_TYPE (op0), arg01));
+ arg01 = fold_build1_loc (loc, code, type,
+ fold_convert_loc (loc,
+ TREE_TYPE (op0), arg01));
if (! VOID_TYPE_P (TREE_TYPE (arg02)))
- arg02 = fold_build1 (code, type,
- fold_convert (TREE_TYPE (op0), arg02));
- tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
+ arg02 = fold_build1_loc (loc, code, type,
+ fold_convert_loc (loc,
+ TREE_TYPE (op0), arg02));
+ tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
arg01, arg02);
/* If this was a conversion, and all we did was to move into
(TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
&& TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
|| flag_syntax_only))
- tem = build1 (code, type,
- build3 (COND_EXPR,
- TREE_TYPE (TREE_OPERAND
- (TREE_OPERAND (tem, 1), 0)),
- TREE_OPERAND (tem, 0),
- TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
- TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
+ {
+ tem = build1 (code, type,
+ build3 (COND_EXPR,
+ TREE_TYPE (TREE_OPERAND
+ (TREE_OPERAND (tem, 1), 0)),
+ TREE_OPERAND (tem, 0),
+ TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
+ TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
+ SET_EXPR_LOCATION (tem, loc);
+ }
return tem;
}
else if (COMPARISON_CLASS_P (arg0))
return arg0;
}
else if (TREE_CODE (type) != INTEGER_TYPE)
- return fold_build3 (COND_EXPR, type, arg0,
- fold_build1 (code, type,
+ return fold_build3_loc (loc, COND_EXPR, type, arg0,
+ fold_build1_loc (loc, code, type,
integer_one_node),
- fold_build1 (code, type,
+ fold_build1_loc (loc, code, type,
integer_zero_node));
}
}
barriers can be removed. */
if (CONSTANT_CLASS_P (op0)
|| TREE_CODE (op0) == PAREN_EXPR)
- return fold_convert (type, op0);
+ return fold_convert_loc (loc, type, op0);
return NULL_TREE;
CASE_CONVERT:
/* If we have (type) (a CMP b) and type is an integral type, return
new expression involving the new type. */
if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
- return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
+ return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
TREE_OPERAND (op0, 1));
/* Handle cases of two conversions in a row. */
&& (((inter_int || inter_ptr) && final_int)
|| (inter_float && final_float))
&& inter_prec >= final_prec)
- return fold_build1 (code, type, TREE_OPERAND (op0, 0));
+ return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
/* Likewise, if the intermediate and initial types are either both
float or both integer, we don't need the middle conversion if the
&& TYPE_MODE (type) == TYPE_MODE (inter_type))
&& ! final_ptr
&& (! final_vec || inter_prec == inside_prec))
- return fold_build1 (code, type, TREE_OPERAND (op0, 0));
+ return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
/* If we have a sign-extension of a zero-extended value, we can
replace that by a single zero-extension. */
if (inside_int && inter_int && final_int
&& inside_prec < inter_prec && inter_prec < final_prec
&& inside_unsignedp && !inter_unsignedp)
- return fold_build1 (code, type, TREE_OPERAND (op0, 0));
+ return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
/* Two conversions in a row are not needed unless:
- some conversion is floating-point (overstrict for now), or
&& ! (final_ptr && inside_prec != inter_prec)
&& ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
&& TYPE_MODE (type) == TYPE_MODE (inter_type)))
- return fold_build1 (code, type, TREE_OPERAND (op0, 0));
+ return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
}
/* Handle (T *)&A.B.C for A being of type T and B and C
if (! offset && bitpos == 0
&& TYPE_MAIN_VARIANT (TREE_TYPE (type))
== TYPE_MAIN_VARIANT (TREE_TYPE (base)))
- return fold_convert (type, build_fold_addr_expr (base));
+ return fold_convert_loc (loc, type,
+ build_fold_addr_expr_loc (loc, base));
}
if (TREE_CODE (op0) == MODIFY_EXPR
{
/* Don't leave an assignment inside a conversion
unless assigning a bitfield. */
- tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
+ tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
/* First do the assignment, then return converted constant. */
tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
TREE_NO_WARNING (tem) = 1;
TREE_USED (tem) = 1;
+ SET_EXPR_LOCATION (tem, loc);
return tem;
}
== ZERO_EXTEND))
{
tree uns = unsigned_type_for (TREE_TYPE (and0));
- and0 = fold_convert (uns, and0);
- and1 = fold_convert (uns, and1);
+ and0 = fold_convert_loc (loc, uns, and0);
+ and1 = fold_convert_loc (loc, uns, and1);
}
#endif
}
tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
TREE_INT_CST_HIGH (and1), 0,
TREE_OVERFLOW (and1));
- return fold_build2 (BIT_AND_EXPR, type,
- fold_convert (type, and0), tem);
+ return fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_convert_loc (loc, type, and0), tem);
}
}
tree arg00 = TREE_OPERAND (arg0, 0);
tree arg01 = TREE_OPERAND (arg0, 1);
- return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
- fold_convert (sizetype, arg01));
+ return fold_build2_loc (loc,
+ TREE_CODE (arg0), type,
+ fold_convert_loc (loc, type, arg00),
+ fold_convert_loc (loc, sizetype, arg01));
}
/* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
&& TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
- return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
+ return fold_build1_loc (loc, BIT_NOT_EXPR, type,
+ fold_convert_loc (loc, type, tem));
}
/* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
{
- tem = fold_build2 (MULT_EXPR, mult_type,
- fold_convert (mult_type,
- TREE_OPERAND (op0, 0)),
- fold_convert (mult_type,
- TREE_OPERAND (op0, 1)));
- return fold_convert (type, tem);
+ tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
+ fold_convert_loc (loc, mult_type,
+ TREE_OPERAND (op0, 0)),
+ fold_convert_loc (loc, mult_type,
+ TREE_OPERAND (op0, 1)));
+ return fold_convert_loc (loc, type, tem);
}
}
if (TREE_TYPE (op0) == type)
return op0;
if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
- return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
+ return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
+ type, TREE_OPERAND (op0, 0));
/* For integral conversions with the same precision or pointer
conversions use a NOP_EXPR instead. */
&& (INTEGRAL_TYPE_P (TREE_TYPE (op0))
|| POINTER_TYPE_P (TREE_TYPE (op0)))
&& TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
- return fold_convert (type, op0);
+ return fold_convert_loc (loc, type, op0);
/* Strip inner integral conversions that do not change the precision. */
if (CONVERT_EXPR_P (op0)
|| POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
&& (TYPE_PRECISION (TREE_TYPE (op0))
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
- return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
+ return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
+ type, TREE_OPERAND (op0, 0));
return fold_view_convert_expr (type, op0);
case NEGATE_EXPR:
- tem = fold_negate_expr (arg0);
+ tem = fold_negate_expr (loc, arg0);
if (tem)
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
return NULL_TREE;
case ABS_EXPR:
if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
return fold_abs_const (arg0, type);
else if (TREE_CODE (arg0) == NEGATE_EXPR)
- return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
+ return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
/* Convert fabs((double)float) into (double)fabsf(float). */
else if (TREE_CODE (arg0) == NOP_EXPR
&& TREE_CODE (type) == REAL_TYPE)
{
tree targ0 = strip_float_extensions (arg0);
if (targ0 != arg0)
- return fold_convert (type, fold_build1 (ABS_EXPR,
- TREE_TYPE (targ0),
- targ0));
+ return fold_convert_loc (loc, type,
+ fold_build1_loc (loc, ABS_EXPR,
+ TREE_TYPE (targ0),
+ targ0));
}
/* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
else if (TREE_CODE (arg0) == ABS_EXPR)
{
tem = fold_strip_sign_ops (arg0);
if (tem)
- return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
+ return fold_build1_loc (loc, ABS_EXPR, type,
+ fold_convert_loc (loc, type, tem));
}
return NULL_TREE;
case CONJ_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
- return fold_convert (type, arg0);
+ return fold_convert_loc (loc, type, arg0);
if (TREE_CODE (arg0) == COMPLEX_EXPR)
{
tree itype = TREE_TYPE (type);
- tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
- tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
- return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
+ tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
+ tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
+ return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
+ negate_expr (ipart));
}
if (TREE_CODE (arg0) == COMPLEX_CST)
{
tree itype = TREE_TYPE (type);
- tree rpart = fold_convert (itype, TREE_REALPART (arg0));
- tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
+ tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
+ tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
return build_complex (type, rpart, negate_expr (ipart));
}
if (TREE_CODE (arg0) == CONJ_EXPR)
- return fold_convert (type, TREE_OPERAND (arg0, 0));
+ return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
return NULL_TREE;
case BIT_NOT_EXPR:
if (TREE_CODE (arg0) == INTEGER_CST)
return fold_not_const (arg0, type);
else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
- return fold_convert (type, TREE_OPERAND (arg0, 0));
+ return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
/* Convert ~ (-A) to A - 1. */
else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
- return fold_build2 (MINUS_EXPR, type,
- fold_convert (type, TREE_OPERAND (arg0, 0)),
+ return fold_build2_loc (loc, MINUS_EXPR, type,
+ fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
build_int_cst (type, 1));
/* Convert ~ (A - 1) or ~ (A + -1) to -A. */
else if (INTEGRAL_TYPE_P (type)
&& integer_onep (TREE_OPERAND (arg0, 1)))
|| (TREE_CODE (arg0) == PLUS_EXPR
&& integer_all_onesp (TREE_OPERAND (arg0, 1)))))
- return fold_build1 (NEGATE_EXPR, type,
- fold_convert (type, TREE_OPERAND (arg0, 0)));
+ return fold_build1_loc (loc, NEGATE_EXPR, type,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)));
/* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
else if (TREE_CODE (arg0) == BIT_XOR_EXPR
- && (tem = fold_unary (BIT_NOT_EXPR, type,
- fold_convert (type,
- TREE_OPERAND (arg0, 0)))))
- return fold_build2 (BIT_XOR_EXPR, type, tem,
- fold_convert (type, TREE_OPERAND (arg0, 1)));
+ && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)))))
+ return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 1)));
else if (TREE_CODE (arg0) == BIT_XOR_EXPR
- && (tem = fold_unary (BIT_NOT_EXPR, type,
- fold_convert (type,
- TREE_OPERAND (arg0, 1)))))
- return fold_build2 (BIT_XOR_EXPR, type,
- fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
+ && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 1)))))
+ return fold_build2_loc (loc, BIT_XOR_EXPR, type,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)), tem);
/* Perform BIT_NOT_EXPR on each element individually. */
else if (TREE_CODE (arg0) == VECTOR_CST)
{
if (elements)
{
elem = TREE_VALUE (elements);
- elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
+ elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
if (elem == NULL_TREE)
break;
elements = TREE_CHAIN (elements);
case TRUTH_NOT_EXPR:
/* The argument to invert_truthvalue must have Boolean type. */
if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
- arg0 = fold_convert (boolean_type_node, arg0);
+ arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
/* Note that the operand of this must be an int
and its values must be 0 or 1.
("true" is a fixed value perhaps depending on the language,
but we don't handle values other than 1 correctly yet.) */
- tem = fold_truth_not_expr (arg0);
+ tem = fold_truth_not_expr (loc, arg0);
if (!tem)
return NULL_TREE;
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
case REALPART_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
- return fold_convert (type, arg0);
+ return fold_convert_loc (loc, type, arg0);
if (TREE_CODE (arg0) == COMPLEX_EXPR)
- return omit_one_operand (type, TREE_OPERAND (arg0, 0),
+ return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg0, 1));
if (TREE_CODE (arg0) == COMPLEX_CST)
- return fold_convert (type, TREE_REALPART (arg0));
+ return fold_convert_loc (loc, type, TREE_REALPART (arg0));
if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
{
tree itype = TREE_TYPE (TREE_TYPE (arg0));
- tem = fold_build2 (TREE_CODE (arg0), itype,
- fold_build1 (REALPART_EXPR, itype,
+ tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
+ fold_build1_loc (loc, REALPART_EXPR, itype,
TREE_OPERAND (arg0, 0)),
- fold_build1 (REALPART_EXPR, itype,
+ fold_build1_loc (loc, REALPART_EXPR, itype,
TREE_OPERAND (arg0, 1)));
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
if (TREE_CODE (arg0) == CONJ_EXPR)
{
tree itype = TREE_TYPE (TREE_TYPE (arg0));
- tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
- return fold_convert (type, tem);
+ tem = fold_build1_loc (loc, REALPART_EXPR, itype,
+ TREE_OPERAND (arg0, 0));
+ return fold_convert_loc (loc, type, tem);
}
if (TREE_CODE (arg0) == CALL_EXPR)
{
CASE_FLT_FN (BUILT_IN_CEXPI):
fn = mathfn_built_in (type, BUILT_IN_COS);
if (fn)
- return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
+ return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
break;
default:
case IMAGPART_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
- return fold_convert (type, integer_zero_node);
+ return fold_convert_loc (loc, type, integer_zero_node);
if (TREE_CODE (arg0) == COMPLEX_EXPR)
- return omit_one_operand (type, TREE_OPERAND (arg0, 1),
+ return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
TREE_OPERAND (arg0, 0));
if (TREE_CODE (arg0) == COMPLEX_CST)
- return fold_convert (type, TREE_IMAGPART (arg0));
+ return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
{
tree itype = TREE_TYPE (TREE_TYPE (arg0));
- tem = fold_build2 (TREE_CODE (arg0), itype,
- fold_build1 (IMAGPART_EXPR, itype,
+ tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
+ fold_build1_loc (loc, IMAGPART_EXPR, itype,
TREE_OPERAND (arg0, 0)),
- fold_build1 (IMAGPART_EXPR, itype,
+ fold_build1_loc (loc, IMAGPART_EXPR, itype,
TREE_OPERAND (arg0, 1)));
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
if (TREE_CODE (arg0) == CONJ_EXPR)
{
tree itype = TREE_TYPE (TREE_TYPE (arg0));
- tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
- return fold_convert (type, negate_expr (tem));
+ tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
+ return fold_convert_loc (loc, type, negate_expr (tem));
}
if (TREE_CODE (arg0) == CALL_EXPR)
{
CASE_FLT_FN (BUILT_IN_CEXPI):
fn = mathfn_built_in (type, BUILT_IN_SIN);
if (fn)
- return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
+ return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
break;
default:
have implementation defined behavior and retaining the TREE_OVERFLOW
flag here would confuse later passes such as VRP. */
tree
-fold_unary_ignore_overflow (enum tree_code code, tree type, tree op0)
+fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
+ tree type, tree op0)
{
- tree res = fold_unary (code, type, op0);
+ tree res = fold_unary_loc (loc, code, type, op0);
if (res
&& TREE_CODE (res) == INTEGER_CST
&& TREE_CODE (op0) == INTEGER_CST
return NULL_TREE. */
static tree
-fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
+fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
{
enum tree_code compl_code;
/* MIN (MAX (a, b), b) == b. */
if (TREE_CODE (op0) == compl_code
&& operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
- return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
+ return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
/* MIN (MAX (b, a), b) == b. */
if (TREE_CODE (op0) == compl_code
&& operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
&& reorder_operands_p (TREE_OPERAND (op0, 1), op1))
- return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
+ return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
/* MIN (a, MAX (a, b)) == a. */
if (TREE_CODE (op1) == compl_code
&& operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
&& reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
- return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
+ return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
/* MIN (a, MAX (b, a)) == a. */
if (TREE_CODE (op1) == compl_code
&& operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
&& reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
- return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
+ return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
return NULL_TREE;
}
valid if signed overflow is undefined. */
static tree
-maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
+maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
tree arg0, tree arg1,
bool *strict_overflow_p)
{
t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
if (code0 != INTEGER_CST)
- t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
+ t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
/* If swapping might yield to a more canonical form, do so. */
if (swap)
- return fold_build2 (swap_tree_comparison (code), type, arg1, t);
+ return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
else
- return fold_build2 (code, type, t, arg1);
+ return fold_build2_loc (loc, code, type, t, arg1);
}
/* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
Returns the canonicalized tree if changed, otherwise NULL_TREE. */
static tree
-maybe_canonicalize_comparison (enum tree_code code, tree type,
+maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
tree arg0, tree arg1)
{
tree t;
/* Try canonicalization by simplifying arg0. */
strict_overflow_p = false;
- t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
+ t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
&strict_overflow_p);
if (t)
{
comparison. */
code = swap_tree_comparison (code);
strict_overflow_p = false;
- t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
+ t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
&strict_overflow_p);
if (t && strict_overflow_p)
fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
the folded comparison or NULL_TREE. */
static tree
-fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
+fold_comparison (location_t loc, enum tree_code code, tree type,
+ tree op0, tree op1)
{
tree arg0, arg1, tem;
/* If one arg is a real or integer constant, put it last. */
if (tree_swap_operands_p (arg0, arg1, true))
- return fold_build2 (swap_tree_comparison (code), type, op1, op0);
+ return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
/* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
int lhs_add;
lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
- lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
+ lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
TREE_TYPE (arg1), const2, const1);
/* If the constant operation overflowed this can be
if (code2 == LT_EXPR
|| code2 == LE_EXPR
|| code2 == EQ_EXPR)
- return omit_one_operand (type, boolean_false_node, variable);
+ return omit_one_operand_loc (loc, type, boolean_false_node, variable);
else if (code2 == NE_EXPR
|| code2 == GE_EXPR
|| code2 == GT_EXPR)
- return omit_one_operand (type, boolean_true_node, variable);
+ return omit_one_operand_loc (loc, type, boolean_true_node, variable);
}
if (TREE_CODE (lhs) == TREE_CODE (arg1)
"when changing X +- C1 cmp C2 to "
"X cmp C1 +- C2"),
WARN_STRICT_OVERFLOW_COMPARISON);
- return fold_build2 (code, type, variable, lhs);
+ return fold_build2_loc (loc, code, type, variable, lhs);
}
}
if (offset0 == NULL_TREE)
offset0 = build_int_cst (signed_size_type_node, 0);
else
- offset0 = fold_convert (signed_size_type_node, offset0);
+ offset0 = fold_convert_loc (loc, signed_size_type_node,
+ offset0);
if (offset1 == NULL_TREE)
offset1 = build_int_cst (signed_size_type_node, 0);
else
- offset1 = fold_convert (signed_size_type_node, offset1);
+ offset1 = fold_convert_loc (loc, signed_size_type_node,
+ offset1);
if (code != EQ_EXPR
&& code != NE_EXPR
"P +- C2"),
WARN_STRICT_OVERFLOW_COMPARISON);
- return fold_build2 (code, type, offset0, offset1);
+ return fold_build2_loc (loc, code, type, offset0, offset1);
}
}
/* For non-equal bases we can simplify if they are addresses
|| CONSTANT_CLASS_P (base1)))
{
if (code == EQ_EXPR)
- return omit_two_operands (type, boolean_false_node, arg0, arg1);
+ return omit_two_operands_loc (loc, type, boolean_false_node,
+ arg0, arg1);
else if (code == NE_EXPR)
- return omit_two_operands (type, boolean_true_node, arg0, arg1);
+ return omit_two_operands_loc (loc, type, boolean_true_node,
+ arg0, arg1);
}
/* For equal offsets we can simplify to a comparison of the
base addresses. */
&& operand_equal_p (offset0, offset1, 0))))
{
if (indirect_base0)
- base0 = build_fold_addr_expr (base0);
+ base0 = build_fold_addr_expr_loc (loc, base0);
if (indirect_base1)
- base1 = build_fold_addr_expr (base1);
- return fold_build2 (code, type, base0, base1);
+ base1 = build_fold_addr_expr_loc (loc, base1);
+ return fold_build2_loc (loc, code, type, base0, base1);
}
}
&& tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
{
fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
- return fold_build2 (code, type,
+ return fold_build2_loc (loc, code, type,
variable1,
- fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
+ fold_build2_loc (loc,
+ TREE_CODE (arg1), TREE_TYPE (arg1),
variable2, cst));
}
&& tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
{
fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
- return fold_build2 (code, type,
- fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
+ return fold_build2_loc (loc, code, type,
+ fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
variable1, cst),
variable2);
}
if (tree_int_cst_sgn (const1) < 0)
cmp_code = swap_tree_comparison (cmp_code);
- return fold_build2 (cmp_code, type, variable1, const2);
+ return fold_build2_loc (loc, cmp_code, type, variable1, const2);
}
- tem = maybe_canonicalize_comparison (code, type, op0, op1);
+ tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
if (tem)
return tem;
/* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
- return fold_build2 (code, type, fold_convert (newtype, targ0),
- fold_convert (newtype, targ1));
+ return fold_build2_loc (loc, code, type,
+ fold_convert_loc (loc, newtype, targ0),
+ fold_convert_loc (loc, newtype, targ1));
/* (-a) CMP (-b) -> b CMP a */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& TREE_CODE (arg1) == NEGATE_EXPR)
- return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
+ return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
TREE_OPERAND (arg0, 0));
if (TREE_CODE (arg1) == REAL_CST)
/* (-a) CMP CST -> a swap(CMP) (-CST) */
if (TREE_CODE (arg0) == NEGATE_EXPR)
- return fold_build2 (swap_tree_comparison (code), type,
+ return fold_build2_loc (loc, swap_tree_comparison (code), type,
TREE_OPERAND (arg0, 0),
build_real (TREE_TYPE (arg1),
REAL_VALUE_NEGATE (cst)));
/* IEEE doesn't distinguish +0 and -0 in comparisons. */
/* a CMP (-0) -> a CMP 0 */
if (REAL_VALUE_MINUS_ZERO (cst))
- return fold_build2 (code, type, arg0,
+ return fold_build2_loc (loc, code, type, arg0,
build_real (TREE_TYPE (arg1), dconst0));
/* x != NaN is always true, other ops are always false. */
&& ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
{
tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
- return omit_one_operand (type, tem, arg0);
+ return omit_one_operand_loc (loc, type, tem, arg0);
}
/* Fold comparisons against infinity. */
if (REAL_VALUE_ISINF (cst)
&& MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
{
- tem = fold_inf_compare (code, type, arg0, arg1);
+ tem = fold_inf_compare (loc, code, type, arg0, arg1);
if (tem != NULL_TREE)
return tem;
}
? MINUS_EXPR : PLUS_EXPR,
arg1, TREE_OPERAND (arg0, 1), 0))
&& !TREE_OVERFLOW (tem))
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
+ return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
/* Likewise, we can simplify a comparison of a real constant with
a MINUS_EXPR whose first operand is also a real constant, i.e.
&& 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
arg1, 0))
&& !TREE_OVERFLOW (tem))
- return fold_build2 (swap_tree_comparison (code), type,
+ return fold_build2_loc (loc, swap_tree_comparison (code), type,
TREE_OPERAND (arg0, 1), tem);
/* Fold comparisons against built-in math functions. */
if (fcode != END_BUILTINS)
{
- tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
+ tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
if (tem != NULL_TREE)
return tem;
}
/* If we are widening one operand of an integer comparison,
see if the other operand is similarly being widened. Perhaps we
can do the comparison in the narrower type. */
- tem = fold_widened_comparison (code, type, arg0, arg1);
+ tem = fold_widened_comparison (loc, code, type, arg0, arg1);
if (tem)
return tem;
/* Or if we are changing signedness. */
- tem = fold_sign_changed_comparison (code, type, arg0, arg1);
+ tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
if (tem)
return tem;
}
|| TREE_CODE (arg0) == MAX_EXPR)
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- tem = optimize_minmax_comparison (code, type, op0, op1);
+ tem = optimize_minmax_comparison (loc, code, type, op0, op1);
if (tem)
return tem;
}
if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
|| ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
return constant_boolean_node (1, type);
- return fold_build2 (EQ_EXPR, type, arg0, arg1);
+ return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
case NE_EXPR:
/* For NE, we can only do this simplification if integer
was the same as ARG1. */
tree high_result
- = fold_build2 (code, type,
- eval_subst (arg0, cval1, maxval,
+ = fold_build2_loc (loc, code, type,
+ eval_subst (loc, arg0, cval1, maxval,
cval2, minval),
arg1);
tree equal_result
- = fold_build2 (code, type,
- eval_subst (arg0, cval1, maxval,
+ = fold_build2_loc (loc, code, type,
+ eval_subst (loc, arg0, cval1, maxval,
cval2, maxval),
arg1);
tree low_result
- = fold_build2 (code, type,
- eval_subst (arg0, cval1, minval,
+ = fold_build2_loc (loc, code, type,
+ eval_subst (loc, arg0, cval1, minval,
cval2, maxval),
arg1);
{
case 0:
/* Always false. */
- return omit_one_operand (type, integer_zero_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
case 1:
code = LT_EXPR;
break;
break;
case 7:
/* Always true. */
- return omit_one_operand (type, integer_one_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg0);
}
if (save_p)
- return save_expr (build2 (code, type, cval1, cval2));
- return fold_build2 (code, type, cval1, cval2);
+ {
+ tem = save_expr (build2 (code, type, cval1, cval2));
+ SET_EXPR_LOCATION (tem, loc);
+ return tem;
+ }
+ return fold_build2_loc (loc, code, type, cval1, cval2);
}
}
}
&& !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
&& !TREE_OVERFLOW (arg1))
{
- tem = fold_div_compare (code, type, arg0, arg1);
+ tem = fold_div_compare (loc, code, type, arg0, arg1);
if (tem != NULL_TREE)
return tem;
}
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
{
tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
- return fold_build2 (code, type,
- fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
+ return fold_build2_loc (loc, code, type,
+ fold_convert_loc (loc, cmp_type,
+ TREE_OPERAND (arg1, 0)),
TREE_OPERAND (arg0, 0));
}
&& TREE_CODE (arg1) == INTEGER_CST)
{
tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
- return fold_build2 (swap_tree_comparison (code), type,
+ return fold_build2_loc (loc, swap_tree_comparison (code), type,
TREE_OPERAND (arg0, 0),
- fold_build1 (BIT_NOT_EXPR, cmp_type,
- fold_convert (cmp_type, arg1)));
+ fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
+ fold_convert_loc (loc, cmp_type, arg1)));
}
return NULL_TREE;
argument EXPR represents the expression "z" of type TYPE. */
static tree
-fold_mult_zconjz (tree type, tree expr)
+fold_mult_zconjz (location_t loc, tree type, tree expr)
{
tree itype = TREE_TYPE (type);
tree rpart, ipart, tem;
else
{
expr = save_expr (expr);
- rpart = fold_build1 (REALPART_EXPR, itype, expr);
- ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
+ rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
+ ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
}
rpart = save_expr (rpart);
ipart = save_expr (ipart);
- tem = fold_build2 (PLUS_EXPR, itype,
- fold_build2 (MULT_EXPR, itype, rpart, rpart),
- fold_build2 (MULT_EXPR, itype, ipart, ipart));
- return fold_build2 (COMPLEX_EXPR, type, tem,
- fold_convert (itype, integer_zero_node));
+ tem = fold_build2_loc (loc, PLUS_EXPR, itype,
+ fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
+ fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
+ return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
+ fold_convert_loc (loc, itype, integer_zero_node));
}
/* Fold a binary expression of code CODE and type TYPE with operands
- OP0 and OP1. Return the folded expression if folding is
- successful. Otherwise, return NULL_TREE. */
+ OP0 and OP1. LOC is the location of the resulting expression.
+ Return the folded expression if folding is successful. Otherwise,
+ return NULL_TREE. */
tree
-fold_binary (enum tree_code code, tree type, tree op0, tree op1)
+fold_binary_loc (location_t loc,
+ enum tree_code code, tree type, tree op0, tree op1)
{
enum tree_code_class kind = TREE_CODE_CLASS (code);
tree arg0, arg1, tem;
if (tem != NULL_TREE)
{
if (TREE_TYPE (tem) != type)
- tem = fold_convert (type, tem);
+ tem = fold_convert_loc (loc, type, tem);
return tem;
}
}
to ARG1 to reduce the number of tests below. */
if (commutative_tree_code (code)
&& tree_swap_operands_p (arg0, arg1, true))
- return fold_build2 (code, type, op1, op0);
+ return fold_build2_loc (loc, code, type, op1, op0);
/* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
|| (TREE_CODE (arg0) == BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (arg0, 1)))))))
{
- tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
+ tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
: code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
: TRUTH_XOR_EXPR,
boolean_type_node,
- fold_convert (boolean_type_node, arg0),
- fold_convert (boolean_type_node, arg1));
+ fold_convert_loc (loc, boolean_type_node, arg0),
+ fold_convert_loc (loc, boolean_type_node, arg1));
if (code == EQ_EXPR)
- tem = invert_truthvalue (tem);
+ tem = invert_truthvalue_loc (loc, tem);
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
if (TREE_CODE_CLASS (code) == tcc_binary
|| TREE_CODE_CLASS (code) == tcc_comparison)
{
if (TREE_CODE (arg0) == COMPOUND_EXPR)
- return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
- fold_build2 (code, type,
- fold_convert (TREE_TYPE (op0),
- TREE_OPERAND (arg0, 1)),
- op1));
+ {
+ tem = fold_build2_loc (loc, code, type,
+ fold_convert_loc (loc, TREE_TYPE (op0),
+ TREE_OPERAND (arg0, 1)), op1);
+ protected_set_expr_location (tem, loc);
+ tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
+ goto fold_binary_exit;
+ }
if (TREE_CODE (arg1) == COMPOUND_EXPR
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
- return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
- fold_build2 (code, type, op0,
- fold_convert (TREE_TYPE (op1),
- TREE_OPERAND (arg1, 1))));
+ {
+ tem = fold_build2_loc (loc, code, type, op0,
+ fold_convert_loc (loc, TREE_TYPE (op1),
+ TREE_OPERAND (arg1, 1)));
+ protected_set_expr_location (tem, loc);
+ tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
+ goto fold_binary_exit;
+ }
if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
{
- tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
+ tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
arg0, arg1,
/*cond_first_p=*/1);
if (tem != NULL_TREE)
if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
{
- tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
+ tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
arg1, arg0,
/*cond_first_p=*/0);
if (tem != NULL_TREE)
case POINTER_PLUS_EXPR:
/* 0 +p index -> (type)index */
if (integer_zerop (arg0))
- return non_lvalue (fold_convert (type, arg1));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
/* PTR +p 0 -> PTR */
if (integer_zerop (arg1))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
&& INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
- return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
- fold_convert (sizetype, arg1),
- fold_convert (sizetype, arg0)));
+ return fold_convert_loc (loc, type,
+ fold_build2_loc (loc, PLUS_EXPR, sizetype,
+ fold_convert_loc (loc, sizetype,
+ arg1),
+ fold_convert_loc (loc, sizetype,
+ arg0)));
/* index +p PTR -> PTR +p index */
if (POINTER_TYPE_P (TREE_TYPE (arg1))
&& INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
- return fold_build2 (POINTER_PLUS_EXPR, type,
- fold_convert (type, arg1),
- fold_convert (sizetype, arg0));
+ return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
+ fold_convert_loc (loc, type, arg1),
+ fold_convert_loc (loc, sizetype, arg0));
/* (PTR +p B) +p A -> PTR +p (B + A) */
if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
{
tree inner;
- tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
+ tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
tree arg00 = TREE_OPERAND (arg0, 0);
- inner = fold_build2 (PLUS_EXPR, sizetype,
- arg01, fold_convert (sizetype, arg1));
- return fold_convert (type,
- fold_build2 (POINTER_PLUS_EXPR,
- TREE_TYPE (arg00), arg00, inner));
+ inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
+ arg01, fold_convert_loc (loc, sizetype, arg1));
+ return fold_convert_loc (loc, type,
+ fold_build2_loc (loc, POINTER_PLUS_EXPR,
+ TREE_TYPE (arg00),
+ arg00, inner));
}
/* PTR_CST +p CST -> CST1 */
if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
- return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
+ return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
+ fold_convert_loc (loc, type, arg1));
/* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
of the array. Loop optimizer sometimes produce this type of
expressions. */
if (TREE_CODE (arg0) == ADDR_EXPR)
{
- tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
+ tem = try_move_mult_to_index (loc, arg0,
+ fold_convert_loc (loc, sizetype, arg1));
if (tem)
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
return NULL_TREE;
case PLUS_EXPR:
/* A + (-B) -> A - B */
if (TREE_CODE (arg1) == NEGATE_EXPR)
- return fold_build2 (MINUS_EXPR, type,
- fold_convert (type, arg0),
- fold_convert (type, TREE_OPERAND (arg1, 0)));
+ return fold_build2_loc (loc, MINUS_EXPR, type,
+ fold_convert_loc (loc, type, arg0),
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg1, 0)));
/* (-A) + B -> B - A */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
- return fold_build2 (MINUS_EXPR, type,
- fold_convert (type, arg1),
- fold_convert (type, TREE_OPERAND (arg0, 0)));
+ return fold_build2_loc (loc, MINUS_EXPR, type,
+ fold_convert_loc (loc, type, arg1),
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)));
if (INTEGRAL_TYPE_P (type))
{
/* Convert ~A + 1 to -A. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& integer_onep (arg1))
- return fold_build1 (NEGATE_EXPR, type,
- fold_convert (type, TREE_OPERAND (arg0, 0)));
+ return fold_build1_loc (loc, NEGATE_EXPR, type,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)));
/* ~X + X is -1. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
if (operand_equal_p (tem, arg1, 0))
{
t1 = build_int_cst_type (type, -1);
- return omit_one_operand (type, t1, arg1);
+ return omit_one_operand_loc (loc, type, t1, arg1);
}
}
if (operand_equal_p (arg0, tem, 0))
{
t1 = build_int_cst_type (type, -1);
- return omit_one_operand (type, t1, arg0);
+ return omit_one_operand_loc (loc, type, t1, arg0);
}
}
{
tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
tree cst1 = TREE_OPERAND (arg1, 1);
- tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
+ tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
+ cst1, cst0);
if (sum && integer_zerop (sum))
- return fold_convert (type,
- fold_build2 (TRUNC_MOD_EXPR,
- TREE_TYPE (arg0), arg0, cst0));
+ return fold_convert_loc (loc, type,
+ fold_build2_loc (loc, TRUNC_MOD_EXPR,
+ TREE_TYPE (arg0), arg0,
+ cst0));
}
}
&& !TYPE_SATURATING (type)
&& (!FLOAT_TYPE_P (type) || flag_associative_math))
{
- tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
+ tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
if (tem)
return tem;
}
if (! FLOAT_TYPE_P (type))
{
if (integer_zerop (arg1))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* If we are adding two BIT_AND_EXPR's, both of which are and'ing
with a constant, and the two constants have no bits in common,
if (TREE_CODE (parg0) == MULT_EXPR
&& TREE_CODE (parg1) != MULT_EXPR)
- return fold_build2 (pcode, type,
- fold_build2 (PLUS_EXPR, type,
- fold_convert (type, parg0),
- fold_convert (type, marg)),
- fold_convert (type, parg1));
+ return fold_build2_loc (loc, pcode, type,
+ fold_build2_loc (loc, PLUS_EXPR, type,
+ fold_convert_loc (loc, type,
+ parg0),
+ fold_convert_loc (loc, type,
+ marg)),
+ fold_convert_loc (loc, type, parg1));
if (TREE_CODE (parg0) != MULT_EXPR
&& TREE_CODE (parg1) == MULT_EXPR)
- return fold_build2 (PLUS_EXPR, type,
- fold_convert (type, parg0),
- fold_build2 (pcode, type,
- fold_convert (type, marg),
- fold_convert (type,
- parg1)));
+ return
+ fold_build2_loc (loc, PLUS_EXPR, type,
+ fold_convert_loc (loc, type, parg0),
+ fold_build2_loc (loc, pcode, type,
+ fold_convert_loc (loc, type, marg),
+ fold_convert_loc (loc, type,
+ parg1)));
}
}
else
{
/* See if ARG1 is zero and X + ARG1 reduces to X. */
if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* Likewise if the operands are reversed. */
if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
- return non_lvalue (fold_convert (type, arg1));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
/* Convert X + -C into X - C. */
if (TREE_CODE (arg1) == REAL_CST
{
tem = fold_negate_const (arg1, type);
if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
- return fold_build2 (MINUS_EXPR, type,
- fold_convert (type, arg0),
- fold_convert (type, tem));
+ return fold_build2_loc (loc, MINUS_EXPR, type,
+ fold_convert_loc (loc, type, arg0),
+ fold_convert_loc (loc, type, tem));
}
/* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
&& COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
tree rtype = TREE_TYPE (TREE_TYPE (arg0));
- tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
- tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
+ tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
+ tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
bool arg0rz = false, arg0iz = false;
if ((arg0r && (arg0rz = real_zerop (arg0r)))
|| (arg0i && (arg0iz = real_zerop (arg0i))))
{
- tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
- tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
+ tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
+ tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
if (arg0rz && arg1i && real_zerop (arg1i))
{
tree rp = arg1r ? arg1r
: build1 (REALPART_EXPR, rtype, arg1);
tree ip = arg0i ? arg0i
: build1 (IMAGPART_EXPR, rtype, arg0);
- return fold_build2 (COMPLEX_EXPR, type, rp, ip);
+ return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
}
else if (arg0iz && arg1r && real_zerop (arg1r))
{
: build1 (REALPART_EXPR, rtype, arg0);
tree ip = arg1i ? arg1i
: build1 (IMAGPART_EXPR, rtype, arg1);
- return fold_build2 (COMPLEX_EXPR, type, rp, ip);
+ return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
}
}
}
if (flag_unsafe_math_optimizations
&& (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
&& (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
- && (tem = distribute_real_division (code, type, arg0, arg1)))
+ && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
return tem;
/* Convert x+x into x*2.0. */
if (operand_equal_p (arg0, arg1, 0)
&& SCALAR_FLOAT_TYPE_P (type))
- return fold_build2 (MULT_EXPR, type, arg0,
+ return fold_build2_loc (loc, MULT_EXPR, type, arg0,
build_real (type, dconst2));
/* Convert a + (b*c + d*e) into (a + b*c) + d*e.
&& TREE_CODE (tree10) == MULT_EXPR)
{
tree tree0;
- tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
- return fold_build2 (PLUS_EXPR, type, tree0, tree11);
+ tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
+ return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
}
}
/* Convert (b*c + d*e) + a into b*c + (d*e +a).
&& TREE_CODE (tree00) == MULT_EXPR)
{
tree tree0;
- tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
- return fold_build2 (PLUS_EXPR, type, tree00, tree0);
+ tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
+ return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
}
}
}
&& TREE_INT_CST_HIGH (tree11) == 0
&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
- return fold_convert (type,
- build2 (LROTATE_EXPR,
- TREE_TYPE (TREE_OPERAND (arg0, 0)),
- TREE_OPERAND (arg0, 0),
- code0 == LSHIFT_EXPR
- ? tree01 : tree11));
+ {
+ tem = build2 (LROTATE_EXPR,
+ TREE_TYPE (TREE_OPERAND (arg0, 0)),
+ TREE_OPERAND (arg0, 0),
+ code0 == LSHIFT_EXPR
+ ? tree01 : tree11);
+ SET_EXPR_LOCATION (tem, loc);
+ return fold_convert_loc (loc, type, tem);
+ }
else if (code11 == MINUS_EXPR)
{
tree tree110, tree111;
(TREE_TYPE (TREE_OPERAND
(arg0, 0))))
&& operand_equal_p (tree01, tree111, 0))
- return fold_convert (type,
- build2 ((code0 == LSHIFT_EXPR
- ? LROTATE_EXPR
- : RROTATE_EXPR),
- TREE_TYPE (TREE_OPERAND (arg0, 0)),
- TREE_OPERAND (arg0, 0), tree01));
+ return
+ fold_convert_loc (loc, type,
+ build2 ((code0 == LSHIFT_EXPR
+ ? LROTATE_EXPR
+ : RROTATE_EXPR),
+ TREE_TYPE (TREE_OPERAND (arg0, 0)),
+ TREE_OPERAND (arg0, 0), tree01));
}
else if (code01 == MINUS_EXPR)
{
(TREE_TYPE (TREE_OPERAND
(arg0, 0))))
&& operand_equal_p (tree11, tree011, 0))
- return fold_convert (type,
- build2 ((code0 != LSHIFT_EXPR
- ? LROTATE_EXPR
- : RROTATE_EXPR),
- TREE_TYPE (TREE_OPERAND (arg0, 0)),
- TREE_OPERAND (arg0, 0), tree11));
+ return fold_convert_loc
+ (loc, type,
+ build2 ((code0 != LSHIFT_EXPR
+ ? LROTATE_EXPR
+ : RROTATE_EXPR),
+ TREE_TYPE (TREE_OPERAND (arg0, 0)),
+ TREE_OPERAND (arg0, 0), tree11));
}
}
}
if (code == MINUS_EXPR)
code = PLUS_EXPR;
- var0 = associate_trees (var0, var1, code, type);
- con0 = associate_trees (con0, con1, code, type);
- lit0 = associate_trees (lit0, lit1, code, type);
- minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
+ var0 = associate_trees (loc, var0, var1, code, type);
+ con0 = associate_trees (loc, con0, con1, code, type);
+ lit0 = associate_trees (loc, lit0, lit1, code, type);
+ minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
/* Preserve the MINUS_EXPR if the negative part of the literal is
greater than the positive part. Otherwise, the multiplicative
&& TREE_CODE (minus_lit0) == INTEGER_CST
&& tree_int_cst_lt (lit0, minus_lit0))
{
- minus_lit0 = associate_trees (minus_lit0, lit0,
+ minus_lit0 = associate_trees (loc, minus_lit0, lit0,
MINUS_EXPR, type);
lit0 = 0;
}
else
{
- lit0 = associate_trees (lit0, minus_lit0,
+ lit0 = associate_trees (loc, lit0, minus_lit0,
MINUS_EXPR, type);
minus_lit0 = 0;
}
if (minus_lit0)
{
if (con0 == 0)
- return fold_convert (type,
- associate_trees (var0, minus_lit0,
- MINUS_EXPR, type));
+ return
+ fold_convert_loc (loc, type,
+ associate_trees (loc, var0, minus_lit0,
+ MINUS_EXPR, type));
else
{
- con0 = associate_trees (con0, minus_lit0,
+ con0 = associate_trees (loc, con0, minus_lit0,
MINUS_EXPR, type);
- return fold_convert (type,
- associate_trees (var0, con0,
- PLUS_EXPR, type));
+ return
+ fold_convert_loc (loc, type,
+ associate_trees (loc, var0, con0,
+ PLUS_EXPR, type));
}
}
- con0 = associate_trees (con0, lit0, code, type);
- return fold_convert (type, associate_trees (var0, con0,
- code, type));
+ con0 = associate_trees (loc, con0, lit0, code, type);
+ return
+ fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
+ code, type));
}
}
if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
&& TREE_CODE (arg1) == POINTER_PLUS_EXPR)
{
- tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
- tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
- tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
- tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
- return fold_build2 (PLUS_EXPR, type,
- fold_build2 (MINUS_EXPR, type, arg00, arg10),
- fold_build2 (MINUS_EXPR, type, arg01, arg11));
+ tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
+ tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
+ tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
+ tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
+ return fold_build2_loc (loc, PLUS_EXPR, type,
+ fold_build2_loc (loc, MINUS_EXPR, type,
+ arg00, arg10),
+ fold_build2_loc (loc, MINUS_EXPR, type,
+ arg01, arg11));
}
/* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
{
- tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
- tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
- tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
+ tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
+ tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
+ tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
+ fold_convert_loc (loc, type, arg1));
if (tmp)
- return fold_build2 (PLUS_EXPR, type, tmp, arg01);
+ return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
}
}
/* A - (-B) -> A + B */
if (TREE_CODE (arg1) == NEGATE_EXPR)
- return fold_build2 (PLUS_EXPR, type, op0,
- fold_convert (type, TREE_OPERAND (arg1, 0)));
+ return fold_build2_loc (loc, PLUS_EXPR, type, op0,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg1, 0)));
/* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& (FLOAT_TYPE_P (type)
|| INTEGRAL_TYPE_P (type))
&& negate_expr_p (arg1)
&& reorder_operands_p (arg0, arg1))
- return fold_build2 (MINUS_EXPR, type,
- fold_convert (type, negate_expr (arg1)),
- fold_convert (type, TREE_OPERAND (arg0, 0)));
+ return fold_build2_loc (loc, MINUS_EXPR, type,
+ fold_convert_loc (loc, type,
+ negate_expr (arg1)),
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)));
/* Convert -A - 1 to ~A. */
if (INTEGRAL_TYPE_P (type)
&& TREE_CODE (arg0) == NEGATE_EXPR
&& integer_onep (arg1)
&& !TYPE_OVERFLOW_TRAPS (type))
- return fold_build1 (BIT_NOT_EXPR, type,
- fold_convert (type, TREE_OPERAND (arg0, 0)));
+ return fold_build1_loc (loc, BIT_NOT_EXPR, type,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)));
/* Convert -1 - A to ~A. */
if (INTEGRAL_TYPE_P (type)
&& integer_all_onesp (arg0))
- return fold_build1 (BIT_NOT_EXPR, type, op1);
+ return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
/* X - (X / CST) * CST is X % CST. */
TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
&& operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
TREE_OPERAND (arg1, 1), 0))
- return fold_convert (type,
- fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
- arg0, TREE_OPERAND (arg1, 1)));
+ return
+ fold_convert_loc (loc, type,
+ fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
+ arg0, TREE_OPERAND (arg1, 1)));
if (! FLOAT_TYPE_P (type))
{
if (integer_zerop (arg0))
- return negate_expr (fold_convert (type, arg1));
+ return negate_expr (fold_convert_loc (loc, type, arg1));
if (integer_zerop (arg1))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* Fold A - (A & B) into ~B & A. */
if (!TREE_SIDE_EFFECTS (arg0)
{
if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
{
- tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
- return fold_build2 (BIT_AND_EXPR, type,
- fold_build1 (BIT_NOT_EXPR, type, arg10),
- fold_convert (type, arg0));
+ tree arg10 = fold_convert_loc (loc, type,
+ TREE_OPERAND (arg1, 0));
+ return fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_build1_loc (loc, BIT_NOT_EXPR,
+ type, arg10),
+ fold_convert_loc (loc, type, arg0));
}
if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
- return fold_build2 (BIT_AND_EXPR, type,
- fold_build1 (BIT_NOT_EXPR, type, arg11),
- fold_convert (type, arg0));
+ tree arg11 = fold_convert_loc (loc,
+ type, TREE_OPERAND (arg1, 1));
+ return fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_build1_loc (loc, BIT_NOT_EXPR,
+ type, arg11),
+ fold_convert_loc (loc, type, arg0));
}
}
{
tree mask0 = TREE_OPERAND (arg0, 1);
tree mask1 = TREE_OPERAND (arg1, 1);
- tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
+ tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
if (operand_equal_p (tem, mask1, 0))
{
- tem = fold_build2 (BIT_XOR_EXPR, type,
+ tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
TREE_OPERAND (arg0, 0), mask1);
- return fold_build2 (MINUS_EXPR, type, tem, mask1);
+ return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
}
}
}
/* See if ARG1 is zero and X - ARG1 reduces to X. */
else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
ARG0 is zero and X + ARG0 reduces to X, since that would mean
(-ARG1 + ARG0) reduces to -ARG1. */
else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
- return negate_expr (fold_convert (type, arg1));
+ return negate_expr (fold_convert_loc (loc, type, arg1));
/* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
__complex__ ( x, -y ). This is not the same for SNaNs or if
&& COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
tree rtype = TREE_TYPE (TREE_TYPE (arg0));
- tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
- tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
+ tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
+ tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
bool arg0rz = false, arg0iz = false;
if ((arg0r && (arg0rz = real_zerop (arg0r)))
|| (arg0i && (arg0iz = real_zerop (arg0i))))
{
- tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
- tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
+ tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
+ tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
if (arg0rz && arg1i && real_zerop (arg1i))
{
- tree rp = fold_build1 (NEGATE_EXPR, rtype,
+ tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
arg1r ? arg1r
: build1 (REALPART_EXPR, rtype, arg1));
tree ip = arg0i ? arg0i
: build1 (IMAGPART_EXPR, rtype, arg0);
- return fold_build2 (COMPLEX_EXPR, type, rp, ip);
+ return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
}
else if (arg0iz && arg1r && real_zerop (arg1r))
{
tree rp = arg0r ? arg0r
: build1 (REALPART_EXPR, rtype, arg0);
- tree ip = fold_build1 (NEGATE_EXPR, rtype,
+ tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
arg1i ? arg1i
: build1 (IMAGPART_EXPR, rtype, arg1));
- return fold_build2 (COMPLEX_EXPR, type, rp, ip);
+ return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
}
}
}
if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
&& operand_equal_p (arg0, arg1, 0))
- return fold_convert (type, integer_zero_node);
+ return fold_convert_loc (loc, type, integer_zero_node);
/* A - B -> A + (-B) if B is easily negatable. */
if (negate_expr_p (arg1)
&& (TREE_CODE (arg1) != REAL_CST
|| REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
|| INTEGRAL_TYPE_P (type)))
- return fold_build2 (PLUS_EXPR, type,
- fold_convert (type, arg0),
- fold_convert (type, negate_expr (arg1)));
+ return fold_build2_loc (loc, PLUS_EXPR, type,
+ fold_convert_loc (loc, type, arg0),
+ fold_convert_loc (loc, type,
+ negate_expr (arg1)));
/* Try folding difference of addresses. */
{
if (operand_equal_p (TREE_OPERAND (aref0, 0),
TREE_OPERAND (aref1, 0), 0))
{
- tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
- tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
+ tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
+ tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
tree esz = array_ref_element_size (aref0);
tree diff = build2 (MINUS_EXPR, type, op0, op1);
- return fold_build2 (MULT_EXPR, type, diff,
- fold_convert (type, esz));
+ return fold_build2_loc (loc, MULT_EXPR, type, diff,
+ fold_convert_loc (loc, type, esz));
}
}
&& flag_unsafe_math_optimizations
&& (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
&& (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
- && (tem = distribute_real_division (code, type, arg0, arg1)))
+ && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
return tem;
/* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
&& !TYPE_SATURATING (type)
&& (!FLOAT_TYPE_P (type) || flag_associative_math))
{
- tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
+ tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
if (tem)
return tem;
}
case MULT_EXPR:
/* (-A) * (-B) -> A * B */
if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
- return fold_build2 (MULT_EXPR, type,
- fold_convert (type, TREE_OPERAND (arg0, 0)),
- fold_convert (type, negate_expr (arg1)));
+ return fold_build2_loc (loc, MULT_EXPR, type,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)),
+ fold_convert_loc (loc, type,
+ negate_expr (arg1)));
if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
- return fold_build2 (MULT_EXPR, type,
- fold_convert (type, negate_expr (arg0)),
- fold_convert (type, TREE_OPERAND (arg1, 0)));
+ return fold_build2_loc (loc, MULT_EXPR, type,
+ fold_convert_loc (loc, type,
+ negate_expr (arg0)),
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg1, 0)));
if (! FLOAT_TYPE_P (type))
{
if (integer_zerop (arg1))
- return omit_one_operand (type, arg1, arg0);
+ return omit_one_operand_loc (loc, type, arg1, arg0);
if (integer_onep (arg1))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* Transform x * -1 into -x. Make sure to do the negation
on the original operand with conversions not stripped
because we can only strip non-sign-changing conversions. */
if (integer_all_onesp (arg1))
- return fold_convert (type, negate_expr (op0));
+ return fold_convert_loc (loc, type, negate_expr (op0));
/* Transform x * -C into -x * C if x is easily negatable. */
if (TREE_CODE (arg1) == INTEGER_CST
&& tree_int_cst_sgn (arg1) == -1
&& negate_expr_p (arg0)
&& (tem = negate_expr (arg1)) != arg1
&& !TREE_OVERFLOW (tem))
- return fold_build2 (MULT_EXPR, type,
- fold_convert (type, negate_expr (arg0)), tem);
+ return fold_build2_loc (loc, MULT_EXPR, type,
+ fold_convert_loc (loc, type,
+ negate_expr (arg0)),
+ tem);
/* (a * (1 << b)) is (a << b) */
if (TREE_CODE (arg1) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (arg1, 0)))
- return fold_build2 (LSHIFT_EXPR, type, op0,
+ return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
TREE_OPERAND (arg1, 1));
if (TREE_CODE (arg0) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (arg0, 0)))
- return fold_build2 (LSHIFT_EXPR, type, op1,
+ return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
TREE_OPERAND (arg0, 1));
/* (A + A) * C -> A * 2 * C */
&& TREE_CODE (arg1) == INTEGER_CST
&& operand_equal_p (TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg0, 1), 0))
- return fold_build2 (MULT_EXPR, type,
- omit_one_operand (type, TREE_OPERAND (arg0, 0),
+ return fold_build2_loc (loc, MULT_EXPR, type,
+ omit_one_operand_loc (loc, type,
+ TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg0, 1)),
- fold_build2 (MULT_EXPR, type,
+ fold_build2_loc (loc, MULT_EXPR, type,
build_int_cst (type, 2) , arg1));
strict_overflow_p = false;
"occur when simplifying "
"multiplication"),
WARN_STRICT_OVERFLOW_MISC);
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
/* Optimize z * conj(z) for integer complex numbers. */
if (TREE_CODE (arg0) == CONJ_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
- return fold_mult_zconjz (type, arg1);
+ return fold_mult_zconjz (loc, type, arg1);
if (TREE_CODE (arg1) == CONJ_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
- return fold_mult_zconjz (type, arg0);
+ return fold_mult_zconjz (loc, type, arg0);
}
else
{
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
&& real_zerop (arg1))
- return omit_one_operand (type, arg1, arg0);
+ return omit_one_operand_loc (loc, type, arg1, arg0);
/* In IEEE floating point, x*1 is not equivalent to x for snans.
Likewise for complex arithmetic with signed zeros. */
if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
&& (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
|| !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
&& real_onep (arg1))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* Transform x * -1.0 into -x. */
if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
&& (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
|| !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
&& real_minus_onep (arg1))
- return fold_convert (type, negate_expr (arg0));
+ return fold_convert_loc (loc, type, negate_expr (arg0));
/* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
the result for floating point types due to rounding so it is applied
tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
arg1, 0);
if (tem)
- return fold_build2 (RDIV_EXPR, type, tem,
+ return fold_build2_loc (loc, RDIV_EXPR, type, tem,
TREE_OPERAND (arg0, 1));
}
tree tem = fold_strip_sign_ops (arg0);
if (tem != NULL_TREE)
{
- tem = fold_convert (type, tem);
- return fold_build2 (MULT_EXPR, type, tem, tem);
+ tem = fold_convert_loc (loc, type, tem);
+ return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
}
}
{
tree rtype = TREE_TYPE (TREE_TYPE (arg0));
if (real_onep (TREE_IMAGPART (arg1)))
- return fold_build2 (COMPLEX_EXPR, type,
- negate_expr (fold_build1 (IMAGPART_EXPR,
- rtype, arg0)),
- fold_build1 (REALPART_EXPR, rtype, arg0));
+ return
+ fold_build2_loc (loc, COMPLEX_EXPR, type,
+ negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
+ rtype, arg0)),
+ fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
else if (real_minus_onep (TREE_IMAGPART (arg1)))
- return fold_build2 (COMPLEX_EXPR, type,
- fold_build1 (IMAGPART_EXPR, rtype, arg0),
- negate_expr (fold_build1 (REALPART_EXPR,
- rtype, arg0)));
+ return
+ fold_build2_loc (loc, COMPLEX_EXPR, type,
+ fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
+ negate_expr (fold_build1_loc (loc, REALPART_EXPR,
+ rtype, arg0)));
}
/* Optimize z * conj(z) for floating point complex numbers.
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg0) == CONJ_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
- return fold_mult_zconjz (type, arg1);
+ return fold_mult_zconjz (loc, type, arg1);
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg1) == CONJ_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
- return fold_mult_zconjz (type, arg0);
+ return fold_mult_zconjz (loc, type, arg0);
if (flag_unsafe_math_optimizations)
{
/* Optimize root(x)*root(y) as root(x*y). */
rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
- arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
- return build_call_expr (rootfn, 1, arg);
+ arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
+ return build_call_expr_loc (loc, rootfn, 1, arg);
}
/* Optimize expN(x)*expN(y) as expN(x+y). */
if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
{
tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
- tree arg = fold_build2 (PLUS_EXPR, type,
+ tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
CALL_EXPR_ARG (arg0, 0),
CALL_EXPR_ARG (arg1, 0));
- return build_call_expr (expfn, 1, arg);
+ return build_call_expr_loc (loc, expfn, 1, arg);
}
/* Optimizations of pow(...)*pow(...). */
if (operand_equal_p (arg01, arg11, 0))
{
tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
- tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
- return build_call_expr (powfn, 2, arg, arg01);
+ tree arg = fold_build2_loc (loc, MULT_EXPR, type,
+ arg00, arg10);
+ return build_call_expr_loc (loc, powfn, 2, arg, arg01);
}
/* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
if (operand_equal_p (arg00, arg10, 0))
{
tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
- tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
- return build_call_expr (powfn, 2, arg00, arg);
+ tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
+ arg01, arg11);
+ return build_call_expr_loc (loc, powfn, 2, arg00, arg);
}
}
tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
if (sinfn != NULL_TREE)
- return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
+ return build_call_expr_loc (loc, sinfn, 1,
+ CALL_EXPR_ARG (arg0, 0));
}
/* Optimize x*pow(x,c) as pow(x,c+1). */
c = TREE_REAL_CST (arg11);
real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
arg = build_real (type, c);
- return build_call_expr (powfn, 2, arg0, arg);
+ return build_call_expr_loc (loc, powfn, 2, arg0, arg);
}
}
c = TREE_REAL_CST (arg01);
real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
arg = build_real (type, c);
- return build_call_expr (powfn, 2, arg1, arg);
+ return build_call_expr_loc (loc, powfn, 2, arg1, arg);
}
}
if (powfn)
{
tree arg = build_real (type, dconst2);
- return build_call_expr (powfn, 2, arg0, arg);
+ return build_call_expr_loc (loc, powfn, 2, arg0, arg);
}
}
}
case BIT_IOR_EXPR:
bit_ior:
if (integer_all_onesp (arg1))
- return omit_one_operand (type, arg1, arg0);
+ return omit_one_operand_loc (loc, type, arg1, arg0);
if (integer_zerop (arg1))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
if (operand_equal_p (arg0, arg1, 0))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* ~X | X is -1. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
- t1 = fold_convert (type, integer_zero_node);
- t1 = fold_unary (BIT_NOT_EXPR, type, t1);
- return omit_one_operand (type, t1, arg1);
+ t1 = fold_convert_loc (loc, type, integer_zero_node);
+ t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
+ return omit_one_operand_loc (loc, type, t1, arg1);
}
/* X | ~X is -1. */
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- t1 = fold_convert (type, integer_zero_node);
- t1 = fold_unary (BIT_NOT_EXPR, type, t1);
- return omit_one_operand (type, t1, arg0);
+ t1 = fold_convert_loc (loc, type, integer_zero_node);
+ t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
+ return omit_one_operand_loc (loc, type, t1, arg0);
}
/* Canonicalize (X & C1) | C2. */
/* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
- return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
+ return omit_one_operand_loc (loc, type, arg1,
+ TREE_OPERAND (arg0, 0));
if (width > HOST_BITS_PER_WIDE_INT)
{
/* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
- return fold_build2 (BIT_IOR_EXPR, type,
+ return fold_build2_loc (loc, BIT_IOR_EXPR, type,
TREE_OPERAND (arg0, 0), arg1);
/* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
}
}
if (hi3 != hi1 || lo3 != lo1)
- return fold_build2 (BIT_IOR_EXPR, type,
- fold_build2 (BIT_AND_EXPR, type,
+ return fold_build2_loc (loc, BIT_IOR_EXPR, type,
+ fold_build2_loc (loc, BIT_AND_EXPR, type,
TREE_OPERAND (arg0, 0),
build_int_cst_wide (type,
lo3, hi3)),
/* (X & Y) | Y is (X, Y). */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
+ return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
/* (X & Y) | X is (Y, X). */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
&& reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
- return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
+ return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
/* X | (X & Y) is (Y, X). */
if (TREE_CODE (arg1) == BIT_AND_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
- return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
+ return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
/* X | (Y & X) is (Y, X). */
if (TREE_CODE (arg1) == BIT_AND_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
- return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
+ return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
- t1 = distribute_bit_expr (code, type, arg0, arg1);
+ t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
if (t1 != NULL_TREE)
return t1;
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
{
- return fold_build1 (BIT_NOT_EXPR, type,
- build2 (BIT_AND_EXPR, type,
- fold_convert (type,
- TREE_OPERAND (arg0, 0)),
- fold_convert (type,
- TREE_OPERAND (arg1, 0))));
+ return
+ fold_build1_loc (loc, BIT_NOT_EXPR, type,
+ build2 (BIT_AND_EXPR, type,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)),
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg1, 0))));
}
/* See if this can be simplified into a rotate first. If that
case BIT_XOR_EXPR:
if (integer_zerop (arg1))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
if (integer_all_onesp (arg1))
- return fold_build1 (BIT_NOT_EXPR, type, op0);
+ return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
if (operand_equal_p (arg0, arg1, 0))
- return omit_one_operand (type, integer_zero_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
/* ~X ^ X is -1. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
- t1 = fold_convert (type, integer_zero_node);
- t1 = fold_unary (BIT_NOT_EXPR, type, t1);
- return omit_one_operand (type, t1, arg1);
+ t1 = fold_convert_loc (loc, type, integer_zero_node);
+ t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
+ return omit_one_operand_loc (loc, type, t1, arg1);
}
/* X ^ ~X is -1. */
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- t1 = fold_convert (type, integer_zero_node);
- t1 = fold_unary (BIT_NOT_EXPR, type, t1);
- return omit_one_operand (type, t1, arg0);
+ t1 = fold_convert_loc (loc, type, integer_zero_node);
+ t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
+ return omit_one_operand_loc (loc, type, t1, arg0);
}
/* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
tree t2 = TREE_OPERAND (arg0, 1);
- t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
+ t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
arg1);
- t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
- fold_convert (type, t1));
+ t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_convert_loc (loc, type, t2),
+ fold_convert_loc (loc, type, t1));
return t1;
}
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
{
tree t2 = TREE_OPERAND (arg0, 0);
- t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
+ t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
arg1);
- t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
- fold_convert (type, t1));
+ t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_convert_loc (loc, type, t2),
+ fold_convert_loc (loc, type, t1));
return t1;
}
&& operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
{
tree t2 = TREE_OPERAND (arg1, 1);
- t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
+ t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
arg0);
- t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
- fold_convert (type, t1));
+ t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_convert_loc (loc, type, t2),
+ fold_convert_loc (loc, type, t1));
return t1;
}
&& operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
{
tree t2 = TREE_OPERAND (arg1, 0);
- t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
+ t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
arg0);
- t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
- fold_convert (type, t1));
+ t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_convert_loc (loc, type, t2),
+ fold_convert_loc (loc, type, t1));
return t1;
}
/* Convert ~X ^ ~Y to X ^ Y. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
- return fold_build2 (code, type,
- fold_convert (type, TREE_OPERAND (arg0, 0)),
- fold_convert (type, TREE_OPERAND (arg1, 0)));
+ return fold_build2_loc (loc, code, type,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)),
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg1, 0)));
/* Convert ~X ^ C to X ^ ~C. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == INTEGER_CST)
- return fold_build2 (code, type,
- fold_convert (type, TREE_OPERAND (arg0, 0)),
- fold_build1 (BIT_NOT_EXPR, type, arg1));
+ return fold_build2_loc (loc, code, type,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)),
+ fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
/* Fold (X & 1) ^ 1 as (X & 1) == 0. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (arg0, 1))
&& integer_onep (arg1))
- return fold_build2 (EQ_EXPR, type, arg0,
+ return fold_build2_loc (loc, EQ_EXPR, type, arg0,
build_int_cst (TREE_TYPE (arg0), 0));
/* Fold (X & Y) ^ Y as ~X & Y. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
{
- tem = fold_convert (type, TREE_OPERAND (arg0, 0));
- return fold_build2 (BIT_AND_EXPR, type,
- fold_build1 (BIT_NOT_EXPR, type, tem),
- fold_convert (type, arg1));
+ tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
+ return fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
+ fold_convert_loc (loc, type, arg1));
}
/* Fold (X & Y) ^ X as ~Y & X. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
&& reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
{
- tem = fold_convert (type, TREE_OPERAND (arg0, 1));
- return fold_build2 (BIT_AND_EXPR, type,
- fold_build1 (BIT_NOT_EXPR, type, tem),
- fold_convert (type, arg1));
+ tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
+ return fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
+ fold_convert_loc (loc, type, arg1));
}
/* Fold X ^ (X & Y) as X & ~Y. */
if (TREE_CODE (arg1) == BIT_AND_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- tem = fold_convert (type, TREE_OPERAND (arg1, 1));
- return fold_build2 (BIT_AND_EXPR, type,
- fold_convert (type, arg0),
- fold_build1 (BIT_NOT_EXPR, type, tem));
+ tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
+ return fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_convert_loc (loc, type, arg0),
+ fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
}
/* Fold X ^ (Y & X) as ~Y & X. */
if (TREE_CODE (arg1) == BIT_AND_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
{
- tem = fold_convert (type, TREE_OPERAND (arg1, 0));
- return fold_build2 (BIT_AND_EXPR, type,
- fold_build1 (BIT_NOT_EXPR, type, tem),
- fold_convert (type, arg0));
+ tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
+ return fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
+ fold_convert_loc (loc, type, arg0));
}
/* See if this can be simplified into a rotate first. If that
case BIT_AND_EXPR:
if (integer_all_onesp (arg1))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
if (integer_zerop (arg1))
- return omit_one_operand (type, arg1, arg0);
+ return omit_one_operand_loc (loc, type, arg1, arg0);
if (operand_equal_p (arg0, arg1, 0))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* ~X & X is always zero. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
- return omit_one_operand (type, integer_zero_node, arg1);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
/* X & ~X is always zero. */
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
- return omit_one_operand (type, integer_zero_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
/* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
if (TREE_CODE (arg0) == BIT_IOR_EXPR
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- tree tmp1 = fold_convert (type, arg1);
- tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
- tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
- tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
- tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
- return fold_convert (type,
- fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
+ tree tmp1 = fold_convert_loc (loc, type, arg1);
+ tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
+ tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
+ tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
+ tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
+ return
+ fold_convert_loc (loc, type,
+ fold_build2_loc (loc, BIT_IOR_EXPR,
+ type, tmp2, tmp3));
}
/* (X | Y) & Y is (X, Y). */
if (TREE_CODE (arg0) == BIT_IOR_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
+ return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
/* (X | Y) & X is (Y, X). */
if (TREE_CODE (arg0) == BIT_IOR_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
&& reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
- return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
+ return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
/* X & (X | Y) is (Y, X). */
if (TREE_CODE (arg1) == BIT_IOR_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
- return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
+ return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
/* X & (Y | X) is (Y, X). */
if (TREE_CODE (arg1) == BIT_IOR_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
- return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
+ return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
/* Fold (X ^ 1) & 1 as (X & 1) == 0. */
if (TREE_CODE (arg0) == BIT_XOR_EXPR
&& integer_onep (arg1))
{
tem = TREE_OPERAND (arg0, 0);
- return fold_build2 (EQ_EXPR, type,
- fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
+ return fold_build2_loc (loc, EQ_EXPR, type,
+ fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
build_int_cst (TREE_TYPE (tem), 1)),
build_int_cst (TREE_TYPE (tem), 0));
}
&& integer_onep (arg1))
{
tem = TREE_OPERAND (arg0, 0);
- return fold_build2 (EQ_EXPR, type,
- fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
+ return fold_build2_loc (loc, EQ_EXPR, type,
+ fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
build_int_cst (TREE_TYPE (tem), 1)),
build_int_cst (TREE_TYPE (tem), 0));
}
if (TREE_CODE (arg0) == BIT_XOR_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
{
- tem = fold_convert (type, TREE_OPERAND (arg0, 0));
- return fold_build2 (BIT_AND_EXPR, type,
- fold_build1 (BIT_NOT_EXPR, type, tem),
- fold_convert (type, arg1));
+ tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
+ return fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
+ fold_convert_loc (loc, type, arg1));
}
/* Fold (X ^ Y) & X as ~Y & X. */
if (TREE_CODE (arg0) == BIT_XOR_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
&& reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
{
- tem = fold_convert (type, TREE_OPERAND (arg0, 1));
- return fold_build2 (BIT_AND_EXPR, type,
- fold_build1 (BIT_NOT_EXPR, type, tem),
- fold_convert (type, arg1));
+ tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
+ return fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
+ fold_convert_loc (loc, type, arg1));
}
/* Fold X & (X ^ Y) as X & ~Y. */
if (TREE_CODE (arg1) == BIT_XOR_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- tem = fold_convert (type, TREE_OPERAND (arg1, 1));
- return fold_build2 (BIT_AND_EXPR, type,
- fold_convert (type, arg0),
- fold_build1 (BIT_NOT_EXPR, type, tem));
+ tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
+ return fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_convert_loc (loc, type, arg0),
+ fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
}
/* Fold X & (Y ^ X) as ~Y & X. */
if (TREE_CODE (arg1) == BIT_XOR_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
{
- tem = fold_convert (type, TREE_OPERAND (arg1, 0));
- return fold_build2 (BIT_AND_EXPR, type,
- fold_build1 (BIT_NOT_EXPR, type, tem),
- fold_convert (type, arg0));
+ tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
+ return fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
+ fold_convert_loc (loc, type, arg0));
}
- t1 = distribute_bit_expr (code, type, arg0, arg1);
+ t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
if (t1 != NULL_TREE)
return t1;
/* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
&& (~TREE_INT_CST_LOW (arg1)
& (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
- return fold_convert (type, TREE_OPERAND (arg0, 0));
+ return
+ fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
}
/* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
{
- return fold_build1 (BIT_NOT_EXPR, type,
+ return fold_build1_loc (loc, BIT_NOT_EXPR, type,
build2 (BIT_IOR_EXPR, type,
- fold_convert (type,
- TREE_OPERAND (arg0, 0)),
- fold_convert (type,
- TREE_OPERAND (arg1, 0))));
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)),
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg1, 0))));
}
/* If arg0 is derived from the address of an object or function, we may
/* ((X << 16) & 0xff00) is (X, 0). */
if ((mask & zerobits) == mask)
- return omit_one_operand (type, build_int_cst (type, 0), arg0);
+ return omit_one_operand_loc (loc, type,
+ build_int_cst (type, 0), arg0);
newmask = mask | zerobits;
if (newmask != mask && (newmask & (newmask + 1)) == 0)
if (shift_type != TREE_TYPE (arg0))
{
- tem = fold_build2 (TREE_CODE (arg0), shift_type,
- fold_convert (shift_type,
- TREE_OPERAND (arg0, 0)),
+ tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
+ fold_convert_loc (loc, shift_type,
+ TREE_OPERAND (arg0, 0)),
TREE_OPERAND (arg0, 1));
- tem = fold_convert (type, tem);
+ tem = fold_convert_loc (loc, type, tem);
}
else
tem = op0;
newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
if (!tree_int_cst_equal (newmaskt, arg1))
- return fold_build2 (BIT_AND_EXPR, type, tem, newmaskt);
+ return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
}
}
}
{
tree r = build_real (TREE_TYPE (arg0), dconst1);
- return omit_two_operands (type, r, arg0, arg1);
+ return omit_two_operands_loc (loc, type, r, arg0, arg1);
}
/* The complex version of the above A / A optimization. */
{
tree r = build_real (elem_type, dconst1);
/* omit_two_operands will call fold_convert for us. */
- return omit_two_operands (type, r, arg0, arg1);
+ return omit_two_operands_loc (loc, type, r, arg0, arg1);
}
}
/* (-A) / (-B) -> A / B */
if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
- return fold_build2 (RDIV_EXPR, type,
+ return fold_build2_loc (loc, RDIV_EXPR, type,
TREE_OPERAND (arg0, 0),
negate_expr (arg1));
if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
- return fold_build2 (RDIV_EXPR, type,
+ return fold_build2_loc (loc, RDIV_EXPR, type,
negate_expr (arg0),
TREE_OPERAND (arg1, 0));
/* In IEEE floating point, x/1 is not equivalent to x for snans. */
if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
&& real_onep (arg1))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
&& real_minus_onep (arg1))
- return non_lvalue (fold_convert (type, negate_expr (arg0)));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type,
+ negate_expr (arg0)));
/* If ARG1 is a constant, we can convert this to a multiply by the
reciprocal. This does not have the same rounding properties,
if (flag_reciprocal_math
&& 0 != (tem = const_binop (code, build_real (type, dconst1),
arg1, 0)))
- return fold_build2 (MULT_EXPR, type, arg0, tem);
+ return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
/* Find the reciprocal if optimizing and the result is exact. */
if (optimize)
{
if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
{
tem = build_real (type, r);
- return fold_build2 (MULT_EXPR, type,
- fold_convert (type, arg0), tem);
+ return fold_build2_loc (loc, MULT_EXPR, type,
+ fold_convert_loc (loc, type, arg0), tem);
}
}
}
/* Convert A/B/C to A/(B*C). */
if (flag_reciprocal_math
&& TREE_CODE (arg0) == RDIV_EXPR)
- return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
- fold_build2 (MULT_EXPR, type,
+ return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
+ fold_build2_loc (loc, MULT_EXPR, type,
TREE_OPERAND (arg0, 1), arg1));
/* Convert A/(B/C) to (A/B)*C. */
if (flag_reciprocal_math
&& TREE_CODE (arg1) == RDIV_EXPR)
- return fold_build2 (MULT_EXPR, type,
- fold_build2 (RDIV_EXPR, type, arg0,
+ return fold_build2_loc (loc, MULT_EXPR, type,
+ fold_build2_loc (loc, RDIV_EXPR, type, arg0,
TREE_OPERAND (arg1, 0)),
TREE_OPERAND (arg1, 1));
tree tem = const_binop (RDIV_EXPR, arg0,
TREE_OPERAND (arg1, 1), 0);
if (tem)
- return fold_build2 (RDIV_EXPR, type, tem,
+ return fold_build2_loc (loc, RDIV_EXPR, type, tem,
TREE_OPERAND (arg1, 0));
}
tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
if (tanfn != NULL_TREE)
- return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
+ return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
}
/* Optimize cos(x)/sin(x) as 1.0/tan(x). */
if (tanfn != NULL_TREE)
{
- tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
- return fold_build2 (RDIV_EXPR, type,
+ tree tmp = build_call_expr_loc (loc, tanfn, 1,
+ CALL_EXPR_ARG (arg0, 0));
+ return fold_build2_loc (loc, RDIV_EXPR, type,
build_real (type, dconst1), tmp);
}
}
tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
if (cosfn != NULL_TREE)
- return build_call_expr (cosfn, 1, arg00);
+ return build_call_expr_loc (loc, cosfn, 1, arg00);
}
}
if (cosfn != NULL_TREE)
{
- tree tmp = build_call_expr (cosfn, 1, arg00);
- return fold_build2 (RDIV_EXPR, type,
+ tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
+ return fold_build2_loc (loc, RDIV_EXPR, type,
build_real (type, dconst1),
tmp);
}
c = TREE_REAL_CST (arg01);
real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
arg = build_real (type, c);
- return build_call_expr (powfn, 2, arg1, arg);
+ return build_call_expr_loc (loc, powfn, 2, arg1, arg);
}
}
tree b = TREE_OPERAND (rootarg, 0);
tree c = TREE_OPERAND (rootarg, 1);
- tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
+ tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
- tmp = build_call_expr (rootfn, 1, tmp);
- return fold_build2 (MULT_EXPR, type, arg0, tmp);
+ tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
+ return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
}
}
{
tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
- arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
- return fold_build2 (MULT_EXPR, type, arg0, arg1);
+ arg1 = build_call_expr_loc (loc,
+ expfn, 1,
+ fold_convert_loc (loc, type, arg));
+ return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
}
/* Optimize x/pow(y,z) into x*pow(y,-z). */
tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
tree arg10 = CALL_EXPR_ARG (arg1, 0);
tree arg11 = CALL_EXPR_ARG (arg1, 1);
- tree neg11 = fold_convert (type, negate_expr (arg11));
- arg1 = build_call_expr (powfn, 2, arg10, neg11);
- return fold_build2 (MULT_EXPR, type, arg0, arg1);
+ tree neg11 = fold_convert_loc (loc, type,
+ negate_expr (arg11));
+ arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
+ return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
}
}
return NULL_TREE;
"occur when simplifying A / (B << N)"),
WARN_STRICT_OVERFLOW_MISC);
- sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
+ sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
sh_cnt, build_int_cst (NULL_TREE, pow2));
- return fold_build2 (RSHIFT_EXPR, type,
- fold_convert (type, arg0), sh_cnt);
+ return fold_build2_loc (loc, RSHIFT_EXPR, type,
+ fold_convert_loc (loc, type, arg0), sh_cnt);
}
}
if (INTEGRAL_TYPE_P (type)
&& TYPE_UNSIGNED (type)
&& code == FLOOR_DIV_EXPR)
- return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
+ return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
/* Fall thru */
case CEIL_DIV_EXPR:
case EXACT_DIV_EXPR:
if (integer_onep (arg1))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
if (integer_zerop (arg1))
return NULL_TREE;
/* X / -1 is -X. */
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
&& TREE_INT_CST_HIGH (arg1) == -1)
- return fold_convert (type, negate_expr (arg0));
+ return fold_convert_loc (loc, type, negate_expr (arg0));
/* Convert -A / -B to A / B when the type is signed and overflow is
undefined. */
"when distributing negation across "
"division"),
WARN_STRICT_OVERFLOW_MISC);
- return fold_build2 (code, type,
- fold_convert (type, TREE_OPERAND (arg0, 0)),
- fold_convert (type, negate_expr (arg1)));
+ return fold_build2_loc (loc, code, type,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)),
+ fold_convert_loc (loc, type,
+ negate_expr (arg1)));
}
if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
&& TREE_CODE (arg1) == NEGATE_EXPR
"when distributing negation across "
"division"),
WARN_STRICT_OVERFLOW_MISC);
- return fold_build2 (code, type,
- fold_convert (type, negate_expr (arg0)),
- fold_convert (type, TREE_OPERAND (arg1, 0)));
+ return fold_build2_loc (loc, code, type,
+ fold_convert_loc (loc, type,
+ negate_expr (arg0)),
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg1, 0)));
}
/* If arg0 is a multiple of arg1, then rewrite to the fastest div
after the last round to changes to the DIV code in expmed.c. */
if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
&& multiple_of_p (type, arg0, arg1))
- return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
+ return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
strict_overflow_p = false;
if (TREE_CODE (arg1) == INTEGER_CST
fold_overflow_warning (("assuming signed overflow does not occur "
"when simplifying division"),
WARN_STRICT_OVERFLOW_MISC);
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
return NULL_TREE;
/* X % 1 is always zero, but be sure to preserve any side
effects in X. */
if (integer_onep (arg1))
- return omit_one_operand (type, integer_zero_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
/* X % 0, return X % 0 unchanged so that we can get the
proper warnings and errors. */
/* 0 % X is always zero, but be sure to preserve any side
effects in X. Place this after checking for X == 0. */
if (integer_zerop (arg0))
- return omit_one_operand (type, integer_zero_node, arg1);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
/* X % -1 is zero. */
if (!TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
&& TREE_INT_CST_HIGH (arg1) == -1)
- return omit_one_operand (type, integer_zero_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
/* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
{
- tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
+ tree mask = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
build_int_cst (TREE_TYPE (arg1), 1));
if (strict_overflow_p)
fold_overflow_warning (("assuming signed overflow does not "
"occur when simplifying "
"X % (power of two)"),
WARN_STRICT_OVERFLOW_MISC);
- return fold_build2 (BIT_AND_EXPR, type,
- fold_convert (type, arg0),
- fold_convert (type, mask));
+ return fold_build2_loc (loc, BIT_AND_EXPR, type,
+ fold_convert_loc (loc, type, arg0),
+ fold_convert_loc (loc, type, mask));
}
}
&& !TYPE_OVERFLOW_TRAPS (type)
/* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
&& !sign_bit_p (arg1, arg1))
- return fold_build2 (code, type, fold_convert (type, arg0),
- fold_convert (type, negate_expr (arg1)));
+ return fold_build2_loc (loc, code, type,
+ fold_convert_loc (loc, type, arg0),
+ fold_convert_loc (loc, type,
+ negate_expr (arg1)));
/* X % -Y is the same as X % Y. */
if (code == TRUNC_MOD_EXPR
&& !TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == NEGATE_EXPR
&& !TYPE_OVERFLOW_TRAPS (type))
- return fold_build2 (code, type, fold_convert (type, arg0),
- fold_convert (type, TREE_OPERAND (arg1, 0)));
+ return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg1, 0)));
if (TREE_CODE (arg1) == INTEGER_CST
&& 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
fold_overflow_warning (("assuming signed overflow does not occur "
"when simplifying modulus"),
WARN_STRICT_OVERFLOW_MISC);
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
return NULL_TREE;
case LROTATE_EXPR:
case RROTATE_EXPR:
if (integer_all_onesp (arg0))
- return omit_one_operand (type, arg0, arg1);
+ return omit_one_operand_loc (loc, type, arg0, arg1);
goto shift;
case RSHIFT_EXPR:
/* Optimize -1 >> x for arithmetic right shifts. */
if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
&& tree_expr_nonnegative_p (arg1))
- return omit_one_operand (type, arg0, arg1);
+ return omit_one_operand_loc (loc, type, arg0, arg1);
/* ... fall through ... */
case LSHIFT_EXPR:
shift:
if (integer_zerop (arg1))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
if (integer_zerop (arg0))
- return omit_one_operand (type, arg0, arg1);
+ return omit_one_operand_loc (loc, type, arg0, arg1);
/* Since negative shift count is not well-defined,
don't try to compute it in the compiler. */
if (code == LROTATE_EXPR || code == RROTATE_EXPR)
low = low % TYPE_PRECISION (type);
else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
- return omit_one_operand (type, build_int_cst (type, 0),
+ return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
TREE_OPERAND (arg0, 0));
else
low = TYPE_PRECISION (type) - 1;
}
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
+ return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
build_int_cst (type, low));
}
if (low0 == low1)
{
- arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
+ arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
lshift = build_int_cst (type, -1);
lshift = int_const_binop (code, lshift, arg1, 0);
- return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
+ return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
}
}
tree tem = build_int_cst (TREE_TYPE (arg1),
TYPE_PRECISION (type));
tem = const_binop (MINUS_EXPR, tem, arg1, 0);
- return fold_build2 (RROTATE_EXPR, type, op0, tem);
+ return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
}
/* If we have a rotate of a bit operation with the rotate count and
|| TREE_CODE (arg0) == BIT_IOR_EXPR
|| TREE_CODE (arg0) == BIT_XOR_EXPR)
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- return fold_build2 (TREE_CODE (arg0), type,
- fold_build2 (code, type,
+ return fold_build2_loc (loc, TREE_CODE (arg0), type,
+ fold_build2_loc (loc, code, type,
TREE_OPERAND (arg0, 0), arg1),
- fold_build2 (code, type,
+ fold_build2_loc (loc, code, type,
TREE_OPERAND (arg0, 1), arg1));
/* Two consecutive rotates adding up to the precision of the
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- tree mask = fold_build2 (code, type,
- fold_convert (type, TREE_OPERAND (arg0, 1)),
+ tree mask = fold_build2_loc (loc, code, type,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 1)),
arg1);
- tree shift = fold_build2 (code, type,
- fold_convert (type, TREE_OPERAND (arg0, 0)),
+ tree shift = fold_build2_loc (loc, code, type,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)),
arg1);
- tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
+ tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
if (tem)
return tem;
}
case MIN_EXPR:
if (operand_equal_p (arg0, arg1, 0))
- return omit_one_operand (type, arg0, arg1);
+ return omit_one_operand_loc (loc, type, arg0, arg1);
if (INTEGRAL_TYPE_P (type)
&& operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
- return omit_one_operand (type, arg1, arg0);
- tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
+ return omit_one_operand_loc (loc, type, arg1, arg0);
+ tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
if (tem)
return tem;
goto associate;
case MAX_EXPR:
if (operand_equal_p (arg0, arg1, 0))
- return omit_one_operand (type, arg0, arg1);
+ return omit_one_operand_loc (loc, type, arg0, arg1);
if (INTEGRAL_TYPE_P (type)
&& TYPE_MAX_VALUE (type)
&& operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
- return omit_one_operand (type, arg1, arg0);
- tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
+ return omit_one_operand_loc (loc, type, arg1, arg0);
+ tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
if (tem)
return tem;
goto associate;
("true" is a fixed value perhaps depending on the language.) */
/* If first arg is constant zero, return it. */
if (integer_zerop (arg0))
- return fold_convert (type, arg0);
+ return fold_convert_loc (loc, type, arg0);
case TRUTH_AND_EXPR:
/* If either arg is constant true, drop it. */
if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
- return non_lvalue (fold_convert (type, arg1));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
/* Preserve sequence points. */
&& (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* If second arg is constant zero, result is zero, but first arg
must be evaluated. */
if (integer_zerop (arg1))
- return omit_one_operand (type, arg1, arg0);
+ return omit_one_operand_loc (loc, type, arg1, arg0);
/* Likewise for first arg, but note that only the TRUTH_AND_EXPR
case will be handled here. */
if (integer_zerop (arg0))
- return omit_one_operand (type, arg0, arg1);
+ return omit_one_operand_loc (loc, type, arg0, arg1);
/* !X && X is always false. */
if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
- return omit_one_operand (type, integer_zero_node, arg1);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
/* X && !X is always false. */
if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
- return omit_one_operand (type, integer_zero_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
/* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
means A >= Y && A != MAX, but in this case we know that
if (!TREE_SIDE_EFFECTS (arg0)
&& !TREE_SIDE_EFFECTS (arg1))
{
- tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
+ tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
if (tem && !operand_equal_p (tem, arg0, 0))
- return fold_build2 (code, type, tem, arg1);
+ return fold_build2_loc (loc, code, type, tem, arg1);
- tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
+ tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
if (tem && !operand_equal_p (tem, arg1, 0))
- return fold_build2 (code, type, arg0, tem);
+ return fold_build2_loc (loc, code, type, arg0, tem);
}
truth_andor:
|| code == TRUTH_OR_EXPR));
if (operand_equal_p (a00, a10, 0))
- return fold_build2 (TREE_CODE (arg0), type, a00,
- fold_build2 (code, type, a01, a11));
+ return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
+ fold_build2_loc (loc, code, type, a01, a11));
else if (commutative && operand_equal_p (a00, a11, 0))
- return fold_build2 (TREE_CODE (arg0), type, a00,
- fold_build2 (code, type, a01, a10));
+ return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
+ fold_build2_loc (loc, code, type, a01, a10));
else if (commutative && operand_equal_p (a01, a10, 0))
- return fold_build2 (TREE_CODE (arg0), type, a01,
- fold_build2 (code, type, a00, a11));
+ return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
+ fold_build2_loc (loc, code, type, a00, a11));
/* This case if tricky because we must either have commutative
operators or else A10 must not have side-effects. */
else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
&& operand_equal_p (a01, a11, 0))
- return fold_build2 (TREE_CODE (arg0), type,
- fold_build2 (code, type, a00, a10),
+ return fold_build2_loc (loc, TREE_CODE (arg0), type,
+ fold_build2_loc (loc, code, type, a00, a10),
a01);
}
/* See if we can build a range comparison. */
- if (0 != (tem = fold_range_test (code, type, op0, op1)))
+ if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
return tem;
/* Check for the possibility of merging component references. If our
lhs is another similar operation, try to merge its rhs with our
rhs. Then try to merge our lhs and rhs. */
if (TREE_CODE (arg0) == code
- && 0 != (tem = fold_truthop (code, type,
+ && 0 != (tem = fold_truthop (loc, code, type,
TREE_OPERAND (arg0, 1), arg1)))
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
+ return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
- if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
+ if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
return tem;
return NULL_TREE;
("true" is a fixed value perhaps depending on the language.) */
/* If first arg is constant true, return it. */
if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
- return fold_convert (type, arg0);
+ return fold_convert_loc (loc, type, arg0);
case TRUTH_OR_EXPR:
/* If either arg is constant zero, drop it. */
if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
- return non_lvalue (fold_convert (type, arg1));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
/* Preserve sequence points. */
&& (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* If second arg is constant true, result is true, but we must
evaluate first arg. */
if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
- return omit_one_operand (type, arg1, arg0);
+ return omit_one_operand_loc (loc, type, arg1, arg0);
/* Likewise for first arg, but note this only occurs here for
TRUTH_OR_EXPR. */
if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
- return omit_one_operand (type, arg0, arg1);
+ return omit_one_operand_loc (loc, type, arg0, arg1);
/* !X || X is always true. */
if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
- return omit_one_operand (type, integer_one_node, arg1);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg1);
/* X || !X is always true. */
if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
- return omit_one_operand (type, integer_one_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg0);
goto truth_andor;
case TRUTH_XOR_EXPR:
/* If the second arg is constant zero, drop it. */
if (integer_zerop (arg1))
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* If the second arg is constant true, this is a logical inversion. */
if (integer_onep (arg1))
{
/* Only call invert_truthvalue if operand is a truth value. */
if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
- tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
+ tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
else
- tem = invert_truthvalue (arg0);
- return non_lvalue (fold_convert (type, tem));
+ tem = invert_truthvalue_loc (loc, arg0);
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
}
/* Identical arguments cancel to zero. */
if (operand_equal_p (arg0, arg1, 0))
- return omit_one_operand (type, integer_zero_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
/* !X ^ X is always true. */
if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
- return omit_one_operand (type, integer_one_node, arg1);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg1);
/* X ^ !X is always true. */
if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
- return omit_one_operand (type, integer_one_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg0);
return NULL_TREE;
case EQ_EXPR:
case NE_EXPR:
- tem = fold_comparison (code, type, op0, op1);
+ tem = fold_comparison (loc, code, type, op0, op1);
if (tem != NULL_TREE)
return tem;
/* bool_var != 0 becomes bool_var. */
if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
&& code == NE_EXPR)
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* bool_var == 1 becomes bool_var. */
if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
&& code == EQ_EXPR)
- return non_lvalue (fold_convert (type, arg0));
+ return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* bool_var != 1 becomes !bool_var. */
if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
&& code == NE_EXPR)
- return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
+ return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
+ fold_convert_loc (loc, type, arg0));
/* bool_var == 0 becomes !bool_var. */
if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
&& code == EQ_EXPR)
- return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
+ return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
+ fold_convert_loc (loc, type, arg0));
/* If this is an equality comparison of the address of two non-weak,
unaliased symbols neither of which are extern (since we do not
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
? MINUS_EXPR : PLUS_EXPR,
- fold_convert (TREE_TYPE (arg0), arg1),
+ fold_convert_loc (loc, TREE_TYPE (arg0),
+ arg1),
TREE_OPERAND (arg0, 1), 0))
&& !TREE_OVERFLOW (tem))
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
+ return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
/* Similarly for a NEGATE_EXPR. */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& 0 != (tem = negate_expr (arg1))
&& TREE_CODE (tem) == INTEGER_CST
&& !TREE_OVERFLOW (tem))
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
+ return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
/* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
if (TREE_CODE (arg0) == BIT_XOR_EXPR
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
- fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
- fold_convert (TREE_TYPE (arg0), arg1),
+ return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
+ fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
+ fold_convert_loc (loc,
+ TREE_TYPE (arg0),
+ arg1),
TREE_OPERAND (arg0, 1)));
/* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
|| POINTER_TYPE_P (TREE_TYPE (arg0))))
{
tree val = TREE_OPERAND (arg0, 1);
- return omit_two_operands (type,
- fold_build2 (code, type,
+ return omit_two_operands_loc (loc, type,
+ fold_build2_loc (loc, code, type,
val,
build_int_cst (TREE_TYPE (val),
0)),
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
&& (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
{
- return omit_two_operands (type,
+ return omit_two_operands_loc (loc, type,
code == NE_EXPR
? boolean_true_node : boolean_false_node,
TREE_OPERAND (arg0, 1), arg1);
for !=. Don't do this for ordered comparisons due to overflow. */
if (TREE_CODE (arg0) == MINUS_EXPR
&& integer_zerop (arg1))
- return fold_build2 (code, type,
+ return fold_build2_loc (loc, code, type,
TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
/* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
if (TREE_CODE (arg0) == ABS_EXPR
&& (integer_zerop (arg1) || real_zerop (arg1)))
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
+ return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
/* If this is an EQ or NE comparison with zero and ARG0 is
(1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
if (TREE_CODE (arg00) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (arg00, 0)))
{
- tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
+ tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
arg01, TREE_OPERAND (arg00, 1));
- tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
+ tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
build_int_cst (TREE_TYPE (arg0), 1));
- return fold_build2 (code, type,
- fold_convert (TREE_TYPE (arg1), tem), arg1);
+ return fold_build2_loc (loc, code, type,
+ fold_convert_loc (loc, TREE_TYPE (arg1), tem),
+ arg1);
}
else if (TREE_CODE (arg01) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (arg01, 0)))
{
- tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
+ tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
arg00, TREE_OPERAND (arg01, 1));
- tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
+ tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
build_int_cst (TREE_TYPE (arg0), 1));
- return fold_build2 (code, type,
- fold_convert (TREE_TYPE (arg1), tem), arg1);
+ return fold_build2_loc (loc, code, type,
+ fold_convert_loc (loc, TREE_TYPE (arg1), tem),
+ arg1);
}
}
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
{
tree newtype = unsigned_type_for (TREE_TYPE (arg0));
- tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
- fold_convert (newtype,
- TREE_OPERAND (arg0, 0)),
- fold_convert (newtype,
- TREE_OPERAND (arg0, 1)));
+ tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
+ fold_convert_loc (loc, newtype,
+ TREE_OPERAND (arg0, 0)),
+ fold_convert_loc (loc, newtype,
+ TREE_OPERAND (arg0, 1)));
- return fold_build2 (code, type, newmod,
- fold_convert (newtype, arg1));
+ return fold_build2_loc (loc, code, type, newmod,
+ fold_convert_loc (loc, newtype, arg1));
}
/* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
can be rewritten as (X & (C2 << C1)) != 0. */
if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
{
- tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
- tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
- return fold_build2 (code, type, tem, arg1);
+ tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
+ tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
+ return fold_build2_loc (loc, code, type, tem, arg1);
}
/* Otherwise, for signed (arithmetic) shifts,
((X >> C1) & C2) != 0 is rewritten as X < 0, and
((X >> C1) & C2) == 0 is rewritten as X >= 0. */
else if (!TYPE_UNSIGNED (itype))
- return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
+ return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
arg000, build_int_cst (itype, 0));
/* Otherwise, of unsigned (logical) shifts,
((X >> C1) & C2) != 0 is rewritten as (X,false), and
((X >> C1) & C2) == 0 is rewritten as (X,true). */
else
- return omit_one_operand (type,
+ return omit_one_operand_loc (loc, type,
code == EQ_EXPR ? integer_one_node
: integer_zero_node,
arg000);
&& integer_zerop (arg1)
&& TREE_CODE (arg0) == BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (arg0, 1)))
- return fold_convert (type, arg0);
+ return fold_convert_loc (loc, type, arg0);
/* If we have (A & C) == C where C is a power of 2, convert this into
(A & C) != 0. Similarly for NE_EXPR. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& integer_pow2p (TREE_OPERAND (arg0, 1))
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
- arg0, fold_convert (TREE_TYPE (arg0),
- integer_zero_node));
+ return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
+ arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
+ integer_zero_node));
/* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
bit, then fold the expression into A < 0 or A >= 0. */
- tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
+ tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
if (tem)
return tem;
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- tree notc = fold_build1 (BIT_NOT_EXPR,
+ tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
TREE_TYPE (TREE_OPERAND (arg0, 1)),
TREE_OPERAND (arg0, 1));
- tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
arg1, notc);
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (dandnotc))
- return omit_one_operand (type, rslt, arg0);
+ return omit_one_operand_loc (loc, type, rslt, arg0);
}
/* If we have (A | C) == D where C & ~D != 0, convert this into 0.
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
- tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
+ tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
TREE_OPERAND (arg0, 1), notd);
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (candnotd))
- return omit_one_operand (type, rslt, arg0);
+ return omit_one_operand_loc (loc, type, rslt, arg0);
}
/* If this is a comparison of a field, we may be able to simplify it. */
to make sure the warnings are given. */
&& (optimize || TREE_CODE (arg1) == INTEGER_CST))
{
- t1 = optimize_bit_field_compare (code, type, arg0, arg1);
+ t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
if (t1)
return t1;
}
&& call_expr_nargs (arg0) == 1
&& TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
{
- tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
- return fold_build2 (code, type, iref,
+ tree iref = build_fold_indirect_ref_loc (loc,
+ CALL_EXPR_ARG (arg0, 0));
+ return fold_build2_loc (loc, code, type, iref,
build_int_cst (TREE_TYPE (iref), 0));
}
}
if (TYPE_UNSIGNED (itype))
{
itype = signed_type_for (itype);
- arg00 = fold_convert (itype, arg00);
+ arg00 = fold_convert_loc (loc, itype, arg00);
}
- return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
+ return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
type, arg00, build_int_cst (itype, 0));
}
}
/* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
if (integer_zerop (arg1)
&& TREE_CODE (arg0) == BIT_XOR_EXPR)
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
+ return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg0, 1));
/* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
if (TREE_CODE (arg0) == BIT_XOR_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
+ return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
build_int_cst (TREE_TYPE (arg1), 0));
/* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
if (TREE_CODE (arg0) == BIT_XOR_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
&& reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
- return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
+ return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
build_int_cst (TREE_TYPE (arg1), 0));
/* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
if (TREE_CODE (arg0) == BIT_XOR_EXPR
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
- fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
+ return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
+ fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
TREE_OPERAND (arg0, 1), arg1));
/* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
&& integer_zerop (arg1)
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
{
- tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
+ tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
TREE_OPERAND (arg0, 1));
- return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
+ return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
type, tem, arg1);
}
TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
{
tree arg00 = TREE_OPERAND (arg0, 0);
- return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
+ return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
arg00, build_int_cst (TREE_TYPE (arg00), 0));
}
TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
{
tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
- tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
+ tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
arg000, TREE_OPERAND (arg0, 1));
- return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
+ return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
tem, build_int_cst (TREE_TYPE (tem), 0));
}
&& tree_expr_nonzero_p (arg0))
{
tree res = constant_boolean_node (code==NE_EXPR, type);
- return omit_one_operand (type, res, arg0);
+ return omit_one_operand_loc (loc, type, res, arg0);
}
/* Fold -X op -Y as X op Y, where op is eq/ne. */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& TREE_CODE (arg1) == NEGATE_EXPR)
- return fold_build2 (code, type,
+ return fold_build2_loc (loc, code, type,
TREE_OPERAND (arg0, 0),
TREE_OPERAND (arg1, 0));
tree itype = TREE_TYPE (arg0);
if (operand_equal_p (arg01, arg11, 0))
- return fold_build2 (code, type,
- fold_build2 (BIT_AND_EXPR, itype,
- fold_build2 (BIT_XOR_EXPR, itype,
+ return fold_build2_loc (loc, code, type,
+ fold_build2_loc (loc, BIT_AND_EXPR, itype,
+ fold_build2_loc (loc,
+ BIT_XOR_EXPR, itype,
arg00, arg10),
arg01),
build_int_cst (itype, 0));
if (operand_equal_p (arg01, arg10, 0))
- return fold_build2 (code, type,
- fold_build2 (BIT_AND_EXPR, itype,
- fold_build2 (BIT_XOR_EXPR, itype,
+ return fold_build2_loc (loc, code, type,
+ fold_build2_loc (loc, BIT_AND_EXPR, itype,
+ fold_build2_loc (loc,
+ BIT_XOR_EXPR, itype,
arg00, arg11),
arg01),
build_int_cst (itype, 0));
if (operand_equal_p (arg00, arg11, 0))
- return fold_build2 (code, type,
- fold_build2 (BIT_AND_EXPR, itype,
- fold_build2 (BIT_XOR_EXPR, itype,
+ return fold_build2_loc (loc, code, type,
+ fold_build2_loc (loc, BIT_AND_EXPR, itype,
+ fold_build2_loc (loc,
+ BIT_XOR_EXPR, itype,
arg01, arg10),
arg00),
build_int_cst (itype, 0));
if (operand_equal_p (arg00, arg10, 0))
- return fold_build2 (code, type,
- fold_build2 (BIT_AND_EXPR, itype,
- fold_build2 (BIT_XOR_EXPR, itype,
+ return fold_build2_loc (loc, code, type,
+ fold_build2_loc (loc, BIT_AND_EXPR, itype,
+ fold_build2_loc (loc,
+ BIT_XOR_EXPR, itype,
arg01, arg11),
arg00),
build_int_cst (itype, 0));
operand_equal_p guarantees no side-effects so we don't need
to use omit_one_operand on Z. */
if (operand_equal_p (arg01, arg11, 0))
- return fold_build2 (code, type, arg00, arg10);
+ return fold_build2_loc (loc, code, type, arg00, arg10);
if (operand_equal_p (arg01, arg10, 0))
- return fold_build2 (code, type, arg00, arg11);
+ return fold_build2_loc (loc, code, type, arg00, arg11);
if (operand_equal_p (arg00, arg11, 0))
- return fold_build2 (code, type, arg01, arg10);
+ return fold_build2_loc (loc, code, type, arg01, arg10);
if (operand_equal_p (arg00, arg10, 0))
- return fold_build2 (code, type, arg01, arg11);
+ return fold_build2_loc (loc, code, type, arg01, arg11);
/* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
if (TREE_CODE (arg01) == INTEGER_CST
&& TREE_CODE (arg11) == INTEGER_CST)
- return fold_build2 (code, type,
- fold_build2 (BIT_XOR_EXPR, itype, arg00,
- fold_build2 (BIT_XOR_EXPR, itype,
+ return fold_build2_loc (loc, code, type,
+ fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
+ fold_build2_loc (loc,
+ BIT_XOR_EXPR, itype,
arg01, arg11)),
arg10);
}
imag1 = TREE_IMAGPART (arg1);
}
- rcond = fold_binary (code, type, real0, real1);
+ rcond = fold_binary_loc (loc, code, type, real0, real1);
if (rcond && TREE_CODE (rcond) == INTEGER_CST)
{
if (integer_zerop (rcond))
{
if (code == EQ_EXPR)
- return omit_two_operands (type, boolean_false_node,
+ return omit_two_operands_loc (loc, type, boolean_false_node,
imag0, imag1);
- return fold_build2 (NE_EXPR, type, imag0, imag1);
+ return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
}
else
{
if (code == NE_EXPR)
- return omit_two_operands (type, boolean_true_node,
+ return omit_two_operands_loc (loc, type, boolean_true_node,
imag0, imag1);
- return fold_build2 (EQ_EXPR, type, imag0, imag1);
+ return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
}
}
- icond = fold_binary (code, type, imag0, imag1);
+ icond = fold_binary_loc (loc, code, type, imag0, imag1);
if (icond && TREE_CODE (icond) == INTEGER_CST)
{
if (integer_zerop (icond))
{
if (code == EQ_EXPR)
- return omit_two_operands (type, boolean_false_node,
+ return omit_two_operands_loc (loc, type, boolean_false_node,
real0, real1);
- return fold_build2 (NE_EXPR, type, real0, real1);
+ return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
}
else
{
if (code == NE_EXPR)
- return omit_two_operands (type, boolean_true_node,
+ return omit_two_operands_loc (loc, type, boolean_true_node,
real0, real1);
- return fold_build2 (EQ_EXPR, type, real0, real1);
+ return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
}
}
}
case GT_EXPR:
case LE_EXPR:
case GE_EXPR:
- tem = fold_comparison (code, type, op0, op1);
+ tem = fold_comparison (loc, code, type, op0, op1);
if (tem != NULL_TREE)
return tem;
switch (code)
{
case GT_EXPR:
- return omit_one_operand (type, integer_zero_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
case GE_EXPR:
- return fold_build2 (EQ_EXPR, type, op0, op1);
+ return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
case LE_EXPR:
- return omit_one_operand (type, integer_one_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg0);
case LT_EXPR:
- return fold_build2 (NE_EXPR, type, op0, op1);
+ return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
/* The GE_EXPR and LT_EXPR cases above are not normally
reached because of previous transformations. */
case GT_EXPR:
arg1 = const_binop (PLUS_EXPR, arg1,
build_int_cst (TREE_TYPE (arg1), 1), 0);
- return fold_build2 (EQ_EXPR, type,
- fold_convert (TREE_TYPE (arg1), arg0),
+ return fold_build2_loc (loc, EQ_EXPR, type,
+ fold_convert_loc (loc,
+ TREE_TYPE (arg1), arg0),
arg1);
case LE_EXPR:
arg1 = const_binop (PLUS_EXPR, arg1,
build_int_cst (TREE_TYPE (arg1), 1), 0);
- return fold_build2 (NE_EXPR, type,
- fold_convert (TREE_TYPE (arg1), arg0),
+ return fold_build2_loc (loc, NE_EXPR, type,
+ fold_convert_loc (loc, TREE_TYPE (arg1),
+ arg0),
arg1);
default:
break;
switch (code)
{
case LT_EXPR:
- return omit_one_operand (type, integer_zero_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
case LE_EXPR:
- return fold_build2 (EQ_EXPR, type, op0, op1);
+ return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
case GE_EXPR:
- return omit_one_operand (type, integer_one_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg0);
case GT_EXPR:
- return fold_build2 (NE_EXPR, type, op0, op1);
+ return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
default:
break;
{
case GE_EXPR:
arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
- return fold_build2 (NE_EXPR, type,
- fold_convert (TREE_TYPE (arg1), arg0),
+ return fold_build2_loc (loc, NE_EXPR, type,
+ fold_convert_loc (loc,
+ TREE_TYPE (arg1), arg0),
arg1);
case LT_EXPR:
arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
- return fold_build2 (EQ_EXPR, type,
- fold_convert (TREE_TYPE (arg1), arg0),
+ return fold_build2_loc (loc, EQ_EXPR, type,
+ fold_convert_loc (loc, TREE_TYPE (arg1),
+ arg0),
arg1);
default:
break;
{
tree st;
st = signed_type_for (TREE_TYPE (arg1));
- return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
- type, fold_convert (st, arg0),
+ return fold_build2_loc (loc,
+ code == LE_EXPR ? GE_EXPR : LT_EXPR,
+ type, fold_convert_loc (loc, st, arg0),
build_int_cst (st, 0));
}
}
&& (0 != (tem = negate_expr (arg1)))
&& TREE_CODE (tem) == INTEGER_CST
&& !TREE_OVERFLOW (tem))
- return fold_build2 (TRUTH_ANDIF_EXPR, type,
+ return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
build2 (GE_EXPR, type,
TREE_OPERAND (arg0, 0), tem),
build2 (LE_EXPR, type,
"when simplifying comparison of "
"absolute value and zero"),
WARN_STRICT_OVERFLOW_CONDITIONAL);
- return omit_one_operand (type, integer_one_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg0);
}
/* Convert ABS_EXPR<x> < 0 to false. */
"when simplifying comparison of "
"absolute value and zero"),
WARN_STRICT_OVERFLOW_CONDITIONAL);
- return omit_one_operand (type, integer_zero_node, arg0);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
}
/* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
&& TYPE_UNSIGNED (TREE_TYPE (arg0))
&& TREE_CODE (arg1) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (arg1, 0)))
- return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
- build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
- TREE_OPERAND (arg1, 1)),
- build_int_cst (TREE_TYPE (arg0), 0));
+ {
+ tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
+ build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
+ TREE_OPERAND (arg1, 1)),
+ build_int_cst (TREE_TYPE (arg0), 0));
+ goto fold_binary_exit;
+ }
if ((code == LT_EXPR || code == GE_EXPR)
&& TYPE_UNSIGNED (TREE_TYPE (arg0))
&& CONVERT_EXPR_P (arg1)
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
- return
- build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
- fold_convert (TREE_TYPE (arg0),
- build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
- TREE_OPERAND (TREE_OPERAND (arg1, 0),
- 1))),
- build_int_cst (TREE_TYPE (arg0), 0));
+ {
+ tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
+ fold_convert_loc (loc, TREE_TYPE (arg0),
+ build2 (RSHIFT_EXPR,
+ TREE_TYPE (arg0), arg0,
+ TREE_OPERAND (TREE_OPERAND (arg1, 0),
+ 1))),
+ build_int_cst (TREE_TYPE (arg0), 0));
+ goto fold_binary_exit;
+ }
return NULL_TREE;
t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
? integer_zero_node
: integer_one_node;
- return omit_one_operand (type, t1, arg1);
+ return omit_one_operand_loc (loc, type, t1, arg1);
}
/* If the second operand is NaN, the result is constant. */
t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
? integer_zero_node
: integer_one_node;
- return omit_one_operand (type, t1, arg0);
+ return omit_one_operand_loc (loc, type, t1, arg0);
}
/* Simplify unordered comparison of something with itself. */
newtype = TREE_TYPE (targ1);
if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
- return fold_build2 (code, type, fold_convert (newtype, targ0),
- fold_convert (newtype, targ1));
+ return fold_build2_loc (loc, code, type,
+ fold_convert_loc (loc, newtype, targ0),
+ fold_convert_loc (loc, newtype, targ1));
}
return NULL_TREE;
return NULL_TREE;
/* Don't let (0, 0) be null pointer constant. */
tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
- : fold_convert (type, arg1);
- return pedantic_non_lvalue (tem);
+ : fold_convert_loc (loc, type, arg1);
+ return pedantic_non_lvalue_loc (loc, tem);
case COMPLEX_EXPR:
if ((TREE_CODE (arg0) == REAL_CST
default:
return NULL_TREE;
} /* switch (code) */
+ fold_binary_exit:
+ protected_set_expr_location (tem, loc);
+ return tem;
}
/* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
successful. Otherwise, return NULL_TREE. */
tree
-fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
+fold_ternary_loc (location_t loc, enum tree_code code, tree type,
+ tree op0, tree op1, tree op2)
{
tree tem;
tree arg0 = NULL_TREE, arg1 = NULL_TREE;
|| !contains_label_p (unused_op))
&& (! VOID_TYPE_P (TREE_TYPE (tem))
|| VOID_TYPE_P (type)))
- return pedantic_non_lvalue (tem);
+ return pedantic_non_lvalue_loc (loc, tem);
return NULL_TREE;
}
if (operand_equal_p (arg1, op2, 0))
- return pedantic_omit_one_operand (type, arg1, arg0);
+ return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
/* If we have A op B ? A : C, we may be able to convert this to a
simpler expression, depending on the operation and the values
arg1, TREE_OPERAND (arg0, 1))
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
{
- tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
+ tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
if (tem)
return tem;
}
TREE_OPERAND (arg0, 1))
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
{
- tem = fold_truth_not_expr (arg0);
+ tem = fold_truth_not_expr (loc, arg0);
if (tem && COMPARISON_CLASS_P (tem))
{
- tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
+ tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
if (tem)
return tem;
}
/* See if this can be inverted. If it can't, possibly because
it was a floating-point inequality comparison, don't do
anything. */
- tem = fold_truth_not_expr (arg0);
+ tem = fold_truth_not_expr (loc, arg0);
if (tem)
- return fold_build3 (code, type, tem, op2, op1);
+ return fold_build3_loc (loc, code, type, tem, op2, op1);
}
/* Convert A ? 1 : 0 to simply A. */
a COND, which will recurse. In that case, the COND_EXPR
is probably the best choice, so leave it alone. */
&& type == TREE_TYPE (arg0))
- return pedantic_non_lvalue (arg0);
+ return pedantic_non_lvalue_loc (loc, arg0);
/* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
over COND_EXPR in cases such as floating point comparisons. */
if (integer_zerop (op1)
&& integer_onep (op2)
&& truth_value_p (TREE_CODE (arg0)))
- return pedantic_non_lvalue (fold_convert (type,
- invert_truthvalue (arg0)));
+ return pedantic_non_lvalue_loc (loc,
+ fold_convert_loc (loc, type,
+ invert_truthvalue_loc (loc,
+ arg0)));
/* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
if (TREE_CODE (arg0) == LT_EXPR
&& (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
{
tem_type = signed_type_for (TREE_TYPE (tem));
- tem = fold_convert (tem_type, tem);
+ tem = fold_convert_loc (loc, tem_type, tem);
}
else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
&& (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
{
tem_type = unsigned_type_for (TREE_TYPE (tem));
- tem = fold_convert (tem_type, tem);
+ tem = fold_convert_loc (loc, tem_type, tem);
}
else
tem = NULL;
}
if (tem)
- return fold_convert (type,
- fold_build2 (BIT_AND_EXPR,
- TREE_TYPE (tem), tem,
- fold_convert (TREE_TYPE (tem),
- arg1)));
+ return
+ fold_convert_loc (loc, type,
+ fold_build2_loc (loc, BIT_AND_EXPR,
+ TREE_TYPE (tem), tem,
+ fold_convert_loc (loc,
+ TREE_TYPE (tem),
+ arg1)));
}
/* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
&& TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
&& (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
- return fold_build2 (BIT_AND_EXPR, type,
+ return fold_build2_loc (loc, BIT_AND_EXPR, type,
TREE_OPERAND (tem, 0), arg1);
}
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
&& operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
arg1, OEP_ONLY_CONST))
- return pedantic_non_lvalue (fold_convert (type,
- TREE_OPERAND (arg0, 0)));
+ return pedantic_non_lvalue_loc (loc,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)));
/* Convert A ? B : 0 into A && B if A and B are truth values. */
if (integer_zerop (op2)
&& truth_value_p (TREE_CODE (arg0))
&& truth_value_p (TREE_CODE (arg1)))
- return fold_build2 (TRUTH_ANDIF_EXPR, type,
- fold_convert (type, arg0),
+ return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
+ fold_convert_loc (loc, type, arg0),
arg1);
/* Convert A ? B : 1 into !A || B if A and B are truth values. */
&& truth_value_p (TREE_CODE (arg1)))
{
/* Only perform transformation if ARG0 is easily inverted. */
- tem = fold_truth_not_expr (arg0);
+ tem = fold_truth_not_expr (loc, arg0);
if (tem)
- return fold_build2 (TRUTH_ORIF_EXPR, type,
- fold_convert (type, tem),
+ return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
+ fold_convert_loc (loc, type, tem),
arg1);
}
&& truth_value_p (TREE_CODE (op2)))
{
/* Only perform transformation if ARG0 is easily inverted. */
- tem = fold_truth_not_expr (arg0);
+ tem = fold_truth_not_expr (loc, arg0);
if (tem)
- return fold_build2 (TRUTH_ANDIF_EXPR, type,
- fold_convert (type, tem),
+ return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
+ fold_convert_loc (loc, type, tem),
op2);
}
if (integer_onep (arg1)
&& truth_value_p (TREE_CODE (arg0))
&& truth_value_p (TREE_CODE (op2)))
- return fold_build2 (TRUTH_ORIF_EXPR, type,
- fold_convert (type, arg0),
+ return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
+ fold_convert_loc (loc, type, arg0),
op2);
return NULL_TREE;
if (elements)
return TREE_VALUE (elements);
else
- return fold_convert (type, integer_zero_node);
+ return fold_convert_loc (loc, type, integer_zero_node);
}
}
if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
&& TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
&& integer_zerop (op2))
- return fold_convert (type, arg0);
+ return fold_convert_loc (loc, type, arg0);
return NULL_TREE;
enum tree_code code = TREE_CODE (t);
enum tree_code_class kind = TREE_CODE_CLASS (code);
tree tem;
+ location_t loc = EXPR_LOCATION (expr);
/* Return right away if a constant. */
if (kind == tcc_constant)
{
if (code == CALL_EXPR)
{
- tem = fold_call_expr (expr, false);
+ tem = fold_call_expr (loc, expr, false);
return tem ? tem : expr;
}
return expr;
{
case 1:
op0 = TREE_OPERAND (t, 0);
- tem = fold_unary (code, type, op0);
+ tem = fold_unary_loc (loc, code, type, op0);
return tem ? tem : expr;
case 2:
op0 = TREE_OPERAND (t, 0);
op1 = TREE_OPERAND (t, 1);
- tem = fold_binary (code, type, op0, op1);
+ tem = fold_binary_loc (loc, code, type, op0, op1);
return tem ? tem : expr;
case 3:
op0 = TREE_OPERAND (t, 0);
op1 = TREE_OPERAND (t, 1);
op2 = TREE_OPERAND (t, 2);
- tem = fold_ternary (code, type, op0, op1, op2);
+ tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
return tem ? tem : expr;
default:
break;
#endif
/* Fold a unary tree expression with code CODE of type TYPE with an
- operand OP0. Return a folded expression if successful. Otherwise,
- return a tree expression with code CODE of type TYPE with an
- operand OP0. */
+ operand OP0. LOC is the location of the resulting expression.
+ Return a folded expression if successful. Otherwise, return a tree
+ expression with code CODE of type TYPE with an operand OP0. */
tree
-fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
+fold_build1_stat_loc (location_t loc,
+ enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
{
tree tem;
#ifdef ENABLE_FOLD_CHECKING
htab_empty (ht);
#endif
- tem = fold_unary (code, type, op0);
+ tem = fold_unary_loc (loc, code, type, op0);
if (!tem)
- tem = build1_stat (code, type, op0 PASS_MEM_STAT);
+ {
+ tem = build1_stat (code, type, op0 PASS_MEM_STAT);
+ SET_EXPR_LOCATION (tem, loc);
+ }
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
}
/* Fold a binary tree expression with code CODE of type TYPE with
- operands OP0 and OP1. Return a folded expression if successful.
- Otherwise, return a tree expression with code CODE of type TYPE
- with operands OP0 and OP1. */
+ operands OP0 and OP1. LOC is the location of the resulting
+ expression. Return a folded expression if successful. Otherwise,
+ return a tree expression with code CODE of type TYPE with operands
+ OP0 and OP1. */
tree
-fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
- MEM_STAT_DECL)
+fold_build2_stat_loc (location_t loc,
+ enum tree_code code, tree type, tree op0, tree op1
+ MEM_STAT_DECL)
{
tree tem;
#ifdef ENABLE_FOLD_CHECKING
htab_empty (ht);
#endif
- tem = fold_binary (code, type, op0, op1);
+ tem = fold_binary_loc (loc, code, type, op0, op1);
if (!tem)
- tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
+ {
+ tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
+ SET_EXPR_LOCATION (tem, loc);
+ }
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
type TYPE with operands OP0, OP1, and OP2. */
tree
-fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
- MEM_STAT_DECL)
+fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
+ tree op0, tree op1, tree op2 MEM_STAT_DECL)
{
tree tem;
#ifdef ENABLE_FOLD_CHECKING
#endif
gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
- tem = fold_ternary (code, type, op0, op1, op2);
+ tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
if (!tem)
- tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
+ {
+ tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
+ SET_EXPR_LOCATION (tem, loc);
+ }
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
of type TYPE from the given operands as constructed by build_call_array. */
tree
-fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
+fold_build_call_array_loc (location_t loc, tree type, tree fn,
+ int nargs, tree *argarray)
{
tree tem;
#ifdef ENABLE_FOLD_CHECKING
htab_empty (ht);
#endif
- tem = fold_builtin_call_array (type, fn, nargs, argarray);
+ tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
folding_initializer = saved_folding_initializer;
tree
-fold_build1_initializer (enum tree_code code, tree type, tree op)
+fold_build1_initializer_loc (location_t loc, enum tree_code code,
+ tree type, tree op)
{
tree result;
START_FOLD_INIT;
- result = fold_build1 (code, type, op);
+ result = fold_build1_loc (loc, code, type, op);
END_FOLD_INIT;
return result;
}
tree
-fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
+fold_build2_initializer_loc (location_t loc, enum tree_code code,
+ tree type, tree op0, tree op1)
{
tree result;
START_FOLD_INIT;
- result = fold_build2 (code, type, op0, op1);
+ result = fold_build2_loc (loc, code, type, op0, op1);
END_FOLD_INIT;
return result;
}
tree
-fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
- tree op2)
+fold_build3_initializer_loc (location_t loc, enum tree_code code,
+ tree type, tree op0, tree op1, tree op2)
{
tree result;
START_FOLD_INIT;
- result = fold_build3 (code, type, op0, op1, op2);
+ result = fold_build3_loc (loc, code, type, op0, op1, op2);
END_FOLD_INIT;
return result;
}
tree
-fold_build_call_array_initializer (tree type, tree fn,
- int nargs, tree *argarray)
+fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
+ int nargs, tree *argarray)
{
tree result;
START_FOLD_INIT;
- result = fold_build_call_array (type, fn, nargs, argarray);
+ result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
END_FOLD_INIT;
return result;
tree exp1 = TREE_OPERAND (exp, 0);
tree index;
tree string;
+ location_t loc = EXPR_LOCATION (exp);
if (TREE_CODE (exp) == INDIRECT_REF)
string = string_constant (exp1, &index);
else
{
tree low_bound = array_ref_low_bound (exp);
- index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
+ index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
/* Optimize the special-case of a zero lower bound.
+(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
+INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
if (! integer_zerop (low_bound))
- index = size_diffop (index, fold_convert (sizetype, low_bound));
+ index = size_diffop_loc (loc, index,
+ fold_convert_loc (loc, sizetype, low_bound));
string = exp1;
}
possible. */
tree
-fold_indirect_ref_1 (tree type, tree op0)
+fold_indirect_ref_1 (location_t loc, tree type, tree op0)
{
tree sub = op0;
tree subtype;
tree min_val = size_zero_node;
if (type_domain && TYPE_MIN_VALUE (type_domain))
min_val = TYPE_MIN_VALUE (type_domain);
- return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
+ op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
+ SET_EXPR_LOCATION (op0, loc);
+ return op0;
}
/* *(foo *)&complexfoo => __real__ complexfoo */
else if (TREE_CODE (optype) == COMPLEX_TYPE
&& type == TREE_TYPE (optype))
- return fold_build1 (REALPART_EXPR, type, op);
+ return fold_build1_loc (loc, REALPART_EXPR, type, op);
/* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
else if (TREE_CODE (optype) == VECTOR_TYPE
&& type == TREE_TYPE (optype))
{
tree part_width = TYPE_SIZE (type);
tree index = bitsize_int (0);
- return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
+ return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
}
}
tree index = bitsize_int (indexi);
if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
- return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
+ return fold_build3_loc (loc,
+ BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
part_width, index);
}
{
tree size = TYPE_SIZE_UNIT (type);
if (tree_int_cst_equal (size, op01))
- return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
+ return fold_build1_loc (loc, IMAGPART_EXPR, type,
+ TREE_OPERAND (op00, 0));
}
}
{
tree type_domain;
tree min_val = size_zero_node;
- sub = build_fold_indirect_ref (sub);
+ sub = build_fold_indirect_ref_loc (loc, sub);
type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
if (type_domain && TYPE_MIN_VALUE (type_domain))
min_val = TYPE_MIN_VALUE (type_domain);
- return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
+ op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
+ SET_EXPR_LOCATION (op0, loc);
+ return op0;
}
return NULL_TREE;
cases. */
tree
-build_fold_indirect_ref (tree t)
+build_fold_indirect_ref_loc (location_t loc, tree t)
{
tree type = TREE_TYPE (TREE_TYPE (t));
- tree sub = fold_indirect_ref_1 (type, t);
+ tree sub = fold_indirect_ref_1 (loc, type, t);
if (sub)
return sub;
- else
- return build1 (INDIRECT_REF, type, t);
+
+ t = build1 (INDIRECT_REF, type, t);
+ SET_EXPR_LOCATION (t, loc);
+ return t;
}
/* Given an INDIRECT_REF T, return either T or a simplified version. */
tree
-fold_indirect_ref (tree t)
+fold_indirect_ref_loc (location_t loc, tree t)
{
- tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
+ tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
if (sub)
return sub;
This can only be applied to objects of a sizetype. */
tree
-round_up (tree value, int divisor)
+round_up_loc (location_t loc, tree value, int divisor)
{
tree div = NULL_TREE;
tree t;
t = build_int_cst (TREE_TYPE (value), divisor - 1);
- value = size_binop (PLUS_EXPR, value, t);
+ value = size_binop_loc (loc, PLUS_EXPR, value, t);
t = build_int_cst (TREE_TYPE (value), -divisor);
- value = size_binop (BIT_AND_EXPR, value, t);
+ value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
}
}
else
{
if (!div)
div = build_int_cst (TREE_TYPE (value), divisor);
- value = size_binop (CEIL_DIV_EXPR, value, div);
- value = size_binop (MULT_EXPR, value, div);
+ value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
+ value = size_binop_loc (loc, MULT_EXPR, value, div);
}
return value;
/* Likewise, but round down. */
tree
-round_down (tree value, int divisor)
+round_down_loc (location_t loc, tree value, int divisor)
{
tree div = NULL_TREE;
tree t;
t = build_int_cst (TREE_TYPE (value), -divisor);
- value = size_binop (BIT_AND_EXPR, value, t);
+ value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
}
else
{
if (!div)
div = build_int_cst (TREE_TYPE (value), divisor);
- value = size_binop (FLOOR_DIV_EXPR, value, div);
- value = size_binop (MULT_EXPR, value, div);
+ value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
+ value = size_binop_loc (loc, MULT_EXPR, value, div);
}
return value;
enum machine_mode mode;
int unsignedp, volatilep;
HOST_WIDE_INT bitsize;
+ location_t loc = EXPR_LOCATION (exp);
if (TREE_CODE (exp) == ADDR_EXPR)
{
core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
poffset, &mode, &unsignedp, &volatilep,
false);
- core = build_fold_addr_expr (core);
+ core = build_fold_addr_expr_loc (loc, core);
}
else
{
fold_strip_sign_ops (tree exp)
{
tree arg0, arg1;
+ location_t loc = EXPR_LOCATION (exp);
switch (TREE_CODE (exp))
{
arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
if (arg0 != NULL_TREE || arg1 != NULL_TREE)
- return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
+ return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
arg0 ? arg0 : TREE_OPERAND (exp, 0),
arg1 ? arg1 : TREE_OPERAND (exp, 1));
break;
arg0 = TREE_OPERAND (exp, 0);
arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
if (arg1)
- return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
+ return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
break;
case COND_EXPR:
arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
if (arg0 || arg1)
- return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
+ return fold_build3_loc (loc,
+ COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
arg0 ? arg0 : TREE_OPERAND (exp, 1),
arg1 ? arg1 : TREE_OPERAND (exp, 2));
break;
/* Strip copysign function call, return the 1st argument. */
arg0 = CALL_EXPR_ARG (exp, 0);
arg1 = CALL_EXPR_ARG (exp, 1);
- return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
+ return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
default:
/* Strip sign ops from the argument of "odd" math functions. */
{
arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
if (arg0)
- return build_call_expr (get_callee_fndecl (exp), 1, arg0);
+ return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
}
break;
}
+2009-07-17 Aldy Hernandez <aldyh@redhat.com>
+ Manuel López-Ibáñez <manu@gcc.gnu.org>
+
+ PR 40435
+ * trans-expr.c, trans-array.c, trans-openmp.c, trans-stmt.c,
+ trans.c, trans-io.c, trans-decl.c, trans-intrinsic.c: Add location
+ argument to fold_{unary,binary,ternary}, fold_build[123],
+ build_call_expr, build_size_arg, build_fold_addr_expr,
+ build_call_array, non_lvalue, size_diffop,
+ fold_build1_initializer, fold_build2_initializer,
+ fold_build3_initializer, fold_build_call_array,
+ fold_build_call_array_initializer, fold_single_bit_test,
+ omit_one_operand, omit_two_operands, invert_truthvalue,
+ fold_truth_not_expr, build_fold_indirect_ref, fold_indirect_ref,
+ combine_comparisons, fold_builtin_*, fold_call_expr,
+ build_range_check, maybe_fold_offset_to_address, round_up,
+ round_down.
+
2009-07-15 Janus Weil <janus@gcc.gnu.org>
PR fortran/40743
gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc)));
packed = gfc_create_var (build_pointer_type (tmp), "data");
- tmp = build_call_expr (gfor_fndecl_in_pack, 1, initial);
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_in_pack, 1, initial);
tmp = fold_convert (TREE_TYPE (packed), tmp);
gfc_add_modify (pre, packed, tmp);
- tmp = build_fold_indirect_ref (initial);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ initial);
source_data = gfc_conv_descriptor_data_get (tmp);
/* internal_pack may return source->data without any allocation
gfc_conv_expr (se, expr);
/* Store the value. */
- tmp = build_fold_indirect_ref (gfc_conv_descriptor_data_get (desc));
+ tmp = build_fold_indirect_ref_loc (input_location,
+ gfc_conv_descriptor_data_get (desc));
tmp = gfc_build_array_ref (tmp, offset, NULL);
if (expr->ts.type == BT_CHARACTER)
/* Use BUILTIN_MEMCPY to assign the values. */
tmp = gfc_conv_descriptor_data_get (desc);
- tmp = build_fold_indirect_ref (tmp);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ tmp);
tmp = gfc_build_array_ref (tmp, *poffset, NULL);
tmp = gfc_build_addr_expr (NULL_TREE, tmp);
init = gfc_build_addr_expr (NULL_TREE, init);
size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
bound = build_int_cst (NULL_TREE, n * size);
- tmp = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MEMCPY], 3,
tmp, init, bound);
gfc_add_expr_to_block (&body, tmp);
index, gfc_conv_array_stride (desc, 0));
/* Read the vector to get an index into info->descriptor. */
- data = build_fold_indirect_ref (gfc_conv_array_data (desc));
+ data = build_fold_indirect_ref_loc (input_location,
+ gfc_conv_array_data (desc));
index = gfc_build_array_ref (data, index, NULL);
index = gfc_evaluate_now (index, &se->pre);
if (se->ss->expr && is_subref_array (se->ss->expr))
decl = se->ss->expr->symtree->n.sym->backend_decl;
- tmp = build_fold_indirect_ref (info->data);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ info->data);
se->expr = gfc_build_array_ref (tmp, index, decl);
}
type = TREE_TYPE (tmpdesc);
gcc_assert (GFC_ARRAY_TYPE_P (type));
dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
- dumdesc = build_fold_indirect_ref (dumdesc);
+ dumdesc = build_fold_indirect_ref_loc (input_location,
+ dumdesc);
gfc_start_block (&block);
if (sym->ts.type == BT_CHARACTER
gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0)));
/* A library call to repack the array if necessary. */
tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
- stmt_unpacked = build_call_expr (gfor_fndecl_in_pack, 1, tmp);
+ stmt_unpacked = build_call_expr_loc (input_location,
+ gfor_fndecl_in_pack, 1, tmp);
stride = gfc_index_one_node;
if (sym->attr.intent != INTENT_IN)
{
/* Copy the data back. */
- tmp = build_call_expr (gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
gfc_add_expr_to_block (&cleanup, tmp);
}
stmt = gfc_finish_block (&cleanup);
/* Only do the cleanup if the array was repacked. */
- tmp = build_fold_indirect_ref (dumdesc);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ dumdesc);
tmp = gfc_conv_descriptor_data_get (tmp);
tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp, tmpdesc);
stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
}
tmp = gfc_conv_array_data (desc);
- tmp = build_fold_indirect_ref (tmp);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ tmp);
tmp = gfc_build_array_ref (tmp, offset, NULL);
/* Offset the data pointer for pointer assignments from arrays with
{
gfc_conv_expr (&rse, expr);
if (POINTER_TYPE_P (TREE_TYPE (rse.expr)))
- rse.expr = build_fold_indirect_ref (rse.expr);
+ rse.expr = build_fold_indirect_ref_loc (input_location,
+ rse.expr);
}
else
gfc_conv_expr_val (&rse, expr);
if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
*size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc));
else if (expr->rank > 1)
- *size = build_call_expr (gfor_fndecl_size0, 1,
+ *size = build_call_expr_loc (input_location,
+ gfor_fndecl_size0, 1,
gfc_build_addr_expr (NULL, desc));
else
{
if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE
&& GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
- se->expr = gfc_conv_array_data (build_fold_indirect_ref (se->expr));
+ se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location,
+ se->expr));
return;
}
se->want_pointer = 1;
gfc_conv_expr_descriptor (se, expr, ss);
if (size)
- array_parameter_size (build_fold_indirect_ref (se->expr),
+ array_parameter_size (build_fold_indirect_ref_loc (input_location,
+ se->expr),
expr, size);
}
&& expr->ts.derived->attr.alloc_comp
&& expr->expr_type != EXPR_VARIABLE)
{
- tmp = build_fold_indirect_ref (se->expr);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ se->expr);
tmp = gfc_deallocate_alloc_comp (expr->ts.derived, tmp, expr->rank);
gfc_add_expr_to_block (&se->post, tmp);
}
gfc_warning ("Creating array temporary at %L", &expr->where);
}
- ptr = build_call_expr (gfor_fndecl_in_pack, 1, desc);
+ ptr = build_call_expr_loc (input_location,
+ gfor_fndecl_in_pack, 1, desc);
if (fsym && fsym->attr.optional && sym && sym->attr.optional)
{
else
asprintf (&msg, "An array temporary was created");
- tmp = build_fold_indirect_ref (desc);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ desc);
tmp = gfc_conv_array_data (tmp);
tmp = fold_build2 (NE_EXPR, boolean_type_node,
fold_convert (TREE_TYPE (tmp), ptr), tmp);
/* Copy the data back. */
if (fsym == NULL || fsym->attr.intent != INTENT_IN)
{
- tmp = build_call_expr (gfor_fndecl_in_unpack, 2, desc, ptr);
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_in_unpack, 2, desc, ptr);
gfc_add_expr_to_block (&block, tmp);
}
gfc_init_block (&block);
/* Only if it was repacked. This code needs to be executed before the
loop cleanup code. */
- tmp = build_fold_indirect_ref (desc);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ desc);
tmp = gfc_conv_array_data (tmp);
tmp = fold_build2 (NE_EXPR, boolean_type_node,
fold_convert (TREE_TYPE (tmp), ptr), tmp);
/* We know the temporary and the value will be the same length,
so can use memcpy. */
tmp = built_in_decls[BUILT_IN_MEMCPY];
- tmp = build_call_expr (tmp, 3, gfc_conv_descriptor_data_get (dest),
+ tmp = build_call_expr_loc (input_location,
+ tmp, 3, gfc_conv_descriptor_data_get (dest),
gfc_conv_descriptor_data_get (src), size);
gfc_add_expr_to_block (&block, tmp);
tmp = gfc_finish_block (&block);
gfc_init_block (&fnblock);
if (POINTER_TYPE_P (TREE_TYPE (decl)))
- decl = build_fold_indirect_ref (decl);
+ decl = build_fold_indirect_ref_loc (input_location,
+ decl);
/* If this an array of derived types with allocatable components
build a loop and recursively call this function. */
|| GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
{
tmp = gfc_conv_array_data (decl);
- var = build_fold_indirect_ref (tmp);
+ var = build_fold_indirect_ref_loc (input_location,
+ tmp);
/* Get the number of elements - 1 and set the counter. */
if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
tmp = gfc_duplicate_allocatable (dest, decl, TREE_TYPE(decl), rank);
gfc_add_expr_to_block (&fnblock, tmp);
}
- tmp = build_fold_indirect_ref (gfc_conv_array_data (dest));
+ tmp = build_fold_indirect_ref_loc (input_location,
+ gfc_conv_array_data (dest));
dref = gfc_build_array_ref (tmp, index, NULL);
tmp = structure_alloc_comps (der_type, vref, dref, rank, purpose);
}
{
/* If the backend_decl is not a descriptor, we must have a pointer
to one. */
- descriptor = build_fold_indirect_ref (sym->backend_decl);
+ descriptor = build_fold_indirect_ref_loc (input_location,
+ sym->backend_decl);
type = TREE_TYPE (descriptor);
}
/* Parameters need to be dereferenced. */
if (sym->cp_pointer->attr.dummy)
- ptr_decl = build_fold_indirect_ref (ptr_decl);
+ ptr_decl = build_fold_indirect_ref_loc (input_location,
+ ptr_decl);
/* Check to see if we're dealing with a variable-sized array. */
if (sym->attr.dimension
{
ptr_decl = convert (build_pointer_type (TREE_TYPE (decl)),
ptr_decl);
- value = build_fold_indirect_ref (ptr_decl);
+ value = build_fold_indirect_ref_loc (input_location,
+ ptr_decl);
}
SET_DECL_VALUE_EXPR (decl, value);
args = nreverse (args);
args = chainon (args, nreverse (string_args));
tmp = ns->proc_name->backend_decl;
- tmp = build_function_call_expr (tmp, args);
+ tmp = build_function_call_expr (input_location, tmp, args);
if (ns->proc_name->attr.mixed_entry_master)
{
tree union_decl, field;
/* Call _gfortran_set_args (argc, argv). */
TREE_USED (argc) = 1;
TREE_USED (argv) = 1;
- tmp = build_call_expr (gfor_fndecl_set_args, 2, argc, argv);
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_set_args, 2, argc, argv);
gfc_add_expr_to_block (&body, tmp);
/* Add a call to set_options to set up the runtime library Fortran
DECL_INITIAL (var) = array;
var = gfc_build_addr_expr (build_pointer_type (integer_type_node), var);
- tmp = build_call_expr (gfor_fndecl_set_options, 2,
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_set_options, 2,
build_int_cst (integer_type_node, 8), var);
gfc_add_expr_to_block (&body, tmp);
}
the library will raise a FPE when needed. */
if (gfc_option.fpe != 0)
{
- tmp = build_call_expr (gfor_fndecl_set_fpe, 1,
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_set_fpe, 1,
build_int_cst (integer_type_node,
gfc_option.fpe));
gfc_add_expr_to_block (&body, tmp);
if (gfc_option.convert != GFC_CONVERT_NATIVE)
{
- tmp = build_call_expr (gfor_fndecl_set_convert, 1,
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_set_convert, 1,
build_int_cst (integer_type_node,
gfc_option.convert));
gfc_add_expr_to_block (&body, tmp);
if (gfc_option.record_marker != 0)
{
- tmp = build_call_expr (gfor_fndecl_set_record_marker, 1,
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_set_record_marker, 1,
build_int_cst (integer_type_node,
gfc_option.record_marker));
gfc_add_expr_to_block (&body, tmp);
if (gfc_option.max_subrecord_length != 0)
{
- tmp = build_call_expr (gfor_fndecl_set_max_subrecord_length, 1,
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_set_max_subrecord_length, 1,
build_int_cst (integer_type_node,
gfc_option.max_subrecord_length));
gfc_add_expr_to_block (&body, tmp);
}
/* Call MAIN__(). */
- tmp = build_call_expr (fndecl, 0);
+ tmp = build_call_expr_loc (input_location,
+ fndecl, 0);
gfc_add_expr_to_block (&body, tmp);
/* Mark MAIN__ as used. */
for (; gfc_static_ctors; gfc_static_ctors = TREE_CHAIN (gfc_static_ctors))
{
- tmp = build_call_expr (TREE_VALUE (gfc_static_ctors), 0);
+ tmp = build_call_expr_loc (input_location,
+ TREE_VALUE (gfc_static_ctors), 0);
DECL_SAVED_TREE (fndecl) = build_stmt (input_location, EXPR_STMT, tmp);
}
{
/* Create a temporary and convert it to the correct type. */
tmp = gfc_get_int_type (kind);
- tmp = fold_convert (tmp, build_fold_indirect_ref (se->expr));
+ tmp = fold_convert (tmp, build_fold_indirect_ref_loc (input_location,
+ se->expr));
/* Test for a NULL value. */
tmp = build3 (COND_EXPR, TREE_TYPE (tmp), present, tmp,
if (TYPE_STRING_FLAG (TREE_TYPE (se->expr)))
tmp = se->expr;
else
- tmp = build_fold_indirect_ref (se->expr);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ se->expr);
tmp = gfc_build_array_ref (tmp, start.expr, NULL);
se->expr = gfc_build_addr_expr (type, tmp);
}
if ((c->attr.pointer && c->attr.dimension == 0 && c->ts.type != BT_CHARACTER)
|| c->attr.proc_pointer)
- se->expr = build_fold_indirect_ref (se->expr);
+ se->expr = build_fold_indirect_ref_loc (input_location,
+ se->expr);
}
&& (sym->attr.dummy
|| sym->attr.function
|| sym->attr.result))
- se->expr = build_fold_indirect_ref (se->expr);
+ se->expr = build_fold_indirect_ref_loc (input_location,
+ se->expr);
}
else if (!sym->attr.value)
{
/* Dereference non-character scalar dummy arguments. */
if (sym->attr.dummy && !sym->attr.dimension)
- se->expr = build_fold_indirect_ref (se->expr);
+ se->expr = build_fold_indirect_ref_loc (input_location,
+ se->expr);
/* Dereference scalar hidden result. */
if (gfc_option.flag_f2c && sym->ts.type == BT_COMPLEX
&& (sym->attr.function || sym->attr.result)
&& !sym->attr.dimension && !sym->attr.pointer
&& !sym->attr.always_explicit)
- se->expr = build_fold_indirect_ref (se->expr);
+ se->expr = build_fold_indirect_ref_loc (input_location,
+ se->expr);
/* Dereference non-character pointer variables.
These must be dummies, results, or scalars. */
|| sym->attr.function
|| sym->attr.result
|| !sym->attr.dimension))
- se->expr = build_fold_indirect_ref (se->expr);
+ se->expr = build_fold_indirect_ref_loc (input_location,
+ se->expr);
}
ref = expr->ref;
break;
}
- se->expr = build_call_expr (fndecl, 2, lse.expr, rse.expr);
+ se->expr = build_call_expr_loc (input_location,
+ fndecl, 2, lse.expr, rse.expr);
}
else
gcc_unreachable ();
- tmp = build_call_expr (fndecl, 6, len, var, lse.string_length, lse.expr,
+ tmp = build_call_expr_loc (input_location,
+ fndecl, 6, len, var, lse.string_length, lse.expr,
rse.string_length, rse.expr);
gfc_add_expr_to_block (&se->pre, tmp);
&& TREE_INT_CST_HIGH (len) == 0)
{
str = fold_convert (gfc_get_pchar_type (kind), str);
- return build_fold_indirect_ref (str);
+ return build_fold_indirect_ref_loc (input_location,
+ str);
}
return NULL_TREE;
else
gcc_unreachable ();
- tmp = build_call_expr (fndecl, 4, len1, str1, len2, str2);
+ tmp = build_call_expr_loc (input_location,
+ fndecl, 4, len1, str1, len2, str2);
}
return tmp;
{
tmp = gfc_get_symbol_decl (sym);
if (sym->attr.proc_pointer)
- tmp = build_fold_indirect_ref (tmp);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ tmp);
gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == POINTER_TYPE
&& TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) == FUNCTION_TYPE);
}
tmp = gfc_get_character_type_len (sym->ts.kind, NULL);
tmp = build_pointer_type (tmp);
if (sym->attr.pointer)
- value = build_fold_indirect_ref (se->expr);
+ value = build_fold_indirect_ref_loc (input_location,
+ se->expr);
else
value = se->expr;
value = fold_convert (tmp, value);
/* If the argument is a scalar, a pointer to an array or an allocatable,
dereference it. */
else if (!sym->attr.dimension || sym->attr.pointer || sym->attr.allocatable)
- value = build_fold_indirect_ref (se->expr);
+ value = build_fold_indirect_ref_loc (input_location,
+ se->expr);
/* For character(*), use the actual argument's descriptor. */
else if (sym->ts.type == BT_CHARACTER && !new_sym->ts.cl->length)
- value = build_fold_indirect_ref (se->expr);
+ value = build_fold_indirect_ref_loc (input_location,
+ se->expr);
/* If the argument is an array descriptor, use it to determine
information about the actual argument's shape. */
&& GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
{
/* Get the actual argument's descriptor. */
- desc = build_fold_indirect_ref (se->expr);
+ desc = build_fold_indirect_ref_loc (input_location,
+ se->expr);
/* Create the replacement variable. */
tmp = gfc_conv_descriptor_data_get (desc);
rse.loop->loopvar[0], offset);
/* Now use the offset for the reference. */
- tmp = build_fold_indirect_ref (info->data);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ info->data);
rse.expr = gfc_build_array_ref (tmp, tmp_index, NULL);
if (expr->ts.type == BT_CHARACTER)
if (fsym && fsym->attr.allocatable
&& fsym->attr.intent == INTENT_OUT)
{
- tmp = build_fold_indirect_ref (parmse.expr);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ parmse.expr);
tmp = gfc_trans_dealloc_allocated (tmp);
gfc_add_expr_to_block (&se->pre, tmp);
}
&& (e->expr_type != EXPR_VARIABLE && !e->rank))
{
int parm_rank;
- tmp = build_fold_indirect_ref (parmse.expr);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ parmse.expr);
parm_rank = e->rank;
switch (parm_kind)
{
break;
case (SCALAR_POINTER):
- tmp = build_fold_indirect_ref (tmp);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ tmp);
break;
}
&& TREE_TYPE (TREE_TYPE (TREE_TYPE (se->expr)))
&& GFC_DESCRIPTOR_TYPE_P
(TREE_TYPE (TREE_TYPE (TREE_TYPE (se->expr)))))
- se->expr = build_fold_indirect_ref (se->expr);
+ se->expr = build_fold_indirect_ref_loc (input_location,
+ se->expr);
retargs = gfc_chainon_list (retargs, se->expr);
}
where f is pointer valued, we have to dereference the result. */
if (!se->want_pointer && !byref && sym->attr.pointer
&& !gfc_is_proc_ptr_comp (expr, NULL))
- se->expr = build_fold_indirect_ref (se->expr);
+ se->expr = build_fold_indirect_ref_loc (input_location,
+ se->expr);
/* f2c calling conventions require a scalar default real function to
return a double precision result. Convert this back to default
{
/* Dereference for character pointer results. */
if (sym->attr.pointer || sym->attr.allocatable)
- se->expr = build_fold_indirect_ref (var);
+ se->expr = build_fold_indirect_ref_loc (input_location,
+ var);
else
se->expr = var;
else
{
gcc_assert (sym->ts.type == BT_COMPLEX && gfc_option.flag_f2c);
- se->expr = build_fold_indirect_ref (var);
+ se->expr = build_fold_indirect_ref_loc (input_location,
+ var);
}
}
}
/* For a simple char type, we can call memset(). */
if (compare_tree_int (TYPE_SIZE_UNIT (type), 1) == 0)
- return build_call_expr (built_in_decls[BUILT_IN_MEMSET], 3, start,
+ return build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MEMSET], 3, start,
build_int_cst (gfc_get_int_type (gfc_c_int_kind),
lang_hooks.to_target_charset (' ')),
size);
/* Truncate string if source is too long. */
cond2 = fold_build2 (GE_EXPR, boolean_type_node, slen, dlen);
- tmp2 = build_call_expr (built_in_decls[BUILT_IN_MEMMOVE],
+ tmp2 = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MEMMOVE],
3, dest, src, dlen);
/* Else copy and pad with spaces. */
- tmp3 = build_call_expr (built_in_decls[BUILT_IN_MEMMOVE],
+ tmp3 = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MEMMOVE],
3, dest, src, slen);
tmp4 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest,
e2 = gfc_copy_expr (e);
e2->expr_type = EXPR_VARIABLE;
gfc_conv_expr (&comp_se, e2);
- comp_se.expr = build_fold_addr_expr (comp_se.expr);
+ comp_se.expr = build_fold_addr_expr_loc (input_location, comp_se.expr);
return gfc_evaluate_now (comp_se.expr, &se->pre);
}
if (expr1->symtree->n.sym->attr.proc_pointer
&& expr1->symtree->n.sym->attr.dummy)
- lse.expr = build_fold_indirect_ref (lse.expr);
+ lse.expr = build_fold_indirect_ref_loc (input_location,
+ lse.expr);
if (expr2->symtree && expr2->symtree->n.sym->attr.proc_pointer
&& expr2->symtree->n.sym->attr.dummy)
- rse.expr = build_fold_indirect_ref (rse.expr);
+ rse.expr = build_fold_indirect_ref_loc (input_location,
+ rse.expr);
gfc_add_block_to_block (&block, &lse.pre);
gfc_add_block_to_block (&block, &rse.pre);
len = fold_convert (size_type_node, len);
/* Construct call to __builtin_memset. */
- tmp = build_call_expr (built_in_decls[BUILT_IN_MEMSET],
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MEMSET],
3, dest, integer_zero_node, len);
return fold_convert (void_type_node, tmp);
}
len = fold_convert (size_type_node, len);
/* Construct call to __builtin_memcpy. */
- tmp = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3, dst, src, len);
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MEMCPY], 3, dst, src, len);
return fold_convert (void_type_node, tmp);
}
/* Call the library function that will perform the conversion. */
gcc_assert (nargs >= 2);
- tmp = build_call_expr (fndecl, 3, addr, args[0], args[1]);
+ tmp = build_call_expr_loc (input_location,
+ fndecl, 3, addr, args[0], args[1]);
gfc_add_expr_to_block (&se->pre, tmp);
/* Free the temporary afterwards. */
else
gcc_unreachable ();
- return fold_convert (restype, build_call_expr (fn, 1, arg));
+ return fold_convert (restype, build_call_expr_loc (input_location,
+ fn, 1, arg));
}
if (n != END_BUILTINS)
{
tmp = built_in_decls[n];
- se->expr = build_call_expr (tmp, 1, arg[0]);
+ se->expr = build_call_expr_loc (input_location,
+ tmp, 1, arg[0]);
return;
}
rettype = TREE_TYPE (TREE_TYPE (fndecl));
fndecl = build_addr (fndecl, current_function_decl);
- se->expr = build_call_array (rettype, fndecl, num_args, args);
+ se->expr = build_call_array_loc (input_location, rettype, fndecl, num_args, args);
}
gfc_conv_intrinsic_function_args (se, expr, &arg, 1);
res = gfc_create_var (integer_type_node, NULL);
- tmp = build_call_expr (built_in_decls[frexp], 2, arg,
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[frexp], 2, arg,
gfc_build_addr_expr (NULL_TREE, res));
gfc_add_expr_to_block (&se->pre, tmp);
default:
gcc_unreachable ();
}
- se->expr = build_call_expr (built_in_decls[n], 1, arg);
+ se->expr = build_call_expr_loc (input_location,
+ built_in_decls[n], 1, arg);
break;
default:
if (n != END_BUILTINS)
{
tmp = build_addr (built_in_decls[n], current_function_decl);
- se->expr = build_call_array (TREE_TYPE (TREE_TYPE (built_in_decls[n])),
+ se->expr = build_call_array_loc (input_location,
+ TREE_TYPE (TREE_TYPE (built_in_decls[n])),
tmp, 2, args);
if (modulo == 0)
return;
build_call_expr (tmp, 2, args[0], args[1]));
}
else
- se->expr = build_call_expr (tmp, 2, args[0], args[1]);
+ se->expr = build_call_expr_loc (input_location,
+ tmp, 2, args[0], args[1]);
return;
}
args[1] = gfc_build_addr_expr (NULL_TREE, len);
fndecl = build_addr (gfor_fndecl_ctime, current_function_decl);
- tmp = build_call_array (TREE_TYPE (TREE_TYPE (gfor_fndecl_ctime)),
+ tmp = build_call_array_loc (input_location,
+ TREE_TYPE (TREE_TYPE (gfor_fndecl_ctime)),
fndecl, num_args, args);
gfc_add_expr_to_block (&se->pre, tmp);
args[1] = gfc_build_addr_expr (NULL_TREE, len);
fndecl = build_addr (gfor_fndecl_fdate, current_function_decl);
- tmp = build_call_array (TREE_TYPE (TREE_TYPE (gfor_fndecl_fdate)),
+ tmp = build_call_array_loc (input_location,
+ TREE_TYPE (TREE_TYPE (gfor_fndecl_fdate)),
fndecl, num_args, args);
gfc_add_expr_to_block (&se->pre, tmp);
args[1] = gfc_build_addr_expr (NULL_TREE, len);
fndecl = build_addr (gfor_fndecl_ttynam, current_function_decl);
- tmp = build_call_array (TREE_TYPE (TREE_TYPE (gfor_fndecl_ttynam)),
+ tmp = build_call_array_loc (input_location,
+ TREE_TYPE (TREE_TYPE (gfor_fndecl_ttynam)),
fndecl, num_args, args);
gfc_add_expr_to_block (&se->pre, tmp);
if (argexpr->expr->expr_type == EXPR_VARIABLE
&& argexpr->expr->symtree->n.sym->attr.optional
&& TREE_CODE (val) == INDIRECT_REF)
- cond = fold_build2
- (NE_EXPR, boolean_type_node, TREE_OPERAND (val, 0),
- build_int_cst (TREE_TYPE (TREE_OPERAND (val, 0)), 0));
+ cond = fold_build2_loc (input_location,
+ NE_EXPR, boolean_type_node,
+ TREE_OPERAND (val, 0),
+ build_int_cst (TREE_TYPE (TREE_OPERAND (val, 0)), 0));
else
{
cond = NULL_TREE;
to help performance of programs that don't rely on IEEE semantics. */
if (FLOAT_TYPE_P (TREE_TYPE (mvar)))
{
- isnan = build_call_expr (built_in_decls[BUILT_IN_ISNAN], 1, mvar);
+ isnan = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_ISNAN], 1, mvar);
tmp = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, tmp,
fold_convert (boolean_type_node, isnan));
}
/* Make the function call. */
fndecl = build_addr (function, current_function_decl);
- tmp = build_call_array (TREE_TYPE (TREE_TYPE (function)), fndecl,
+ tmp = build_call_array_loc (input_location,
+ TREE_TYPE (TREE_TYPE (function)), fndecl,
nargs + 4, args);
gfc_add_expr_to_block (&se->pre, tmp);
default:
gcc_unreachable ();
}
- se->expr = build_call_expr (tmp, 3, args[0], args[1], args[2]);
+ se->expr = build_call_expr_loc (input_location,
+ tmp, 3, args[0], args[1], args[2]);
/* Convert the result back to the original type, if we extended
the first argument's width above. */
if (expr->ts.kind < 4)
result_type = gfc_get_int_type (gfc_default_integer_kind);
/* Compute TRAILZ for the case i .ne. 0. */
- trailz = fold_convert (result_type, build_call_expr (func, 1, arg));
+ trailz = fold_convert (result_type, build_call_expr_loc (input_location,
+ func, 1, arg));
/* Build BIT_SIZE. */
bit_size = build_int_cst (result_type, argsize);
else
gcc_unreachable ();
- se->expr = build_call_expr (fndecl, 2, args[0], args[1]);
+ se->expr = build_call_expr_loc (input_location,
+ fndecl, 2, args[0], args[1]);
se->expr = convert (type, se->expr);
}
args[4] = convert (logical4_type_node, args[4]);
fndecl = build_addr (function, current_function_decl);
- se->expr = build_call_array (TREE_TYPE (TREE_TYPE (function)), fndecl,
+ se->expr = build_call_array_loc (input_location,
+ TREE_TYPE (TREE_TYPE (function)), fndecl,
5, args);
se->expr = convert (type, se->expr);
args[1] = fold_build1 (NOP_EXPR, pchartype, args[1]);
type = gfc_typenode_for_spec (&expr->ts);
- se->expr = build_fold_indirect_ref (args[1]);
+ se->expr = build_fold_indirect_ref_loc (input_location,
+ args[1]);
se->expr = convert (type, se->expr);
}
tree arg;
gfc_conv_intrinsic_function_args (se, expr, &arg, 1);
- se->expr = build_call_expr (built_in_decls[BUILT_IN_ISNAN], 1, arg);
+ se->expr = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_ISNAN], 1, arg);
STRIP_TYPE_NOPS (se->expr);
se->expr = fold_convert (gfc_typenode_for_spec (&expr->ts), se->expr);
}
type = gfc_typenode_for_spec (&expr->ts);
gfc_conv_intrinsic_function_args (se, expr, &arg, 1);
tmp = gfc_create_var (integer_type_node, NULL);
- se->expr = build_call_expr (built_in_decls[frexp], 2,
+ se->expr = build_call_expr_loc (input_location,
+ built_in_decls[frexp], 2,
fold_convert (type, arg),
gfc_build_addr_expr (NULL_TREE, tmp));
se->expr = fold_convert (type, se->expr);
type = gfc_typenode_for_spec (&expr->ts);
gfc_conv_intrinsic_function_args (se, expr, args, 2);
- tmp = build_call_expr (built_in_decls[copysign], 2,
- build_call_expr (built_in_decls[huge_val], 0),
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[copysign], 2,
+ build_call_expr_loc (input_location,
+ built_in_decls[huge_val], 0),
fold_convert (type, args[1]));
- se->expr = build_call_expr (built_in_decls[nextafter], 2,
+ se->expr = build_call_expr_loc (input_location,
+ built_in_decls[nextafter], 2,
fold_convert (type, args[0]), tmp);
se->expr = fold_convert (type, se->expr);
}
/* Build the block for s /= 0. */
gfc_start_block (&block);
- tmp = build_call_expr (built_in_decls[frexp], 2, arg,
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[frexp], 2, arg,
gfc_build_addr_expr (NULL_TREE, e));
gfc_add_expr_to_block (&block, tmp);
tmp = fold_build2 (MINUS_EXPR, integer_type_node, e, prec);
gfc_add_modify (&block, e, fold_build2 (MAX_EXPR, integer_type_node,
- tmp, emin));
+ tmp, emin));
- tmp = build_call_expr (built_in_decls[scalbn], 2,
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[scalbn], 2,
build_real_from_int_cst (type, integer_one_node), e);
gfc_add_modify (&block, res, tmp);
e = gfc_create_var (integer_type_node, NULL);
x = gfc_create_var (type, NULL);
gfc_add_modify (&se->pre, x,
- build_call_expr (built_in_decls[fabs], 1, arg));
+ build_call_expr_loc (input_location,
+ built_in_decls[fabs], 1, arg));
gfc_start_block (&block);
- tmp = build_call_expr (built_in_decls[frexp], 2, arg,
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[frexp], 2, arg,
gfc_build_addr_expr (NULL_TREE, e));
gfc_add_expr_to_block (&block, tmp);
tmp = fold_build2 (MINUS_EXPR, integer_type_node,
build_int_cst (NULL_TREE, prec), e);
- tmp = build_call_expr (built_in_decls[scalbn], 2, x, tmp);
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[scalbn], 2, x, tmp);
gfc_add_modify (&block, x, tmp);
stmt = gfc_finish_block (&block);
type = gfc_typenode_for_spec (&expr->ts);
gfc_conv_intrinsic_function_args (se, expr, args, 2);
- se->expr = build_call_expr (built_in_decls[scalbn], 2,
+ se->expr = build_call_expr_loc (input_location,
+ built_in_decls[scalbn], 2,
fold_convert (type, args[0]),
fold_convert (integer_type_node, args[1]));
se->expr = fold_convert (type, se->expr);
gfc_conv_intrinsic_function_args (se, expr, args, 2);
tmp = gfc_create_var (integer_type_node, NULL);
- tmp = build_call_expr (built_in_decls[frexp], 2,
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[frexp], 2,
fold_convert (type, args[0]),
gfc_build_addr_expr (NULL_TREE, tmp));
- se->expr = build_call_expr (built_in_decls[scalbn], 2, tmp,
+ se->expr = build_call_expr_loc (input_location,
+ built_in_decls[scalbn], 2, tmp,
fold_convert (integer_type_node, args[1]));
se->expr = fold_convert (type, se->expr);
}
arg1 = gfc_evaluate_now (argse.expr, &se->pre);
/* Build the call to size0. */
- fncall0 = build_call_expr (gfor_fndecl_size0, 1, arg1);
+ fncall0 = build_call_expr_loc (input_location,
+ gfor_fndecl_size0, 1, arg1);
actual = actual->next;
{
tree tmp;
/* Build the call to size1. */
- fncall1 = build_call_expr (gfor_fndecl_size1, 2,
+ fncall1 = build_call_expr_loc (input_location,
+ gfor_fndecl_size1, 2,
arg1, argse.expr);
gfc_init_se (&argse, NULL);
{
tree ubound, lbound;
- arg1 = build_fold_indirect_ref (arg1);
+ arg1 = build_fold_indirect_ref_loc (input_location,
+ arg1);
ubound = gfc_conv_descriptor_ubound_get (arg1, argse.expr);
lbound = gfc_conv_descriptor_lbound_get (arg1, argse.expr);
se->expr = fold_build2 (MINUS_EXPR, gfc_array_index_type,
gfc_conv_expr_reference (&argse, arg);
source = argse.expr;
- type = TREE_TYPE (build_fold_indirect_ref (argse.expr));
+ type = TREE_TYPE (build_fold_indirect_ref_loc (input_location,
+ argse.expr));
/* Obtain the source word length. */
if (arg->ts.type == BT_CHARACTER)
var = gfc_conv_string_tmp (se, type, len);
args[0] = var;
- tmp = build_call_expr (fndecl, 3, args[0], args[1], args[2]);
+ tmp = build_call_expr_loc (input_location,
+ fndecl, 3, args[0], args[1], args[2]);
gfc_add_expr_to_block (&se->pre, tmp);
se->expr = var;
se->string_length = len;
gfc_conv_expr_reference (&argse, arg->expr);
source = argse.expr;
- source_type = TREE_TYPE (build_fold_indirect_ref (argse.expr));
+ source_type = TREE_TYPE (build_fold_indirect_ref_loc (input_location,
+ argse.expr));
/* Obtain the source word length. */
if (arg->expr->ts.type == BT_CHARACTER)
if (gfc_option.warn_array_temp)
gfc_warning ("Creating array temporary at %L", &expr->where);
- source = build_call_expr (gfor_fndecl_in_pack, 1, tmp);
+ source = build_call_expr_loc (input_location,
+ gfor_fndecl_in_pack, 1, tmp);
source = gfc_evaluate_now (source, &argse.pre);
/* Free the temporary. */
if (ss == gfc_ss_terminator)
{
gfc_conv_expr_reference (&argse, arg->expr);
- mold_type = TREE_TYPE (build_fold_indirect_ref (argse.expr));
+ mold_type = TREE_TYPE (build_fold_indirect_ref_loc (input_location,
+ argse.expr));
}
else
{
gfc_init_se (&argse, NULL);
gfc_conv_expr_reference (&argse, arg->expr);
tmp = convert (gfc_array_index_type,
- build_fold_indirect_ref (argse.expr));
+ build_fold_indirect_ref_loc (input_location,
+ argse.expr));
gfc_add_block_to_block (&se->pre, &argse.pre);
gfc_add_block_to_block (&se->post, &argse.post);
}
tmp = fold_convert (pvoid_type_node, tmp);
/* Use memcpy to do the transfer. */
- tmp = build_call_expr (built_in_decls[BUILT_IN_MEMCPY],
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MEMCPY],
3,
tmp,
fold_convert (pvoid_type_node, source),
tmp = gfc_call_malloc (&block, tmp, dest_word_len);
gfc_add_modify (&block, tmpdecl,
fold_convert (TREE_TYPE (ptr), tmp));
- tmp = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MEMCPY], 3,
fold_convert (pvoid_type_node, tmpdecl),
fold_convert (pvoid_type_node, ptr),
extent);
/* Use memcpy to do the transfer. */
tmp = gfc_build_addr_expr (NULL_TREE, tmpdecl);
- tmp = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MEMCPY], 3,
fold_convert (pvoid_type_node, tmp),
fold_convert (pvoid_type_node, ptr),
extent);
gfc_conv_expr_descriptor (&arg2se, arg2->expr, ss2);
gfc_add_block_to_block (&se->pre, &arg2se.pre);
gfc_add_block_to_block (&se->post, &arg2se.post);
- se->expr = build_call_expr (gfor_fndecl_associated, 2,
+ se->expr = build_call_expr_loc (input_location,
+ gfor_fndecl_associated, 2,
arg1se.expr, arg2se.expr);
se->expr = convert (boolean_type_node, se->expr);
se->expr = fold_build2 (TRUTH_AND_EXPR, boolean_type_node,
tree args[2];
gfc_conv_intrinsic_function_args (se, expr, args, 2);
- se->expr = build_call_expr (gfor_fndecl_sc_kind, 2, args[0], args[1]);
+ se->expr = build_call_expr_loc (input_location,
+ gfor_fndecl_sc_kind, 2, args[0], args[1]);
se->expr = fold_convert (gfc_typenode_for_spec (&expr->ts), se->expr);
}
/* Convert it to the required type. */
type = gfc_typenode_for_spec (&expr->ts);
- se->expr = build_call_expr (gfor_fndecl_si_kind, 1, arg);
+ se->expr = build_call_expr_loc (input_location,
+ gfor_fndecl_si_kind, 1, arg);
se->expr = fold_convert (type, se->expr);
}
/* Convert it to the required type. */
type = gfc_typenode_for_spec (&expr->ts);
- se->expr = build_function_call_expr (gfor_fndecl_sr_kind, args);
+ se->expr = build_function_call_expr (input_location,
+ gfor_fndecl_sr_kind, args);
se->expr = fold_convert (type, se->expr);
}
gcc_unreachable ();
fndecl = build_addr (function, current_function_decl);
- tmp = build_call_array (TREE_TYPE (TREE_TYPE (function)), fndecl,
+ tmp = build_call_array_loc (input_location,
+ TREE_TYPE (TREE_TYPE (function)), fndecl,
num_args, args);
gfc_add_expr_to_block (&se->pre, tmp);
tmp = fold_build2 (POINTER_PLUS_EXPR, pvoid_type_node,
fold_convert (pvoid_type_node, dest),
fold_convert (sizetype, tmp));
- tmp = build_call_expr (built_in_decls[BUILT_IN_MEMMOVE], 3, tmp, src,
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MEMMOVE], 3, tmp, src,
fold_build2 (MULT_EXPR, size_type_node, slen,
fold_convert (size_type_node, size)));
gfc_add_expr_to_block (&body, tmp);
/* Call the library function. This always returns an INTEGER(4). */
fndecl = gfor_fndecl_iargc;
- tmp = build_call_expr (fndecl, 0);
+ tmp = build_call_expr_loc (input_location,
+ fndecl, 0);
/* Convert it to the required type. */
type = gfc_typenode_for_spec (&expr->ts);
gfc_build_localized_cstring_const (message));
gfc_free(message);
- tmp = build_call_expr (gfor_fndecl_generate_error, 3, arg1, arg2, arg3);
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_generate_error, 3, arg1, arg2, arg3);
gfc_add_expr_to_block (&block, tmp);
/* Tell the compiler that this isn't likely. */
cond = fold_convert (long_integer_type_node, cond);
tmp = build_int_cst (long_integer_type_node, 0);
- cond = build_call_expr (built_in_decls[BUILT_IN_EXPECT], 2, cond, tmp);
+ cond = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_EXPECT], 2, cond, tmp);
cond = fold_convert (boolean_type_node, cond);
tmp = build3_v (COND_EXPR, cond, body, build_empty_stmt (input_location));
or substring array references. */
gfc_conv_subref_array_arg (&se, e, 0,
last_dt == READ ? INTENT_IN : INTENT_OUT);
- tmp = build_fold_indirect_ref (se.expr);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ se.expr);
se.expr = gfc_build_addr_expr (pchar_type_node, tmp);
tmp = gfc_conv_descriptor_data_get (tmp);
}
set_parameter_const (&block, var, IOPARM_common_unit, 0);
tmp = gfc_build_addr_expr (NULL_TREE, var);
- tmp = build_call_expr (iocall[IOCALL_OPEN], 1, tmp);
+ tmp = build_call_expr_loc (input_location,
+ iocall[IOCALL_OPEN], 1, tmp);
gfc_add_expr_to_block (&block, tmp);
gfc_add_block_to_block (&block, &post_block);
set_parameter_const (&block, var, IOPARM_common_unit, 0);
tmp = gfc_build_addr_expr (NULL_TREE, var);
- tmp = build_call_expr (iocall[IOCALL_CLOSE], 1, tmp);
+ tmp = build_call_expr_loc (input_location,
+ iocall[IOCALL_CLOSE], 1, tmp);
gfc_add_expr_to_block (&block, tmp);
gfc_add_block_to_block (&block, &post_block);
set_parameter_const (&block, var, IOPARM_common_unit, 0);
tmp = gfc_build_addr_expr (NULL_TREE, var);
- tmp = build_call_expr (function, 1, tmp);
+ tmp = build_call_expr_loc (input_location,
+ function, 1, tmp);
gfc_add_expr_to_block (&block, tmp);
gfc_add_block_to_block (&block, &post_block);
set_parameter_const (&block, var, IOPARM_common_unit, 0);
tmp = gfc_build_addr_expr (NULL_TREE, var);
- tmp = build_call_expr (iocall[IOCALL_INQUIRE], 1, tmp);
+ tmp = build_call_expr_loc (input_location,
+ iocall[IOCALL_INQUIRE], 1, tmp);
gfc_add_expr_to_block (&block, tmp);
gfc_add_block_to_block (&block, &post_block);
set_parameter_value (&block, var, IOPARM_common_unit, p->unit);
tmp = gfc_build_addr_expr (NULL_TREE, var);
- tmp = build_call_expr (iocall[IOCALL_WAIT], 1, tmp);
+ tmp = build_call_expr_loc (input_location,
+ iocall[IOCALL_WAIT], 1, tmp);
gfc_add_expr_to_block (&block, tmp);
gfc_add_block_to_block (&block, &post_block);
dummy_arg_flagged = POINTER_TYPE_P (TREE_TYPE(tmp));
- itmp = (dummy_arg_flagged) ? build_fold_indirect_ref (tmp) : tmp;
+ itmp = (dummy_arg_flagged) ? build_fold_indirect_ref_loc (input_location,
+ tmp) : tmp;
/* If an array, set flag and use indirect ref. if built. */
/* If scalar dummy, resolve indirect reference now. */
if (dummy_arg_flagged && !array_flagged)
- tmp = build_fold_indirect_ref (tmp);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ tmp);
gcc_assert (tmp && POINTER_TYPE_P (TREE_TYPE (tmp)));
tmp = ts->cl->backend_decl;
else
tmp = build_int_cst (gfc_charlen_type_node, 0);
- tmp = build_call_expr (iocall[IOCALL_SET_NML_VAL], 6,
+ tmp = build_call_expr_loc (input_location,
+ iocall[IOCALL_SET_NML_VAL], 6,
dt_parm_addr, addr_expr, string,
IARG (ts->kind), tmp, dtype);
gfc_add_expr_to_block (block, tmp);
for ( n_dim = 0 ; n_dim < rank ; n_dim++ )
{
- tmp = build_call_expr (iocall[IOCALL_SET_NML_VAL_DIM], 5,
+ tmp = build_call_expr_loc (input_location,
+ iocall[IOCALL_SET_NML_VAL_DIM], 5,
dt_parm_addr,
IARG (n_dim),
GFC_TYPE_ARRAY_STRIDE (dt, n_dim),
/* Provide the RECORD_TYPE to build component references. */
- tree expr = build_fold_indirect_ref (addr_expr);
+ tree expr = build_fold_indirect_ref_loc (input_location,
+ addr_expr);
for (cmp = ts->derived->components; cmp; cmp = cmp->next)
{
set_parameter_const (&block, var, IOPARM_common_flags, mask);
tmp = gfc_build_addr_expr (NULL_TREE, var);
- tmp = build_call_expr (function, 1, tmp);
+ tmp = build_call_expr_loc (input_location,
+ function, 1, tmp);
gfc_add_expr_to_block (&block, tmp);
gfc_add_block_to_block (&block, &post_block);
}
tmp = gfc_build_addr_expr (NULL_TREE, dt_parm);
- tmp = build_call_expr (function, 1, tmp);
+ tmp = build_call_expr_loc (input_location,
+ function, 1, tmp);
gfc_add_expr_to_block (&block, tmp);
gfc_add_block_to_block (&block, dt_post_end_block);
gfc_init_block (dt_post_end_block);
arg2 = se->string_length;
else
{
- tmp = build_fold_indirect_ref (addr_expr);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ addr_expr);
gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
arg2 = TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (tmp)));
arg2 = fold_convert (gfc_charlen_type_node, arg2);
arg3 = build_int_cst (NULL_TREE, kind);
function = iocall[IOCALL_X_CHARACTER_WIDE];
tmp = gfc_build_addr_expr (NULL_TREE, dt_parm);
- tmp = build_call_expr (function, 4, tmp, addr_expr, arg2, arg3);
+ tmp = build_call_expr_loc (input_location,
+ function, 4, tmp, addr_expr, arg2, arg3);
gfc_add_expr_to_block (&se->pre, tmp);
gfc_add_block_to_block (&se->pre, &se->post);
return;
arg2 = se->string_length;
else
{
- tmp = build_fold_indirect_ref (addr_expr);
+ tmp = build_fold_indirect_ref_loc (input_location,
+ addr_expr);
gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
arg2 = TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (tmp)));
}
case BT_DERIVED:
/* Recurse into the elements of the derived type. */
expr = gfc_evaluate_now (addr_expr, &se->pre);
- expr = build_fold_indirect_ref (expr);
+ expr = build_fold_indirect_ref_loc (input_location,
+ expr);
for (c = ts->derived->components; c; c = c->next)
{
field = c->backend_decl;
gcc_assert (field && TREE_CODE (field) == FIELD_DECL);
- tmp = fold_build3 (COMPONENT_REF, TREE_TYPE (field),
+ tmp = fold_build3_loc (UNKNOWN_LOCATION,
+ COMPONENT_REF, TREE_TYPE (field),
expr, field, NULL_TREE);
if (c->attr.dimension)
}
tmp = gfc_build_addr_expr (NULL_TREE, dt_parm);
- tmp = build_call_expr (function, 3, tmp, addr_expr, arg2);
+ tmp = build_call_expr_loc (input_location,
+ function, 3, tmp, addr_expr, arg2);
gfc_add_expr_to_block (&se->pre, tmp);
gfc_add_block_to_block (&se->pre, &se->post);
kind_arg = build_int_cst (NULL_TREE, ts->kind);
tmp = gfc_build_addr_expr (NULL_TREE, dt_parm);
- tmp = build_call_expr (iocall[IOCALL_X_ARRAY], 4,
+ tmp = build_call_expr_loc (input_location,
+ iocall[IOCALL_X_ARRAY], 4,
tmp, addr_expr, kind_arg, charlen_arg);
gfc_add_expr_to_block (&se->pre, tmp);
gfc_add_block_to_block (&se->pre, &se->post);
build_int_cst (pvoid_type_node, 0),
size, NULL, NULL);
gfc_conv_descriptor_data_set (&block, dest, ptr);
- call = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3, ptr,
+ call = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MEMCPY], 3, ptr,
fold_convert (pvoid_type_node,
gfc_conv_descriptor_data_get (src)),
size);
TYPE_SIZE_UNIT (gfc_get_element_type (type)));
size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
- call = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
+ call = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MEMCPY], 3,
fold_convert (pvoid_type_node,
gfc_conv_descriptor_data_get (dest)),
fold_convert (pvoid_type_node,
lhsaddr = save_expr (lhsaddr);
rhs = gfc_evaluate_now (rse.expr, &block);
- x = convert (TREE_TYPE (rhs), build_fold_indirect_ref (lhsaddr));
+ x = convert (TREE_TYPE (rhs), build_fold_indirect_ref_loc (input_location,
+ lhsaddr));
if (var_on_left)
x = fold_build2 (op, TREE_TYPE (rhs), x, rhs);
gfc_trans_omp_barrier (void)
{
tree decl = built_in_decls [BUILT_IN_GOMP_BARRIER];
- return build_call_expr (decl, 0);
+ return build_call_expr_loc (input_location, decl, 0);
}
static tree
gfc_trans_omp_flush (void)
{
tree decl = built_in_decls [BUILT_IN_SYNCHRONIZE];
- return build_call_expr (decl, 0);
+ return build_call_expr_loc (input_location, decl, 0);
}
static tree
gfc_trans_omp_taskwait (void)
{
tree decl = built_in_decls [BUILT_IN_GOMP_TASKWAIT];
- return build_call_expr (decl, 0);
+ return build_call_expr_loc (input_location, decl, 0);
}
static tree
tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
loopse->loop->from[n], tmp);
offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
- offset, tmp);
+ offset, tmp);
}
info->offset = gfc_create_var (gfc_array_index_type, NULL);
gfc_add_modify (&se->pre, info->offset, offset);
/* Copy the result back using unpack. */
- tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_in_unpack, 2, parmse.expr, data);
gfc_add_expr_to_block (&se->post, tmp);
/* parmse.pre is already added above. */
if (code->expr1 == NULL)
{
tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
- tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_pause_numeric, 1, tmp);
}
else
{
gfc_conv_expr_reference (&se, code->expr1);
- tmp = build_call_expr (gfor_fndecl_pause_string, 2,
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_pause_string, 2,
se.expr, se.string_length);
}
if (code->expr1 == NULL)
{
tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
- tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_stop_numeric, 1, tmp);
}
else
{
gfc_conv_expr_reference (&se, code->expr1);
- tmp = build_call_expr (gfor_fndecl_stop_string, 2,
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_stop_string, 2,
se.expr, se.string_length);
}
else
gcc_unreachable ();
- tmp = build_call_expr (fndecl, 4, init, build_int_cst (NULL_TREE, n),
+ tmp = build_call_expr_loc (input_location,
+ fndecl, 4, init, build_int_cst (NULL_TREE, n),
se.expr, se.string_length);
case_num = gfc_create_var (integer_type_node, "case_num");
gfc_add_modify (&block, case_num, tmp);
gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
gfc_add_block_to_block (pre, &tse.pre);
gfc_add_block_to_block (post, &tse.post);
- tse.expr = build_fold_indirect_ref (tse.expr);
+ tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
if (e->ts.type != BT_CHARACTER)
{
tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
if (*ptemp1)
- tmp = build_fold_indirect_ref (tmp);
+ tmp = build_fold_indirect_ref_loc (input_location, tmp);
return tmp;
}
if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
{
- tmp = build_fold_indirect_ref (se.expr);
+ tmp = build_fold_indirect_ref_loc (input_location, se.expr);
tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
gfc_add_expr_to_block (&se.pre, tmp);
}
dlen = gfc_get_expr_charlen (code->expr2);
slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
- dlen = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
+ dlen = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MEMCPY], 3,
gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
dlen = gfc_get_expr_charlen (code->expr2);
slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
- dlen = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
+ dlen = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MEMCPY], 3,
gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
tmp, fold_convert (sizetype, offset));
tmp = fold_convert (build_pointer_type (type), tmp);
if (!TYPE_STRING_FLAG (type))
- tmp = build_fold_indirect_ref (tmp);
+ tmp = build_fold_indirect_ref_loc (input_location, tmp);
return tmp;
}
else
va_end (ap);
/* Build the function call to runtime_(warning,error)_at; because of the
- variable number of arguments, we can't use build_call_expr directly. */
+ variable number of arguments, we can't use build_call_expr_loc dinput_location,
+ irectly. */
if (error)
fntype = TREE_TYPE (gfor_fndecl_runtime_error_at);
else
fntype = TREE_TYPE (gfor_fndecl_runtime_warning_at);
- tmp = fold_builtin_call_array (TREE_TYPE (fntype),
+ tmp = fold_builtin_call_array (input_location, TREE_TYPE (fntype),
fold_build1 (ADDR_EXPR,
build_pointer_type (fntype),
error
cond = fold_convert (long_integer_type_node, cond);
tmp = build_int_cst (long_integer_type_node, 0);
- cond = build_call_expr (built_in_decls[BUILT_IN_EXPECT], 2, cond, tmp);
+ cond = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_EXPECT], 2, cond, tmp);
cond = fold_convert (boolean_type_node, cond);
tmp = build3_v (COND_EXPR, cond, body, build_empty_stmt (input_location));
msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const
("Attempt to allocate a negative amount of memory."));
tmp = fold_build3 (COND_EXPR, void_type_node, negative,
- build_call_expr (gfor_fndecl_runtime_error, 1, msg),
+ build_call_expr_loc (input_location,
+ gfor_fndecl_runtime_error, 1, msg),
build_empty_stmt (input_location));
gfc_add_expr_to_block (block, tmp);
build_int_cst (size_type_node, 1));
gfc_add_modify (&block2, res,
- build_call_expr (built_in_decls[BUILT_IN_MALLOC], 1,
+ build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MALLOC], 1,
size));
null_result = fold_build2 (EQ_EXPR, boolean_type_node, res,
build_int_cst (pvoid_type_node, 0));
msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const
("Memory allocation failed"));
tmp = fold_build3 (COND_EXPR, void_type_node, null_result,
- build_call_expr (gfor_fndecl_os_error, 1, msg),
+ build_call_expr_loc (input_location,
+ gfor_fndecl_os_error, 1, msg),
build_empty_stmt (input_location));
gfc_add_expr_to_block (&block2, tmp);
malloc_result = gfc_finish_block (&block2);
msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const
("Attempt to allocate negative amount of memory. "
"Possible integer overflow"));
- error = build_call_expr (gfor_fndecl_runtime_error, 1, msg);
+ error = build_call_expr_loc (input_location,
+ gfor_fndecl_runtime_error, 1, msg);
if (status != NULL_TREE && !integer_zerop (status))
{
gfc_start_block (&set_status_block);
gfc_add_modify (&set_status_block,
- fold_build1 (INDIRECT_REF, status_type, status),
+ fold_build1 (INDIRECT_REF, status_type, status),
build_int_cst (status_type, LIBERROR_ALLOCATION));
gfc_add_modify (&set_status_block, res,
build_int_cst (pvoid_type_node, 0));
/* The allocation itself. */
gfc_start_block (&alloc_block);
gfc_add_modify (&alloc_block, res,
- build_call_expr (built_in_decls[BUILT_IN_MALLOC], 1,
+ build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_MALLOC], 1,
fold_build2 (MAX_EXPR, size_type_node,
size,
build_int_cst (size_type_node, 1))));
msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const
("Out of memory"));
- tmp = build_call_expr (gfor_fndecl_os_error, 1, msg);
+ tmp = build_call_expr_loc (input_location,
+ gfor_fndecl_os_error, 1, msg);
if (status != NULL_TREE && !integer_zerop (status))
{
stmtblock_t set_status_block;
gfc_start_block (&set_status_block);
- tmp = build_call_expr (built_in_decls[BUILT_IN_FREE], 1,
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_FREE], 1,
fold_convert (pvoid_type_node, mem));
gfc_add_expr_to_block (&set_status_block, tmp);
var = gfc_evaluate_now (var, &block);
cond = fold_build2 (NE_EXPR, boolean_type_node, var,
build_int_cst (pvoid_type_node, 0));
- call = build_call_expr (built_in_decls[BUILT_IN_FREE], 1, var);
+ call = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_FREE], 1, var);
tmp = fold_build3 (COND_EXPR, void_type_node, cond, call,
build_empty_stmt (input_location));
gfc_add_expr_to_block (&block, tmp);
/* When POINTER is not NULL, we free it. */
gfc_start_block (&non_null);
- tmp = build_call_expr (built_in_decls[BUILT_IN_FREE], 1,
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_FREE], 1,
fold_convert (pvoid_type_node, pointer));
gfc_add_expr_to_block (&non_null, tmp);
msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const
("Attempt to allocate a negative amount of memory."));
tmp = fold_build3 (COND_EXPR, void_type_node, negative,
- build_call_expr (gfor_fndecl_runtime_error, 1, msg),
+ build_call_expr_loc (input_location,
+ gfor_fndecl_runtime_error, 1, msg),
build_empty_stmt (input_location));
gfc_add_expr_to_block (block, tmp);
/* Call realloc and check the result. */
- tmp = build_call_expr (built_in_decls[BUILT_IN_REALLOC], 2,
+ tmp = build_call_expr_loc (input_location,
+ built_in_decls[BUILT_IN_REALLOC], 2,
fold_convert (pvoid_type_node, mem), size);
gfc_add_modify (block, res, fold_convert (type, tmp));
null_result = fold_build2 (EQ_EXPR, boolean_type_node, res,
msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const
("Out of memory"));
tmp = fold_build3 (COND_EXPR, void_type_node, null_result,
- build_call_expr (gfor_fndecl_os_error, 1, msg),
+ build_call_expr_loc (input_location,
+ gfor_fndecl_os_error, 1, msg),
build_empty_stmt (input_location));
gfc_add_expr_to_block (block, tmp);
arg = build_addr (next_label, current_function_decl);
t = implicit_built_in_decls[BUILT_IN_SETJMP_SETUP];
g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
- gimple_set_location (g, gimple_location (stmt));
+ gimple_set_location (g, loc);
gimple_set_block (g, gimple_block (stmt));
gsi_insert_before (gsi, g, GSI_SAME_STMT);
/* Build 'DEST = 0' and insert. */
if (dest)
{
- g = gimple_build_assign (dest, fold_convert (TREE_TYPE (dest),
- integer_zero_node));
- gimple_set_location (g, gimple_location (stmt));
+ g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
+ integer_zero_node));
+ gimple_set_location (g, loc);
gimple_set_block (g, gimple_block (stmt));
gsi_insert_before (gsi, g, GSI_SAME_STMT);
}
arg = build_addr (next_label, current_function_decl);
t = implicit_built_in_decls[BUILT_IN_SETJMP_RECEIVER];
g = gimple_build_call (t, 1, arg);
- gimple_set_location (g, gimple_location (stmt));
+ gimple_set_location (g, loc);
gimple_set_block (g, gimple_block (stmt));
gsi_insert_before (gsi, g, GSI_SAME_STMT);
/* Build 'DEST = 1' and insert. */
if (dest)
{
- g = gimple_build_assign (dest, fold_convert (TREE_TYPE (dest),
- integer_one_node));
- gimple_set_location (g, gimple_location (stmt));
+ g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
+ integer_one_node));
+ gimple_set_location (g, loc);
gimple_set_block (g, gimple_block (stmt));
gsi_insert_before (gsi, g, GSI_SAME_STMT);
}
gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
tree *lhs_p, tree *rhs_p)
{
+ location_t loc = EXPR_LOCATION (cond);
gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
|| TREE_CODE (cond) == TRUTH_NOT_EXPR
|| is_gimple_min_invariant (cond)
{
*code_p = EQ_EXPR;
gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
- *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
+ *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
}
/* Canonicalize conditionals of the form 'if (VAL)' */
else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
{
*code_p = NE_EXPR;
gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
- *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
+ *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
}
}
tree
gimple_fold (const_gimple stmt)
{
+ location_t loc = gimple_location (stmt);
switch (gimple_code (stmt))
{
case GIMPLE_COND:
- return fold_binary (gimple_cond_code (stmt),
+ return fold_binary_loc (loc, gimple_cond_code (stmt),
boolean_type_node,
gimple_cond_lhs (stmt),
gimple_cond_rhs (stmt));
switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
{
case GIMPLE_UNARY_RHS:
- return fold_unary (gimple_assign_rhs_code (stmt),
+ return fold_unary_loc (loc, gimple_assign_rhs_code (stmt),
TREE_TYPE (gimple_assign_lhs (stmt)),
gimple_assign_rhs1 (stmt));
case GIMPLE_BINARY_RHS:
- return fold_binary (gimple_assign_rhs_code (stmt),
+ return fold_binary_loc (loc, gimple_assign_rhs_code (stmt),
TREE_TYPE (gimple_assign_lhs (stmt)),
gimple_assign_rhs1 (stmt),
gimple_assign_rhs2 (stmt));
{
GIMPLE_CHECK (gs, GIMPLE_CALL);
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
- gimple_set_op (gs, 1, build_fold_addr_expr (decl));
+ gimple_set_op (gs, 1, build_fold_addr_expr_loc (gimple_location (gs), decl));
}
gimplify_conversion (tree *expr_p)
{
tree tem;
+ location_t loc = EXPR_LOCATION (*expr_p);
gcc_assert (CONVERT_EXPR_P (*expr_p));
/* Then strip away all but the outermost conversion. */
/* If we have a conversion to a non-register type force the
use of a VIEW_CONVERT_EXPR instead. */
if (!is_gimple_reg_type (TREE_TYPE (*expr_p)))
- *expr_p = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
+ *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
TREE_OPERAND (*expr_p, 0));
return GS_OK;
VEC(tree,heap) *stack;
enum gimplify_status ret = GS_OK, tret;
int i;
+ location_t loc = EXPR_LOCATION (*expr_p);
/* Create a stack of the subexpressions so later we can walk them in
order from inner to outer. */
restart:
/* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
if (TREE_CODE (*p) == INDIRECT_REF)
- *p = fold_indirect_ref (*p);
+ *p = fold_indirect_ref_loc (loc, *p);
if (handled_component_p (*p))
;
/* Divide the element size by the alignment of the element
type (above). */
- elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
+ elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
if (!is_gimple_min_invariant (elmt_size))
{
= size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
/* Divide the offset by its alignment. */
- offset = size_binop (EXACT_DIV_EXPR, offset, factor);
+ offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
if (!is_gimple_min_invariant (offset))
{
bool postfix;
enum tree_code arith_code;
enum gimplify_status ret;
+ location_t loc = EXPR_LOCATION (*expr_p);
code = TREE_CODE (*expr_p);
if (!is_gimple_min_lval (lvalue))
{
mark_addressable (lvalue);
- lvalue = build_fold_addr_expr (lvalue);
+ lvalue = build_fold_addr_expr_loc (input_location, lvalue);
gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
- lvalue = build_fold_indirect_ref (lvalue);
+ lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
}
ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
if (ret == GS_ERROR)
/* For POINTERs increment, use POINTER_PLUS_EXPR. */
if (POINTER_TYPE_P (TREE_TYPE (lhs)))
{
- rhs = fold_convert (sizetype, rhs);
+ rhs = fold_convert_loc (loc, sizetype, rhs);
if (arith_code == MINUS_EXPR)
- rhs = fold_build1 (NEGATE_EXPR, TREE_TYPE (rhs), rhs);
+ rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
arith_code = POINTER_PLUS_EXPR;
}
int i, nargs;
gimple call;
bool builtin_va_start_p = FALSE;
+ location_t loc = EXPR_LOCATION (*expr_p);
gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
fndecl = get_callee_fndecl (*expr_p);
if (fndecl && DECL_BUILT_IN (fndecl))
{
- tree new_tree = fold_call_expr (*expr_p, !want_value);
+ tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
if (new_tree && new_tree != *expr_p)
{
tree call = *expr_p;
--nargs;
- *expr_p = build_call_array (TREE_TYPE (call), CALL_EXPR_FN (call),
- nargs, CALL_EXPR_ARGP (call));
+ *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
+ CALL_EXPR_FN (call),
+ nargs, CALL_EXPR_ARGP (call));
/* Copy all CALL_EXPR flags, location and block, except
CALL_EXPR_VA_ARG_PACK flag. */
/* Try this again in case gimplification exposed something. */
if (ret != GS_ERROR)
{
- tree new_tree = fold_call_expr (*expr_p, !want_value);
+ tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
if (new_tree && new_tree != *expr_p)
{
gimple_boolify (tree expr)
{
tree type = TREE_TYPE (expr);
+ location_t loc = EXPR_LOCATION (expr);
if (TREE_CODE (type) == BOOLEAN_TYPE)
return expr;
default:
/* Other expressions that get here must have boolean values, but
might need to be converted to the appropriate mode. */
- return fold_convert (boolean_type_node, expr);
+ return fold_convert_loc (loc, boolean_type_node, expr);
}
}
gimple gimple_cond;
enum tree_code pred_code;
gimple_seq seq = NULL;
+ location_t loc = EXPR_LOCATION (*expr_p);
type = TREE_TYPE (expr);
if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
TREE_OPERAND (expr, 1) =
- build_fold_addr_expr (TREE_OPERAND (expr, 1));
+ build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 1));
if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
TREE_OPERAND (expr, 2) =
- build_fold_addr_expr (TREE_OPERAND (expr, 2));
+ build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 2));
tmp = create_tmp_var (type, "iftmp");
expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
- result = build_fold_indirect_ref (tmp);
+ result = build_fold_indirect_ref_loc (loc, tmp);
}
/* Build the then clause, 't1 = a;'. But don't build an assignment
{
tree t, to, to_ptr, from, from_ptr;
gimple gs;
+ location_t loc = EXPR_LOCATION (*expr_p);
to = TREE_OPERAND (*expr_p, 0);
from = TREE_OPERAND (*expr_p, 1);
mark_addressable (from);
- from_ptr = build_fold_addr_expr (from);
- gimplify_arg (&from_ptr, seq_p, EXPR_LOCATION (*expr_p));
+ from_ptr = build_fold_addr_expr_loc (loc, from);
+ gimplify_arg (&from_ptr, seq_p, loc);
mark_addressable (to);
- to_ptr = build_fold_addr_expr (to);
- gimplify_arg (&to_ptr, seq_p, EXPR_LOCATION (*expr_p));
+ to_ptr = build_fold_addr_expr_loc (loc, to);
+ gimplify_arg (&to_ptr, seq_p, loc);
t = implicit_built_in_decls[BUILT_IN_MEMCPY];
{
tree t, from, to, to_ptr;
gimple gs;
+ location_t loc = EXPR_LOCATION (*expr_p);
/* Assert our assumptions, to abort instead of producing wrong code
silently if they are not met. Beware that the RHS CONSTRUCTOR might
/* Now proceed. */
to = TREE_OPERAND (*expr_p, 0);
- to_ptr = build_fold_addr_expr (to);
- gimplify_arg (&to_ptr, seq_p, EXPR_LOCATION (*expr_p));
+ to_ptr = build_fold_addr_expr_loc (loc, to);
+ gimplify_arg (&to_ptr, seq_p, loc);
t = implicit_built_in_decls[BUILT_IN_MEMSET];
gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
tree *to_p = &TREE_OPERAND (*expr_p, 0);
enum gimplify_status ret = GS_UNHANDLED;
gimple assign;
+ location_t loc = EXPR_LOCATION (*expr_p);
gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
|| TREE_CODE (*expr_p) == INIT_EXPR);
{
STRIP_USELESS_TYPE_CONVERSION (*from_p);
if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
- *from_p = fold_convert (TREE_TYPE (*to_p), *from_p);
+ *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
}
/* See if any simplifications can be done based on what the RHS is. */
tree op0 = TREE_OPERAND (*expr_p, 0);
tree op1 = TREE_OPERAND (*expr_p, 1);
tree t, arg, dest, src;
+ location_t loc = EXPR_LOCATION (*expr_p);
arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
arg = unshare_expr (arg);
arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
- src = build_fold_addr_expr (op1);
- dest = build_fold_addr_expr (op0);
+ src = build_fold_addr_expr_loc (loc, op1);
+ dest = build_fold_addr_expr_loc (loc, op0);
t = implicit_built_in_decls[BUILT_IN_MEMCMP];
- t = build_call_expr (t, 3, dest, src, arg);
+ t = build_call_expr_loc (loc, t, 3, dest, src, arg);
*expr_p
= build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
static enum gimplify_status
gimplify_scalar_mode_aggregate_compare (tree *expr_p)
{
+ location_t loc = EXPR_LOCATION (*expr_p);
tree op0 = TREE_OPERAND (*expr_p, 0);
tree op1 = TREE_OPERAND (*expr_p, 1);
tree type = TREE_TYPE (op0);
tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
- op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
- op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
+ op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
+ op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
*expr_p
- = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
+ = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
return GS_OK;
}
tree type = TREE_TYPE (*expr_p);
*expr_p = build3 (COND_EXPR, type, *expr_p,
- fold_convert (type, boolean_true_node),
- fold_convert (type, boolean_false_node));
+ fold_convert_loc (locus, type, boolean_true_node),
+ fold_convert_loc (locus, type, boolean_false_node));
SET_EXPR_LOCATION (*expr_p, locus);
tree expr = *expr_p;
tree op0 = TREE_OPERAND (expr, 0);
enum gimplify_status ret;
+ location_t loc = EXPR_LOCATION (*expr_p);
switch (TREE_CODE (op0))
{
tree t_op00 = TREE_TYPE (op00);
if (!useless_type_conversion_p (t_expr, t_op00))
- op00 = fold_convert (TREE_TYPE (expr), op00);
+ op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
*expr_p = op00;
ret = GS_OK;
}
if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
op0 = TREE_OPERAND (op0, 0);
- *expr_p = fold_convert (TREE_TYPE (expr),
- build_fold_addr_expr (TREE_OPERAND (op0, 0)));
+ *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
+ build_fold_addr_expr_loc (loc,
+ TREE_OPERAND (op0, 0)));
ret = GS_OK;
break;
break;
case INDIRECT_REF:
- *expr_p = fold_indirect_ref (*expr_p);
+ *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
if (*expr_p != save_expr)
break;
/* else fall through. */
/* An lvalue will do. Take the address of the expression, store it
in a temporary, and replace the expression with an INDIRECT_REF of
that temporary. */
- tmp = build_fold_addr_expr (*expr_p);
+ tmp = build_fold_addr_expr_loc (input_location, *expr_p);
gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
*expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
}
+2009-07-17 Aldy Hernandez <aldyh@redhat.com>
+ Manuel López-Ibáñez <manu@gcc.gnu.org>
+
+ PR 40435
+ * objc-act.c: Add location argument to all calls to
+ build_fold_addr_expr.
+
2009-07-14 Taras Glek <tglek@mozilla.com>
Rafael Espindola <espindola@google.com>
next_sjlj_build_try_exit (void)
{
tree t;
- t = build_fold_addr_expr (cur_try_context->stack_decl);
+ t = build_fold_addr_expr_loc (input_location, cur_try_context->stack_decl);
t = tree_cons (NULL, t, NULL);
t = build_function_call (input_location,
objc_exception_try_exit_decl, t);
{
tree t, enter, sj, cond;
- t = build_fold_addr_expr (cur_try_context->stack_decl);
+ t = build_fold_addr_expr_loc (input_location, cur_try_context->stack_decl);
t = tree_cons (NULL, t, NULL);
enter = build_function_call (input_location,
objc_exception_try_enter_decl, t);
t = objc_build_component_ref (cur_try_context->stack_decl,
get_identifier ("buf"));
- t = build_fold_addr_expr (t);
+ t = build_fold_addr_expr_loc (input_location, t);
#ifdef OBJCPLUS
/* Convert _setjmp argument to type that is expected. */
if (TYPE_ARG_TYPES (TREE_TYPE (objc_setjmp_decl)))
{
tree t;
- t = build_fold_addr_expr (cur_try_context->stack_decl);
+ t = build_fold_addr_expr_loc (input_location, cur_try_context->stack_decl);
t = tree_cons (NULL, t, NULL);
t = build_function_call (input_location,
objc_exception_extract_decl, t);
method_params = tree_cons (NULL_TREE, lookup_object,
tree_cons (NULL_TREE, selector,
method_params));
- method = build_fold_addr_expr (sender);
+ method = build_fold_addr_expr_loc (input_location, sender);
}
else
{
t = tree_cons (NULL_TREE, selector, NULL_TREE);
t = tree_cons (NULL_TREE, lookup_object, t);
- method = build_function_call (loc,
- sender, t);
+ method = build_function_call (loc, sender, t);
/* Pass the object to the method. */
method_params = tree_cons (NULL_TREE, object,
struct omp_for_data_loop *loop;
int i;
struct omp_for_data_loop dummy_loop;
+ location_t loc = gimple_location (for_stmt);
fd->for_stmt = for_stmt;
fd->pre = NULL;
break;
case LE_EXPR:
if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
- loop->n2 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
+ loop->n2 = fold_build2_loc (loc,
+ POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
loop->n2, size_one_node);
else
- loop->n2 = fold_build2 (PLUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
+ loop->n2 = fold_build2_loc (loc,
+ PLUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
build_int_cst (TREE_TYPE (loop->n2), 1));
loop->cond_code = LT_EXPR;
break;
case GE_EXPR:
if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
- loop->n2 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
+ loop->n2 = fold_build2_loc (loc,
+ POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
loop->n2, size_int (-1));
else
- loop->n2 = fold_build2 (MINUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
+ loop->n2 = fold_build2_loc (loc,
+ MINUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
build_int_cst (TREE_TYPE (loop->n2), 1));
loop->cond_code = GT_EXPR;
break;
break;
case MINUS_EXPR:
loop->step = TREE_OPERAND (t, 1);
- loop->step = fold_build1 (NEGATE_EXPR, TREE_TYPE (loop->step),
+ loop->step = fold_build1_loc (loc,
+ NEGATE_EXPR, TREE_TYPE (loop->step),
loop->step);
break;
default:
tree n;
if (loop->cond_code == LT_EXPR)
- n = fold_build2 (PLUS_EXPR, TREE_TYPE (loop->v),
+ n = fold_build2_loc (loc,
+ PLUS_EXPR, TREE_TYPE (loop->v),
loop->n2, loop->step);
else
n = loop->n1;
if (loop->cond_code == LT_EXPR)
{
n1 = loop->n1;
- n2 = fold_build2 (PLUS_EXPR, TREE_TYPE (loop->v),
+ n2 = fold_build2_loc (loc,
+ PLUS_EXPR, TREE_TYPE (loop->v),
loop->n2, loop->step);
}
else
{
- n1 = fold_build2 (MINUS_EXPR, TREE_TYPE (loop->v),
+ n1 = fold_build2_loc (loc,
+ MINUS_EXPR, TREE_TYPE (loop->v),
loop->n2, loop->step);
n2 = loop->n1;
}
itype
= lang_hooks.types.type_for_size (TYPE_PRECISION (itype), 0);
t = build_int_cst (itype, (loop->cond_code == LT_EXPR ? -1 : 1));
- t = fold_build2 (PLUS_EXPR, itype,
- fold_convert (itype, loop->step), t);
- t = fold_build2 (PLUS_EXPR, itype, t,
- fold_convert (itype, loop->n2));
- t = fold_build2 (MINUS_EXPR, itype, t,
- fold_convert (itype, loop->n1));
+ t = fold_build2_loc (loc,
+ PLUS_EXPR, itype,
+ fold_convert_loc (loc, itype, loop->step), t);
+ t = fold_build2_loc (loc, PLUS_EXPR, itype, t,
+ fold_convert_loc (loc, itype, loop->n2));
+ t = fold_build2_loc (loc, MINUS_EXPR, itype, t,
+ fold_convert_loc (loc, itype, loop->n1));
if (TYPE_UNSIGNED (itype) && loop->cond_code == GT_EXPR)
- t = fold_build2 (TRUNC_DIV_EXPR, itype,
- fold_build1 (NEGATE_EXPR, itype, t),
- fold_build1 (NEGATE_EXPR, itype,
- fold_convert (itype,
- loop->step)));
+ t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype,
+ fold_build1_loc (loc, NEGATE_EXPR, itype, t),
+ fold_build1_loc (loc, NEGATE_EXPR, itype,
+ fold_convert_loc (loc, itype,
+ loop->step)));
else
- t = fold_build2 (TRUNC_DIV_EXPR, itype, t,
- fold_convert (itype, loop->step));
- t = fold_convert (long_long_unsigned_type_node, t);
+ t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, t,
+ fold_convert_loc (loc, itype, loop->step));
+ t = fold_convert_loc (loc, long_long_unsigned_type_node, t);
if (count != NULL_TREE)
- count = fold_build2 (MULT_EXPR, long_long_unsigned_type_node,
+ count = fold_build2_loc (loc,
+ MULT_EXPR, long_long_unsigned_type_node,
count, t);
else
count = t;
if (collapse_count && *collapse_count == NULL)
{
if (count)
- *collapse_count = fold_convert (iter_type, count);
+ *collapse_count = fold_convert_loc (loc, iter_type, count);
else
*collapse_count = create_tmp_var (iter_type, ".count");
}
get_ws_args_for (gimple ws_stmt)
{
tree t;
+ location_t loc = gimple_location (ws_stmt);
if (gimple_code (ws_stmt) == GIMPLE_OMP_FOR)
{
ws_args = NULL_TREE;
if (fd.chunk_size)
{
- t = fold_convert (long_integer_type_node, fd.chunk_size);
+ t = fold_convert_loc (loc, long_integer_type_node, fd.chunk_size);
ws_args = tree_cons (NULL, t, ws_args);
}
- t = fold_convert (long_integer_type_node, fd.loop.step);
+ t = fold_convert_loc (loc, long_integer_type_node, fd.loop.step);
ws_args = tree_cons (NULL, t, ws_args);
- t = fold_convert (long_integer_type_node, fd.loop.n2);
+ t = fold_convert_loc (loc, long_integer_type_node, fd.loop.n2);
ws_args = tree_cons (NULL, t, ws_args);
- t = fold_convert (long_integer_type_node, fd.loop.n1);
+ t = fold_convert_loc (loc, long_integer_type_node, fd.loop.n1);
ws_args = tree_cons (NULL, t, ws_args);
return ws_args;
omp_context *ctx;
tree name, t;
gimple stmt = gsi_stmt (*gsi);
+ location_t loc = gimple_location (stmt);
/* Ignore task directives with empty bodies. */
if (optimize > 0
fixup_child_record_type (ctx);
if (ctx->srecord_type)
layout_type (ctx->srecord_type);
- t = fold_convert (long_integer_type_node,
+ t = fold_convert_loc (loc, long_integer_type_node,
TYPE_SIZE_UNIT (ctx->record_type));
gimple_omp_task_set_arg_size (stmt, t);
t = build_int_cst (long_integer_type_node,
tree
omp_reduction_init (tree clause, tree type)
{
+ location_t loc = OMP_CLAUSE_LOCATION (clause);
switch (OMP_CLAUSE_REDUCTION_CODE (clause))
{
case PLUS_EXPR:
case TRUTH_ORIF_EXPR:
case TRUTH_XOR_EXPR:
case NE_EXPR:
- return fold_convert (type, integer_zero_node);
+ return fold_convert_loc (loc, type, integer_zero_node);
case MULT_EXPR:
case TRUTH_AND_EXPR:
case TRUTH_ANDIF_EXPR:
case EQ_EXPR:
- return fold_convert (type, integer_one_node);
+ return fold_convert_loc (loc, type, integer_one_node);
case BIT_AND_EXPR:
- return fold_convert (type, integer_minus_one_node);
+ return fold_convert_loc (loc, type, integer_minus_one_node);
case MAX_EXPR:
if (SCALAR_FLOAT_TYPE_P (type))
enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
tree var, new_var;
bool by_ref;
+ location_t clause_loc = OMP_CLAUSE_LOCATION (c);
switch (c_kind)
{
gimple_seq_add_stmt (ilist, stmt);
- x = fold_convert (TREE_TYPE (ptr), tmp);
+ x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
gimplify_assign (ptr, x, ilist);
}
}
if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
{
x = build_receiver_ref (var, false, ctx);
- x = build_fold_addr_expr (x);
+ x = build_fold_addr_expr_loc (clause_loc, x);
}
else if (TREE_CONSTANT (x))
{
name);
gimple_add_tmp_var (x);
TREE_ADDRESSABLE (x) = 1;
- x = build_fold_addr_expr (x);
+ x = build_fold_addr_expr_loc (clause_loc, x);
}
else
{
- x = build_call_expr (built_in_decls[BUILT_IN_ALLOCA], 1, x);
+ x = build_call_expr_loc (clause_loc,
+ built_in_decls[BUILT_IN_ALLOCA], 1, x);
}
- x = fold_convert (TREE_TYPE (new_var), x);
+ x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
gimplify_assign (new_var, x, ilist);
- new_var = build_fold_indirect_ref (new_var);
+ new_var = build_fold_indirect_ref_loc (clause_loc, new_var);
}
else if (c_kind == OMP_CLAUSE_REDUCTION
&& OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
x = build_outer_var_ref (var, ctx);
if (is_reference (var))
- x = build_fold_addr_expr (x);
+ x = build_fold_addr_expr_loc (clause_loc, x);
SET_DECL_VALUE_EXPR (placeholder, x);
DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
lower_omp (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
for (c = clauses; c ;)
{
tree var, new_var;
+ location_t clause_loc = OMP_CLAUSE_LOCATION (c);
if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
{
x = build_outer_var_ref (var, ctx);
if (is_reference (var))
- new_var = build_fold_indirect_ref (new_var);
+ new_var = build_fold_indirect_ref_loc (clause_loc, new_var);
x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
gimplify_and_add (x, stmt_list);
}
{
tree var, ref, new_var;
enum tree_code code;
+ location_t clause_loc = OMP_CLAUSE_LOCATION (c);
if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
continue;
var = OMP_CLAUSE_DECL (c);
new_var = lookup_decl (var, ctx);
if (is_reference (var))
- new_var = build_fold_indirect_ref (new_var);
+ new_var = build_fold_indirect_ref_loc (clause_loc, new_var);
ref = build_outer_var_ref (var, ctx);
code = OMP_CLAUSE_REDUCTION_CODE (c);
if (count == 1)
{
- tree addr = build_fold_addr_expr (ref);
+ tree addr = build_fold_addr_expr_loc (clause_loc, ref);
addr = save_expr (addr);
ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
- x = fold_build2 (code, TREE_TYPE (ref), ref, new_var);
+ x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
x = build2 (OMP_ATOMIC, void_type_node, addr, x);
gimplify_and_add (x, stmt_seqp);
return;
tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
if (is_reference (var))
- ref = build_fold_addr_expr (ref);
+ ref = build_fold_addr_expr_loc (clause_loc, ref);
SET_DECL_VALUE_EXPR (placeholder, ref);
DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
lower_omp (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
{
tree var, ref, x;
bool by_ref;
+ location_t clause_loc = OMP_CLAUSE_LOCATION (c);
if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
continue;
ref = build_sender_ref (var, ctx);
x = lookup_decl_in_outer_ctx (var, ctx);
- x = by_ref ? build_fold_addr_expr (x) : x;
+ x = by_ref ? build_fold_addr_expr_loc (clause_loc, x) : x;
gimplify_assign (ref, x, slist);
ref = build_receiver_ref (var, by_ref, ctx);
if (is_reference (var))
{
- ref = build_fold_indirect_ref (ref);
- var = build_fold_indirect_ref (var);
+ ref = build_fold_indirect_ref_loc (clause_loc, ref);
+ var = build_fold_indirect_ref_loc (clause_loc, var);
}
x = lang_hooks.decls.omp_clause_assign_op (c, var, ref);
gimplify_and_add (x, rlist);
{
tree val, ref, x, var;
bool by_ref, do_in = false, do_out = false;
+ location_t clause_loc = OMP_CLAUSE_LOCATION (c);
switch (OMP_CLAUSE_CODE (c))
{
if (do_in)
{
ref = build_sender_ref (val, ctx);
- x = by_ref ? build_fold_addr_expr (var) : var;
+ x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
gimplify_assign (ref, x, ilist);
if (is_task_ctx (ctx))
DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
gimple_stmt_iterator gsi;
gimple stmt;
int start_ix;
+ location_t clause_loc;
clauses = gimple_omp_parallel_clauses (entry_stmt);
c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
if (c)
- val = OMP_CLAUSE_NUM_THREADS_EXPR (c);
+ {
+ val = OMP_CLAUSE_NUM_THREADS_EXPR (c);
+ clause_loc = OMP_CLAUSE_LOCATION (c);
+ }
+ else
+ clause_loc = gimple_location (entry_stmt);
/* Ensure 'val' is of the correct type. */
- val = fold_convert (unsigned_type_node, val);
+ val = fold_convert_loc (clause_loc, unsigned_type_node, val);
/* If we found the clause 'if (cond)', build either
(cond != 0) or (cond ? val : 1u). */
cond = gimple_boolify (cond);
if (integer_zerop (val))
- val = fold_build2 (EQ_EXPR, unsigned_type_node, cond,
+ val = fold_build2_loc (clause_loc,
+ EQ_EXPR, unsigned_type_node, cond,
build_int_cst (TREE_TYPE (cond), 0));
else
{
tree args = tree_cons (NULL, t2,
tree_cons (NULL, t1,
tree_cons (NULL, val, ws_args)));
- t = build_function_call_expr (built_in_decls[start_ix], args);
+ t = build_function_call_expr (UNKNOWN_LOCATION,
+ built_in_decls[start_ix], args);
}
else
t = build_call_expr (built_in_decls[start_ix], 3, t2, t1, val);
t = null_pointer_node;
else
t = build_fold_addr_expr (t);
- t = build_call_expr (gimple_omp_parallel_child_fn (entry_stmt), 1, t);
+ t = build_call_expr_loc (gimple_location (entry_stmt),
+ gimple_omp_parallel_child_fn (entry_stmt), 1, t);
force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
false, GSI_CONTINUE_LINKING);
- t = build_call_expr (built_in_decls[BUILT_IN_GOMP_PARALLEL_END], 0);
+ t = build_call_expr_loc (gimple_location (entry_stmt),
+ built_in_decls[BUILT_IN_GOMP_PARALLEL_END], 0);
force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
false, GSI_CONTINUE_LINKING);
}
{
tree t, t1, t2, t3, flags, cond, c, clauses;
gimple_stmt_iterator gsi;
+ location_t loc = gimple_location (entry_stmt);
clauses = gimple_omp_task_clauses (entry_stmt);
if (t == NULL)
t2 = null_pointer_node;
else
- t2 = build_fold_addr_expr (t);
- t1 = build_fold_addr_expr (gimple_omp_task_child_fn (entry_stmt));
+ t2 = build_fold_addr_expr_loc (loc, t);
+ t1 = build_fold_addr_expr_loc (loc, gimple_omp_task_child_fn (entry_stmt));
t = gimple_omp_task_copy_fn (entry_stmt);
if (t == NULL)
t3 = null_pointer_node;
else
- t3 = build_fold_addr_expr (t);
+ t3 = build_fold_addr_expr_loc (loc, t);
t = build_call_expr (built_in_decls[BUILT_IN_GOMP_TASK], 7, t1, t2, t3,
gimple_omp_task_arg_size (entry_stmt),
basic_block store_bb = single_succ (load_bb);
gimple_stmt_iterator gsi;
gimple stmt;
+ location_t loc;
/* We expect to find the following sequences:
gsi = gsi_after_labels (store_bb);
stmt = gsi_stmt (gsi);
+ loc = gimple_location (stmt);
if (!is_gimple_assign (stmt))
return false;
gsi_next (&gsi);
gsi = gsi_last_bb (load_bb);
gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_LOAD);
- call = build_call_expr (decl, 2, addr, fold_convert (itype, rhs));
- call = fold_convert (void_type_node, call);
+ call = build_call_expr_loc (loc,
+ decl, 2, addr,
+ fold_convert_loc (loc, itype, rhs));
+ call = fold_convert_loc (loc, void_type_node, call);
force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
gsi_remove (&gsi, true);
gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
- t = build_function_call_expr (t, 0);
+ t = build_function_call_expr (UNKNOWN_LOCATION, t, 0);
force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
stmt = gimple_build_assign (loaded_val, build_fold_indirect_ref (addr));
gsi_insert_before (&si, stmt, GSI_SAME_STMT);
t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
- t = build_function_call_expr (t, 0);
+ t = build_function_call_expr (UNKNOWN_LOCATION, t, 0);
force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
gsi_remove (&si, true);
gimple_seq_add_stmt (pre_p, call);
cond = gimple_build_cond (EQ_EXPR, lhs,
- fold_convert (TREE_TYPE (lhs), boolean_true_node),
+ fold_convert_loc (loc, TREE_TYPE (lhs),
+ boolean_true_node),
tlabel, flabel);
gimple_seq_add_stmt (pre_p, cond);
gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
l1 = create_artificial_label (loc);
l2 = create_artificial_label (loc);
- t = build_call_expr (built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_START], 0);
- t = fold_convert (ptr_type, t);
+ t = build_call_expr_loc (loc, built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_START], 0);
+ t = fold_convert_loc (loc, ptr_type, t);
gimplify_assign (ctx->receiver_decl, t, pre_p);
t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
©in_seq, ctx);
- t = build_fold_addr_expr (ctx->sender_decl);
- t = build_call_expr (built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_END], 1, t);
+ t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
+ t = build_call_expr_loc (loc, built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_END],
+ 1, t);
gimplify_and_add (t, pre_p);
t = build_and_jump (&l2);
{
tree block, lab = NULL, x;
gimple stmt = gsi_stmt (*gsi_p), bind;
+ location_t loc = gimple_location (stmt);
gimple_seq tseq;
struct gimplify_ctx gctx;
bind = gimple_build_bind (NULL, gimple_seq_alloc_with_stmt (stmt),
block);
- x = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
+ x = build_call_expr_loc (loc, built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
tseq = NULL;
tree block;
tree name, lock, unlock;
gimple stmt = gsi_stmt (*gsi_p), bind;
+ location_t loc = gimple_location (stmt);
gimple_seq tbody;
struct gimplify_ctx gctx;
decl = (tree) n->value;
lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_START];
- lock = build_call_expr (lock, 1, build_fold_addr_expr (decl));
+ lock = build_call_expr_loc (loc, lock, 1, build_fold_addr_expr_loc (loc, decl));
unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_END];
- unlock = build_call_expr (unlock, 1, build_fold_addr_expr (decl));
+ unlock = build_call_expr_loc (loc, unlock, 1,
+ build_fold_addr_expr_loc (loc, decl));
}
else
{
lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_START];
- lock = build_call_expr (lock, 0);
+ lock = build_call_expr_loc (loc, lock, 0);
unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_END];
- unlock = build_call_expr (unlock, 0);
+ unlock = build_call_expr_loc (loc, unlock, 0);
}
push_gimplify_context (&gctx);
splay_tree_node n;
struct omp_taskcopy_context tcctx;
struct gimplify_ctx gctx;
+ location_t loc = gimple_location (task_stmt);
child_fn = gimple_omp_task_copy_fn (task_stmt);
child_cfun = DECL_STRUCT_FUNCTION (child_fn);
n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
sf = (tree) n->value;
sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
- src = build_fold_indirect_ref (sarg);
+ src = build_fold_indirect_ref_loc (loc, sarg);
src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
append_to_statement_list (t, &list);
sf = (tree) n->value;
if (tcctx.cb.decl_map)
sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
- src = build_fold_indirect_ref (sarg);
+ src = build_fold_indirect_ref_loc (loc, sarg);
src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
- dst = build_fold_indirect_ref (arg);
+ dst = build_fold_indirect_ref_loc (loc, arg);
dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
append_to_statement_list (t, &list);
sf = (tree) n->value;
if (tcctx.cb.decl_map)
sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
- src = build_fold_indirect_ref (sarg);
+ src = build_fold_indirect_ref_loc (loc, sarg);
src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
if (use_pointer_for_field (decl, NULL) || is_reference (decl))
- src = build_fold_indirect_ref (src);
+ src = build_fold_indirect_ref_loc (loc, src);
}
else
src = decl;
- dst = build_fold_indirect_ref (arg);
+ dst = build_fold_indirect_ref_loc (loc, arg);
dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
append_to_statement_list (t, &list);
sf = (tree) n->value;
if (tcctx.cb.decl_map)
sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
- src = build_fold_indirect_ref (sarg);
+ src = build_fold_indirect_ref_loc (loc, sarg);
src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
if (use_pointer_for_field (decl, NULL))
- src = build_fold_indirect_ref (src);
+ src = build_fold_indirect_ref_loc (loc, src);
}
else
src = decl;
- dst = build_fold_indirect_ref (arg);
+ dst = build_fold_indirect_ref_loc (loc, arg);
dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
append_to_statement_list (t, &list);
(splay_tree_key) TREE_OPERAND (ind, 0));
sf = (tree) n->value;
sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
- src = build_fold_indirect_ref (sarg);
+ src = build_fold_indirect_ref_loc (loc, sarg);
src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
- src = build_fold_indirect_ref (src);
- dst = build_fold_indirect_ref (arg);
+ src = build_fold_indirect_ref_loc (loc, src);
+ dst = build_fold_indirect_ref_loc (loc, arg);
dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
append_to_statement_list (t, &list);
(splay_tree_key) TREE_OPERAND (ind, 0));
df = (tree) n->value;
df = *(tree *) pointer_map_contains (tcctx.cb.decl_map, df);
- ptr = build_fold_indirect_ref (arg);
+ ptr = build_fold_indirect_ref_loc (loc, arg);
ptr = build3 (COMPONENT_REF, TREE_TYPE (df), ptr, df, NULL);
t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
- build_fold_addr_expr (dst));
+ build_fold_addr_expr_loc (loc, dst));
append_to_statement_list (t, &list);
}
gimple par_bind, bind;
gimple_seq par_body, olist, ilist, par_olist, par_ilist, new_body;
struct gimplify_ctx gctx;
+ location_t loc = gimple_location (stmt);
clauses = gimple_omp_taskreg_clauses (stmt);
par_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
if (ctx->record_type)
{
- t = build_fold_addr_expr (ctx->sender_decl);
+ t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
/* fixup_child_record_type might have changed receiver_decl's type. */
- t = fold_convert (TREE_TYPE (ctx->receiver_decl), t);
+ t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
gimple_seq_add_stmt (&new_body,
gimple_build_assign (ctx->receiver_decl, t));
}
VEC_safe_push (tree, gc, size_functions, fndecl);
/* Replace the original expression with a call to the size function. */
- return build_function_call_expr (fndecl, arg_list);
+ return build_function_call_expr (input_location, fndecl, arg_list);
}
/* Take, queue and compile all the size functions. It is essential that
tree type = TREE_TYPE (decl);
enum tree_code code = TREE_CODE (decl);
rtx rtl = NULL_RTX;
+ location_t loc = DECL_SOURCE_LOCATION (decl);
if (code == CONST_DECL)
return;
}
else if (DECL_SIZE_UNIT (decl) == 0)
DECL_SIZE_UNIT (decl)
- = fold_convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
- bitsize_unit_node));
+ = fold_convert_loc (loc, sizetype,
+ size_binop_loc (loc, CEIL_DIV_EXPR, DECL_SIZE (decl),
+ bitsize_unit_node));
if (code != FIELD_DECL)
/* For non-fields, update the alignment from the type. */
if (TREE_CODE (rli->t) == UNION_TYPE)
rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
- rli->offset = fold_build3 (COND_EXPR, sizetype,
+ rli->offset = fold_build3_loc (input_location, COND_EXPR, sizetype,
DECL_QUALIFIER (field),
DECL_SIZE_UNIT (field), rli->offset);
}
field);
}
else
- rli->bitpos = round_up (rli->bitpos, type_align);
+ rli->bitpos = round_up_loc (input_location, rli->bitpos, type_align);
}
if (! DECL_PACKED (field))
if (maximum_field_alignment != 0)
type_align = MIN (type_align, maximum_field_alignment);
- rli->bitpos = round_up (rli->bitpos, type_align);
+ rli->bitpos = round_up_loc (input_location, rli->bitpos, type_align);
/* If we really aligned, don't allow subsequent bitfields
to undo that. */
= size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
/* Round the size up to be a multiple of the required alignment. */
- TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
+ TYPE_SIZE (rli->t) = round_up_loc (input_location, unpadded_size,
+ TYPE_ALIGN (rli->t));
TYPE_SIZE_UNIT (rli->t)
- = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
+ = round_up_loc (input_location, unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
if (TREE_CONSTANT (unpadded_size)
&& simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
#endif
- unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
+ unpacked_size = round_up_loc (input_location, TYPE_SIZE (rli->t), rli->unpacked_align);
if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
{
TYPE_PACKED (rli->t) = 0;
if (TYPE_SIZE (type) != 0)
{
- TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
- TYPE_SIZE_UNIT (type) = round_up (TYPE_SIZE_UNIT (type),
+ TYPE_SIZE (type) = round_up_loc (input_location,
+ TYPE_SIZE (type), TYPE_ALIGN (type));
+ TYPE_SIZE_UNIT (type) = round_up_loc (input_location, TYPE_SIZE_UNIT (type),
TYPE_ALIGN_UNIT (type));
}
that (possible) negative values are handled appropriately. */
length = size_binop (PLUS_EXPR, size_one_node,
fold_convert (sizetype,
- fold_build2 (MINUS_EXPR,
+ fold_build2_loc (input_location,
+ MINUS_EXPR,
TREE_TYPE (lb),
ub, lb)));
+2009-07-17 Aldy Hernandez <aldyh@redhat.com>
+ Manuel López-Ibáñez <manu@gcc.gnu.org>
+
+ PR 40435
+ * gcc.dg/pr36902.c: Add column info.
+ * g++.dg/gcov/gcov-2.C: Change count for definition.
+
2009-07-16 Jason Merrill <jason@redhat.com>
PR libstdc++/37907
void foo()
{
- C c; /* count(1) */
+ C c; /* count(2) */
c.seti (1); /* count(1) */
}
*to = *from;
break;
case 5:
- to[4] = from [4]; /* { dg-warning "array subscript is above array bounds" } */
+ to[4] = from [4]; /* { dg-warning "20:array subscript is above array bounds" } */
break;
}
return to;
if (stmt && gimple_code (stmt) == GIMPLE_COND)
{
+ location_t loc = gimple_location (stmt);
tree cond;
bool zerop, onep;
fold_defer_overflow_warnings ();
- cond = fold_binary (gimple_cond_code (stmt), boolean_type_node,
+ cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
if (cond)
{
tree type, tree a, tree b, tree c)
{
tree ret;
+ location_t loc = gimple_location (gsi_stmt (*gsi));
- ret = fold_build3 (code, type, a, b, c);
+ ret = fold_build3_loc (loc, code, type, a, b, c);
STRIP_NOPS (ret);
return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
{
tree ret;
- ret = fold_build2 (code, type, a, b);
+ ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
STRIP_NOPS (ret);
return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
{
tree ret;
- ret = fold_build1 (code, type, a);
+ ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
STRIP_NOPS (ret);
return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
/* Examine |br| < |bi|, and branch. */
t1 = gimplify_build1 (gsi, ABS_EXPR, inner_type, br);
t2 = gimplify_build1 (gsi, ABS_EXPR, inner_type, bi);
- compare = fold_build2 (LT_EXPR, boolean_type_node, t1, t2);
+ compare = fold_build2_loc (gimple_location (gsi_stmt (*gsi)),
+ LT_EXPR, boolean_type_node, t1, t2);
STRIP_NOPS (compare);
bb_cond = bb_true = bb_false = bb_join = NULL;
gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
- cond = fold_build2 (EQ_EXPR, boolean_type_node, tmp, boolean_true_node);
+ cond = fold_build2_loc (gimple_location (stmt),
+ EQ_EXPR, boolean_type_node, tmp, boolean_true_node);
stmt = gimple_build_cond_from_tree (cond, NULL_TREE, NULL_TREE);
gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
{
tree c, c2;
edge true_edge, false_edge;
+ location_t loc = gimple_location (stmt);
gcc_assert (gimple_code (stmt) == GIMPLE_COND);
- c = fold_build2 (gimple_cond_code (stmt), boolean_type_node,
+ c = fold_build2_loc (loc, gimple_cond_code (stmt), boolean_type_node,
gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
extract_true_false_edges_from_block (gimple_bb (stmt),
add_to_dst_predicate_list (loop, true_edge, cond, c, gsi);
/* If 'c' is false then FALSE_EDGE is taken. */
- c2 = invert_truthvalue (unshare_expr (c));
+ c2 = invert_truthvalue_loc (loc, unshare_expr (c));
add_to_dst_predicate_list (loop, false_edge, cond, c2, gsi);
/* Now this conditional statement is redundant. Remove it.
tree cond = (tree) bb->aux;
if (cond)
- cond = fold_build2 (TRUTH_OR_EXPR, boolean_type_node,
+ cond = fold_build2_loc (EXPR_LOCATION (cond),
+ TRUTH_OR_EXPR, boolean_type_node,
unshare_expr (cond), new_cond);
else
cond = new_cond;
{
if (TREE_CODE (new_tree) == ADDR_EXPR)
{
- *tp = fold_indirect_ref_1 (type, new_tree);
+ *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
+ type, new_tree);
/* ??? We should either assert here or build
a VIEW_CONVERT_EXPR instead of blindly leaking
incompatible types to our IL. */
{
if (TREE_CODE (new_tree) == ADDR_EXPR)
{
- *tp = fold_indirect_ref_1 (type, new_tree);
+ *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
+ type, new_tree);
/* ??? We should either assert here or build
a VIEW_CONVERT_EXPR instead of blindly leaking
incompatible types to our IL. */
/* Build the size argument for a memset call. */
static inline tree
-build_size_arg (tree nb_iter, tree op, gimple_seq* stmt_list)
+build_size_arg_loc (location_t loc, tree nb_iter, tree op, gimple_seq* stmt_list)
{
tree nb_bytes;
gimple_seq stmts = NULL;
- nb_bytes = fold_build2 (MULT_EXPR, size_type_node,
- fold_convert (size_type_node, nb_iter),
- fold_convert (size_type_node,
- TYPE_SIZE_UNIT (TREE_TYPE (op))));
+ nb_bytes = fold_build2_loc (loc, MULT_EXPR, size_type_node,
+ fold_convert_loc (loc, size_type_node, nb_iter),
+ fold_convert_loc (loc, size_type_node,
+ TYPE_SIZE_UNIT (TREE_TYPE (op))));
nb_bytes = force_gimple_operand (nb_bytes, &stmts, true, NULL);
gimple_seq_add_seq (stmt_list, stmts);
tree mem, fndecl, fntype, fn;
gimple_stmt_iterator i;
struct data_reference *dr = XCNEW (struct data_reference);
+ location_t loc = gimple_location (stmt);
DR_STMT (dr) = stmt;
DR_REF (dr) = op0;
goto end;
/* Test for a positive stride, iterating over every element. */
- if (integer_zerop (fold_build2 (MINUS_EXPR, integer_type_node, DR_STEP (dr),
+ if (integer_zerop (fold_build2_loc (loc,
+ MINUS_EXPR, integer_type_node, DR_STEP (dr),
TYPE_SIZE_UNIT (TREE_TYPE (op0)))))
{
- tree offset = fold_convert (sizetype,
- size_binop (PLUS_EXPR,
- DR_OFFSET (dr),
- DR_INIT (dr)));
- addr_base = fold_build2 (POINTER_PLUS_EXPR,
+ tree offset = fold_convert_loc (loc, sizetype,
+ size_binop_loc (loc, PLUS_EXPR,
+ DR_OFFSET (dr),
+ DR_INIT (dr)));
+ addr_base = fold_build2_loc (loc, POINTER_PLUS_EXPR,
TREE_TYPE (DR_BASE_ADDRESS (dr)),
DR_BASE_ADDRESS (dr), offset);
}
/* Test for a negative stride, iterating over every element. */
- else if (integer_zerop (fold_build2 (PLUS_EXPR, integer_type_node,
+ else if (integer_zerop (fold_build2_loc (loc, PLUS_EXPR, integer_type_node,
TYPE_SIZE_UNIT (TREE_TYPE (op0)),
DR_STEP (dr))))
{
- nb_bytes = build_size_arg (nb_iter, op0, &stmt_list);
- addr_base = size_binop (PLUS_EXPR, DR_OFFSET (dr), DR_INIT (dr));
- addr_base = fold_build2 (MINUS_EXPR, sizetype, addr_base,
- fold_convert (sizetype, nb_bytes));
+ nb_bytes = build_size_arg_loc (loc, nb_iter, op0, &stmt_list);
+ addr_base = size_binop_loc (loc, PLUS_EXPR, DR_OFFSET (dr), DR_INIT (dr));
+ addr_base = fold_build2_loc (loc, MINUS_EXPR, sizetype, addr_base,
+ fold_convert_loc (loc, sizetype, nb_bytes));
addr_base = force_gimple_operand (addr_base, &stmts, true, NULL);
gimple_seq_add_seq (&stmt_list, stmts);
- addr_base = fold_build2 (POINTER_PLUS_EXPR,
+ addr_base = fold_build2_loc (loc, POINTER_PLUS_EXPR,
TREE_TYPE (DR_BASE_ADDRESS (dr)),
DR_BASE_ADDRESS (dr), addr_base);
}
fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
if (!nb_bytes)
- nb_bytes = build_size_arg (nb_iter, op0, &stmt_list);
+ nb_bytes = build_size_arg_loc (loc, nb_iter, op0, &stmt_list);
fn_call = gimple_build_call (fn, 3, mem, integer_zero_node, nb_bytes);
gimple_seq_add_stmt (&stmt_list, fn_call);
/* Build: __mf_base = (uintptr_t) <base address expression>. */
seq = gimple_seq_alloc ();
- t = fold_convert (mf_uintptr_type, unshare_expr (base));
+ t = fold_convert_loc (location, mf_uintptr_type,
+ unshare_expr (base));
t = force_gimple_operand (t, &stmts, false, NULL_TREE);
gimple_seq_add_seq (&seq, stmts);
g = gimple_build_assign (mf_base, t);
gimple_seq_add_stmt (&seq, g);
/* Build: __mf_limit = (uintptr_t) <limit address expression>. */
- t = fold_convert (mf_uintptr_type, unshare_expr (limit));
+ t = fold_convert_loc (location, mf_uintptr_type,
+ unshare_expr (limit));
t = force_gimple_operand (t, &stmts, false, NULL_TREE);
gimple_seq_add_seq (&seq, stmts);
g = gimple_build_assign (mf_limit, t);
/* u is a string, so it is already a gimple value. */
u = mf_file_function_line_tree (location);
/* NB: we pass the overall [base..limit] range to mf_check. */
- v = fold_build2 (PLUS_EXPR, mf_uintptr_type,
- fold_build2 (MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
+ v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
+ fold_build2_loc (location,
+ MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
build_int_cst (mf_uintptr_type, 1));
v = force_gimple_operand (v, &stmts, true, NULL_TREE);
gimple_seq_add_seq (&seq, stmts);
if (elt)
elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
elt);
- addr = fold_convert (ptr_type_node, elt ? elt : base);
- addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
- addr, fold_convert (sizetype,
- byte_position (field)));
+ addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
+ addr = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
+ addr, fold_convert_loc (location, sizetype,
+ byte_position (field)));
}
else
addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
- limit = fold_build2 (MINUS_EXPR, mf_uintptr_type,
- fold_build2 (PLUS_EXPR, mf_uintptr_type,
+ limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type,
+ fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
convert (mf_uintptr_type, addr),
size),
integer_one_node);
case INDIRECT_REF:
addr = TREE_OPERAND (t, 0);
base = addr;
- limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
- fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, base,
+ limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
+ fold_build2_loc (location,
+ POINTER_PLUS_EXPR, ptr_type_node, base,
size),
size_int (-1));
break;
case TARGET_MEM_REF:
addr = tree_mem_ref_addr (ptr_type_node, t);
base = addr;
- limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
- fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, base,
+ limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
+ fold_build2_loc (location,
+ POINTER_PLUS_EXPR, ptr_type_node, base,
size),
size_int (-1));
break;
bpu = bitsize_int (BITS_PER_UNIT);
ofs = convert (bitsizetype, TREE_OPERAND (t, 2));
- rem = size_binop (TRUNC_MOD_EXPR, ofs, bpu);
- ofs = fold_convert (sizetype, size_binop (TRUNC_DIV_EXPR, ofs, bpu));
+ rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu);
+ ofs = fold_convert_loc (location,
+ sizetype,
+ size_binop_loc (location,
+ TRUNC_DIV_EXPR, ofs, bpu));
size = convert (bitsizetype, TREE_OPERAND (t, 1));
- size = size_binop (PLUS_EXPR, size, rem);
- size = size_binop (CEIL_DIV_EXPR, size, bpu);
+ size = size_binop_loc (location, PLUS_EXPR, size, rem);
+ size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu);
size = convert (sizetype, size);
addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
addr = convert (ptr_type_node, addr);
- addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, addr, ofs);
+ addr = fold_build2_loc (location, POINTER_PLUS_EXPR,
+ ptr_type_node, addr, ofs);
base = addr;
- limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
- fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
+ limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
+ fold_build2_loc (location,
+ POINTER_PLUS_EXPR, ptr_type_node,
base, size),
size_int (-1));
}
build_ref_for_offset (tree *expr, tree type, HOST_WIDE_INT offset,
tree exp_type, bool allow_ptr)
{
+ location_t loc = expr ? EXPR_LOCATION (*expr) : UNKNOWN_LOCATION;
+
if (allow_ptr && POINTER_TYPE_P (type))
{
type = TREE_TYPE (type);
if (expr)
- *expr = fold_build1 (INDIRECT_REF, type, *expr);
+ *expr = fold_build1_loc (loc, INDIRECT_REF, type, *expr);
}
return build_ref_for_offset_1 (expr, type, offset, exp_type);
enum unscalarized_data_handling *refreshed,
tree lhs)
{
+ location_t loc = EXPR_LOCATION (lacc->expr);
do
{
if (lacc->grp_to_be_replaced)
{
rhs = get_access_replacement (racc);
if (!useless_type_conversion_p (lacc->type, racc->type))
- rhs = fold_build1 (VIEW_CONVERT_EXPR, lacc->type, rhs);
+ rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
}
else
{
tree lhs, rhs;
bool modify_this_stmt = false;
bool force_gimple_rhs = false;
+ location_t loc = gimple_location (*stmt);
if (!gimple_assign_single_p (*stmt))
return SRA_SA_NONE;
}
if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
{
- rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
+ rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
if (!is_gimple_reg (lhs))
force_gimple_rhs = true;
}
static tree
ccp_fold (gimple stmt)
{
+ location_t loc = gimple_location (stmt);
switch (gimple_code (stmt))
{
case GIMPLE_ASSIGN:
{
prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
if (val->lattice_val == CONSTANT)
- return fold_unary (TREE_CODE (rhs),
+ return fold_unary_loc (EXPR_LOCATION (rhs),
+ TREE_CODE (rhs),
TREE_TYPE (rhs), val->value);
}
else if (TREE_CODE (rhs) == INDIRECT_REF
if (!useless_type_conversion_p (TREE_TYPE (lhs),
TREE_TYPE (op0))
&& ((tem = maybe_fold_offset_to_address
- (gimple_location (stmt),
+ (loc,
op0, integer_zero_node, TREE_TYPE (lhs)))
!= NULL_TREE))
return tem;
return op0;
}
- return fold_unary_ignore_overflow (subcode,
- gimple_expr_type (stmt), op0);
+ return
+ fold_unary_ignore_overflow_loc (loc, subcode,
+ gimple_expr_type (stmt), op0);
}
case GIMPLE_BINARY_RHS:
{
tree lhs = gimple_assign_lhs (stmt);
tree tem = maybe_fold_offset_to_address
- (gimple_location (stmt), op0, op1, TREE_TYPE (lhs));
+ (loc, op0, op1, TREE_TYPE (lhs));
if (tem != NULL_TREE)
return tem;
}
- return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
+ return fold_binary_loc (loc, subcode,
+ gimple_expr_type (stmt), op0, op1);
}
default:
args[i] = val->value;
}
}
- call = build_call_array (gimple_call_return_type (stmt),
- fn, gimple_call_num_args (stmt), args);
- retval = fold_call_expr (call, false);
+ call = build_call_array_loc (loc,
+ gimple_call_return_type (stmt),
+ fn, gimple_call_num_args (stmt), args);
+ retval = fold_call_expr (EXPR_LOCATION (call), call, false);
if (retval)
/* fold_call_expr wraps the result inside a NOP_EXPR. */
STRIP_NOPS (retval);
op1 = val->value;
}
- return fold_binary (code, boolean_type_node, op0, op1);
+ return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
}
case GIMPLE_SWITCH:
{
tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
if (c && TREE_CODE (c) == COMPLEX_CST)
- return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
+ return fold_build1_loc (EXPR_LOCATION (t),
+ TREE_CODE (t), TREE_TYPE (t), c);
break;
}
ptr_type = build_pointer_type (TREE_TYPE (t));
if (!useless_type_conversion_p (orig_type, ptr_type))
return NULL_TREE;
- t = build_fold_addr_expr_with_type (t, ptr_type);
- protected_set_expr_location (t, loc);
- return t;
+ return build_fold_addr_expr_with_type_loc (loc, t, ptr_type);
}
return NULL_TREE;
bitmap visited;
bool ignore;
int nargs;
+ location_t loc = gimple_location (stmt);
gcc_assert (is_gimple_call (stmt));
case BUILT_IN_STRCPY:
if (val[1] && is_gimple_val (val[1]) && nargs == 2)
- result = fold_builtin_strcpy (callee,
+ result = fold_builtin_strcpy (loc, callee,
gimple_call_arg (stmt, 0),
gimple_call_arg (stmt, 1),
val[1]);
case BUILT_IN_STRNCPY:
if (val[1] && is_gimple_val (val[1]) && nargs == 3)
- result = fold_builtin_strncpy (callee,
+ result = fold_builtin_strncpy (loc, callee,
gimple_call_arg (stmt, 0),
gimple_call_arg (stmt, 1),
gimple_call_arg (stmt, 2),
case BUILT_IN_FPUTS:
if (nargs == 2)
- result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
+ result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
gimple_call_arg (stmt, 1),
ignore, false, val[0]);
break;
case BUILT_IN_FPUTS_UNLOCKED:
if (nargs == 2)
- result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
+ result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
gimple_call_arg (stmt, 1),
ignore, true, val[0]);
break;
case BUILT_IN_MEMMOVE_CHK:
case BUILT_IN_MEMSET_CHK:
if (val[2] && is_gimple_val (val[2]) && nargs == 4)
- result = fold_builtin_memory_chk (callee,
+ result = fold_builtin_memory_chk (loc, callee,
gimple_call_arg (stmt, 0),
gimple_call_arg (stmt, 1),
gimple_call_arg (stmt, 2),
case BUILT_IN_STRCPY_CHK:
case BUILT_IN_STPCPY_CHK:
if (val[1] && is_gimple_val (val[1]) && nargs == 3)
- result = fold_builtin_stxcpy_chk (callee,
+ result = fold_builtin_stxcpy_chk (loc, callee,
gimple_call_arg (stmt, 0),
gimple_call_arg (stmt, 1),
gimple_call_arg (stmt, 2),
case BUILT_IN_STRNCPY_CHK:
if (val[2] && is_gimple_val (val[2]) && nargs == 4)
- result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
+ result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
gimple_call_arg (stmt, 1),
gimple_call_arg (stmt, 2),
gimple_call_arg (stmt, 3),
{
gimple stmt = gsi_stmt (*si);
enum tree_code subcode = gimple_assign_rhs_code (stmt);
+ location_t loc = gimple_location (stmt);
tree result = NULL_TREE;
tree op0 = COND_EXPR_COND (rhs);
tree tem;
bool set = false;
+ location_t cond_loc = EXPR_LOCATION (rhs);
if (COMPARISON_CLASS_P (op0))
{
fold_defer_overflow_warnings ();
- tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
+ tem = fold_binary_loc (cond_loc,
+ TREE_CODE (op0), TREE_TYPE (op0),
TREE_OPERAND (op0, 0),
TREE_OPERAND (op0, 1));
/* This is actually a conditional expression, not a GIMPLE
return NULL_TREE;
if (set)
- result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), tem,
+ result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
}
tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
if (tem)
result = fold_convert (TREE_TYPE (rhs),
- build_fold_addr_expr (tem));
+ build_fold_addr_expr_loc (loc, tem));
}
else if (TREE_CODE (rhs) == CONSTRUCTOR
{
tree rhs = gimple_assign_rhs1 (stmt);
- result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
+ result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
if (result)
{
/* If the operation was a conversion do _not_ mark a
&& POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
{
tree type = gimple_expr_type (stmt);
- tree t = maybe_fold_offset_to_address (gimple_location (stmt),
+ tree t = maybe_fold_offset_to_address (loc,
gimple_assign_rhs1 (stmt),
integer_zero_node, type);
if (t)
}
if (!result)
- result = fold_binary (subcode,
+ result = fold_binary_loc (loc, subcode,
TREE_TYPE (gimple_assign_lhs (stmt)),
gimple_assign_rhs1 (stmt),
gimple_assign_rhs2 (stmt));
static bool
fold_gimple_cond (gimple stmt)
{
- tree result = fold_binary (gimple_cond_code (stmt),
+ tree result = fold_binary_loc (gimple_location (stmt),
+ gimple_cond_code (stmt),
boolean_type_node,
gimple_cond_lhs (stmt),
gimple_cond_rhs (stmt));
{
tree callee, lhs, rhs, cfun_va_list;
bool va_list_simple_ptr;
+ location_t loc = gimple_location (call);
if (gimple_code (call) != GIMPLE_CALL)
return NULL_TREE;
!= TYPE_MAIN_VARIANT (cfun_va_list))
return NULL_TREE;
- lhs = build_fold_indirect_ref (lhs);
- rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
+ lhs = build_fold_indirect_ref_loc (loc, lhs);
+ rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
1, integer_zero_node);
- rhs = fold_convert (TREE_TYPE (lhs), rhs);
+ rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
case BUILT_IN_VA_COPY:
!= TYPE_MAIN_VARIANT (cfun_va_list))
return NULL_TREE;
- lhs = build_fold_indirect_ref (lhs);
+ lhs = build_fold_indirect_ref_loc (loc, lhs);
rhs = gimple_call_arg (call, 1);
if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
!= TYPE_MAIN_VARIANT (cfun_va_list))
return NULL_TREE;
- rhs = fold_convert (TREE_TYPE (lhs), rhs);
+ rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
case BUILT_IN_VA_END:
copy_prop_visit_cond_stmt (gimple stmt, edge *taken_edge_p)
{
enum ssa_prop_result retval = SSA_PROP_VARYING;
+ location_t loc = gimple_location (stmt);
tree op0 = gimple_cond_lhs (stmt);
tree op1 = gimple_cond_rhs (stmt);
the same SSA_NAME on both sides of a comparison operator. */
if (op0 == op1)
{
- tree folded_cond = fold_binary (gimple_cond_code (stmt),
+ tree folded_cond = fold_binary_loc (loc, gimple_cond_code (stmt),
boolean_type_node, op0, op1);
if (folded_cond)
{
if (! gsi_end_p (gsi))
{
gimple stmt = gsi_stmt (gsi);
+ location_t loc = gimple_location (stmt);
if (gimple_code (stmt) == GIMPLE_SWITCH)
{
if (label != NULL && label != error_mark_node)
{
- tree x = fold_convert (TREE_TYPE (index), CASE_LOW (label));
+ tree x = fold_convert_loc (loc, TREE_TYPE (index),
+ CASE_LOW (label));
edge_info = allocate_edge_info (e);
edge_info->lhs = index;
edge_info->rhs = x;
|| is_gimple_min_invariant (op1)))
{
tree cond = build2 (code, boolean_type_node, op0, op1);
- tree inverted = invert_truthvalue (cond);
+ tree inverted = invert_truthvalue_loc (loc, cond);
struct edge_info *edge_info;
edge_info = allocate_edge_info (true_edge);
|| TREE_CODE (op1) == SSA_NAME))
{
tree cond = build2 (code, boolean_type_node, op0, op1);
- tree inverted = invert_truthvalue (cond);
+ tree inverted = invert_truthvalue_loc (loc, cond);
struct edge_info *edge_info;
edge_info = allocate_edge_info (true_edge);
tree val = NULL;
if (gimple_code (stmt) == GIMPLE_COND)
- val = fold_binary (gimple_cond_code (stmt), boolean_type_node,
+ val = fold_binary_loc (gimple_location (stmt),
+ gimple_cond_code (stmt), boolean_type_node,
gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
else if (gimple_code (stmt) == GIMPLE_SWITCH)
val = gimple_switch_index (stmt);
tree val;
if (gimple_code (use_stmt) == GIMPLE_COND)
- val = fold_binary (gimple_cond_code (use_stmt),
+ val = fold_binary_loc (gimple_location (use_stmt),
+ gimple_cond_code (use_stmt),
boolean_type_node,
gimple_cond_lhs (use_stmt),
gimple_cond_rhs (use_stmt));
static tree
rhs_to_tree (tree type, gimple stmt)
{
+ location_t loc = gimple_location (stmt);
enum tree_code code = gimple_assign_rhs_code (stmt);
if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
- return fold_build2 (code, type, gimple_assign_rhs1 (stmt),
+ return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
gimple_assign_rhs2 (stmt));
else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
return build1 (code, type, gimple_assign_rhs1 (stmt));
considered simplified. */
static tree
-combine_cond_expr_cond (enum tree_code code, tree type,
+combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
tree op0, tree op1, bool invariant_only)
{
tree t;
gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
- t = fold_binary (code, type, op0, op1);
+ t = fold_binary_loc (loc, code, type, op0, op1);
if (!t)
return NULL_TREE;
static int
forward_propagate_into_gimple_cond (gimple stmt)
{
- int did_something = 0;
+ int did_something = 0;
+ location_t loc = gimple_location (stmt);
do {
tree tmp = NULL_TREE;
{
tree op1 = gimple_cond_rhs (stmt);
rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
- tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
+ tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
op1, !single_use0_p);
}
/* If that wasn't successful, try the second operand. */
return did_something;
rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
- tmp = combine_cond_expr_cond (code, boolean_type_node, op0, rhs1,
- !single_use1_p);
+ tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
+ rhs1, !single_use1_p);
}
/* If that wasn't successful either, try both operands. */
if (tmp == NULL_TREE
&& rhs0 != NULL_TREE
&& rhs1 != NULL_TREE)
- tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
- fold_convert (TREE_TYPE (rhs0), rhs1),
+ tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
+ fold_convert_loc (loc,
+ TREE_TYPE (rhs0),
+ rhs1),
!(single_use0_p && single_use1_p));
}
forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
{
gimple stmt = gsi_stmt (*gsi_p);
+ location_t loc = gimple_location (stmt);
int did_something = 0;
do {
{
tree op1 = TREE_OPERAND (cond, 1);
rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
- tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
+ tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
+ boolean_type_node,
rhs0, op1, !single_use0_p);
}
/* If that wasn't successful, try the second operand. */
return did_something;
rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
- tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
+ tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
+ boolean_type_node,
op0, rhs1, !single_use1_p);
}
/* If that wasn't successful either, try both operands. */
if (tmp == NULL_TREE
&& rhs0 != NULL_TREE
&& rhs1 != NULL_TREE)
- tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
- rhs0, fold_convert (TREE_TYPE (rhs0),
- rhs1),
+ tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
+ boolean_type_node,
+ rhs0,
+ fold_convert_loc (loc,
+ TREE_TYPE (rhs0),
+ rhs1),
!(single_use0_p && single_use1_p));
}
else if (TREE_CODE (cond) == SSA_NAME)
return did_something;
rhs0 = gimple_assign_rhs1 (def_stmt);
- tmp = combine_cond_expr_cond (NE_EXPR, boolean_type_node, rhs0,
+ tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
build_int_cst (TREE_TYPE (rhs0), 0),
false);
}
gimple_assign_rhs1 (stmt),
gimple_assign_rhs2 (stmt));
- tmp = combine_cond_expr_cond (code, TREE_TYPE (lhs), cond, cst, false);
+ tmp = combine_cond_expr_cond (gimple_location (use_stmt),
+ code, TREE_TYPE (lhs),
+ cond, cst, false);
if (tmp == NULL_TREE)
return false;
}
}
}
- res = fold_binary (BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
+ res = fold_binary_loc (gimple_location (stmt),
+ BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
arg1, arg2);
if (res && is_gimple_min_invariant (res))
{
enum tree_code code2 = gimple_cond_code (outer_cond);
tree t;
- if (!(t = combine_comparisons (TRUTH_ANDIF_EXPR, code1, code2,
+ if (!(t = combine_comparisons (UNKNOWN_LOCATION,
+ TRUTH_ANDIF_EXPR, code1, code2,
boolean_type_node,
gimple_cond_lhs (outer_cond),
gimple_cond_rhs (outer_cond))))
enum tree_code code2 = gimple_cond_code (outer_cond);
tree t;
- if (!(t = combine_comparisons (TRUTH_ORIF_EXPR, code1, code2,
+ if (!(t = combine_comparisons (UNKNOWN_LOCATION,
+ TRUTH_ORIF_EXPR, code1, code2,
boolean_type_node,
gimple_cond_lhs (outer_cond),
gimple_cond_rhs (outer_cond))))
gimple stmt;
gimple_stmt_iterator gsi;
int i;
+ location_t loc = gimple_location (swtch);
gsi = gsi_for_stmt (swtch);
tmp = create_tmp_var (TREE_TYPE (info.index_expr), "csti");
add_referenced_var (tmp);
tidx = make_ssa_name (tmp, NULL);
- sub = fold_build2 (MINUS_EXPR, TREE_TYPE (info.index_expr), info.index_expr,
- fold_convert (TREE_TYPE (info.index_expr),
- info.range_min));
+ sub = fold_build2_loc (loc, MINUS_EXPR,
+ TREE_TYPE (info.index_expr), info.index_expr,
+ fold_convert_loc (loc, TREE_TYPE (info.index_expr),
+ info.range_min));
sub = force_gimple_operand_gsi (&gsi, sub,
false, NULL, true, GSI_SAME_STMT);
stmt = gimple_build_assign (tidx, sub);
gimple_stmt_iterator gsi;
basic_block bb0, bb1, bb2, bbf, bbd;
edge e01, e02, e21, e1d, e1f, e2f;
+ location_t loc = gimple_location (swtch);
gcc_assert (info.default_values);
bb0 = gimple_bb (swtch);
add_referenced_var (tmp_u_var);
tmp_u_1 = make_ssa_name (tmp_u_var, NULL);
- cast = fold_convert (utype, info.index_expr);
+ cast = fold_convert_loc (loc, utype, info.index_expr);
cast_assign = gimple_build_assign (tmp_u_1, cast);
SSA_NAME_DEF_STMT (tmp_u_1) = cast_assign;
gsi_insert_before (&gsi, cast_assign, GSI_SAME_STMT);
update_stmt (cast_assign);
- ulb = fold_convert (utype, info.range_min);
- minus = fold_build2 (MINUS_EXPR, utype, tmp_u_1, ulb);
+ ulb = fold_convert_loc (loc, utype, info.range_min);
+ minus = fold_build2_loc (loc, MINUS_EXPR, utype, tmp_u_1, ulb);
minus = force_gimple_operand_gsi (&gsi, minus, false, NULL, true,
GSI_SAME_STMT);
tmp_u_2 = make_ssa_name (tmp_u_var, NULL);
gsi_insert_before (&gsi, minus_assign, GSI_SAME_STMT);
update_stmt (minus_assign);
- bound = fold_convert (utype, info.range_size);
+ bound = fold_convert_loc (loc, utype, info.range_size);
cond_stmt = gimple_build_cond (LE_EXPR, tmp_u_2, bound, NULL_TREE, NULL_TREE);
gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
update_stmt (cond_stmt);
which are specified as a tree array ARGS. */
tree
-build_call_array (tree return_type, tree fn, int nargs, const tree *args)
+build_call_array_loc (location_t loc, tree return_type, tree fn,
+ int nargs, const tree *args)
{
tree t;
int i;
for (i = 0; i < nargs; i++)
CALL_EXPR_ARG (t, i) = args[i];
process_call_operands (t);
+ SET_EXPR_LOCATION (t, loc);
return t;
}
extern tree build_call_list (tree, tree, tree);
extern tree build_call_nary (tree, tree, int, ...);
extern tree build_call_valist (tree, tree, int, va_list);
-extern tree build_call_array (tree, tree, int, const tree *);
+#define build_call_array(T1,T2,N,T3)\
+ build_call_array_loc (UNKNOWN_LOCATION, T1, T2, N, T3)
+extern tree build_call_array_loc (location_t, tree, tree, int, const tree *);
extern tree build_call_vec (tree, tree, VEC(tree,gc) *);
/* Construct various nodes representing data types. */
/* Return an expr equal to X but certainly not valid as an lvalue. */
-extern tree non_lvalue (tree);
+#define non_lvalue(T) non_lvalue_loc (UNKNOWN_LOCATION, T)
+extern tree non_lvalue_loc (location_t, tree);
extern tree convert (tree, tree);
extern unsigned int expr_align (const_tree);
#define sbitsizetype sizetype_tab[(int) SBITSIZETYPE]
extern tree size_int_kind (HOST_WIDE_INT, enum size_type_kind);
-extern tree size_binop (enum tree_code, tree, tree);
-extern tree size_diffop (tree, tree);
+#define size_binop(CODE,T1,T2)\
+ size_binop_loc (UNKNOWN_LOCATION, CODE, T1, T2)
+extern tree size_binop_loc (location_t, enum tree_code, tree, tree);
+#define size_diffop(T1,T2)\
+ size_diffop_loc (UNKNOWN_LOCATION, T1, T2)
+extern tree size_diffop_loc (location_t, tree, tree);
#define size_int(L) size_int_kind (L, SIZETYPE)
#define ssize_int(L) size_int_kind (L, SSIZETYPE)
#define bitsize_int(L) size_int_kind (L, BITSIZETYPE)
#define sbitsize_int(L) size_int_kind (L, SBITSIZETYPE)
-extern tree round_up (tree, int);
-extern tree round_down (tree, int);
+#define round_up(T,N) round_up_loc (UNKNOWN_LOCATION, T, N)
+extern tree round_up_loc (location_t, tree, int);
+#define round_down(T,N) round_down_loc (UNKNOWN_LOCATION, T, N)
+extern tree round_down_loc (location_t, tree, int);
extern tree get_pending_sizes (void);
extern void put_pending_size (tree);
extern void put_pending_sizes (tree);
subexpressions are not changed. */
extern tree fold (tree);
-extern tree fold_unary (enum tree_code, tree, tree);
-extern tree fold_unary_ignore_overflow (enum tree_code, tree, tree);
-extern tree fold_binary (enum tree_code, tree, tree, tree);
-extern tree fold_ternary (enum tree_code, tree, tree, tree, tree);
-extern tree fold_build1_stat (enum tree_code, tree, tree MEM_STAT_DECL);
-#define fold_build1(c,t1,t2) fold_build1_stat (c, t1, t2 MEM_STAT_INFO)
-extern tree fold_build2_stat (enum tree_code, tree, tree, tree MEM_STAT_DECL);
-#define fold_build2(c,t1,t2,t3) fold_build2_stat (c, t1, t2, t3 MEM_STAT_INFO)
-extern tree fold_build3_stat (enum tree_code, tree, tree, tree, tree MEM_STAT_DECL);
-#define fold_build3(c,t1,t2,t3,t4) fold_build3_stat (c, t1, t2, t3, t4 MEM_STAT_INFO)
-extern tree fold_build1_initializer (enum tree_code, tree, tree);
-extern tree fold_build2_initializer (enum tree_code, tree, tree, tree);
-extern tree fold_build3_initializer (enum tree_code, tree, tree, tree, tree);
-extern tree fold_build_call_array (tree, tree, int, tree *);
-extern tree fold_build_call_array_initializer (tree, tree, int, tree *);
+#define fold_unary(CODE,T1,T2)\
+ fold_unary_loc (UNKNOWN_LOCATION, CODE, T1, T2)
+extern tree fold_unary_loc (location_t, enum tree_code, tree, tree);
+#define fold_unary_ignore_overflow(CODE,T1,T2)\
+ fold_unary_ignore_overflow_loc (UNKNOWN_LOCATION, CODE, T1, T2)
+extern tree fold_unary_ignore_overflow_loc (location_t, enum tree_code, tree, tree);
+#define fold_binary(CODE,T1,T2,T3)\
+ fold_binary_loc (UNKNOWN_LOCATION, CODE, T1, T2, T3)
+extern tree fold_binary_loc (location_t, enum tree_code, tree, tree, tree);
+#define fold_ternary(CODE,T1,T2,T3,T4)\
+ fold_ternary_loc (UNKNOWN_LOCATION, CODE, T1, T2, T3, T4)
+extern tree fold_ternary_loc (location_t, enum tree_code, tree, tree, tree, tree);
+#define fold_build1(c,t1,t2)\
+ fold_build1_stat_loc (UNKNOWN_LOCATION, c, t1, t2 MEM_STAT_INFO)
+#define fold_build1_loc(l,c,t1,t2)\
+ fold_build1_stat_loc (l, c, t1, t2 MEM_STAT_INFO)
+extern tree fold_build1_stat_loc (location_t, enum tree_code, tree,
+ tree MEM_STAT_DECL);
+#define fold_build2(c,t1,t2,t3)\
+ fold_build2_stat_loc (UNKNOWN_LOCATION, c, t1, t2, t3 MEM_STAT_INFO)
+#define fold_build2_loc(l,c,t1,t2,t3)\
+ fold_build2_stat_loc (l, c, t1, t2, t3 MEM_STAT_INFO)
+extern tree fold_build2_stat_loc (location_t, enum tree_code, tree, tree,
+ tree MEM_STAT_DECL);
+#define fold_build3(c,t1,t2,t3,t4)\
+ fold_build3_stat_loc (UNKNOWN_LOCATION, c, t1, t2, t3, t4 MEM_STAT_INFO)
+#define fold_build3_loc(l,c,t1,t2,t3,t4)\
+ fold_build3_stat_loc (l, c, t1, t2, t3, t4 MEM_STAT_INFO)
+extern tree fold_build3_stat_loc (location_t, enum tree_code, tree, tree, tree,
+ tree MEM_STAT_DECL);
+extern tree fold_build1_initializer_loc (location_t, enum tree_code, tree, tree);
+extern tree fold_build2_initializer_loc (location_t, enum tree_code, tree, tree, tree);
+extern tree fold_build3_initializer_loc (location_t, enum tree_code, tree, tree, tree, tree);
+#define fold_build_call_array(T1,T2,N,T4)\
+ fold_build_call_array_loc (UNKNOWN_LOCATION, T1, T2, N, T4)
+extern tree fold_build_call_array_loc (location_t, tree, tree, int, tree *);
+#define fold_build_call_array_initializer(T1,T2,N,T4)\
+ fold_build_call_array_initializer_loc (UNKNOWN_LOCATION, T1, T2, N, T4)
+extern tree fold_build_call_array_initializer_loc (location_t, tree, tree, int, tree *);
extern bool fold_convertible_p (const_tree, const_tree);
-extern tree fold_convert (tree, tree);
-extern tree fold_single_bit_test (enum tree_code, tree, tree, tree);
+#define fold_convert(T1,T2)\
+ fold_convert_loc(UNKNOWN_LOCATION, T1, T2)
+extern tree fold_convert_loc (location_t, tree, tree);
+extern tree fold_single_bit_test (location_t, enum tree_code, tree, tree, tree);
extern tree fold_ignored_result (tree);
extern tree fold_abs_const (tree, tree);
-extern tree fold_indirect_ref_1 (tree, tree);
+extern tree fold_indirect_ref_1 (location_t, tree, tree);
extern void fold_defer_overflow_warnings (void);
extern void fold_undefer_overflow_warnings (bool, const_gimple, int);
extern void fold_undefer_and_ignore_overflow_warnings (void);
extern int operand_equal_p (const_tree, const_tree, unsigned int);
extern int multiple_of_p (tree, const_tree, const_tree);
-extern tree omit_one_operand (tree, tree, tree);
-extern tree omit_two_operands (tree, tree, tree, tree);
-extern tree invert_truthvalue (tree);
-extern tree fold_truth_not_expr (tree);
+#define omit_one_operand(T1,T2,T3)\
+ omit_one_operand_loc (UNKNOWN_LOCATION, T1, T2, T3)
+extern tree omit_one_operand_loc (location_t, tree, tree, tree);
+#define omit_two_operands(T1,T2,T3,T4)\
+ omit_two_operands_loc (UNKNOWN_LOCATION, T1, T2, T3, T4)
+extern tree omit_two_operands_loc (location_t, tree, tree, tree, tree);
+#define invert_truthvalue(T)\
+ invert_truthvalue_loc(UNKNOWN_LOCATION, T)
+extern tree invert_truthvalue_loc (location_t, tree);
+extern tree fold_truth_not_expr (location_t, tree);
extern tree fold_unary_to_constant (enum tree_code, tree, tree);
extern tree fold_binary_to_constant (enum tree_code, tree, tree, tree);
extern tree fold_read_from_constant_string (tree);
extern tree int_const_binop (enum tree_code, const_tree, const_tree, int);
-extern tree build_fold_addr_expr (tree);
+#define build_fold_addr_expr(T)\
+ build_fold_addr_expr_loc (UNKNOWN_LOCATION, (T))
+extern tree build_fold_addr_expr_loc (location_t, tree);
+#define build_fold_addr_expr_with_type(T,TYPE)\
+ build_fold_addr_expr_with_type_loc (UNKNOWN_LOCATION, (T), TYPE)
+extern tree build_fold_addr_expr_with_type_loc (location_t, tree, tree);
extern tree fold_build_cleanup_point_expr (tree type, tree expr);
extern tree fold_strip_sign_ops (tree);
-extern tree build_fold_addr_expr_with_type (tree, tree);
-extern tree build_fold_indirect_ref (tree);
-extern tree fold_indirect_ref (tree);
+#define build_fold_indirect_ref(T)\
+ build_fold_indirect_ref_loc (UNKNOWN_LOCATION, T)
+extern tree build_fold_indirect_ref_loc (location_t, tree);
+#define fold_indirect_ref(T)\
+ fold_indirect_ref_loc (UNKNOWN_LOCATION, T)
+extern tree fold_indirect_ref_loc (location_t, tree);
extern tree constant_boolean_node (int, tree);
extern tree div_if_zero_remainder (enum tree_code, const_tree, const_tree);
extern bool tree_expr_nonzero_warnv_p (tree, bool *);
extern bool fold_real_zero_addition_p (const_tree, const_tree, int);
-extern tree combine_comparisons (enum tree_code, enum tree_code,
+extern tree combine_comparisons (location_t, enum tree_code, enum tree_code,
enum tree_code, tree, tree, tree);
extern void debug_fold_checksum (const_tree);
/* In builtins.c */
-extern tree fold_call_expr (tree, bool);
-extern tree fold_builtin_fputs (tree, tree, bool, bool, tree);
-extern tree fold_builtin_strcpy (tree, tree, tree, tree);
-extern tree fold_builtin_strncpy (tree, tree, tree, tree, tree);
-extern tree fold_builtin_memory_chk (tree, tree, tree, tree, tree, tree, bool,
+extern tree fold_call_expr (location_t, tree, bool);
+extern tree fold_builtin_fputs (location_t, tree, tree, bool, bool, tree);
+extern tree fold_builtin_strcpy (location_t, tree, tree, tree, tree);
+extern tree fold_builtin_strncpy (location_t, tree, tree, tree, tree, tree);
+extern tree fold_builtin_memory_chk (location_t, tree, tree, tree, tree, tree, tree, bool,
enum built_in_function);
-extern tree fold_builtin_stxcpy_chk (tree, tree, tree, tree, tree, bool,
+extern tree fold_builtin_stxcpy_chk (location_t, tree, tree, tree, tree, tree, bool,
enum built_in_function);
-extern tree fold_builtin_strncpy_chk (tree, tree, tree, tree, tree);
-extern tree fold_builtin_snprintf_chk (tree, tree, enum built_in_function);
+extern tree fold_builtin_strncpy_chk (location_t, tree, tree, tree, tree, tree);
+extern tree fold_builtin_snprintf_chk (location_t, tree, tree, enum built_in_function);
extern bool fold_builtin_next_arg (tree, bool);
extern enum built_in_function builtin_mathfn_code (const_tree);
-extern tree build_function_call_expr (tree, tree);
-extern tree fold_builtin_call_array (tree, tree, int, tree *);
-extern tree build_call_expr (tree, int, ...);
+extern tree build_function_call_expr (location_t, tree, tree);
+extern tree fold_builtin_call_array (location_t, tree, tree, int, tree *);
+#define build_call_expr(...)\
+ build_call_expr_loc (UNKNOWN_LOCATION, __VA_ARGS__)
+extern tree build_call_expr_loc (location_t, tree, int, ...);
extern tree mathfn_built_in (tree, enum built_in_function fn);
extern tree c_strlen (tree, int);
extern tree std_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
extern tree fold_call_stmt (gimple, bool);
extern tree gimple_fold_builtin_snprintf_chk (gimple, tree, enum built_in_function);
extern tree make_range (tree, int *, tree *, tree *, bool *);
-extern tree build_range_check (tree, tree, int, tree, tree);
+extern tree build_range_check (location_t, tree, tree, int, tree, tree);
extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
tree, tree);
args = tree_cons (NULL, x, args);
x = built_in_decls[BUILT_IN_EMUTLS_REGISTER_COMMON];
- x = build_function_call_expr (x, args);
+ x = build_function_call_expr (UNKNOWN_LOCATION, x, args);
append_to_statement_list (x, pstmts);
return 1;