#include "langhooks.h"
#include "basic-block.h"
#include "tree-mudflap.h"
+#include "tree-flow.h"
#ifndef PAD_VARARGS_DOWN
#define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
required to implement the function call in all cases). */
tree implicit_built_in_decls[(int) END_BUILTINS];
-static int get_pointer_alignment (tree, unsigned int);
static const char *c_getstr (tree);
static rtx c_readstr (const char *, enum machine_mode);
static int target_char_cast (tree, char *);
static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
static rtx expand_builtin_sincos (tree);
+static rtx expand_builtin_cexpi (tree, rtx, rtx);
static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
+static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
static rtx expand_builtin_args_info (tree);
static rtx expand_builtin_next_arg (void);
static rtx expand_builtin_va_start (tree);
static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
static rtx expand_builtin_mempcpy (tree, tree, rtx, enum machine_mode, int);
-static rtx expand_builtin_memmove (tree, tree, rtx, enum machine_mode, tree);
+static rtx expand_builtin_memmove (tree, tree, rtx, enum machine_mode);
static rtx expand_builtin_bcopy (tree);
static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
-static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
static rtx expand_builtin_memset (tree, rtx, enum machine_mode, tree);
static rtx expand_builtin_bzero (tree);
static tree fold_builtin_strlen (tree);
static tree fold_builtin_inf (tree, int);
static tree fold_builtin_nan (tree, tree, int);
-static int validate_arglist (tree, ...);
static bool integer_valued_real_p (tree);
static tree fold_trunc_transparent_mathfn (tree, tree);
static bool readonly_data_expr (tree);
static tree fold_builtin_cbrt (tree, tree);
static tree fold_builtin_pow (tree, tree, tree);
static tree fold_builtin_powi (tree, tree, tree);
-static tree fold_builtin_sin (tree);
static tree fold_builtin_cos (tree, tree, tree);
-static tree fold_builtin_tan (tree);
-static tree fold_builtin_atan (tree, tree);
+static tree fold_builtin_cosh (tree, tree, tree);
+static tree fold_builtin_tan (tree, tree);
static tree fold_builtin_trunc (tree, tree);
static tree fold_builtin_floor (tree, tree);
static tree fold_builtin_ceil (tree, tree);
static char target_percent_c[3];
static char target_percent_s[3];
static char target_percent_s_newline[4];
+static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
+ const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
+static tree do_mpfr_arg2 (tree, tree, tree,
+ int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
+static tree do_mpfr_arg3 (tree, tree, tree, tree,
+ int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
+static tree do_mpfr_sincos (tree, tree, tree);
/* Return true if NODE should be considered for inline expansion regardless
of the optimization level. This means whenever a function is invoked with
Otherwise, look at the expression to see if we can do better, i.e., if the
expression is actually pointing at an object whose alignment is tighter. */
-static int
+int
get_pointer_alignment (tree exp, unsigned int max_align)
{
unsigned int align, inner;
- if (! POINTER_TYPE_P (TREE_TYPE (exp)))
+ /* We rely on TER to compute accurate alignment information. */
+ if (!(optimize && flag_tree_ter))
+ return 0;
+
+ if (!POINTER_TYPE_P (TREE_TYPE (exp)))
return 0;
align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
override us. Therefore frame pointer elimination is OK, and using
the soft frame pointer is OK.
- For a non-zero count, or a zero count with __builtin_frame_address,
+ For a nonzero count, or a zero count with __builtin_frame_address,
we require a stable offset from the current frame pointer to the
previous one, so we must use the hard frame pointer, and
we must disable frame pointer elimination. */
CASE_MATHFN (BUILT_IN_ATANH)
CASE_MATHFN (BUILT_IN_CBRT)
CASE_MATHFN (BUILT_IN_CEIL)
+ CASE_MATHFN (BUILT_IN_CEXPI)
CASE_MATHFN (BUILT_IN_COPYSIGN)
CASE_MATHFN (BUILT_IN_COS)
CASE_MATHFN (BUILT_IN_COSH)
CASE_FLT_FN (BUILT_IN_ROUND):
builtin_optab = round_optab; break;
CASE_FLT_FN (BUILT_IN_NEARBYINT):
- builtin_optab = nearbyint_optab; break;
+ builtin_optab = nearbyint_optab;
+ if (flag_trapping_math)
+ break;
+ /* Else fallthrough and expand as rint. */
CASE_FLT_FN (BUILT_IN_RINT):
builtin_optab = rint_optab; break;
- CASE_FLT_FN (BUILT_IN_LRINT):
- CASE_FLT_FN (BUILT_IN_LLRINT):
- builtin_optab = lrint_optab; break;
default:
gcc_unreachable ();
}
builtin_optab = ldexp_optab; break;
CASE_FLT_FN (BUILT_IN_FMOD):
builtin_optab = fmod_optab; break;
+ CASE_FLT_FN (BUILT_IN_REMAINDER):
CASE_FLT_FN (BUILT_IN_DREM):
- builtin_optab = drem_optab; break;
+ builtin_optab = remainder_optab; break;
default:
gcc_unreachable ();
}
/* Check if sincos insn is available, otherwise fallback
to sin or cos insn. */
- if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) {
+ if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
switch (DECL_FUNCTION_CODE (fndecl))
{
CASE_FLT_FN (BUILT_IN_SIN):
default:
gcc_unreachable ();
}
- }
/* Before working hard, check whether the instruction is available. */
if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
return const0_rtx;
}
-/* Expand a call to one of the builtin rounding functions (lfloor).
+/* Expand a call to the internal cexpi builtin to the sincos math function.
+ EXP is the expression that is a call to the builtin function; if convenient,
+ the result should be placed in TARGET. SUBTARGET may be used as the target
+ for computing one of EXP's operands. */
+
+static rtx
+expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
+{
+ tree fndecl = get_callee_fndecl (exp);
+ tree arglist = TREE_OPERAND (exp, 1);
+ enum machine_mode mode;
+ tree arg, type;
+ rtx op0, op1, op2;
+
+ if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return 0;
+
+ arg = TREE_VALUE (arglist);
+ type = TREE_TYPE (arg);
+ mode = TYPE_MODE (TREE_TYPE (arg));
+
+ /* Try expanding via a sincos optab, fall back to emitting a libcall
+ to sincos. We are sure we have sincos either way because cexpi
+ is only generated from sincos. */
+ if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
+ {
+ op1 = gen_reg_rtx (mode);
+ op2 = gen_reg_rtx (mode);
+
+ op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+
+ /* Compute into op1 and op2. */
+ expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
+ }
+ else
+ {
+ tree call, narglist, fn = NULL_TREE;
+ tree top1, top2;
+ rtx op1a, op2a;
+
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
+ fn = built_in_decls[BUILT_IN_SINCOSF];
+ else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
+ fn = built_in_decls[BUILT_IN_SINCOS];
+ else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
+ fn = built_in_decls[BUILT_IN_SINCOSL];
+ gcc_assert (fn);
+
+ op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
+ op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
+ op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
+ op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
+ top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
+ top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
+
+ narglist = build_tree_list (NULL_TREE, top2);
+ narglist = tree_cons (NULL_TREE, top1, narglist);
+ narglist = tree_cons (NULL_TREE, arg, narglist);
+
+ /* Make sure not to fold the sincos call again. */
+ call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
+ expand_normal (build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
+ call, narglist, NULL_TREE));
+ }
+
+ /* Now build the proper return type. */
+ return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
+ make_tree (TREE_TYPE (arg), op2),
+ make_tree (TREE_TYPE (arg), op1)),
+ target, VOIDmode, 0);
+}
+
+/* Expand a call to one of the builtin rounding functions gcc defines
+ as an extension (lfloor and lceil). As these are gcc extensions we
+ do not need to worry about setting errno to EDOM.
If expanding via optab fails, lower expression to (int)(floor(x)).
EXP is the expression that is a call to the builtin function;
if convenient, the result should be placed in TARGET. SUBTARGET may
static rtx
expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
{
- optab builtin_optab;
+ convert_optab builtin_optab;
rtx op0, insns, tmp;
tree fndecl = get_callee_fndecl (exp);
tree arglist = TREE_OPERAND (exp, 1);
/* Make a suitable register to place result in. */
mode = TYPE_MODE (TREE_TYPE (exp));
- /* Before working hard, check whether the instruction is available. */
- if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
- {
- target = gen_reg_rtx (mode);
-
- /* Wrap the computation of the argument in a SAVE_EXPR, as we may
- need to expand the argument again. This way, we will not perform
- side-effects more the once. */
- narg = builtin_save_expr (arg);
- if (narg != arg)
- {
- arg = narg;
- arglist = build_tree_list (NULL_TREE, arg);
- exp = build_function_call_expr (fndecl, arglist);
- }
-
- op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+ target = gen_reg_rtx (mode);
- start_sequence ();
+ /* Wrap the computation of the argument in a SAVE_EXPR, as we may
+ need to expand the argument again. This way, we will not perform
+ side-effects more the once. */
+ narg = builtin_save_expr (arg);
+ if (narg != arg)
+ {
+ arg = narg;
+ arglist = build_tree_list (NULL_TREE, arg);
+ exp = build_function_call_expr (fndecl, arglist);
+ }
- /* Compute into TARGET.
- Set TARGET to wherever the result comes back. */
- target = expand_unop (mode, builtin_optab, op0, target, 0);
+ op0 = expand_expr (arg, subtarget, VOIDmode, 0);
- if (target != 0)
- {
- /* Output the entire sequence. */
- insns = get_insns ();
- end_sequence ();
- emit_insn (insns);
- return target;
- }
+ start_sequence ();
- /* If we were unable to expand via the builtin, stop the sequence
- (without outputting the insns). */
+ /* Compute into TARGET. */
+ if (expand_sfix_optab (target, op0, builtin_optab))
+ {
+ /* Output the entire sequence. */
+ insns = get_insns ();
end_sequence ();
+ emit_insn (insns);
+ return target;
}
+ /* If we were unable to expand via the builtin, stop the sequence
+ (without outputting the insns). */
+ end_sequence ();
+
/* Fall back to floating point rounding optab. */
fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
/* We shouldn't get here on targets without TARGET_C99_FUNCTIONS.
return target;
}
+/* Expand a call to one of the builtin math functions doing integer
+ conversion (lrint).
+ Return 0 if a normal call should be emitted rather than expanding the
+ function in-line. EXP is the expression that is a call to the builtin
+ function; if convenient, the result should be placed in TARGET.
+ SUBTARGET may be used as the target for computing one of EXP's operands. */
+
+static rtx
+expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
+{
+ convert_optab builtin_optab;
+ rtx op0, insns;
+ tree fndecl = get_callee_fndecl (exp);
+ tree arglist = TREE_OPERAND (exp, 1);
+ enum machine_mode mode;
+ tree arg, narg;
+
+ /* There's no easy way to detect the case we need to set EDOM. */
+ if (flag_errno_math)
+ return NULL_RTX;
+
+ if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return NULL_RTX;
+
+ arg = TREE_VALUE (arglist);
+
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ CASE_FLT_FN (BUILT_IN_LRINT):
+ CASE_FLT_FN (BUILT_IN_LLRINT):
+ builtin_optab = lrint_optab; break;
+ CASE_FLT_FN (BUILT_IN_LROUND):
+ CASE_FLT_FN (BUILT_IN_LLROUND):
+ builtin_optab = lround_optab; break;
+ default:
+ gcc_unreachable ();
+ }
+
+ /* Make a suitable register to place result in. */
+ mode = TYPE_MODE (TREE_TYPE (exp));
+
+ target = gen_reg_rtx (mode);
+
+ /* Wrap the computation of the argument in a SAVE_EXPR, as we may
+ need to expand the argument again. This way, we will not perform
+ side-effects more the once. */
+ narg = builtin_save_expr (arg);
+ if (narg != arg)
+ {
+ arg = narg;
+ arglist = build_tree_list (NULL_TREE, arg);
+ exp = build_function_call_expr (fndecl, arglist);
+ }
+
+ op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+
+ start_sequence ();
+
+ if (expand_sfix_optab (target, op0, builtin_optab))
+ {
+ /* Output the entire sequence. */
+ insns = get_insns ();
+ end_sequence ();
+ emit_insn (insns);
+ return target;
+ }
+
+ /* If we were unable to expand via the builtin, stop the sequence
+ (without outputting the insns) and call to the library function
+ with the stabilized argument list. */
+ end_sequence ();
+
+ target = expand_call (exp, target, target == const0_rtx);
+
+ return target;
+}
+
/* To evaluate powi(x,n), the floating point value x raised to the
constant integer exponent n, we use a hybrid algorithm that
combines the "window method" with look-up tables. For an
static rtx
expand_builtin_pow (tree exp, rtx target, rtx subtarget)
{
+ tree arg0, arg1, fn, narg0, narglist;
tree arglist = TREE_OPERAND (exp, 1);
- tree arg0, arg1;
+ tree type = TREE_TYPE (exp);
+ REAL_VALUE_TYPE cint, c, c2;
+ HOST_WIDE_INT n;
+ rtx op, op2;
+ enum machine_mode mode = TYPE_MODE (type);
if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
return 0;
arg0 = TREE_VALUE (arglist);
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
- if (TREE_CODE (arg1) == REAL_CST
- && ! TREE_CONSTANT_OVERFLOW (arg1))
+ if (TREE_CODE (arg1) != REAL_CST
+ || TREE_CONSTANT_OVERFLOW (arg1))
+ return expand_builtin_mathfn_2 (exp, target, subtarget);
+
+ /* Handle constant exponents. */
+
+ /* For integer valued exponents we can expand to an optimal multiplication
+ sequence using expand_powi. */
+ c = TREE_REAL_CST (arg1);
+ n = real_to_integer (&c);
+ real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
+ if (real_identical (&c, &cint)
+ && ((n >= -1 && n <= 2)
+ || (flag_unsafe_math_optimizations
+ && !optimize_size
+ && powi_cost (n) <= POWI_MAX_MULTS)))
{
- REAL_VALUE_TYPE cint;
- REAL_VALUE_TYPE c;
- HOST_WIDE_INT n;
+ op = expand_expr (arg0, subtarget, VOIDmode, 0);
+ if (n != 1)
+ {
+ op = force_reg (mode, op);
+ op = expand_powi (op, mode, n);
+ }
+ return op;
+ }
- c = TREE_REAL_CST (arg1);
- n = real_to_integer (&c);
+ narg0 = builtin_save_expr (arg0);
+ narglist = build_tree_list (NULL_TREE, narg0);
+
+ /* If the exponent is not integer valued, check if it is half of an integer.
+ In this case we can expand to sqrt (x) * x**(n/2). */
+ fn = mathfn_built_in (type, BUILT_IN_SQRT);
+ if (fn != NULL_TREE)
+ {
+ real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
+ n = real_to_integer (&c2);
real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
- if (real_identical (&c, &cint))
- {
- /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
- Otherwise, check the number of multiplications required.
- Note that pow never sets errno for an integer exponent. */
- if ((n >= -1 && n <= 2)
- || (flag_unsafe_math_optimizations
- && ! optimize_size
- && powi_cost (n) <= POWI_MAX_MULTS))
+ if (real_identical (&c2, &cint)
+ && ((flag_unsafe_math_optimizations
+ && !optimize_size
+ && powi_cost (n/2) <= POWI_MAX_MULTS)
+ || n == 1))
+ {
+ tree call_expr = build_function_call_expr (fn, narglist);
+ op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
+ if (n != 1)
{
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
- rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
- op = force_reg (mode, op);
- return expand_powi (op, mode, n);
+ op2 = expand_expr (narg0, subtarget, VOIDmode, 0);
+ op2 = force_reg (mode, op2);
+ op2 = expand_powi (op2, mode, abs (n / 2));
+ op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
+ 0, OPTAB_LIB_WIDEN);
+ /* If the original exponent was negative, reciprocate the
+ result. */
+ if (n < 0)
+ op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
+ op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
}
+ return op;
}
}
- if (! flag_unsafe_math_optimizations)
- return NULL_RTX;
+ /* Try if the exponent is a third of an integer. In this case
+ we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
+ different from pow (x, 1./3.) due to rounding and behavior
+ with negative x we need to constrain this transformation to
+ unsafe math and positive x or finite math. */
+ fn = mathfn_built_in (type, BUILT_IN_CBRT);
+ if (fn != NULL_TREE
+ && flag_unsafe_math_optimizations
+ && (tree_expr_nonnegative_p (arg0)
+ || !HONOR_NANS (mode)))
+ {
+ real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
+ real_round (&c2, mode, &c2);
+ n = real_to_integer (&c2);
+ real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
+ real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
+ real_convert (&c2, mode, &c2);
+ if (real_identical (&c2, &c)
+ && ((!optimize_size
+ && powi_cost (n/3) <= POWI_MAX_MULTS)
+ || n == 1))
+ {
+ tree call_expr = build_function_call_expr (fn, narglist);
+ op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
+ if (abs (n) % 3 == 2)
+ op = expand_simple_binop (mode, MULT, op, op, op,
+ 0, OPTAB_LIB_WIDEN);
+ if (n != 1)
+ {
+ op2 = expand_expr (narg0, subtarget, VOIDmode, 0);
+ op2 = force_reg (mode, op2);
+ op2 = expand_powi (op2, mode, abs (n / 3));
+ op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
+ 0, OPTAB_LIB_WIDEN);
+ /* If the original exponent was negative, reciprocate the
+ result. */
+ if (n < 0)
+ op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
+ op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
+ }
+ return op;
+ }
+ }
+
+ /* Fall back to optab expansion. */
return expand_builtin_mathfn_2 (exp, target, subtarget);
}
static rtx
expand_builtin_memmove (tree arglist, tree type, rtx target,
- enum machine_mode mode, tree orig_exp)
+ enum machine_mode mode)
{
if (!validate_arglist (arglist,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
return 0;
else
{
- tree dest = TREE_VALUE (arglist);
- tree src = TREE_VALUE (TREE_CHAIN (arglist));
- tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
-
- unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
- unsigned int dest_align
- = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
tree result = fold_builtin_memory_op (arglist, type, false, /*endp=*/3);
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
}
- /* If DEST is not a pointer type, call the normal function. */
- if (dest_align == 0)
- return 0;
-
- /* If either SRC is not a pointer type, don't do this
- operation in-line. */
- if (src_align == 0)
- return 0;
-
- /* If src is categorized for a readonly section we can use
- normal memcpy. */
- if (readonly_data_expr (src))
- {
- tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
- if (!fn)
- return 0;
- fn = build_function_call_expr (fn, arglist);
- if (TREE_CODE (fn) == CALL_EXPR)
- CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
- return expand_expr (fn, target, mode, EXPAND_NORMAL);
- }
-
- /* If length is 1 and we can expand memcpy call inline,
- it is ok to use memcpy as well. */
- if (integer_onep (len))
- {
- rtx ret = expand_builtin_mempcpy (arglist, type, target, mode,
- /*endp=*/0);
- if (ret)
- return ret;
- }
-
/* Otherwise, call the normal function. */
return 0;
}
newarglist = tree_cons (NULL_TREE, src, newarglist);
newarglist = tree_cons (NULL_TREE, dest, newarglist);
- return expand_builtin_memmove (newarglist, type, const0_rtx, VOIDmode, exp);
+ return expand_builtin_memmove (newarglist, type, const0_rtx, VOIDmode);
}
#ifndef HAVE_movstr
bytes from constant string DATA + OFFSET and return it as target
constant. */
-static rtx
+rtx
builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
enum machine_mode mode)
{
{
tree t;
- t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
+ t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (valist), valist,
make_tree (ptr_type_node, nextarg));
TREE_SIDE_EFFECTS (t) = 1;
&& !integer_zerop (TYPE_SIZE (type)))
{
t = fold_convert (TREE_TYPE (valist), size_int (boundary - 1));
- t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
+ t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (valist), valist_tmp,
build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t));
gimplify_and_add (t, pre_p);
t = fold_convert (TREE_TYPE (valist), size_int (-boundary));
- t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
+ t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (valist), valist_tmp,
build2 (BIT_AND_EXPR, TREE_TYPE (valist), valist_tmp, t));
gimplify_and_add (t, pre_p);
}
/* Compute new value for AP. */
t = fold_convert (TREE_TYPE (valist), rounded_size);
t = build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t);
- t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
+ t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (valist), valist, t);
gimplify_and_add (t, pre_p);
addr = fold_convert (build_pointer_type (type), addr);
if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
{
- t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
+ t = build2 (GIMPLE_MODIFY_STMT, va_list_type_node, dst, src);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
}
return result;
}
+/* Expand a call to a bswap builtin. The arguments are in ARGLIST. MODE
+ is the mode to expand with. */
+
+static rtx
+expand_builtin_bswap (tree arglist, rtx target, rtx subtarget)
+{
+ enum machine_mode mode;
+ tree arg;
+ rtx op0;
+
+ if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
+ return 0;
+
+ arg = TREE_VALUE (arglist);
+ mode = TYPE_MODE (TREE_TYPE (arg));
+ op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+
+ target = expand_unop (mode, bswap_optab, op0, target, 1);
+
+ gcc_assert (target);
+
+ return convert_to_mode (mode, target, 0);
+}
+
/* Expand a call to a unary builtin. The arguments are in ARGLIST.
Return 0 if a normal call should be emitted rather than expanding the
function in-line. If convenient, the result should be placed in TARGET.
return 0;
}
-/* Expand a call to __builtin_expect. We return our argument and emit a
- NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
- a non-jump context. */
+/* Expand a call to __builtin_expect. We just return our argument
+ as the builtin_expect semantic should've been already executed by
+ tree branch prediction pass. */
static rtx
expand_builtin_expect (tree arglist, rtx target)
{
tree exp, c;
- rtx note, rtx_c;
if (arglist == NULL_TREE
|| TREE_CHAIN (arglist) == NULL_TREE)
exp = TREE_VALUE (arglist);
c = TREE_VALUE (TREE_CHAIN (arglist));
- if (TREE_CODE (c) != INTEGER_CST)
- {
- error ("second argument to %<__builtin_expect%> must be a constant");
- c = integer_zero_node;
- }
-
target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
-
- /* Don't bother with expected value notes for integral constants. */
- if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
- {
- /* We do need to force this into a register so that we can be
- moderately sure to be able to correctly interpret the branch
- condition later. */
- target = force_reg (GET_MODE (target), target);
-
- rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
-
- note = emit_note (NOTE_INSN_EXPECTED_VALUE);
- NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
- }
-
+ /* When guessing was done, the hints should be already stripped away. */
+ gcc_assert (!flag_guess_branch_prob);
return target;
}
-/* Like expand_builtin_expect, except do this in a jump context. This is
- called from do_jump if the conditional is a __builtin_expect. Return either
- a list of insns to emit the jump or NULL if we cannot optimize
- __builtin_expect. We need to optimize this at jump time so that machines
- like the PowerPC don't turn the test into a SCC operation, and then jump
- based on the test being 0/1. */
-
-rtx
-expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
-{
- tree arglist = TREE_OPERAND (exp, 1);
- tree arg0 = TREE_VALUE (arglist);
- tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
- rtx ret = NULL_RTX;
-
- /* Only handle __builtin_expect (test, 0) and
- __builtin_expect (test, 1). */
- if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
- && (integer_zerop (arg1) || integer_onep (arg1)))
- {
- rtx insn, drop_through_label, temp;
-
- /* Expand the jump insns. */
- start_sequence ();
- do_jump (arg0, if_false_label, if_true_label);
- ret = get_insns ();
-
- drop_through_label = get_last_insn ();
- if (drop_through_label && NOTE_P (drop_through_label))
- drop_through_label = prev_nonnote_insn (drop_through_label);
- if (drop_through_label && !LABEL_P (drop_through_label))
- drop_through_label = NULL_RTX;
- end_sequence ();
-
- if (! if_true_label)
- if_true_label = drop_through_label;
- if (! if_false_label)
- if_false_label = drop_through_label;
-
- /* Go through and add the expect's to each of the conditional jumps. */
- insn = ret;
- while (insn != NULL_RTX)
- {
- rtx next = NEXT_INSN (insn);
-
- if (JUMP_P (insn) && any_condjump_p (insn))
- {
- rtx ifelse = SET_SRC (pc_set (insn));
- rtx then_dest = XEXP (ifelse, 1);
- rtx else_dest = XEXP (ifelse, 2);
- int taken = -1;
-
- /* First check if we recognize any of the labels. */
- if (GET_CODE (then_dest) == LABEL_REF
- && XEXP (then_dest, 0) == if_true_label)
- taken = 1;
- else if (GET_CODE (then_dest) == LABEL_REF
- && XEXP (then_dest, 0) == if_false_label)
- taken = 0;
- else if (GET_CODE (else_dest) == LABEL_REF
- && XEXP (else_dest, 0) == if_false_label)
- taken = 1;
- else if (GET_CODE (else_dest) == LABEL_REF
- && XEXP (else_dest, 0) == if_true_label)
- taken = 0;
- /* Otherwise check where we drop through. */
- else if (else_dest == pc_rtx)
- {
- if (next && NOTE_P (next))
- next = next_nonnote_insn (next);
-
- if (next && JUMP_P (next)
- && any_uncondjump_p (next))
- temp = XEXP (SET_SRC (pc_set (next)), 0);
- else
- temp = next;
-
- /* TEMP is either a CODE_LABEL, NULL_RTX or something
- else that can't possibly match either target label. */
- if (temp == if_false_label)
- taken = 1;
- else if (temp == if_true_label)
- taken = 0;
- }
- else if (then_dest == pc_rtx)
- {
- if (next && NOTE_P (next))
- next = next_nonnote_insn (next);
-
- if (next && JUMP_P (next)
- && any_uncondjump_p (next))
- temp = XEXP (SET_SRC (pc_set (next)), 0);
- else
- temp = next;
-
- if (temp == if_false_label)
- taken = 0;
- else if (temp == if_true_label)
- taken = 1;
- }
-
- if (taken != -1)
- {
- /* If the test is expected to fail, reverse the
- probabilities. */
- if (integer_zerop (arg1))
- taken = 1 - taken;
- predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
- }
- }
-
- insn = next;
- }
- }
-
- return ret;
-}
-
void
expand_builtin_trap (void)
{
arglist = TREE_CHAIN (arglist);
val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
+ /* If VAL is promoted to a wider mode, convert it back to MODE. */
+ val = convert_to_mode (mode, val, 1);
if (ignore)
return expand_sync_operation (mem, val, code);
arglist = TREE_CHAIN (arglist);
old_val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
+ /* If OLD_VAL is promoted to a wider mode, convert it back to MODE. */
+ old_val = convert_to_mode (mode, old_val, 1);
arglist = TREE_CHAIN (arglist);
new_val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
+ /* If NEW_VAL is promoted to a wider mode, convert it back to MODE. */
+ new_val = convert_to_mode (mode, new_val, 1);
if (is_bool)
return expand_bool_compare_and_swap (mem, old_val, new_val, target);
arglist = TREE_CHAIN (arglist);
val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
+ /* If VAL is promoted to a wider mode, convert it back to MODE. */
+ val = convert_to_mode (mode, val, 1);
return expand_sync_lock_test_and_set (mem, val, target);
}
CASE_FLT_FN (BUILT_IN_ROUND):
CASE_FLT_FN (BUILT_IN_NEARBYINT):
CASE_FLT_FN (BUILT_IN_RINT):
- CASE_FLT_FN (BUILT_IN_LRINT):
- CASE_FLT_FN (BUILT_IN_LLRINT):
target = expand_builtin_mathfn (exp, target, subtarget);
if (target)
return target;
return target;
break;
- CASE_FLT_FN (BUILT_IN_POW):
- target = expand_builtin_pow (exp, target, subtarget);
+ CASE_FLT_FN (BUILT_IN_LRINT):
+ CASE_FLT_FN (BUILT_IN_LLRINT):
+ CASE_FLT_FN (BUILT_IN_LROUND):
+ CASE_FLT_FN (BUILT_IN_LLROUND):
+ target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
+ if (target)
+ return target;
+ break;
+
+ CASE_FLT_FN (BUILT_IN_POW):
+ target = expand_builtin_pow (exp, target, subtarget);
if (target)
return target;
break;
CASE_FLT_FN (BUILT_IN_ATAN2):
CASE_FLT_FN (BUILT_IN_LDEXP):
- CASE_FLT_FN (BUILT_IN_FMOD):
- CASE_FLT_FN (BUILT_IN_DREM):
if (! flag_unsafe_math_optimizations)
break;
+
+ CASE_FLT_FN (BUILT_IN_FMOD):
+ CASE_FLT_FN (BUILT_IN_REMAINDER):
+ CASE_FLT_FN (BUILT_IN_DREM):
target = expand_builtin_mathfn_2 (exp, target, subtarget);
if (target)
return target;
break;
+ CASE_FLT_FN (BUILT_IN_CEXPI):
+ target = expand_builtin_cexpi (exp, target, subtarget);
+ gcc_assert (target);
+ return target;
+
CASE_FLT_FN (BUILT_IN_SIN):
CASE_FLT_FN (BUILT_IN_COS):
if (! flag_unsafe_math_optimizations)
expand_stack_restore (TREE_VALUE (arglist));
return const0_rtx;
+ case BUILT_IN_BSWAP32:
+ case BUILT_IN_BSWAP64:
+ target = expand_builtin_bswap (arglist, target, subtarget);
+
+ if (target)
+ return target;
+ break;
+
CASE_INT_FN (BUILT_IN_FFS):
case BUILT_IN_FFSIMAX:
target = expand_builtin_unop (target_mode, arglist, target,
case BUILT_IN_MEMMOVE:
target = expand_builtin_memmove (arglist, TREE_TYPE (exp), target,
- mode, exp);
+ mode);
if (target)
return target;
break;
case BUILT_IN_STPCPY_CHK:
case BUILT_IN_STRNCPY_CHK:
case BUILT_IN_STRCAT_CHK:
+ case BUILT_IN_STRNCAT_CHK:
case BUILT_IN_SNPRINTF_CHK:
case BUILT_IN_VSNPRINTF_CHK:
maybe_emit_chk_warning (exp, fcode);
return integer_valued_real_p (TREE_OPERAND (t, 0));
case COMPOUND_EXPR:
- case MODIFY_EXPR:
+ case GIMPLE_MODIFY_STMT:
case BIND_EXPR:
- return integer_valued_real_p (TREE_OPERAND (t, 1));
+ return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
case PLUS_EXPR:
case MINUS_EXPR:
&& integer_valued_real_p (TREE_OPERAND (t, 2));
case REAL_CST:
- if (! TREE_CONSTANT_OVERFLOW (t))
- {
- REAL_VALUE_TYPE c, cint;
-
- c = TREE_REAL_CST (t);
- real_trunc (&cint, TYPE_MODE (TREE_TYPE (t)), &c);
- return real_identical (&c, &cint);
- }
- break;
+ return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
case NOP_EXPR:
{
CASE_FLT_FN (BUILT_IN_TRUNC):
return true;
+ CASE_FLT_FN (BUILT_IN_FMIN):
+ CASE_FLT_FN (BUILT_IN_FMAX):
+ return integer_valued_real_p (TREE_VALUE (TREE_OPERAND (t, 1)))
+ && integer_valued_real_p (TREE_VALUE (TREE_CHAIN (TREE_OPERAND (t, 1))));
+
default:
break;
}
{
tree arg = TREE_VALUE (arglist);
const enum built_in_function fcode = builtin_mathfn_code (arg);
+ tree res;
if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
return NULL_TREE;
- /* Optimize cbrt of constant value. */
- if (real_zerop (arg) || real_onep (arg) || real_minus_onep (arg))
- return arg;
+ /* Calculate the result when the argument is a constant. */
+ if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
+ return res;
if (flag_unsafe_math_optimizations)
{
return NULL_TREE;
}
-/* Fold function call to builtin sin, sinf, or sinl. Return
+/* Fold function call to builtin cos, cosf, or cosl. Return
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_sin (tree arglist)
+fold_builtin_cos (tree arglist, tree type, tree fndecl)
{
tree arg = TREE_VALUE (arglist);
+ tree res, narg;
if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
return NULL_TREE;
- /* Optimize sin (0.0) = 0.0. */
- if (real_zerop (arg))
- return arg;
+ /* Calculate the result when the argument is a constant. */
+ if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
+ return res;
+
+ /* Optimize cos(-x) into cos (x). */
+ if ((narg = fold_strip_sign_ops (arg)))
+ return build_function_call_expr (fndecl,
+ build_tree_list (NULL_TREE, narg));
return NULL_TREE;
}
-/* Fold function call to builtin cos, cosf, or cosl. Return
+/* Fold function call to builtin cosh, coshf, or coshl. Return
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_cos (tree arglist, tree type, tree fndecl)
+fold_builtin_cosh (tree arglist, tree type, tree fndecl)
{
- tree arg = TREE_VALUE (arglist);
-
- if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
- return NULL_TREE;
-
- /* Optimize cos (0.0) = 1.0. */
- if (real_zerop (arg))
- return build_real (type, dconst1);
-
- /* Optimize cos(-x) into cos (x). */
- if (TREE_CODE (arg) == NEGATE_EXPR)
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
{
- tree args = build_tree_list (NULL_TREE,
- TREE_OPERAND (arg, 0));
- return build_function_call_expr (fndecl, args);
- }
-
+ tree arg = TREE_VALUE (arglist);
+ tree res, narg;
+
+ /* Calculate the result when the argument is a constant. */
+ if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
+ return res;
+
+ /* Optimize cosh(-x) into cosh (x). */
+ if ((narg = fold_strip_sign_ops (arg)))
+ return build_function_call_expr (fndecl,
+ build_tree_list (NULL_TREE, narg));
+ }
+
return NULL_TREE;
}
/* Fold function call to builtin tan, tanf, or tanl. Return
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_tan (tree arglist)
+fold_builtin_tan (tree arglist, tree type)
{
enum built_in_function fcode;
tree arg = TREE_VALUE (arglist);
+ tree res;
if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
return NULL_TREE;
- /* Optimize tan(0.0) = 0.0. */
- if (real_zerop (arg))
- return arg;
-
+ /* Calculate the result when the argument is a constant. */
+ if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
+ return res;
+
/* Optimize tan(atan(x)) = x. */
fcode = builtin_mathfn_code (arg);
if (flag_unsafe_math_optimizations
return NULL_TREE;
}
-/* Fold function call to builtin atan, atanf, or atanl. Return
+/* Fold function call to builtin sincos, sincosf, or sincosl. Return
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_atan (tree arglist, tree type)
+fold_builtin_sincos (tree arglist)
{
+ tree type, arg0, arg1, arg2;
+ tree res, fn, call;
- tree arg = TREE_VALUE (arglist);
+ if (!validate_arglist (arglist, REAL_TYPE, POINTER_TYPE,
+ POINTER_TYPE, VOID_TYPE))
+ return NULL_TREE;
- if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ arg0 = TREE_VALUE (arglist);
+ type = TREE_TYPE (arg0);
+ arg1 = TREE_VALUE (TREE_CHAIN (arglist));
+ arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
+
+ /* Calculate the result when the argument is a constant. */
+ if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
+ return res;
+
+ /* Canonicalize sincos to cexpi. */
+ fn = mathfn_built_in (type, BUILT_IN_CEXPI);
+ if (!fn)
return NULL_TREE;
- /* Optimize atan(0.0) = 0.0. */
- if (real_zerop (arg))
- return arg;
+ call = build_function_call_expr (fn, build_tree_list (NULL_TREE, arg0));
+ call = builtin_save_expr (call);
+
+ return build2 (COMPOUND_EXPR, type,
+ build2 (MODIFY_EXPR, void_type_node,
+ build_fold_indirect_ref (arg1),
+ build1 (IMAGPART_EXPR, type, call)),
+ build2 (MODIFY_EXPR, void_type_node,
+ build_fold_indirect_ref (arg2),
+ build1 (REALPART_EXPR, type, call)));
+}
- /* Optimize atan(1.0) = pi/4. */
- if (real_onep (arg))
+/* Fold function call to builtin cexp, cexpf, or cexpl. Return
+ NULL_TREE if no simplification can be made. */
+
+static tree
+fold_builtin_cexp (tree arglist, tree type)
+{
+ tree arg0, rtype;
+ tree realp, imagp, ifn;
+
+ if (!validate_arglist (arglist, COMPLEX_TYPE, VOID_TYPE))
+ return NULL_TREE;
+
+ arg0 = TREE_VALUE (arglist);
+ rtype = TREE_TYPE (TREE_TYPE (arg0));
+
+ /* In case we can figure out the real part of arg0 and it is constant zero
+ fold to cexpi. */
+ ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
+ if (!ifn)
+ return NULL_TREE;
+
+ if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
+ && real_zerop (realp))
+ {
+ tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
+ return build_function_call_expr (ifn, build_tree_list (NULL_TREE, narg));
+ }
+
+ /* In case we can easily decompose real and imaginary parts split cexp
+ to exp (r) * cexpi (i). */
+ if (flag_unsafe_math_optimizations
+ && realp)
{
- REAL_VALUE_TYPE cst;
+ tree rfn, rcall, icall;
+
+ rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
+ if (!rfn)
+ return NULL_TREE;
+
+ imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
+ if (!imagp)
+ return NULL_TREE;
- real_convert (&cst, TYPE_MODE (type), &dconstpi);
- SET_REAL_EXP (&cst, REAL_EXP (&cst) - 2);
- return build_real (type, cst);
+ icall = build_function_call_expr (ifn,
+ build_tree_list (NULL_TREE, imagp));
+ icall = builtin_save_expr (icall);
+ rcall = build_function_call_expr (rfn,
+ build_tree_list (NULL_TREE, realp));
+ rcall = builtin_save_expr (rcall);
+ return build2 (COMPLEX_EXPR, type,
+ build2 (MULT_EXPR, rtype,
+ rcall,
+ build1 (REALPART_EXPR, rtype, icall)),
+ build2 (MULT_EXPR, rtype,
+ rcall,
+ build1 (IMAGPART_EXPR, rtype, icall)));
}
return NULL_TREE;
}
}
+ /* Fold floor (x) where x is nonnegative to trunc (x). */
+ if (tree_expr_nonnegative_p (arg))
+ {
+ tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
+ if (truncfn)
+ return build_function_call_expr (truncfn, arglist);
+ }
+
return fold_trunc_transparent_mathfn (fndecl, arglist);
}
}
}
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ CASE_FLT_FN (BUILT_IN_LFLOOR):
+ CASE_FLT_FN (BUILT_IN_LLFLOOR):
+ /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
+ if (tree_expr_nonnegative_p (arg))
+ return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
+ arg);
+ break;
+ default:;
+ }
+
return fold_fixed_mathfn (fndecl, arglist);
}
return NULL_TREE;
}
+/* Fold function call to builtin_bswap and the long and long long
+ variants. Return NULL_TREE if no simplification can be made. */
+static tree
+fold_builtin_bswap (tree fndecl, tree arglist)
+{
+ tree arg;
+
+ if (! validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
+ return 0;
+
+ /* Optimize constant value. */
+ arg = TREE_VALUE (arglist);
+ if (TREE_CODE (arg) == INTEGER_CST && ! TREE_CONSTANT_OVERFLOW (arg))
+ {
+ HOST_WIDE_INT hi, width, r_hi = 0;
+ unsigned HOST_WIDE_INT lo, r_lo = 0;
+ tree type;
+
+ type = TREE_TYPE (arg);
+ width = TYPE_PRECISION (type);
+ lo = TREE_INT_CST_LOW (arg);
+ hi = TREE_INT_CST_HIGH (arg);
+
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ case BUILT_IN_BSWAP32:
+ case BUILT_IN_BSWAP64:
+ {
+ int s;
+
+ for (s = 0; s < width; s += 8)
+ {
+ int d = width - s - 8;
+ unsigned HOST_WIDE_INT byte;
+
+ if (s < HOST_BITS_PER_WIDE_INT)
+ byte = (lo >> s) & 0xff;
+ else
+ byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
+
+ if (d < HOST_BITS_PER_WIDE_INT)
+ r_lo |= byte << d;
+ else
+ r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
+ }
+ }
+
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+
+ if (width < HOST_BITS_PER_WIDE_INT)
+ return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
+ else
+ return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
+ }
+
+ return NULL_TREE;
+}
/* Return true if EXPR is the real constant contained in VALUE. */
static bool
}
/* A subroutine of fold_builtin to fold the various logarithmic
- functions. EXP is the CALL_EXPR of a call to a builtin logN
- function. VALUE is the base of the logN function. */
+ functions. Return NULL_TREE if no simplification can me made.
+ FUNC is the corresponding MPFR logarithm function. */
static tree
fold_builtin_logarithm (tree fndecl, tree arglist,
- const REAL_VALUE_TYPE *value)
+ int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
{
if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
{
tree type = TREE_TYPE (TREE_TYPE (fndecl));
tree arg = TREE_VALUE (arglist);
+ tree res;
const enum built_in_function fcode = builtin_mathfn_code (arg);
- /* Optimize logN(1.0) = 0.0. */
- if (real_onep (arg))
- return build_real (type, dconst0);
-
- /* Optimize logN(N) = 1.0. If N can't be truncated to MODE
- exactly, then only do this if flag_unsafe_math_optimizations. */
- if (exact_real_truncate (TYPE_MODE (type), value)
- || flag_unsafe_math_optimizations)
- {
- const REAL_VALUE_TYPE value_truncate =
- real_value_truncate (TYPE_MODE (type), *value);
- if (real_dconstp (arg, &value_truncate))
+ /* Optimize log(e) = 1.0. We're never passed an exact 'e',
+ instead we'll look for 'e' truncated to MODE. So only do
+ this if flag_unsafe_math_optimizations is set. */
+ if (flag_unsafe_math_optimizations && func == mpfr_log)
+ {
+ const REAL_VALUE_TYPE e_truncated =
+ real_value_truncate (TYPE_MODE (type), dconste);
+ if (real_dconstp (arg, &e_truncated))
return build_real (type, dconst1);
}
+ /* Calculate the result when the argument is a constant. */
+ if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
+ return res;
+
/* Special case, optimize logN(expN(x)) = x. */
if (flag_unsafe_math_optimizations
- && ((value == &dconste
+ && ((func == mpfr_log
&& (fcode == BUILT_IN_EXP
|| fcode == BUILT_IN_EXPF
|| fcode == BUILT_IN_EXPL))
- || (value == &dconst2
+ || (func == mpfr_log2
&& (fcode == BUILT_IN_EXP2
|| fcode == BUILT_IN_EXP2F
|| fcode == BUILT_IN_EXP2L))
- || (value == &dconst10 && (BUILTIN_EXP10_P (fcode)))))
+ || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
return fold_convert (type, TREE_VALUE (TREE_OPERAND (arg, 1)));
/* Optimize logN(func()) for various exponential functions. We
return 0;
}
+/* Fold a builtin function call to hypot, hypotf, or hypotl. Return
+ NULL_TREE if no simplification can be made. */
+
+static tree
+fold_builtin_hypot (tree fndecl, tree arglist, tree type)
+{
+ tree arg0 = TREE_VALUE (arglist);
+ tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
+ tree res, narg0, narg1;
+
+ if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
+ return NULL_TREE;
+
+ /* Calculate the result when the argument is a constant. */
+ if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
+ return res;
+
+ /* If either argument to hypot has a negate or abs, strip that off.
+ E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
+ narg0 = fold_strip_sign_ops (arg0);
+ narg1 = fold_strip_sign_ops (arg1);
+ if (narg0 || narg1)
+ {
+ tree narglist = tree_cons (NULL_TREE, narg0 ? narg0 : arg0,
+ build_tree_list (NULL_TREE,
+ narg1 ? narg1 : arg1));
+ return build_function_call_expr (fndecl, narglist);
+ }
+
+ /* If either argument is zero, hypot is fabs of the other. */
+ if (real_zerop (arg0))
+ return fold_build1 (ABS_EXPR, type, arg1);
+ else if (real_zerop (arg1))
+ return fold_build1 (ABS_EXPR, type, arg0);
+
+ /* hypot(x,x) -> fabs(x)*sqrt(2). */
+ if (flag_unsafe_math_optimizations
+ && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
+ {
+ REAL_VALUE_TYPE sqrt2;
+
+ real_sqrt (&sqrt2, TYPE_MODE (type), &dconst2);
+ return fold_build2 (MULT_EXPR, type,
+ fold_build1 (ABS_EXPR, type, arg0),
+ build_real (type, sqrt2));
+ }
+
+ return NULL_TREE;
+}
+
+
/* Fold a builtin function call to pow, powf, or powl. Return
NULL_TREE if no simplification can be made. */
static tree
{
tree arg0 = TREE_VALUE (arglist);
tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
+ tree res;
if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
return NULL_TREE;
+ /* Calculate the result when the argument is a constant. */
+ if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
+ return res;
+
/* Optimize pow(1.0,y) = 1.0. */
if (real_onep (arg0))
return omit_one_operand (type, build_real (type, dconst1), arg1);
}
}
+ /* Optimize pow(x,1.0/3.0) = cbrt(x). */
+ if (flag_unsafe_math_optimizations)
+ {
+ const REAL_VALUE_TYPE dconstroot
+ = real_value_truncate (TYPE_MODE (type), dconstthird);
+
+ if (REAL_VALUES_EQUAL (c, dconstroot))
+ {
+ tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
+ if (cbrtfn != NULL_TREE)
+ {
+ tree arglist = build_tree_list (NULL_TREE, arg0);
+ return build_function_call_expr (cbrtfn, arglist);
+ }
+ }
+ }
+
/* Check for an integer exponent. */
n = real_to_integer (&c);
real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
}
/* A subroutine of fold_builtin to fold the various exponent
- functions. EXP is the CALL_EXPR of a call to a builtin function.
- VALUE is the value which will be raised to a power. */
+ functions. Return NULL_TREE if no simplification can me made.
+ FUNC is the corresponding MPFR exponent function. */
static tree
fold_builtin_exponent (tree fndecl, tree arglist,
- const REAL_VALUE_TYPE *value)
+ int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
{
if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
{
tree type = TREE_TYPE (TREE_TYPE (fndecl));
tree arg = TREE_VALUE (arglist);
-
- /* Optimize exp*(0.0) = 1.0. */
- if (real_zerop (arg))
- return build_real (type, dconst1);
-
- /* Optimize expN(1.0) = N. */
- if (real_onep (arg))
- {
- REAL_VALUE_TYPE cst;
-
- real_convert (&cst, TYPE_MODE (type), value);
- return build_real (type, cst);
- }
-
- /* Attempt to evaluate expN(integer) at compile-time. */
- if (flag_unsafe_math_optimizations
- && TREE_CODE (arg) == REAL_CST
- && ! TREE_CONSTANT_OVERFLOW (arg))
- {
- REAL_VALUE_TYPE cint;
- REAL_VALUE_TYPE c;
- HOST_WIDE_INT n;
-
- c = TREE_REAL_CST (arg);
- n = real_to_integer (&c);
- real_from_integer (&cint, VOIDmode, n,
- n < 0 ? -1 : 0, 0);
- if (real_identical (&c, &cint))
- {
- REAL_VALUE_TYPE x;
-
- real_powi (&x, TYPE_MODE (type), value, n);
- return build_real (type, x);
- }
- }
+ tree res;
+
+ /* Calculate the result when the argument is a constant. */
+ if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
+ return res;
/* Optimize expN(logN(x)) = x. */
if (flag_unsafe_math_optimizations)
{
const enum built_in_function fcode = builtin_mathfn_code (arg);
- if ((value == &dconste
+ if ((func == mpfr_exp
&& (fcode == BUILT_IN_LOG
|| fcode == BUILT_IN_LOGF
|| fcode == BUILT_IN_LOGL))
- || (value == &dconst2
+ || (func == mpfr_exp2
&& (fcode == BUILT_IN_LOG2
|| fcode == BUILT_IN_LOG2F
|| fcode == BUILT_IN_LOG2L))
- || (value == &dconst10
+ || (func == mpfr_exp10
&& (fcode == BUILT_IN_LOG10
|| fcode == BUILT_IN_LOG10F
|| fcode == BUILT_IN_LOG10L)))
return 0;
}
+/* Return true if VAR is a VAR_DECL or a component thereof. */
+
+static bool
+var_decl_component_p (tree var)
+{
+ tree inner = var;
+ while (handled_component_p (inner))
+ inner = TREE_OPERAND (inner, 0);
+ return SSA_VAR_P (inner);
+}
+
/* Fold function call to builtin memset. Return
NULL_TREE if no simplification can be made. */
&& !POINTER_TYPE_P (TREE_TYPE (var)))
return 0;
+ if (! var_decl_component_p (var))
+ return 0;
+
length = tree_low_cst (len, 1);
if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
|| get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
}
ret = build_int_cst_type (TREE_TYPE (var), cval);
- ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
+ ret = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (var), var, ret);
if (ignore)
return ret;
fold_builtin_memory_op (tree arglist, tree type, bool ignore, int endp)
{
tree dest, src, len, destvar, srcvar, expr;
- unsigned HOST_WIDE_INT length;
if (! validate_arglist (arglist,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
expr = len;
else
{
- if (! host_integerp (len, 1))
- return 0;
+ tree srctype, desttype;
+ if (endp == 3)
+ {
+ int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
+ int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
- if (TREE_SIDE_EFFECTS (dest) || TREE_SIDE_EFFECTS (src))
- return 0;
+ /* Both DEST and SRC must be pointer types.
+ ??? This is what old code did. Is the testing for pointer types
+ really mandatory?
- destvar = dest;
- STRIP_NOPS (destvar);
- if (TREE_CODE (destvar) != ADDR_EXPR)
- return 0;
+ If either SRC is readonly or length is 1, we can use memcpy. */
+ if (dest_align && src_align
+ && (readonly_data_expr (src)
+ || (host_integerp (len, 1)
+ && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
+ tree_low_cst (len, 1)))))
+ {
+ tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
+ if (!fn)
+ return 0;
+ return build_function_call_expr (fn, arglist);
+ }
+ return 0;
+ }
- destvar = TREE_OPERAND (destvar, 0);
- if (TREE_THIS_VOLATILE (destvar))
+ if (!host_integerp (len, 0))
return 0;
-
- if (!INTEGRAL_TYPE_P (TREE_TYPE (destvar))
- && !POINTER_TYPE_P (TREE_TYPE (destvar))
- && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (destvar)))
+ /* FIXME:
+ This logic lose for arguments like (type *)malloc (sizeof (type)),
+ since we strip the casts of up to VOID return value from malloc.
+ Perhaps we ought to inherit type from non-VOID argument here? */
+ STRIP_NOPS (src);
+ STRIP_NOPS (dest);
+ srctype = TREE_TYPE (TREE_TYPE (src));
+ desttype = TREE_TYPE (TREE_TYPE (dest));
+ if (!srctype || !desttype
+ || !TYPE_SIZE_UNIT (srctype)
+ || !TYPE_SIZE_UNIT (desttype)
+ || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
+ || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
+ || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
+ || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
return 0;
- srcvar = src;
- STRIP_NOPS (srcvar);
- if (TREE_CODE (srcvar) != ADDR_EXPR)
+ if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
+ < (int) TYPE_ALIGN (desttype)
+ || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
+ < (int) TYPE_ALIGN (srctype)))
return 0;
- srcvar = TREE_OPERAND (srcvar, 0);
+ if (!ignore)
+ dest = builtin_save_expr (dest);
+
+ srcvar = build_fold_indirect_ref (src);
if (TREE_THIS_VOLATILE (srcvar))
return 0;
-
- if (!INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
- && !POINTER_TYPE_P (TREE_TYPE (srcvar))
- && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (srcvar)))
+ if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
+ return 0;
+ /* With memcpy, it is possible to bypass aliasing rules, so without
+ this check i. e. execute/20060930-2.c would be misoptimized, because
+ it use conflicting alias set to hold argument for the memcpy call.
+ This check is probably unnecesary with -fno-strict-aliasing.
+ Similarly for destvar. See also PR29286. */
+ if (!var_decl_component_p (srcvar)
+ /* Accept: memcpy (*char_var, "test", 1); that simplify
+ to char_var='t'; */
+ || is_gimple_min_invariant (srcvar)
+ || readonly_data_expr (src))
return 0;
- length = tree_low_cst (len, 1);
- if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (destvar))) != length
- || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
- < (int) length
- || GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (srcvar))) != length
- || get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
- < (int) length)
+ destvar = build_fold_indirect_ref (dest);
+ if (TREE_THIS_VOLATILE (destvar))
+ return 0;
+ if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
+ return 0;
+ if (!var_decl_component_p (destvar))
return 0;
- if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
+ if (srctype == desttype
+ || (gimple_in_ssa_p (cfun)
+ && tree_ssa_useless_type_conversion_1 (desttype, srctype)))
+ expr = srcvar;
+ else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
|| POINTER_TYPE_P (TREE_TYPE (srcvar)))
&& (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
|| POINTER_TYPE_P (TREE_TYPE (destvar))))
expr = fold_convert (TREE_TYPE (destvar), srcvar);
else
expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
- expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
+ expr = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (destvar), destvar, expr);
}
if (ignore)
arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
build_int_cst (NULL_TREE,
~ (unsigned HOST_WIDE_INT) 0x7f));
- arg = fold_build2 (EQ_EXPR, integer_type_node,
- arg, integer_zero_node);
-
- if (in_gimple_form && !TREE_CONSTANT (arg))
- return NULL_TREE;
- else
- return arg;
+ return fold_build2 (EQ_EXPR, integer_type_node,
+ arg, integer_zero_node);
}
}
arg = fold_convert (unsigned_type_node, TREE_VALUE (arglist));
arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
build_int_cst (unsigned_type_node, target_digit0));
- arg = fold_build2 (LE_EXPR, integer_type_node, arg,
- build_int_cst (unsigned_type_node, 9));
- if (in_gimple_form && !TREE_CONSTANT (arg))
- return NULL_TREE;
- else
- return arg;
+ return fold_build2 (LE_EXPR, integer_type_node, arg,
+ build_int_cst (unsigned_type_node, 9));
}
}
return fold_build1 (ABS_EXPR, type, arg);
}
+/* Fold a call to builtin fmin or fmax. */
+
+static tree
+fold_builtin_fmin_fmax (tree arglist, tree type, bool max)
+{
+ if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
+ {
+ tree arg0 = TREE_VALUE (arglist);
+ tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
+ /* Calculate the result when the argument is a constant. */
+ tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
+
+ if (res)
+ return res;
+
+ /* If either argument is NaN, return the other one. Avoid the
+ transformation if we get (and honor) a signalling NaN. Using
+ omit_one_operand() ensures we create a non-lvalue. */
+ if (TREE_CODE (arg0) == REAL_CST
+ && real_isnan (&TREE_REAL_CST (arg0))
+ && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
+ || ! TREE_REAL_CST (arg0).signalling))
+ return omit_one_operand (type, arg1, arg0);
+ if (TREE_CODE (arg1) == REAL_CST
+ && real_isnan (&TREE_REAL_CST (arg1))
+ && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
+ || ! TREE_REAL_CST (arg1).signalling))
+ return omit_one_operand (type, arg0, arg1);
+
+ /* Transform fmin/fmax(x,x) -> x. */
+ if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
+ return omit_one_operand (type, arg0, arg1);
+
+ /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
+ functions to return the numeric arg if the other one is NaN.
+ These tree codes don't honor that, so only transform if
+ -ffinite-math-only is set. C99 doesn't require -0.0 to be
+ handled, so we don't have to worry about it either. */
+ if (flag_finite_math_only)
+ return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
+ fold_convert (type, arg0),
+ fold_convert (type, arg1));
+ }
+ return NULL_TREE;
+}
+
/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
EXP is the CALL_EXPR for the call. */
switch (builtin_index)
{
case BUILT_IN_ISINF:
- if (!MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
+ if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
return omit_one_operand (type, integer_zero_node, arg);
if (TREE_CODE (arg) == REAL_CST)
return NULL_TREE;
case BUILT_IN_FINITE:
- if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg)))
- && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
- return omit_one_operand (type, integer_zero_node, arg);
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
+ && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
+ return omit_one_operand (type, integer_one_node, arg);
if (TREE_CODE (arg) == REAL_CST)
{
return NULL_TREE;
case BUILT_IN_ISNAN:
- if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg))))
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
return omit_one_operand (type, integer_zero_node, arg);
if (TREE_CODE (arg) == REAL_CST)
if (unordered_code == UNORDERED_EXPR)
{
- if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg0))))
+ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
return omit_two_operands (type, integer_zero_node, arg0, arg1);
return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
}
- code = MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
- : ordered_code;
+ code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
+ : ordered_code;
return fold_build1 (TRUTH_NOT_EXPR, type,
fold_build2 (code, type, arg0, arg1));
}
CASE_FLT_FN (BUILT_IN_CBRT):
return fold_builtin_cbrt (arglist, type);
+ CASE_FLT_FN (BUILT_IN_ASIN):
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg1 (TREE_VALUE (arglist), type, mpfr_asin,
+ &dconstm1, &dconst1, true);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_ACOS):
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg1 (TREE_VALUE (arglist), type, mpfr_acos,
+ &dconstm1, &dconst1, true);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_ATAN):
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg1 (TREE_VALUE (arglist), type, mpfr_atan,
+ NULL, NULL, 0);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_ASINH):
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg1 (TREE_VALUE (arglist), type, mpfr_asinh,
+ NULL, NULL, 0);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_ACOSH):
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg1 (TREE_VALUE (arglist), type, mpfr_acosh,
+ &dconst1, NULL, true);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_ATANH):
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg1 (TREE_VALUE (arglist), type, mpfr_atanh,
+ &dconstm1, &dconst1, false);
+ break;
+
CASE_FLT_FN (BUILT_IN_SIN):
- return fold_builtin_sin (arglist);
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg1 (TREE_VALUE (arglist), type, mpfr_sin,
+ NULL, NULL, 0);
+ break;
CASE_FLT_FN (BUILT_IN_COS):
return fold_builtin_cos (arglist, type, fndecl);
+ CASE_FLT_FN (BUILT_IN_TAN):
+ return fold_builtin_tan (arglist, type);
+
+ CASE_FLT_FN (BUILT_IN_SINCOS):
+ return fold_builtin_sincos (arglist);
+
+ CASE_FLT_FN (BUILT_IN_CEXP):
+ return fold_builtin_cexp (arglist, type);
+
+ CASE_FLT_FN (BUILT_IN_CEXPI):
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_sincos (TREE_VALUE (arglist), NULL_TREE, NULL_TREE);
+
+ CASE_FLT_FN (BUILT_IN_SINH):
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg1 (TREE_VALUE (arglist), type, mpfr_sinh,
+ NULL, NULL, 0);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_COSH):
+ return fold_builtin_cosh (arglist, type, fndecl);
+
+ CASE_FLT_FN (BUILT_IN_TANH):
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg1 (TREE_VALUE (arglist), type, mpfr_tanh,
+ NULL, NULL, 0);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_ERF):
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg1 (TREE_VALUE (arglist), type, mpfr_erf,
+ NULL, NULL, 0);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_ERFC):
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg1 (TREE_VALUE (arglist), type, mpfr_erfc,
+ NULL, NULL, 0);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_TGAMMA):
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg1 (TREE_VALUE (arglist), type, mpfr_gamma,
+ NULL, NULL, 0);
+ break;
+
CASE_FLT_FN (BUILT_IN_EXP):
- return fold_builtin_exponent (fndecl, arglist, &dconste);
+ return fold_builtin_exponent (fndecl, arglist, mpfr_exp);
CASE_FLT_FN (BUILT_IN_EXP2):
- return fold_builtin_exponent (fndecl, arglist, &dconst2);
+ return fold_builtin_exponent (fndecl, arglist, mpfr_exp2);
CASE_FLT_FN (BUILT_IN_EXP10):
CASE_FLT_FN (BUILT_IN_POW10):
- return fold_builtin_exponent (fndecl, arglist, &dconst10);
+ return fold_builtin_exponent (fndecl, arglist, mpfr_exp10);
+ CASE_FLT_FN (BUILT_IN_EXPM1):
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg1 (TREE_VALUE (arglist), type, mpfr_expm1,
+ NULL, NULL, 0);
+ break;
+
CASE_FLT_FN (BUILT_IN_LOG):
- return fold_builtin_logarithm (fndecl, arglist, &dconste);
+ return fold_builtin_logarithm (fndecl, arglist, mpfr_log);
CASE_FLT_FN (BUILT_IN_LOG2):
- return fold_builtin_logarithm (fndecl, arglist, &dconst2);
+ return fold_builtin_logarithm (fndecl, arglist, mpfr_log2);
CASE_FLT_FN (BUILT_IN_LOG10):
- return fold_builtin_logarithm (fndecl, arglist, &dconst10);
-
- CASE_FLT_FN (BUILT_IN_TAN):
- return fold_builtin_tan (arglist);
+ return fold_builtin_logarithm (fndecl, arglist, mpfr_log10);
- CASE_FLT_FN (BUILT_IN_ATAN):
- return fold_builtin_atan (arglist, type);
+ CASE_FLT_FN (BUILT_IN_LOG1P):
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg1 (TREE_VALUE (arglist), type, mpfr_log1p,
+ &dconstm1, NULL, false);
+ break;
+ CASE_FLT_FN (BUILT_IN_ATAN2):
+ if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg2 (TREE_VALUE (arglist),
+ TREE_VALUE (TREE_CHAIN (arglist)),
+ type, mpfr_atan2);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_FMA):
+ if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
+ return do_mpfr_arg3 (TREE_VALUE (arglist),
+ TREE_VALUE (TREE_CHAIN (arglist)),
+ TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))),
+ type, mpfr_fma);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_FMIN):
+ return fold_builtin_fmin_fmax (arglist, type, /*max=*/false);
+
+ CASE_FLT_FN (BUILT_IN_FMAX):
+ return fold_builtin_fmin_fmax (arglist, type, /*max=*/true);
+
+ CASE_FLT_FN (BUILT_IN_HYPOT):
+ return fold_builtin_hypot (fndecl, arglist, type);
+
CASE_FLT_FN (BUILT_IN_POW):
return fold_builtin_pow (fndecl, arglist, type);
CASE_FLT_FN (BUILT_IN_LLRINT):
return fold_fixed_mathfn (fndecl, arglist);
+ case BUILT_IN_BSWAP32:
+ case BUILT_IN_BSWAP64:
+ return fold_builtin_bswap (fndecl, arglist);
+
CASE_INT_FN (BUILT_IN_FFS):
CASE_INT_FN (BUILT_IN_CLZ):
CASE_INT_FN (BUILT_IN_CTZ):
fold_builtin (tree fndecl, tree arglist, bool ignore)
{
tree exp = fold_builtin_1 (fndecl, arglist, ignore);
- if (exp)
+ if (exp && !ignore)
{
- exp = build1 (NOP_EXPR, TREE_TYPE (exp), exp);
+ exp = build1 (NOP_EXPR, GENERIC_TREE_TYPE (exp), exp);
TREE_NO_WARNING (exp) = 1;
}
of tree_codes. If the last specifier is a 0, that represents an
ellipses, otherwise the last specifier must be a VOID_TYPE. */
-static int
+int
validate_arglist (tree arglist, ...)
{
enum tree_code code;
arg_mask = 6;
is_strlen = 1;
break;
+ case BUILT_IN_STRNCAT_CHK:
+ /* For __strncat_chk the warning will be emitted only if overflowing
+ by at least strlen (dest) + 1 bytes. */
+ arg_mask = 12;
+ break;
case BUILT_IN_STRNCPY_CHK:
arg_mask = 12;
break;
if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
return;
}
+ else if (fcode == BUILT_IN_STRNCAT_CHK)
+ {
+ tree src = TREE_VALUE (TREE_CHAIN (arglist));
+ if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
+ return;
+ src = c_strlen (src, 1);
+ if (! src || ! host_integerp (src, 1))
+ {
+ locus = EXPR_LOCATION (exp);
+ warning (0, "%Hcall to %D might overflow destination buffer",
+ &locus, get_callee_fndecl (exp));
+ return;
+ }
+ else if (tree_int_cst_lt (src, size))
+ return;
+ }
else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
return;
}
return true;
}
+
+/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
+ and no overflow/underflow occurred. INEXACT is true if M was not
+ exactly calculated. TYPE is the tree type for the result. This
+ function assumes that you cleared the MPFR flags and then
+ calculated M to see if anything subsequently set a flag prior to
+ entering this function. Return NULL_TREE if any checks fail. */
+
+static tree
+do_mpfr_ckconv(mpfr_srcptr m, tree type, int inexact)
+{
+ /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
+ overflow/underflow occurred. If -frounding-math, proceed iff the
+ result of calling FUNC was exact. */
+ if (mpfr_number_p (m) && !mpfr_overflow_p() && !mpfr_underflow_p()
+ && (!flag_rounding_math || !inexact))
+ {
+ REAL_VALUE_TYPE rr;
+
+ real_from_mpfr (&rr, m);
+ /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
+ check for overflow/underflow. If the REAL_VALUE_TYPE is zero
+ but the mpft_t is not, then we underflowed in the
+ conversion. */
+ if (!real_isnan (&rr) && !real_isinf (&rr)
+ && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
+ {
+ REAL_VALUE_TYPE rmode;
+
+ real_convert (&rmode, TYPE_MODE (type), &rr);
+ /* Proceed iff the specified mode can hold the value. */
+ if (real_identical (&rmode, &rr))
+ return build_real (type, rmode);
+ }
+ }
+ return NULL_TREE;
+}
+
+/* If argument ARG is a REAL_CST, call the one-argument mpfr function
+ FUNC on it and return the resulting value as a tree with type TYPE.
+ If MIN and/or MAX are not NULL, then the supplied ARG must be
+ within those bounds. If INCLUSIVE is true, then MIN/MAX are
+ acceptable values, otherwise they are not. The mpfr precision is
+ set to the precision of TYPE. We assume that function FUNC returns
+ zero if the result could be calculated exactly within the requested
+ precision. */
+
+static tree
+do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
+ const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
+ bool inclusive)
+{
+ tree result = NULL_TREE;
+
+ STRIP_NOPS (arg);
+
+ if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
+ {
+ const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
+
+ if (!real_isnan (ra) && !real_isinf (ra)
+ && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
+ && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
+ {
+ const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
+ int inexact;
+ mpfr_t m;
+
+ mpfr_init2 (m, prec);
+ mpfr_from_real (m, ra);
+ mpfr_clear_flags();
+ inexact = func (m, m, GMP_RNDN);
+ result = do_mpfr_ckconv (m, type, inexact);
+ mpfr_clear (m);
+ }
+ }
+
+ return result;
+}
+
+/* If argument ARG is a REAL_CST, call the two-argument mpfr function
+ FUNC on it and return the resulting value as a tree with type TYPE.
+ The mpfr precision is set to the precision of TYPE. We assume that
+ function FUNC returns zero if the result could be calculated
+ exactly within the requested precision. */
+
+static tree
+do_mpfr_arg2 (tree arg1, tree arg2, tree type,
+ int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
+{
+ tree result = NULL_TREE;
+
+ STRIP_NOPS (arg1);
+ STRIP_NOPS (arg2);
+
+ if (TREE_CODE (arg1) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg1)
+ && TREE_CODE (arg2) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg2))
+ {
+ const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
+ const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
+
+ if (!real_isnan (ra1) && !real_isinf (ra1)
+ && !real_isnan (ra2) && !real_isinf (ra2))
+ {
+ const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
+ int inexact;
+ mpfr_t m1, m2;
+
+ mpfr_inits2 (prec, m1, m2, NULL);
+ mpfr_from_real (m1, ra1);
+ mpfr_from_real (m2, ra2);
+ mpfr_clear_flags();
+ inexact = func (m1, m1, m2, GMP_RNDN);
+ result = do_mpfr_ckconv (m1, type, inexact);
+ mpfr_clears (m1, m2, NULL);
+ }
+ }
+
+ return result;
+}
+
+/* If argument ARG is a REAL_CST, call the three-argument mpfr function
+ FUNC on it and return the resulting value as a tree with type TYPE.
+ The mpfr precision is set to the precision of TYPE. We assume that
+ function FUNC returns zero if the result could be calculated
+ exactly within the requested precision. */
+
+static tree
+do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
+ int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
+{
+ tree result = NULL_TREE;
+
+ STRIP_NOPS (arg1);
+ STRIP_NOPS (arg2);
+ STRIP_NOPS (arg3);
+
+ if (TREE_CODE (arg1) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg1)
+ && TREE_CODE (arg2) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg2)
+ && TREE_CODE (arg3) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg3))
+ {
+ const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
+ const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
+ const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
+
+ if (!real_isnan (ra1) && !real_isinf (ra1)
+ && !real_isnan (ra2) && !real_isinf (ra2)
+ && !real_isnan (ra3) && !real_isinf (ra3))
+ {
+ const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
+ int inexact;
+ mpfr_t m1, m2, m3;
+
+ mpfr_inits2 (prec, m1, m2, m3, NULL);
+ mpfr_from_real (m1, ra1);
+ mpfr_from_real (m2, ra2);
+ mpfr_from_real (m3, ra3);
+ mpfr_clear_flags();
+ inexact = func (m1, m1, m2, m3, GMP_RNDN);
+ result = do_mpfr_ckconv (m1, type, inexact);
+ mpfr_clears (m1, m2, m3, NULL);
+ }
+ }
+
+ return result;
+}
+
+/* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
+ the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
+ If ARG_SINP and ARG_COSP are NULL then the result is returned
+ as a complex value.
+ The type is taken from the type of ARG and is used for setting the
+ precision of the calculation and results. */
+
+static tree
+do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
+{
+ tree result = NULL_TREE;
+
+ STRIP_NOPS (arg);
+
+ if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
+ {
+ const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
+
+ if (!real_isnan (ra) && !real_isinf (ra))
+ {
+ tree const type = TREE_TYPE (arg);
+ const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
+ tree result_s, result_c;
+ int inexact;
+ mpfr_t m, ms, mc;
+
+ mpfr_inits2 (prec, m, ms, mc, NULL);
+ mpfr_from_real (m, ra);
+ mpfr_clear_flags();
+ inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
+ result_s = do_mpfr_ckconv (ms, type, inexact);
+ result_c = do_mpfr_ckconv (mc, type, inexact);
+ mpfr_clears (m, ms, mc, NULL);
+ if (result_s && result_c)
+ {
+ /* If we are to return in a complex value do so. */
+ if (!arg_sinp && !arg_cosp)
+ return build_complex (build_complex_type (type),
+ result_c, result_s);
+
+ /* Dereference the sin/cos pointer arguments. */
+ arg_sinp = build_fold_indirect_ref (arg_sinp);
+ arg_cosp = build_fold_indirect_ref (arg_cosp);
+ /* Proceed if valid pointer type were passed in. */
+ if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
+ && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
+ {
+ /* Set the values. */
+ result_s = fold_build2 (GIMPLE_MODIFY_STMT, type, arg_sinp,
+ result_s);
+ TREE_SIDE_EFFECTS (result_s) = 1;
+ result_c = fold_build2 (GIMPLE_MODIFY_STMT, type, arg_cosp,
+ result_c);
+ TREE_SIDE_EFFECTS (result_c) = 1;
+ /* Combine the assignments into a compound expr. */
+ result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
+ result_s, result_c));
+ }
+ }
+ }
+ }
+ return result;
+}