#include "tree-mudflap.h"
#include "tree-flow.h"
#include "value-prof.h"
+#include "diagnostic.h"
#ifndef PAD_VARARGS_DOWN
#define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
static rtx expand_builtin_va_start (tree);
static rtx expand_builtin_va_end (tree);
static rtx expand_builtin_va_copy (tree);
+static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
static tree fold_builtin_bitop (tree, tree);
static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
static tree fold_builtin_strchr (tree, tree, tree);
+static tree fold_builtin_memchr (tree, tree, tree, tree);
static tree fold_builtin_memcmp (tree, tree, tree);
static tree fold_builtin_strcmp (tree, tree);
static tree fold_builtin_strncmp (tree, tree, tree);
static tree do_mpfr_arg3 (tree, tree, tree, tree,
int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
static tree do_mpfr_sincos (tree, tree, tree);
+#if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
+static tree do_mpfr_bessel_n (tree, tree, tree,
+ int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
+ const REAL_VALUE_TYPE *, bool);
+static tree do_mpfr_remquo (tree, tree, tree);
+static tree do_mpfr_lgamma_r (tree, tree, tree);
+#endif
/* Return true if NODE should be considered for inline expansion regardless
of the optimization level. This means whenever a function is invoked with
align = MIN (inner, max_align);
break;
- case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
/* If sum of pointer + int, restrict our maximum alignment to that
imposed by the integer. If not, we can't do any better than
ALIGN. */
need to go on during alloca. */
current_function_calls_setjmp = 1;
- /* Set this so all the registers get saved in our frame; we need to be
- able to copy the saved values for any registers from frames we unwind. */
+ /* We have a nonlocal label. */
current_function_has_nonlocal_label = 1;
}
#endif
{ /* Nothing */ }
- /* @@@ This is a kludge. Not all machine descriptions define a blockage
- insn, but we must not allow the code we just generated to be reordered
- by scheduling. Specifically, the update of the frame pointer must
- happen immediately, not later. So emit an ASM_INPUT to act as blockage
- insn. */
- emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
+ /* We must not allow the code we just generated to be reordered by
+ scheduling. Specifically, the update of the frame pointer must
+ happen immediately, not later. */
+ emit_insn (gen_blockage ());
}
/* __builtin_longjmp is passed a pointer to an array of five words (not
case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
fcodel = BUILT_IN_MATHFN##L ; break;
+/* Similar to above, but appends _R after any F/L suffix. */
+#define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
+ case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
+ fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
+ fcodel = BUILT_IN_MATHFN##L_R ; break;
/* Return mathematic function equivalent to FN but operating directly
on TYPE, if available. If we can't do the conversion, return zero. */
CASE_MATHFN (BUILT_IN_FMOD)
CASE_MATHFN (BUILT_IN_FREXP)
CASE_MATHFN (BUILT_IN_GAMMA)
+ CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
CASE_MATHFN (BUILT_IN_HUGE_VAL)
CASE_MATHFN (BUILT_IN_HYPOT)
CASE_MATHFN (BUILT_IN_ILOGB)
CASE_MATHFN (BUILT_IN_LDEXP)
CASE_MATHFN (BUILT_IN_LFLOOR)
CASE_MATHFN (BUILT_IN_LGAMMA)
+ CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
CASE_MATHFN (BUILT_IN_LLCEIL)
CASE_MATHFN (BUILT_IN_LLFLOOR)
CASE_MATHFN (BUILT_IN_LLRINT)
exp = build_call_expr (fndecl, 1, arg);
}
- op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+ op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
start_sequence ();
exp = build_call_expr (fndecl, 1, arg);
}
- op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+ op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
start_sequence ();
exp = build_call_expr (fndecl, 1, arg);
}
- op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+ op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
if (mode != GET_MODE (op0))
op0 = convert_to_mode (mode, op0, 0);
op1 = gen_reg_rtx (mode);
op2 = gen_reg_rtx (mode);
- op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+ op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
/* Compute into op1 and op2. */
expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
tree call, fn = NULL_TREE, narg;
tree ctype = build_complex_type (type);
- /* We can expand via the C99 cexp function. */
- gcc_assert (TARGET_C99_FUNCTIONS);
-
if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
fn = built_in_decls[BUILT_IN_CEXPF];
else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
fn = built_in_decls[BUILT_IN_CEXPL];
else
gcc_unreachable ();
+
+ /* If we don't have a decl for cexp create one. This is the
+ friendliest fallback if the user calls __builtin_cexpi
+ without full target C99 function support. */
+ if (fn == NULL_TREE)
+ {
+ tree fntype;
+ const char *name = NULL;
+
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
+ name = "cexpf";
+ else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
+ name = "cexp";
+ else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
+ name = "cexpl";
+
+ fntype = build_function_type_list (ctype, ctype, NULL_TREE);
+ fn = build_fn_decl (name, fntype);
+ }
+
narg = fold_build2 (COMPLEX_EXPR, ctype,
build_real (type, dconst0), arg);
/* Make sure not to fold the cexp call again. */
call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
- return expand_expr (build_call_nary (ctype, call, 1, arg),
- target, VOIDmode, 0);
+ return expand_expr (build_call_nary (ctype, call, 1, narg),
+ target, VOIDmode, EXPAND_NORMAL);
}
/* Now build the proper return type. */
return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
make_tree (TREE_TYPE (arg), op2),
make_tree (TREE_TYPE (arg), op1)),
- target, VOIDmode, 0);
+ target, VOIDmode, EXPAND_NORMAL);
}
/* Expand a call to one of the builtin rounding functions gcc defines
exp = build_call_expr (fndecl, 1, arg);
}
- op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+ op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
start_sequence ();
/* Fall back to floating point rounding optab. */
fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
- /* We shouldn't get here on targets without TARGET_C99_FUNCTIONS.
- ??? Perhaps convert (int)floorf(x) into (int)floor((double)x). */
- gcc_assert (fallback_fndecl != NULL_TREE);
+
+ /* For non-C99 targets we may end up without a fallback fndecl here
+ if the user called __builtin_lfloor directly. In this case emit
+ a call to the floor/ceil variants nevertheless. This should result
+ in the best user experience for not full C99 targets. */
+ if (fallback_fndecl == NULL_TREE)
+ {
+ tree fntype;
+ const char *name = NULL;
+
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ case BUILT_IN_LCEIL:
+ case BUILT_IN_LLCEIL:
+ name = "ceil";
+ break;
+ case BUILT_IN_LCEILF:
+ case BUILT_IN_LLCEILF:
+ name = "ceilf";
+ break;
+ case BUILT_IN_LCEILL:
+ case BUILT_IN_LLCEILL:
+ name = "ceill";
+ break;
+ case BUILT_IN_LFLOOR:
+ case BUILT_IN_LLFLOOR:
+ name = "floor";
+ break;
+ case BUILT_IN_LFLOORF:
+ case BUILT_IN_LLFLOORF:
+ name = "floorf";
+ break;
+ case BUILT_IN_LFLOORL:
+ case BUILT_IN_LLFLOORL:
+ name = "floorl";
+ break;
+ default:
+ gcc_unreachable ();
+ }
+
+ fntype = build_function_type_list (TREE_TYPE (arg),
+ TREE_TYPE (arg), NULL_TREE);
+ fallback_fndecl = build_fn_decl (name, fntype);
+ }
+
exp = build_call_expr (fallback_fndecl, 1, arg);
tmp = expand_normal (exp);
exp = build_call_expr (fndecl, 1, arg);
}
- op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+ op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
start_sequence ();
&& !optimize_size
&& powi_cost (n) <= POWI_MAX_MULTS)))
{
- op = expand_expr (arg0, subtarget, VOIDmode, 0);
+ op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
if (n != 1)
{
op = force_reg (mode, op);
op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
if (n != 1)
{
- op2 = expand_expr (narg0, subtarget, VOIDmode, 0);
+ op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
op2 = force_reg (mode, op2);
op2 = expand_powi (op2, mode, abs (n / 2));
op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
0, OPTAB_LIB_WIDEN);
if (n != 1)
{
- op2 = expand_expr (narg0, subtarget, VOIDmode, 0);
+ op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
op2 = force_reg (mode, op2);
op2 = expand_powi (op2, mode, abs (n / 3));
op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
|| (! optimize_size
&& powi_cost (n) <= POWI_MAX_MULTS)))
{
- op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
+ op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
op0 = force_reg (mode, op0);
return expand_powi (op0, mode, n);
}
if (target == NULL_RTX)
target = gen_reg_rtx (mode);
- op0 = expand_expr (arg0, subtarget, mode, 0);
+ op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
if (GET_MODE (op0) != mode)
op0 = convert_to_mode (mode, op0, 0);
- op1 = expand_expr (arg1, 0, mode2, 0);
+ op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
if (GET_MODE (op1) != mode2)
op1 = convert_to_mode (mode2, op1, 0);
if (result)
{
+ STRIP_TYPE_NOPS (result);
while (TREE_CODE (result) == COMPOUND_EXPR)
{
expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
const0_rtx, VOIDmode, exp);
}
+/* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
+ caller should emit a normal call, otherwise try to get the result
+ in TARGET, if convenient (and in mode MODE if that's convenient). */
+
+static rtx
+expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
+{
+ if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
+ INTEGER_TYPE, VOID_TYPE))
+ {
+ tree type = TREE_TYPE (exp);
+ tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
+ CALL_EXPR_ARG (exp, 1),
+ CALL_EXPR_ARG (exp, 2), type);
+ if (result)
+ return expand_expr (result, target, mode, EXPAND_NORMAL);
+ }
+ return NULL_RTX;
+}
+
/* Expand expression EXP, which is a call to the memcmp built-in function.
Return NULL_RTX if we failed and the
caller should emit a normal call, otherwise try to get the result in
/* Create strlen (dst). */
newdst = build_call_expr (strlen_fn, 1, dst);
- /* Create (dst + (cast) strlen (dst)). */
- newdst = fold_convert (TREE_TYPE (dst), newdst);
- newdst = fold_build2 (PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
+ /* Create (dst p+ strlen (dst)). */
+ newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
newdst = builtin_save_expr (newdst);
if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
{
/* Checking arguments is already done in fold_builtin_next_arg
that must be called before this function. */
- return expand_binop (Pmode, add_optab,
+ return expand_binop (ptr_mode, add_optab,
current_function_internal_arg_pointer,
current_function_arg_offset_rtx,
NULL_RTX, 0, OPTAB_LIB_WIDEN);
void
std_expand_builtin_va_start (tree valist, rtx nextarg)
{
- tree t;
-
- t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
- make_tree (ptr_type_node, nextarg));
- TREE_SIDE_EFFECTS (t) = 1;
-
- expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
+ rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
+ convert_move (va_r, nextarg, 0);
}
/* Expand EXP, a call to __builtin_va_start. */
if (boundary > align
&& !integer_zerop (TYPE_SIZE (type)))
{
- t = fold_convert (TREE_TYPE (valist), size_int (boundary - 1));
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
- build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t));
+ fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
+ valist_tmp, size_int (boundary - 1)));
gimplify_and_add (t, pre_p);
- t = fold_convert (TREE_TYPE (valist), size_int (-boundary));
+ t = fold_convert (sizetype, valist_tmp);
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
- build2 (BIT_AND_EXPR, TREE_TYPE (valist), valist_tmp, t));
+ fold_convert (TREE_TYPE (valist),
+ fold_build2 (BIT_AND_EXPR, sizetype, t,
+ size_int (-boundary))));
gimplify_and_add (t, pre_p);
}
else
t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
size_binop (MINUS_EXPR, rounded_size, type_size));
- t = fold_convert (TREE_TYPE (addr), t);
- addr = fold_build2 (PLUS_EXPR, TREE_TYPE (addr), addr, t);
+ addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
}
/* Compute new value for AP. */
- t = fold_convert (TREE_TYPE (valist), rounded_size);
- t = build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t);
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
gimplify_and_add (t, pre_p);
arg = CALL_EXPR_ARG (exp, 0);
mode = TYPE_MODE (TREE_TYPE (arg));
- op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+ op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
target = expand_unop (mode, bswap_optab, op0, target, 1);
return NULL_RTX;
/* Compute the argument. */
- op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget, VOIDmode, 0);
+ op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
+ VOIDmode, EXPAND_NORMAL);
/* Compute op, into TARGET if possible.
Set TARGET to wherever the result comes back. */
target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
/* When guessing was done, the hints should be already stripped away. */
- gcc_assert (!flag_guess_branch_prob);
+ gcc_assert (!flag_guess_branch_prob
+ || optimize == 0 || errorcount || sorrycount);
return target;
}
arg = CALL_EXPR_ARG (exp, 0);
mode = TYPE_MODE (TREE_TYPE (arg));
- op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+ op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
}
return tramp;
}
-/* Expand a call to the built-in signbit, signbitf or signbitl function.
+/* Expand a call to the built-in signbit, signbitf, signbitl, signbitd32,
+ signbitd64, or signbitd128 function.
Return NULL_RTX if a normal call should be emitted rather than expanding
the function in-line. EXP is the expression that is a call to the builtin
function; if convenient, the result should be placed in TARGET. */
{
rtx addr, mem;
- addr = expand_expr (loc, NULL, Pmode, EXPAND_SUM);
+ addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
/* Note that we explicitly do not want any alias information for this
memory, so that we kill all other live memories. Otherwise we don't
rtx target, bool ignore)
{
rtx val, mem;
+ enum machine_mode old_mode;
/* Expand the operands. */
mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
- val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL, mode, EXPAND_NORMAL);
- /* If VAL is promoted to a wider mode, convert it back to MODE. */
- val = convert_to_mode (mode, val, 1);
+ val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
+ /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
+ of CONST_INTs, where we know the old_mode only from the call argument. */
+ old_mode = GET_MODE (val);
+ if (old_mode == VOIDmode)
+ old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
+ val = convert_modes (mode, old_mode, val, 1);
if (ignore)
return expand_sync_operation (mem, val, code);
bool is_bool, rtx target)
{
rtx old_val, new_val, mem;
+ enum machine_mode old_mode;
/* Expand the operands. */
mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
- old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL, mode, EXPAND_NORMAL);
- /* If OLD_VAL is promoted to a wider mode, convert it back to MODE. */
- old_val = convert_to_mode (mode, old_val, 1);
-
- new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL, mode, EXPAND_NORMAL);
- /* If NEW_VAL is promoted to a wider mode, convert it back to MODE. */
- new_val = convert_to_mode (mode, new_val, 1);
+ old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
+ mode, EXPAND_NORMAL);
+ /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
+ of CONST_INTs, where we know the old_mode only from the call argument. */
+ old_mode = GET_MODE (old_val);
+ if (old_mode == VOIDmode)
+ old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
+ old_val = convert_modes (mode, old_mode, old_val, 1);
+
+ new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
+ mode, EXPAND_NORMAL);
+ /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
+ of CONST_INTs, where we know the old_mode only from the call argument. */
+ old_mode = GET_MODE (new_val);
+ if (old_mode == VOIDmode)
+ old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
+ new_val = convert_modes (mode, old_mode, new_val, 1);
if (is_bool)
return expand_bool_compare_and_swap (mem, old_val, new_val, target);
rtx target)
{
rtx val, mem;
+ enum machine_mode old_mode;
/* Expand the operands. */
mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
- val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL, mode, EXPAND_NORMAL);
- /* If VAL is promoted to a wider mode, convert it back to MODE. */
- val = convert_to_mode (mode, val, 1);
+ val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
+ /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
+ of CONST_INTs, where we know the old_mode only from the call argument. */
+ old_mode = GET_MODE (val);
+ if (old_mode == VOIDmode)
+ old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
+ val = convert_modes (mode, old_mode, val, 1);
return expand_sync_lock_test_and_set (mem, val, target);
}
return target;
break;
+ case BUILT_IN_MEMCHR:
+ target = expand_builtin_memchr (exp, target, mode);
+ if (target)
+ return target;
+ break;
+
case BUILT_IN_BCMP:
case BUILT_IN_MEMCMP:
target = expand_builtin_memcmp (exp, target, mode);
break;
CASE_FLT_FN (BUILT_IN_SIGNBIT):
+ case BUILT_IN_SIGNBITD32:
+ case BUILT_IN_SIGNBITD64:
+ case BUILT_IN_SIGNBITD128:
target = expand_builtin_signbit (exp, target);
if (target)
return target;
{
const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
- if (! REAL_VALUE_ISNAN (x) && ! REAL_VALUE_ISINF (x))
+ if (real_isfinite (&x))
{
tree itype = TREE_TYPE (TREE_TYPE (fndecl));
tree ftype = TREE_TYPE (arg);
if (srctype == desttype
|| (gimple_in_ssa_p (cfun)
- && tree_ssa_useless_type_conversion_1 (desttype, srctype)))
+ && useless_type_conversion_p (desttype, srctype)))
expr = srcvar;
else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
|| POINTER_TYPE_P (TREE_TYPE (srcvar)))
len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
ssize_int (1));
- len = fold_convert (TREE_TYPE (dest), len);
- dest = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
+ dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
dest = fold_convert (type, dest);
if (expr)
dest = omit_one_operand (type, dest, expr);
build_call_expr (fn, 3, dest, src, len));
}
+/* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
+ arguments to the call, and TYPE is its return type.
+ Return NULL_TREE if no simplification can be made. */
+
+static tree
+fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
+{
+ if (!validate_arg (arg1, POINTER_TYPE)
+ || !validate_arg (arg2, INTEGER_TYPE)
+ || !validate_arg (len, INTEGER_TYPE))
+ return NULL_TREE;
+ else
+ {
+ const char *p1;
+
+ if (TREE_CODE (arg2) != INTEGER_CST
+ || !host_integerp (len, 1))
+ return NULL_TREE;
+
+ p1 = c_getstr (arg1);
+ if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
+ {
+ char c;
+ const char *r;
+ tree tem;
+
+ if (target_char_cast (arg2, &c))
+ return NULL_TREE;
+
+ r = memchr (p1, c, tree_low_cst (len, 1));
+
+ if (r == NULL)
+ return build_int_cst (TREE_TYPE (arg1), 0);
+
+ tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
+ size_int (r - p1));
+ return fold_convert (type, tem);
+ }
+ return NULL_TREE;
+ }
+}
+
/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
Return NULL_TREE if no simplification can be made. */
/* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
if (real_zerop (arg0) || integer_zerop (arg1)
|| (TREE_CODE (arg0) == REAL_CST
- && (real_isnan (&TREE_REAL_CST (arg0))
- || real_isinf (&TREE_REAL_CST (arg0)))))
+ && !real_isfinite (&TREE_REAL_CST (arg0))))
return omit_one_operand (type, arg0, arg1);
/* If both arguments are constant, then try to evaluate it. */
if (TREE_CODE (arg) == REAL_CST)
{
r = TREE_REAL_CST (arg);
- return real_isinf (&r) || real_isnan (&r)
- ? integer_zero_node : integer_one_node;
+ return real_isfinite (&r) ? integer_one_node : integer_zero_node;
}
return NULL_TREE;
&dconstm1, NULL, false);
break;
+#if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
+ CASE_FLT_FN (BUILT_IN_J0):
+ if (validate_arg (arg0, REAL_TYPE))
+ return do_mpfr_arg1 (arg0, type, mpfr_j0,
+ NULL, NULL, 0);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_J1):
+ if (validate_arg (arg0, REAL_TYPE))
+ return do_mpfr_arg1 (arg0, type, mpfr_j1,
+ NULL, NULL, 0);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_Y0):
+ if (validate_arg (arg0, REAL_TYPE))
+ return do_mpfr_arg1 (arg0, type, mpfr_y0,
+ &dconst0, NULL, false);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_Y1):
+ if (validate_arg (arg0, REAL_TYPE))
+ return do_mpfr_arg1 (arg0, type, mpfr_y1,
+ &dconst0, NULL, false);
+ break;
+#endif
+
CASE_FLT_FN (BUILT_IN_NAN):
case BUILT_IN_NAND32:
case BUILT_IN_NAND64:
switch (fcode)
{
+#if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
+ CASE_FLT_FN (BUILT_IN_JN):
+ if (validate_arg (arg0, INTEGER_TYPE)
+ && validate_arg (arg1, REAL_TYPE))
+ return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_YN):
+ if (validate_arg (arg0, INTEGER_TYPE)
+ && validate_arg (arg1, REAL_TYPE))
+ return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
+ &dconst0, false);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_DREM):
+ CASE_FLT_FN (BUILT_IN_REMAINDER):
+ if (validate_arg (arg0, REAL_TYPE)
+ && validate_arg(arg1, REAL_TYPE))
+ return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
+ break;
+
+ CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
+ CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
+ if (validate_arg (arg0, REAL_TYPE)
+ && validate_arg(arg1, POINTER_TYPE))
+ return do_mpfr_lgamma_r (arg0, arg1, type);
+ break;
+#endif
CASE_FLT_FN (BUILT_IN_ATAN2):
if (validate_arg (arg0, REAL_TYPE)
return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
break;
+#if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
+ CASE_FLT_FN (BUILT_IN_REMQUO):
+ if (validate_arg (arg0, REAL_TYPE)
+ && validate_arg(arg1, REAL_TYPE)
+ && validate_arg(arg2, POINTER_TYPE))
+ return do_mpfr_remquo (arg0, arg1, arg2);
+ break;
+#endif
+
case BUILT_IN_MEMSET:
return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
case BUILT_IN_STRNCMP:
return fold_builtin_strncmp (arg0, arg1, arg2);
+ case BUILT_IN_MEMCHR:
+ return fold_builtin_memchr (arg0, arg1, arg2, type);
+
case BUILT_IN_BCMP:
case BUILT_IN_MEMCMP:
return fold_builtin_memcmp (arg0, arg1, arg2);;
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
- s1, build_int_cst (TREE_TYPE (s1), r - p1));
+ tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ s1, size_int (r - p1));
return fold_convert (type, tem);
}
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
- s1, build_int_cst (TREE_TYPE (s1), r - p1));
+ tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ s1, size_int (r - p1));
return fold_convert (type, tem);
}
return NULL_TREE;
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
- s1, build_int_cst (TREE_TYPE (s1), r - p1));
+ tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ s1, size_int (r - p1));
return fold_convert (type, tem);
}
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
- s1, build_int_cst (TREE_TYPE (s1), r - p1));
+ tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ s1, size_int (r - p1));
return fold_convert (type, tem);
}
return expand_expr (dest, target, mode, EXPAND_NORMAL);
}
- len = fold_convert (TREE_TYPE (dest), len);
- expr = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
+ expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
return expand_expr (expr, target, mode, EXPAND_NORMAL);
}
return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
else
{
- tree temp = fold_convert (TREE_TYPE (dest), len);
- temp = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, temp);
+ tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
}
}
{
REAL_VALUE_TYPE rr;
- real_from_mpfr (&rr, m);
+ real_from_mpfr (&rr, m, type, GMP_RNDN);
/* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
check for overflow/underflow. If the REAL_VALUE_TYPE is zero
but the mpft_t is not, then we underflowed in the
conversion. */
- if (!real_isnan (&rr) && !real_isinf (&rr)
+ if (real_isfinite (&rr)
&& (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
{
REAL_VALUE_TYPE rmode;
{
const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
- if (!real_isnan (ra) && !real_isinf (ra)
+ if (real_isfinite (ra)
&& (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
&& (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
{
mpfr_t m;
mpfr_init2 (m, prec);
- mpfr_from_real (m, ra);
+ mpfr_from_real (m, ra, GMP_RNDN);
mpfr_clear_flags ();
inexact = func (m, m, GMP_RNDN);
result = do_mpfr_ckconv (m, type, inexact);
const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
- if (!real_isnan (ra1) && !real_isinf (ra1)
- && !real_isnan (ra2) && !real_isinf (ra2))
+ if (real_isfinite (ra1) && real_isfinite (ra2))
{
const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
int inexact;
mpfr_t m1, m2;
mpfr_inits2 (prec, m1, m2, NULL);
- mpfr_from_real (m1, ra1);
- mpfr_from_real (m2, ra2);
+ mpfr_from_real (m1, ra1, GMP_RNDN);
+ mpfr_from_real (m2, ra2, GMP_RNDN);
mpfr_clear_flags ();
inexact = func (m1, m1, m2, GMP_RNDN);
result = do_mpfr_ckconv (m1, type, inexact);
const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
- if (!real_isnan (ra1) && !real_isinf (ra1)
- && !real_isnan (ra2) && !real_isinf (ra2)
- && !real_isnan (ra3) && !real_isinf (ra3))
+ if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
{
const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
int inexact;
mpfr_t m1, m2, m3;
mpfr_inits2 (prec, m1, m2, m3, NULL);
- mpfr_from_real (m1, ra1);
- mpfr_from_real (m2, ra2);
- mpfr_from_real (m3, ra3);
+ mpfr_from_real (m1, ra1, GMP_RNDN);
+ mpfr_from_real (m2, ra2, GMP_RNDN);
+ mpfr_from_real (m3, ra3, GMP_RNDN);
mpfr_clear_flags ();
inexact = func (m1, m1, m2, m3, GMP_RNDN);
result = do_mpfr_ckconv (m1, type, inexact);
{
const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
- if (!real_isnan (ra) && !real_isinf (ra))
+ if (real_isfinite (ra))
{
const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
tree result_s, result_c;
mpfr_t m, ms, mc;
mpfr_inits2 (prec, m, ms, mc, NULL);
- mpfr_from_real (m, ra);
+ mpfr_from_real (m, ra, GMP_RNDN);
mpfr_clear_flags ();
inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
result_s = do_mpfr_ckconv (ms, type, inexact);
}
return result;
}
+
+#if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
+/* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
+ two-argument mpfr order N Bessel function FUNC on them and return
+ the resulting value as a tree with type TYPE. The mpfr precision
+ is set to the precision of TYPE. We assume that function FUNC
+ returns zero if the result could be calculated exactly within the
+ requested precision. */
+static tree
+do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
+ int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
+ const REAL_VALUE_TYPE *min, bool inclusive)
+{
+ tree result = NULL_TREE;
+
+ STRIP_NOPS (arg1);
+ STRIP_NOPS (arg2);
+
+ /* To proceed, MPFR must exactly represent the target floating point
+ format, which only happens when the target base equals two. */
+ if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
+ && host_integerp (arg1, 0)
+ && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
+ {
+ const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
+ const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
+
+ if (n == (long)n
+ && real_isfinite (ra)
+ && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
+ {
+ const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
+ int inexact;
+ mpfr_t m;
+
+ mpfr_init2 (m, prec);
+ mpfr_from_real (m, ra, GMP_RNDN);
+ mpfr_clear_flags ();
+ inexact = func (m, n, m, GMP_RNDN);
+ result = do_mpfr_ckconv (m, type, inexact);
+ mpfr_clear (m);
+ }
+ }
+
+ return result;
+}
+
+/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
+ the pointer *(ARG_QUO) and return the result. The type is taken
+ from the type of ARG0 and is used for setting the precision of the
+ calculation and results. */
+
+static tree
+do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
+{
+ tree const type = TREE_TYPE (arg0);
+ tree result = NULL_TREE;
+
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
+
+ /* To proceed, MPFR must exactly represent the target floating point
+ format, which only happens when the target base equals two. */
+ if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
+ && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
+ && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
+ {
+ const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
+ const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
+
+ if (real_isfinite (ra0) && real_isfinite (ra1))
+ {
+ const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
+ tree result_rem;
+ long integer_quo;
+ mpfr_t m0, m1;
+
+ mpfr_inits2 (prec, m0, m1, NULL);
+ mpfr_from_real (m0, ra0, GMP_RNDN);
+ mpfr_from_real (m1, ra1, GMP_RNDN);
+ mpfr_clear_flags ();
+ mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
+ /* Remquo is independent of the rounding mode, so pass
+ inexact=0 to do_mpfr_ckconv(). */
+ result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
+ mpfr_clears (m0, m1, NULL);
+ if (result_rem)
+ {
+ /* MPFR calculates quo in the host's long so it may
+ return more bits in quo than the target int can hold
+ if sizeof(host long) > sizeof(target int). This can
+ happen even for native compilers in LP64 mode. In
+ these cases, modulo the quo value with the largest
+ number that the target int can hold while leaving one
+ bit for the sign. */
+ if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
+ integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
+
+ /* Dereference the quo pointer argument. */
+ arg_quo = build_fold_indirect_ref (arg_quo);
+ /* Proceed iff a valid pointer type was passed in. */
+ if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
+ {
+ /* Set the value. */
+ tree result_quo = fold_build2 (MODIFY_EXPR,
+ TREE_TYPE (arg_quo), arg_quo,
+ build_int_cst (NULL, integer_quo));
+ TREE_SIDE_EFFECTS (result_quo) = 1;
+ /* Combine the quo assignment with the rem. */
+ result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
+ result_quo, result_rem));
+ }
+ }
+ }
+ }
+ return result;
+}
+
+/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
+ resulting value as a tree with type TYPE. The mpfr precision is
+ set to the precision of TYPE. We assume that this mpfr function
+ returns zero if the result could be calculated exactly within the
+ requested precision. In addition, the integer pointer represented
+ by ARG_SG will be dereferenced and set to the appropriate signgam
+ (-1,1) value. */
+
+static tree
+do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
+{
+ tree result = NULL_TREE;
+
+ STRIP_NOPS (arg);
+
+ /* To proceed, MPFR must exactly represent the target floating point
+ format, which only happens when the target base equals two. Also
+ verify ARG is a constant and that ARG_SG is an int pointer. */
+ if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
+ && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
+ && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
+ && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
+ {
+ const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
+
+ /* In addition to NaN and Inf, the argument cannot be zero or a
+ negative integer. */
+ if (real_isfinite (ra)
+ && ra->cl != rvc_zero
+ && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
+ {
+ const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
+ int inexact, sg;
+ mpfr_t m;
+ tree result_lg;
+
+ mpfr_init2 (m, prec);
+ mpfr_from_real (m, ra, GMP_RNDN);
+ mpfr_clear_flags ();
+ inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
+ result_lg = do_mpfr_ckconv (m, type, inexact);
+ mpfr_clear (m);
+ if (result_lg)
+ {
+ tree result_sg;
+
+ /* Dereference the arg_sg pointer argument. */
+ arg_sg = build_fold_indirect_ref (arg_sg);
+ /* Assign the signgam value into *arg_sg. */
+ result_sg = fold_build2 (MODIFY_EXPR,
+ TREE_TYPE (arg_sg), arg_sg,
+ build_int_cst (NULL, sg));
+ TREE_SIDE_EFFECTS (result_sg) = 1;
+ /* Combine the signgam assignment with the lgamma result. */
+ result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
+ result_sg, result_lg));
+ }
+ }
+ }
+
+ return result;
+}
+#endif