#include "coretypes.h"
#include "tm.h"
#include "machmode.h"
-#include "real.h"
#include "rtl.h"
#include "tree.h"
+#include "realmpfr.h"
#include "gimple.h"
#include "flags.h"
#include "regs.h"
#include "tree-mudflap.h"
#include "tree-flow.h"
#include "value-prof.h"
-#include "diagnostic.h"
+#include "diagnostic-core.h"
+#include "builtins.h"
#ifndef SLOW_UNALIGNED_ACCESS
#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
#endif
static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
+struct target_builtins default_target_builtins;
+#if SWITCHABLE_TARGET
+struct target_builtins *this_target_builtins = &default_target_builtins;
+#endif
+
/* Define the names of the builtin function types and codes. */
const char *const built_in_class_names[4]
= {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
static tree fold_builtin_constant_p (tree);
static tree fold_builtin_expect (location_t, tree, tree);
static tree fold_builtin_classify_type (tree);
-static tree fold_builtin_strlen (location_t, tree);
+static tree fold_builtin_strlen (location_t, tree, tree);
static tree fold_builtin_inf (location_t, tree, int);
static tree fold_builtin_nan (tree, tree, int);
static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
HOST_WIDE_INT offset;
int max;
const char *ptr;
+ location_t loc;
STRIP_NOPS (src);
if (TREE_CODE (src) == COND_EXPR
&& (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
return c_strlen (TREE_OPERAND (src, 1), only_value);
+ if (EXPR_HAS_LOCATION (src))
+ loc = EXPR_LOCATION (src);
+ else
+ loc = input_location;
+
src = string_constant (src, &offset_node);
if (src == 0)
return NULL_TREE;
and return that. This would perhaps not be valid if we were dealing
with named arrays in addition to literal string constants. */
- return size_diffop_loc (input_location, size_int (max), offset_node);
+ return size_diffop_loc (loc, size_int (max), offset_node);
}
/* We have a known offset into the string. Start searching there for
/* Suppress multiple warnings for propagated constant strings. */
if (! TREE_NO_WARNING (src))
{
- warning (0, "offset outside bounds of constant string");
+ warning_at (loc, 0, "offset outside bounds of constant string");
TREE_NO_WARNING (src) = 1;
}
return NULL_TREE;
&& GET_MODE_SIZE (mode) > UNITS_PER_WORD)
j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
j *= BITS_PER_UNIT;
- gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
+ gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
if (ch)
ch = (unsigned char) str[i];
if (nargs > 2)
arg2 = CALL_EXPR_ARG (exp, 2);
else
- arg2 = build_int_cst (NULL_TREE, 3);
+ arg2 = integer_three_node;
/* Argument 0 is an address. */
op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
\f
/* Built-in functions to perform an untyped call and return. */
-/* For each register that may be used for calling a function, this
- gives a mode used to copy the register's value. VOIDmode indicates
- the register is not used for calling a function. If the machine
- has register windows, this gives only the outbound registers.
- INCOMING_REGNO gives the corresponding inbound register. */
-static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
-
-/* For each register that may be used for returning values, this gives
- a mode used to copy the register's value. VOIDmode indicates the
- register is not used for returning values. If the machine has
- register windows, this gives only the outbound registers.
- INCOMING_REGNO gives the corresponding inbound register. */
-static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
+#define apply_args_mode \
+ (this_target_builtins->x_apply_args_mode)
+#define apply_result_mode \
+ (this_target_builtins->x_apply_result_mode)
/* Return the size required for the block returned by __builtin_apply_args,
and initialize apply_args_mode. */
size = 0;
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if (FUNCTION_VALUE_REGNO_P (regno))
+ if (targetm.calls.function_value_regno_p (regno))
{
mode = reg_raw_mode[regno];
/* Test the result; if it is NaN, set errno=EDOM because
the argument was not in the domain. */
do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
- NULL_RTX, NULL_RTX, lab);
+ NULL_RTX, NULL_RTX, lab,
+ /* The jump is very likely. */
+ REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
#ifdef TARGET_EDOM
/* If this built-in doesn't throw an exception, set errno directly. */
errno_set = false;
/* Before working hard, check whether the instruction is available. */
- if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
+ if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
{
target = gen_reg_rtx (mode);
mode = TYPE_MODE (TREE_TYPE (exp));
/* Before working hard, check whether the instruction is available. */
- if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
+ if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
return NULL_RTX;
target = gen_reg_rtx (mode);
/* Check if sincos insn is available, otherwise fallback
to sin or cos insn. */
- if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
+ if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
switch (DECL_FUNCTION_CODE (fndecl))
{
CASE_FLT_FN (BUILT_IN_SIN):
}
/* Before working hard, check whether the instruction is available. */
- if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
+ if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
{
target = gen_reg_rtx (mode);
mode = TYPE_MODE (TREE_TYPE (arg));
if (builtin_optab)
- return optab_handler (builtin_optab, mode)->insn_code;
+ return optab_handler (builtin_optab, mode);
return CODE_FOR_nothing;
}
if (icode != CODE_FOR_nothing)
{
+ rtx last = get_last_insn ();
+ tree orig_arg = arg;
/* Make a suitable register to place result in. */
if (!target
- || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
+ || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
+ || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
gcc_assert (insn_data[icode].operand[0].predicate
/* Compute into TARGET.
Set TARGET to wherever the result comes back. */
- emit_unop_insn (icode, target, op0, UNKNOWN);
- return target;
+ if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
+ return target;
+ delete_insns_since (last);
+ CALL_EXPR_ARG (exp, 0) = orig_arg;
}
return NULL_RTX;
mode = TYPE_MODE (TREE_TYPE (arg));
/* Check if sincos insn is available, otherwise emit the call. */
- if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
+ if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
return NULL_RTX;
target1 = gen_reg_rtx (mode);
/* Try expanding via a sincos optab, fall back to emitting a libcall
to sincos or cexp. We are sure we have sincos or cexp because cexpi
is only generated from sincos, cexp or if we have either of them. */
- if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
+ if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
{
op1 = gen_reg_rtx (mode);
op2 = gen_reg_rtx (mode);
SET_EXPR_LOCATION (fn, loc);
return fn;
}
-#define build_call_nofold(...) \
- build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
/* Expand a call to one of the builtin rounding functions gcc defines
as an extension (lfloor and lceil). As these are gcc extensions we
fallback_fndecl = build_fn_decl (name, fntype);
}
- exp = build_call_nofold (fallback_fndecl, 1, arg);
+ exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
tmp = expand_normal (exp);
return result;
}
+/* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
+ cbrts. Return NULL_RTX if no simplification can be made or expand the tree
+ if we can simplify it. */
+static rtx
+expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
+ rtx subtarget)
+{
+ if (TREE_CODE (arg1) == REAL_CST
+ && !TREE_OVERFLOW (arg1)
+ && flag_unsafe_math_optimizations)
+ {
+ enum machine_mode mode = TYPE_MODE (type);
+ tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
+ tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
+ REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
+ tree op = NULL_TREE;
+
+ if (sqrtfn)
+ {
+ /* Optimize pow (x, 0.5) into sqrt. */
+ if (REAL_VALUES_EQUAL (c, dconsthalf))
+ op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
+
+ else
+ {
+ REAL_VALUE_TYPE dconst1_4 = dconst1;
+ REAL_VALUE_TYPE dconst3_4;
+ SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
+
+ real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
+ SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
+
+ /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
+ machines that a builtin sqrt instruction is smaller than a
+ call to pow with 0.25, so do this optimization even if
+ -Os. */
+ if (REAL_VALUES_EQUAL (c, dconst1_4))
+ {
+ op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
+ op = build_call_nofold_loc (loc, sqrtfn, 1, op);
+ }
+
+ /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
+ are optimizing for space. */
+ else if (optimize_insn_for_speed_p ()
+ && !TREE_SIDE_EFFECTS (arg0)
+ && REAL_VALUES_EQUAL (c, dconst3_4))
+ {
+ tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
+ tree sqrt2 = builtin_save_expr (sqrt1);
+ tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
+ op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
+ }
+ }
+ }
+
+ /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
+ cbrt/sqrts instead of pow (x, 1./6.). */
+ if (cbrtfn && ! op
+ && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
+ {
+ /* First try 1/3. */
+ REAL_VALUE_TYPE dconst1_3
+ = real_value_truncate (mode, dconst_third ());
+
+ if (REAL_VALUES_EQUAL (c, dconst1_3))
+ op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
+
+ /* Now try 1/6. */
+ else if (optimize_insn_for_speed_p ())
+ {
+ REAL_VALUE_TYPE dconst1_6 = dconst1_3;
+ SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
+
+ if (REAL_VALUES_EQUAL (c, dconst1_6))
+ {
+ op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
+ op = build_call_nofold_loc (loc, cbrtfn, 1, op);
+ }
+ }
+ }
+
+ if (op)
+ return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
+ }
+
+ return NULL_RTX;
+}
+
/* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
a normal call should be emitted rather than expanding the function
in-line. EXP is the expression that is a call to the builtin
&& ((flag_unsafe_math_optimizations
&& optimize_insn_for_speed_p ()
&& powi_cost (n/2) <= POWI_MAX_MULTS)
- || n == 1))
- {
- tree call_expr = build_call_nofold (fn, 1, narg0);
+ /* Even the c == 0.5 case cannot be done unconditionally
+ when we need to preserve signed zeros, as
+ pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
+ || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
+ /* For c == 1.5 we can assume that x * sqrt (x) is always
+ smaller than pow (x, 1.5) if sqrt will not be expanded
+ as a call. */
+ || (n == 3
+ && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)))
+ {
+ tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
+ narg0);
/* Use expand_expr in case the newly built call expression
was folded to a non-call. */
op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
}
}
+ /* Check whether we can do a series of sqrt or cbrt's instead of the pow
+ call. */
+ op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
+ subtarget);
+ if (op)
+ return op;
+
/* Try if the exponent is a third of an integer. In this case
we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
different from pow (x, 1./3.) due to rounding and behavior
&& powi_cost (n/3) <= POWI_MAX_MULTS)
|| n == 1))
{
- tree call_expr = build_call_nofold (fn, 1,narg0);
+ tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
+ narg0);
op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
if (abs (n) % 3 == 2)
op = expand_simple_binop (mode, MULT, op, op, op,
/* Bail out if we can't compute strlen in the right mode. */
while (insn_mode != VOIDmode)
{
- icode = optab_handler (strlen_optab, insn_mode)->insn_code;
+ icode = optab_handler (strlen_optab, insn_mode);
if (icode != CODE_FOR_nothing)
break;
if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
{
tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
- tree result = build_call_nofold (fn, 3, dest, src, len);
+ tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
+ dest, src, len);
return expand_expr (result, target, mode, EXPAND_NORMAL);
}
else
rtx dest_mem;
rtx src_mem;
rtx insn;
- const struct insn_data * data;
+ const struct insn_data_d * data;
if (!HAVE_movstr)
return NULL_RTX;
dest_mem = get_memory_rtx (dest, NULL);
src_mem = get_memory_rtx (src, NULL);
+ data = insn_data + CODE_FOR_movstr;
if (!endp)
{
target = force_reg (Pmode, XEXP (dest_mem, 0));
}
else
{
- if (target == 0 || target == const0_rtx)
+ if (target == 0
+ || target == const0_rtx
+ || ! (*data->operand[0].predicate) (target, Pmode))
{
end = gen_reg_rtx (Pmode);
- if (target == 0)
+ if (target != const0_rtx)
target = end;
}
else
end = target;
}
- data = insn_data + CODE_FOR_movstr;
-
if (data->operand[0].mode != VOIDmode)
end = gen_lowpart (data->operand[0].mode, end);
if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
{
tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
- tree result = build_call_nofold (fn, 2, dst, src);
+ tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
return expand_expr (result, target, mode, EXPAND_NORMAL);
}
else
fndecl = get_callee_fndecl (orig_exp);
fcode = DECL_FUNCTION_CODE (fndecl);
if (fcode == BUILT_IN_MEMSET)
- fn = build_call_nofold (fndecl, 3, dest, val, len);
+ fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
+ dest, val, len);
else if (fcode == BUILT_IN_BZERO)
- fn = build_call_nofold (fndecl, 2, dest, len);
+ fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
+ dest, len);
else
gcc_unreachable ();
gcc_assert (TREE_CODE (fn) == CALL_EXPR);
return NULL_RTX;
#if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
- if (cmpstr_optab[SImode] != CODE_FOR_nothing
- || cmpstrn_optab[SImode] != CODE_FOR_nothing)
+ if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
+ || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
{
rtx arg1_rtx, arg2_rtx;
rtx result, insn = NULL_RTX;
do_libcall:
#endif
fndecl = get_callee_fndecl (exp);
- fn = build_call_nofold (fndecl, 2, arg1, arg2);
+ fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
gcc_assert (TREE_CODE (fn) == CALL_EXPR);
CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
return expand_call (fn, target, target == const0_rtx);
/* Expand the library call ourselves using a stabilized argument
list to avoid re-evaluating the function's arguments twice. */
fndecl = get_callee_fndecl (exp);
- fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
+ fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
+ arg1, arg2, len);
gcc_assert (TREE_CODE (fn) == CALL_EXPR);
CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
return expand_call (fn, target, target == const0_rtx);
{
tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
- gcc_assert (vatype != NULL_TREE);
+ /* The current way of determining the type of valist is completely
+ bogus. We should have the information on the va builtin instead. */
+ if (!vatype)
+ vatype = targetm.fn_abi_va_list (cfun->decl);
if (TREE_CODE (vatype) == ARRAY_TYPE)
{
}
else
{
- tree pt;
+ tree pt = build_pointer_type (vatype);
if (! needs_lvalue)
{
if (! TREE_SIDE_EFFECTS (valist))
return valist;
- pt = build_pointer_type (vatype);
valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
TREE_SIDE_EFFECTS (valist) = 1;
}
if (TREE_SIDE_EFFECTS (valist))
valist = save_expr (valist);
- valist = build_fold_indirect_ref_loc (loc, valist);
+ valist = fold_build2_loc (loc, MEM_REF,
+ vatype, valist, build_int_cst (pt, 0));
}
return valist;
target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
/* When guessing was done, the hints should be already stripped away. */
gcc_assert (!flag_guess_branch_prob
- || optimize == 0 || errorcount || sorrycount);
+ || optimize == 0 || seen_error ());
return target;
}
targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
trampolines_created = 1;
+
+ warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
+ "trampoline generated for nested function %qD", t_func);
+
return const0_rtx;
}
{
const struct real_format *fmt;
enum machine_mode fmode, imode, rmode;
- HOST_WIDE_INT hi, lo;
tree arg;
int word, bitpos;
enum insn_code icode;
/* Check if the back end provides an insn that handles signbit for the
argument's mode. */
- icode = signbit_optab->handlers [(int) fmode].insn_code;
+ icode = optab_handler (signbit_optab, fmode);
if (icode != CODE_FOR_nothing)
{
+ rtx last = get_last_insn ();
target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
- emit_unop_insn (icode, target, temp, UNKNOWN);
- return target;
+ if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
+ return target;
+ delete_insns_since (last);
}
/* For floating point formats without a sign bit, implement signbit
if (bitpos < GET_MODE_BITSIZE (rmode))
{
- if (bitpos < HOST_BITS_PER_WIDE_INT)
- {
- hi = 0;
- lo = (HOST_WIDE_INT) 1 << bitpos;
- }
- else
- {
- hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
- lo = 0;
- }
+ double_int mask = double_int_setbit (double_int_zero, bitpos);
if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
temp = gen_lowpart (rmode, temp);
temp = expand_binop (rmode, and_optab, temp,
- immed_double_const (lo, hi, rmode),
+ immed_double_int_const (mask, rmode),
NULL_RTX, 1, OPTAB_LIB_WIDEN);
}
else
mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
/* If there is an explicit operation in the md file, use it. */
- icode = sync_lock_release[mode];
+ icode = direct_optab_handler (sync_lock_release_optab, mode);
if (icode != CODE_FOR_nothing)
{
if (!insn_data[icode].operand[1].predicate (val, mode))
/* Fold a call to __builtin_strlen with argument ARG. */
static tree
-fold_builtin_strlen (location_t loc, tree arg)
+fold_builtin_strlen (location_t loc, tree type, tree arg)
{
if (!validate_arg (arg, POINTER_TYPE))
return NULL_TREE;
tree len = c_strlen (arg, 0);
if (len)
- {
- /* Convert from the internal "sizetype" type to "size_t". */
- if (size_type_node)
- len = fold_convert_loc (loc, size_type_node, len);
- return len;
- }
+ return fold_convert_loc (loc, type, len);
return NULL_TREE;
}
return NULL_TREE;
}
+/* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
+ complex tree type of the result. If NEG is true, the imaginary
+ zero is negative. */
+
+static tree
+build_complex_cproj (tree type, bool neg)
+{
+ REAL_VALUE_TYPE rinf, rzero = dconst0;
+
+ real_inf (&rinf);
+ rzero.sign = neg;
+ return build_complex (type, build_real (TREE_TYPE (type), rinf),
+ build_real (TREE_TYPE (type), rzero));
+}
+
+/* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
+ return type. Return NULL_TREE if no simplification can be made. */
+
+static tree
+fold_builtin_cproj (location_t loc, tree arg, tree type)
+{
+ if (!validate_arg (arg, COMPLEX_TYPE)
+ || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
+ return NULL_TREE;
+
+ /* If there are no infinities, return arg. */
+ if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
+ return non_lvalue_loc (loc, arg);
+
+ /* Calculate the result when the argument is a constant. */
+ if (TREE_CODE (arg) == COMPLEX_CST)
+ {
+ const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
+ const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
+
+ if (real_isinf (real) || real_isinf (imag))
+ return build_complex_cproj (type, imag->sign);
+ else
+ return arg;
+ }
+ else if (TREE_CODE (arg) == COMPLEX_EXPR)
+ {
+ tree real = TREE_OPERAND (arg, 0);
+ tree imag = TREE_OPERAND (arg, 1);
+
+ STRIP_NOPS (real);
+ STRIP_NOPS (imag);
+
+ /* If the real part is inf and the imag part is known to be
+ nonnegative, return (inf + 0i). Remember side-effects are
+ possible in the imag part. */
+ if (TREE_CODE (real) == REAL_CST
+ && real_isinf (TREE_REAL_CST_PTR (real))
+ && tree_expr_nonnegative_p (imag))
+ return omit_one_operand_loc (loc, type,
+ build_complex_cproj (type, false),
+ arg);
+
+ /* If the imag part is inf, return (inf+I*copysign(0,imag)).
+ Remember side-effects are possible in the real part. */
+ if (TREE_CODE (imag) == REAL_CST
+ && real_isinf (TREE_REAL_CST_PTR (imag)))
+ return
+ omit_one_operand_loc (loc, type,
+ build_complex_cproj (type, TREE_REAL_CST_PTR
+ (imag)->sign), arg);
+ }
+
+ return NULL_TREE;
+}
+
/* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
Return NULL_TREE if no simplification can be made. */
{
tree itype = TREE_TYPE (TREE_TYPE (fndecl));
tree ftype = TREE_TYPE (arg);
- unsigned HOST_WIDE_INT lo2;
- HOST_WIDE_INT hi, lo;
+ double_int val;
REAL_VALUE_TYPE r;
switch (DECL_FUNCTION_CODE (fndecl))
gcc_unreachable ();
}
- REAL_VALUE_TO_INT (&lo, &hi, r);
- if (!fit_double_type (lo, hi, &lo2, &hi, itype))
- return build_int_cst_wide (itype, lo2, hi);
+ real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
+ if (double_int_fits_to_tree_p (itype, val))
+ return double_int_to_tree (itype, val);
}
}
{
tree srctype, desttype;
int src_align, dest_align;
+ tree off0;
if (endp == 3)
{
}
/* If *src and *dest can't overlap, optimize into memcpy as well. */
- srcvar = build_fold_indirect_ref_loc (loc, src);
- destvar = build_fold_indirect_ref_loc (loc, dest);
- if (srcvar
- && !TREE_THIS_VOLATILE (srcvar)
- && destvar
- && !TREE_THIS_VOLATILE (destvar))
+ if (TREE_CODE (src) == ADDR_EXPR
+ && TREE_CODE (dest) == ADDR_EXPR)
{
tree src_base, dest_base, fn;
HOST_WIDE_INT src_offset = 0, dest_offset = 0;
HOST_WIDE_INT size = -1;
HOST_WIDE_INT maxsize = -1;
- src_base = srcvar;
- if (handled_component_p (src_base))
- src_base = get_ref_base_and_extent (src_base, &src_offset,
- &size, &maxsize);
- dest_base = destvar;
- if (handled_component_p (dest_base))
- dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
- &size, &maxsize);
+ srcvar = TREE_OPERAND (src, 0);
+ src_base = get_ref_base_and_extent (srcvar, &src_offset,
+ &size, &maxsize);
+ destvar = TREE_OPERAND (dest, 0);
+ dest_base = get_ref_base_and_extent (destvar, &dest_offset,
+ &size, &maxsize);
if (host_integerp (len, 1))
- {
- maxsize = tree_low_cst (len, 1);
- if (maxsize
- > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
- maxsize = -1;
- else
- maxsize *= BITS_PER_UNIT;
- }
+ maxsize = tree_low_cst (len, 1);
else
maxsize = -1;
+ src_offset /= BITS_PER_UNIT;
+ dest_offset /= BITS_PER_UNIT;
if (SSA_VAR_P (src_base)
&& SSA_VAR_P (dest_base))
{
dest_offset, maxsize))
return NULL_TREE;
}
- else if (TREE_CODE (src_base) == INDIRECT_REF
- && TREE_CODE (dest_base) == INDIRECT_REF)
+ else if (TREE_CODE (src_base) == MEM_REF
+ && TREE_CODE (dest_base) == MEM_REF)
{
+ double_int off;
if (! operand_equal_p (TREE_OPERAND (src_base, 0),
- TREE_OPERAND (dest_base, 0), 0)
- || ranges_overlap_p (src_offset, maxsize,
- dest_offset, maxsize))
+ TREE_OPERAND (dest_base, 0), 0))
+ return NULL_TREE;
+ off = double_int_add (mem_ref_offset (src_base),
+ shwi_to_double_int (src_offset));
+ if (!double_int_fits_in_shwi_p (off))
+ return NULL_TREE;
+ src_offset = off.low;
+ off = double_int_add (mem_ref_offset (dest_base),
+ shwi_to_double_int (dest_offset));
+ if (!double_int_fits_in_shwi_p (off))
+ return NULL_TREE;
+ dest_offset = off.low;
+ if (ranges_overlap_p (src_offset, maxsize,
+ dest_offset, maxsize))
return NULL_TREE;
}
else
dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
}
if (!srctype || !desttype
+ || TREE_ADDRESSABLE (srctype)
+ || TREE_ADDRESSABLE (desttype)
|| !TYPE_SIZE_UNIT (srctype)
|| !TYPE_SIZE_UNIT (desttype)
|| TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
- || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
- || TYPE_VOLATILE (srctype)
- || TYPE_VOLATILE (desttype))
+ || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
return NULL_TREE;
src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
if (!ignore)
dest = builtin_save_expr (dest);
- srcvar = NULL_TREE;
- if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
- {
- srcvar = build_fold_indirect_ref_loc (loc, src);
- if (TREE_THIS_VOLATILE (srcvar))
- return NULL_TREE;
- else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
- srcvar = NULL_TREE;
- /* With memcpy, it is possible to bypass aliasing rules, so without
- this check i.e. execute/20060930-2.c would be misoptimized,
- because it use conflicting alias set to hold argument for the
- memcpy call. This check is probably unnecessary with
- -fno-strict-aliasing. Similarly for destvar. See also
- PR29286. */
- else if (!var_decl_component_p (srcvar))
- srcvar = NULL_TREE;
- }
-
- destvar = NULL_TREE;
- if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
- {
- destvar = build_fold_indirect_ref_loc (loc, dest);
- if (TREE_THIS_VOLATILE (destvar))
- return NULL_TREE;
- else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
- destvar = NULL_TREE;
- else if (!var_decl_component_p (destvar))
- destvar = NULL_TREE;
- }
+ /* Build accesses at offset zero with a ref-all character type. */
+ off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
+ ptr_mode, true), 0);
+
+ destvar = dest;
+ STRIP_NOPS (destvar);
+ if (TREE_CODE (destvar) == ADDR_EXPR
+ && var_decl_component_p (TREE_OPERAND (destvar, 0))
+ && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
+ destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
+ else
+ destvar = NULL_TREE;
+
+ srcvar = src;
+ STRIP_NOPS (srcvar);
+ if (TREE_CODE (srcvar) == ADDR_EXPR
+ && var_decl_component_p (TREE_OPERAND (srcvar, 0))
+ && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
+ srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
+ srcvar, off0);
+ else
+ srcvar = NULL_TREE;
if (srcvar == NULL_TREE && destvar == NULL_TREE)
return NULL_TREE;
if (srcvar == NULL_TREE)
{
- tree srcptype;
- if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
- return NULL_TREE;
-
- srctype = build_qualified_type (desttype, 0);
- if (src_align < (int) TYPE_ALIGN (srctype))
- {
- if (AGGREGATE_TYPE_P (srctype)
- || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
- return NULL_TREE;
-
- srctype = build_variant_type_copy (srctype);
- TYPE_ALIGN (srctype) = src_align;
- TYPE_USER_ALIGN (srctype) = 1;
- TYPE_PACKED (srctype) = 1;
- }
- srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
- src = fold_convert_loc (loc, srcptype, src);
- srcvar = build_fold_indirect_ref_loc (loc, src);
+ STRIP_NOPS (src);
+ srcvar = fold_build2 (MEM_REF, desttype, src, off0);
}
else if (destvar == NULL_TREE)
{
- tree destptype;
- if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
- return NULL_TREE;
-
- desttype = build_qualified_type (srctype, 0);
- if (dest_align < (int) TYPE_ALIGN (desttype))
- {
- if (AGGREGATE_TYPE_P (desttype)
- || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
- return NULL_TREE;
+ STRIP_NOPS (dest);
+ destvar = fold_build2 (MEM_REF, srctype, dest, off0);
+ }
- desttype = build_variant_type_copy (desttype);
- TYPE_ALIGN (desttype) = dest_align;
- TYPE_USER_ALIGN (desttype) = 1;
- TYPE_PACKED (desttype) = 1;
- }
- destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
- dest = fold_convert_loc (loc, destptype, dest);
- destvar = build_fold_indirect_ref_loc (loc, dest);
- }
-
- if (srctype == desttype
- || (gimple_in_ssa_p (cfun)
- && useless_type_conversion_p (desttype, srctype)))
- expr = srcvar;
- else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
- || POINTER_TYPE_P (TREE_TYPE (srcvar)))
- && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
- || POINTER_TYPE_P (TREE_TYPE (destvar))))
- expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
- else
- expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
- TREE_TYPE (destvar), srcvar);
- expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
+ expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
}
if (ignore)
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
switch (fcode)
{
-
case BUILT_IN_CONSTANT_P:
{
tree val = fold_builtin_constant_p (arg0);
return fold_builtin_classify_type (arg0);
case BUILT_IN_STRLEN:
- return fold_builtin_strlen (loc, arg0);
+ return fold_builtin_strlen (loc, type, arg0);
CASE_FLT_FN (BUILT_IN_FABS):
return fold_builtin_fabs (loc, arg0, type);
CASE_FLT_FN (BUILT_IN_CCOSH):
return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
+ CASE_FLT_FN (BUILT_IN_CPROJ):
+ return fold_builtin_cproj(loc, arg0, type);
+
CASE_FLT_FN (BUILT_IN_CSIN):
if (validate_arg (arg0, COMPLEX_TYPE)
&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
case BUILT_IN_VPRINTF:
return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
+ case BUILT_IN_FREE:
+ if (integer_zerop (arg0))
+ return build_empty_stmt (loc);
+ break;
+
default:
break;
}
if (avoid_folding_inline_builtin (fndecl))
return NULL_TREE;
- /* FIXME: Don't use a list in this interface. */
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
- return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
+ return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
+ CALL_EXPR_ARGP (exp), ignore);
else
{
if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
}
/* Conveniently construct a function call expression. FNDECL names the
- function to be called and ARGLIST is a TREE_LIST of arguments. */
+ function to be called and N arguments are passed in the array
+ ARGARRAY. */
tree
-build_function_call_expr (location_t loc, tree fndecl, tree arglist)
+build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
{
tree fntype = TREE_TYPE (fndecl);
tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
- int n = list_length (arglist);
- tree *argarray = (tree *) alloca (n * sizeof (tree));
- int i;
-
- for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
- argarray[i] = TREE_VALUE (arglist);
+
return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
}
/* Conveniently construct a function call expression. FNDECL names the
+ function to be called and the arguments are passed in the vector
+ VEC. */
+
+tree
+build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
+{
+ return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
+ VEC_address (tree, vec));
+}
+
+
+/* Conveniently construct a function call expression. FNDECL names the
function to be called, N is the number of arguments, and the "..."
parameters are the argument expressions. */
build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
{
va_list ap;
- tree fntype = TREE_TYPE (fndecl);
- tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
- tree *argarray = (tree *) alloca (n * sizeof (tree));
+ tree *argarray = XALLOCAVEC (tree, n);
int i;
va_start (ap, n);
for (i = 0; i < n; i++)
argarray[i] = va_arg (ap, tree);
va_end (ap);
- return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
+ return build_call_expr_loc_array (loc, fndecl, n, argarray);
+}
+
+/* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
+ varargs macros aren't supported by all bootstrap compilers. */
+
+tree
+build_call_expr (tree fndecl, int n, ...)
+{
+ va_list ap;
+ tree *argarray = XALLOCAVEC (tree, n);
+ int i;
+
+ va_start (ap, n);
+ for (i = 0; i < n; i++)
+ argarray[i] = va_arg (ap, tree);
+ va_end (ap);
+ return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
}
/* Construct a CALL_EXPR with type TYPE with FN as the function expression.
tree *argarray)
{
tree ret = NULL_TREE;
- int i;
tree exp;
if (TREE_CODE (fn) == ADDR_EXPR)
return build_call_array_loc (loc, type, fn, n, argarray);
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
{
- tree arglist = NULL_TREE;
- for (i = n - 1; i >= 0; i--)
- arglist = tree_cons (NULL_TREE, argarray[i], arglist);
- ret = targetm.fold_builtin (fndecl, arglist, false);
- if (ret)
- return ret;
+ ret = targetm.fold_builtin (fndecl, n, argarray, false);
+ if (ret)
+ return ret;
+
return build_call_array_loc (loc, type, fn, n, argarray);
}
else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
if (! fn)
return NULL_RTX;
- fn = build_call_nofold (fn, 3, dest, src, len);
+ fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
gcc_assert (TREE_CODE (fn) == CALL_EXPR);
CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
return expand_expr (fn, target, mode, EXPAND_NORMAL);
tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
if (!fn)
return NULL_RTX;
- fn = build_call_nofold (fn, 4, dest, src, len, size);
+ fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
+ dest, src, len, size);
gcc_assert (TREE_CODE (fn) == CALL_EXPR);
CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
return expand_expr (fn, target, mode, EXPAND_NORMAL);
return;
arg = get_base_address (TREE_OPERAND (arg, 0));
- if (arg == NULL || INDIRECT_REF_P (arg))
+ if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
return;
if (SSA_VAR_P (arg))
tree
fold_builtin_object_size (tree ptr, tree ost)
{
- tree ret = NULL_TREE;
+ unsigned HOST_WIDE_INT bytes;
int object_size_type;
if (!validate_arg (ptr, POINTER_TYPE)
return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
if (TREE_CODE (ptr) == ADDR_EXPR)
- ret = build_int_cstu (size_type_node,
- compute_builtin_object_size (ptr, object_size_type));
-
+ {
+ bytes = compute_builtin_object_size (ptr, object_size_type);
+ if (double_int_fits_to_tree_p (size_type_node,
+ uhwi_to_double_int (bytes)))
+ return build_int_cstu (size_type_node, bytes);
+ }
else if (TREE_CODE (ptr) == SSA_NAME)
{
- unsigned HOST_WIDE_INT bytes;
-
/* If object size is not known yet, delay folding until
later. Maybe subsequent passes will help determining
it. */
bytes = compute_builtin_object_size (ptr, object_size_type);
- if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
- ? -1 : 0))
- ret = build_int_cstu (size_type_node, bytes);
- }
-
- if (ret)
- {
- unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
- HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
- if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
- ret = NULL_TREE;
+ if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
+ && double_int_fits_to_tree_p (size_type_node,
+ uhwi_to_double_int (bytes)))
+ return build_int_cstu (size_type_node, bytes);
}
- return ret;
+ return NULL_TREE;
}
/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
if (avoid_folding_inline_builtin (fndecl))
return NULL_TREE;
- /* FIXME: Don't use a list in this interface. */
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
{
- tree arglist = NULL_TREE;
- int i;
- for (i = nargs - 1; i >= 0; i--)
- arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
- return targetm.fold_builtin (fndecl, arglist, ignore);
+ return targetm.fold_builtin (fndecl, nargs,
+ (nargs > 0
+ ? gimple_call_arg_ptr (stmt, 0)
+ : &error_mark_node), ignore);
}
else
{
case BUILT_IN_ABORT:
abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
break;
+ case BUILT_IN_FFS:
+ if (INT_TYPE_SIZE < BITS_PER_WORD)
+ {
+ set_user_assembler_libfunc ("ffs", asmspec);
+ set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
+ MODE_INT, 0), "ffs");
+ }
+ break;
default:
break;
}
}
+
+/* Return true if DECL is a builtin that expands to a constant or similarly
+ simple code. */
+bool
+is_simple_builtin (tree decl)
+{
+ if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
+ switch (DECL_FUNCTION_CODE (decl))
+ {
+ /* Builtins that expand to constants. */
+ case BUILT_IN_CONSTANT_P:
+ case BUILT_IN_EXPECT:
+ case BUILT_IN_OBJECT_SIZE:
+ case BUILT_IN_UNREACHABLE:
+ /* Simple register moves or loads from stack. */
+ case BUILT_IN_RETURN_ADDRESS:
+ case BUILT_IN_EXTRACT_RETURN_ADDR:
+ case BUILT_IN_FROB_RETURN_ADDR:
+ case BUILT_IN_RETURN:
+ case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
+ case BUILT_IN_FRAME_ADDRESS:
+ case BUILT_IN_VA_END:
+ case BUILT_IN_STACK_SAVE:
+ case BUILT_IN_STACK_RESTORE:
+ /* Exception state returns or moves registers around. */
+ case BUILT_IN_EH_FILTER:
+ case BUILT_IN_EH_POINTER:
+ case BUILT_IN_EH_COPY_VALUES:
+ return true;
+
+ default:
+ return false;
+ }
+
+ return false;
+}
+
+/* Return true if DECL is a builtin that is not expensive, i.e., they are
+ most probably expanded inline into reasonably simple code. This is a
+ superset of is_simple_builtin. */
+bool
+is_inexpensive_builtin (tree decl)
+{
+ if (!decl)
+ return false;
+ else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
+ return true;
+ else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
+ switch (DECL_FUNCTION_CODE (decl))
+ {
+ case BUILT_IN_ABS:
+ case BUILT_IN_ALLOCA:
+ case BUILT_IN_BSWAP32:
+ case BUILT_IN_BSWAP64:
+ case BUILT_IN_CLZ:
+ case BUILT_IN_CLZIMAX:
+ case BUILT_IN_CLZL:
+ case BUILT_IN_CLZLL:
+ case BUILT_IN_CTZ:
+ case BUILT_IN_CTZIMAX:
+ case BUILT_IN_CTZL:
+ case BUILT_IN_CTZLL:
+ case BUILT_IN_FFS:
+ case BUILT_IN_FFSIMAX:
+ case BUILT_IN_FFSL:
+ case BUILT_IN_FFSLL:
+ case BUILT_IN_IMAXABS:
+ case BUILT_IN_FINITE:
+ case BUILT_IN_FINITEF:
+ case BUILT_IN_FINITEL:
+ case BUILT_IN_FINITED32:
+ case BUILT_IN_FINITED64:
+ case BUILT_IN_FINITED128:
+ case BUILT_IN_FPCLASSIFY:
+ case BUILT_IN_ISFINITE:
+ case BUILT_IN_ISINF_SIGN:
+ case BUILT_IN_ISINF:
+ case BUILT_IN_ISINFF:
+ case BUILT_IN_ISINFL:
+ case BUILT_IN_ISINFD32:
+ case BUILT_IN_ISINFD64:
+ case BUILT_IN_ISINFD128:
+ case BUILT_IN_ISNAN:
+ case BUILT_IN_ISNANF:
+ case BUILT_IN_ISNANL:
+ case BUILT_IN_ISNAND32:
+ case BUILT_IN_ISNAND64:
+ case BUILT_IN_ISNAND128:
+ case BUILT_IN_ISNORMAL:
+ case BUILT_IN_ISGREATER:
+ case BUILT_IN_ISGREATEREQUAL:
+ case BUILT_IN_ISLESS:
+ case BUILT_IN_ISLESSEQUAL:
+ case BUILT_IN_ISLESSGREATER:
+ case BUILT_IN_ISUNORDERED:
+ case BUILT_IN_VA_ARG_PACK:
+ case BUILT_IN_VA_ARG_PACK_LEN:
+ case BUILT_IN_VA_COPY:
+ case BUILT_IN_TRAP:
+ case BUILT_IN_SAVEREGS:
+ case BUILT_IN_POPCOUNTL:
+ case BUILT_IN_POPCOUNTLL:
+ case BUILT_IN_POPCOUNTIMAX:
+ case BUILT_IN_POPCOUNT:
+ case BUILT_IN_PARITYL:
+ case BUILT_IN_PARITYLL:
+ case BUILT_IN_PARITYIMAX:
+ case BUILT_IN_PARITY:
+ case BUILT_IN_LABS:
+ case BUILT_IN_LLABS:
+ case BUILT_IN_PREFETCH:
+ return true;
+
+ default:
+ return is_simple_builtin (decl);
+ }
+
+ return false;
+}
+