/* Expand builtin functions.
Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
Free Software Foundation, Inc.
This file is part of GCC.
{
switch (TREE_CODE (exp))
{
- case NOP_EXPR:
- case CONVERT_EXPR:
+ CASE_CONVERT:
exp = TREE_OPERAND (exp, 0);
if (! POINTER_TYPE_P (TREE_TYPE (exp)))
return align;
runtime. */
if (offset < 0 || offset > max)
{
- warning (0, "offset outside bounds of constant string");
+ /* Suppress multiple warnings for propagated constant strings. */
+ if (! TREE_NO_WARNING (src))
+ {
+ warning (0, "offset outside bounds of constant string");
+ TREE_NO_WARNING (src) = 1;
+ }
return NULL_TREE;
}
tem = hard_frame_pointer_rtx;
/* Tell reload not to eliminate the frame pointer. */
- current_function_accesses_prior_frames = 1;
+ crtl->accesses_prior_frames = 1;
}
#endif
/* Tell optimize_save_area_alloca that extra work is going to
need to go on during alloca. */
- current_function_calls_setjmp = 1;
+ cfun->calls_setjmp = 1;
/* We have a nonlocal label. */
- current_function_has_nonlocal_label = 1;
+ cfun->has_nonlocal_label = 1;
}
/* Construct the trailing part of a __builtin_setjmp call. This is
{
/* Clobber the FP when we get here, so we have to make sure it's
marked as used by this function. */
- emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
+ emit_use (hard_frame_pointer_rtx);
/* Mark the static chain as clobbered here so life information
doesn't get messed up for it. */
- emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
+ emit_clobber (static_chain_rtx);
/* Now put in the code to restore the frame pointer, and argument
pointer, if needed. */
emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
/* This might change the hard frame pointer in ways that aren't
apparent to early optimization passes, so force a clobber. */
- emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
+ emit_clobber (hard_frame_pointer_rtx);
}
#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
{
lab = copy_to_reg (lab);
- emit_insn (gen_rtx_CLOBBER (VOIDmode,
- gen_rtx_MEM (BLKmode,
- gen_rtx_SCRATCH (VOIDmode))));
- emit_insn (gen_rtx_CLOBBER (VOIDmode,
- gen_rtx_MEM (BLKmode,
- hard_frame_pointer_rtx)));
+ emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
+ emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
emit_move_insn (hard_frame_pointer_rtx, fp);
emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
- emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
- emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
+ emit_use (hard_frame_pointer_rtx);
+ emit_use (stack_pointer_rtx);
emit_indirect_jump (lab);
}
}
r_label = convert_memory_address (Pmode, r_label);
r_save_area = expand_normal (t_save_area);
r_save_area = convert_memory_address (Pmode, r_save_area);
+ /* Copy the address of the save location to a register just in case it was based
+ on the frame pointer. */
+ r_save_area = copy_to_reg (r_save_area);
r_fp = gen_rtx_MEM (Pmode, r_save_area);
r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
- current_function_has_nonlocal_goto = 1;
+ crtl->has_nonlocal_goto = 1;
#ifdef HAVE_nonlocal_goto
/* ??? We no longer need to pass the static chain value, afaik. */
{
r_label = copy_to_reg (r_label);
- emit_insn (gen_rtx_CLOBBER (VOIDmode,
- gen_rtx_MEM (BLKmode,
- gen_rtx_SCRATCH (VOIDmode))));
-
- emit_insn (gen_rtx_CLOBBER (VOIDmode,
- gen_rtx_MEM (BLKmode,
- hard_frame_pointer_rtx)));
+ emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
+ emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
/* Restore frame pointer for containing function.
This sets the actual hard register used for the frame pointer
/* USE of hard_frame_pointer_rtx added for consistency;
not clear if really needed. */
- emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
- emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
+ emit_use (hard_frame_pointer_rtx);
+ emit_use (stack_pointer_rtx);
/* If the architecture is using a GP register, we must
conservatively assume that the target function makes use of it.
a no-op if the GP register is a global invariant.) */
if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
&& fixed_regs[PIC_OFFSET_TABLE_REGNUM])
- emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
+ emit_use (pic_offset_table_rtx);
emit_indirect_jump (r_label);
}
/* Get an expression we can use to find the attributes to assign to MEM.
If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
we can. First remove any nops. */
- while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR)
+ while (CONVERT_EXPR_P (exp)
&& POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
exp = TREE_OPERAND (exp, 0);
tree inner = exp;
while (TREE_CODE (inner) == ARRAY_REF
- || TREE_CODE (inner) == NOP_EXPR
- || TREE_CODE (inner) == CONVERT_EXPR
+ || CONVERT_EXPR_P (inner)
|| TREE_CODE (inner) == VIEW_CONVERT_EXPR
|| TREE_CODE (inner) == SAVE_EXPR)
inner = TREE_OPERAND (inner, 0);
while (TREE_CODE (inner) == COMPONENT_REF)
{
tree field = TREE_OPERAND (inner, 1);
- gcc_assert (! DECL_BIT_FIELD (field));
gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
gcc_assert (field == TREE_OPERAND (mem_expr, 1));
+ /* Bitfields are generally not byte-addressable. */
+ gcc_assert (!DECL_BIT_FIELD (field)
+ || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
+ % BITS_PER_UNIT) == 0
+ && host_integerp (DECL_SIZE (field), 0)
+ && (TREE_INT_CST_LOW (DECL_SIZE (field))
+ % BITS_PER_UNIT) == 0));
+
+ /* If we can prove that the memory starting at XEXP (mem, 0) and
+ ending at XEXP (mem, 0) + LENGTH will fit into this field, we
+ can keep the COMPONENT_REF in MEM_EXPR. But be careful with
+ fields without DECL_SIZE_UNIT like flexible array members. */
if (length >= 0
- && TYPE_SIZE_UNIT (TREE_TYPE (inner))
- && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
+ && DECL_SIZE_UNIT (field)
+ && host_integerp (DECL_SIZE_UNIT (field), 0))
{
HOST_WIDE_INT size
- = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
- /* If we can prove the memory starting at XEXP (mem, 0)
- and ending at XEXP (mem, 0) + LENGTH will fit into
- this field, we can keep that COMPONENT_REF in MEM_EXPR. */
+ = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
if (offset <= size
&& length <= size
&& offset + length <= size)
if (offset >= 0
&& host_integerp (DECL_FIELD_OFFSET (field), 0))
- offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
+ offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
/ BITS_PER_UNIT;
else
int regno, size, align, nelts;
enum machine_mode mode;
rtx reg, mem;
- rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
+ rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
size = nelts = 0;
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
emit_move_insn (reg, adjust_address (result, mode, size));
push_to_sequence (call_fusage);
- emit_insn (gen_rtx_USE (VOIDmode, reg));
+ emit_use (reg);
call_fusage = get_insns ();
end_sequence ();
size += GET_MODE_SIZE (mode);
fcodel = BUILT_IN_MATHFN##L_R ; break;
/* Return mathematic function equivalent to FN but operating directly
- on TYPE, if available. If we can't do the conversion, return zero. */
-tree
-mathfn_built_in (tree type, enum built_in_function fn)
+ on TYPE, if available. If IMPLICIT is true find the function in
+ implicit_built_in_decls[], otherwise use built_in_decls[]. If we
+ can't do the conversion, return zero. */
+
+static tree
+mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
{
+ tree const *const fn_arr
+ = implicit ? implicit_built_in_decls : built_in_decls;
enum built_in_function fcode, fcodef, fcodel;
switch (fn)
CASE_MATHFN (BUILT_IN_SCALB)
CASE_MATHFN (BUILT_IN_SCALBLN)
CASE_MATHFN (BUILT_IN_SCALBN)
+ CASE_MATHFN (BUILT_IN_SIGNBIT)
CASE_MATHFN (BUILT_IN_SIGNIFICAND)
CASE_MATHFN (BUILT_IN_SIN)
CASE_MATHFN (BUILT_IN_SINCOS)
}
if (TYPE_MAIN_VARIANT (type) == double_type_node)
- return implicit_built_in_decls[fcode];
+ return fn_arr[fcode];
else if (TYPE_MAIN_VARIANT (type) == float_type_node)
- return implicit_built_in_decls[fcodef];
+ return fn_arr[fcodef];
else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
- return implicit_built_in_decls[fcodel];
+ return fn_arr[fcodel];
else
return NULL_TREE;
}
+/* Like mathfn_built_in_1(), but always use the implicit array. */
+
+tree
+mathfn_built_in (tree type, enum built_in_function fn)
+{
+ return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
+}
+
/* If errno must be maintained, expand the RTL to check if the result,
TARGET, of a built-in function call, EXP, is NaN, and if so set
errno to EDOM. */
before_call = get_last_insn ();
- target = expand_call (exp, target, target == const0_rtx);
-
- /* If this is a sqrt operation and we don't care about errno, try to
- attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
- This allows the semantics of the libcall to be visible to the RTL
- optimizers. */
- if (builtin_optab == sqrt_optab && !errno_set)
- {
- /* Search backwards through the insns emitted by expand_call looking
- for the instruction with the REG_RETVAL note. */
- rtx last = get_last_insn ();
- while (last != before_call)
- {
- if (find_reg_note (last, REG_RETVAL, NULL))
- {
- rtx note = find_reg_note (last, REG_EQUAL, NULL);
- /* Check that the REQ_EQUAL note is an EXPR_LIST with
- two elements, i.e. symbol_ref(sqrt) and the operand. */
- if (note
- && GET_CODE (note) == EXPR_LIST
- && GET_CODE (XEXP (note, 0)) == EXPR_LIST
- && XEXP (XEXP (note, 0), 1) != NULL_RTX
- && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
- {
- rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
- /* Check operand is a register with expected mode. */
- if (operand
- && REG_P (operand)
- && GET_MODE (operand) == mode)
- {
- /* Replace the REG_EQUAL note with a SQRT rtx. */
- rtx equiv = gen_rtx_SQRT (mode, operand);
- set_unique_reg_note (last, REG_EQUAL, equiv);
- }
- }
- break;
- }
- last = PREV_INSN (last);
- }
- }
-
- return target;
+ return expand_call (exp, target, target == const0_rtx);
}
/* Expand a call to the builtin binary math functions (pow and atan2).
op1 = convert_to_mode (mode2, op1, 0);
target = emit_library_call_value (optab_libfunc (powi_optab, mode),
- target, LCT_CONST_MAKE_BLOCK, mode, 2,
+ target, LCT_CONST, mode, 2,
op0, mode, op1, mode2);
return target;
&& GET_CODE (len_rtx) == CONST_INT
&& (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
&& can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
- (void *) src_str, dest_align, false))
+ CONST_CAST (char *, src_str),
+ dest_align, false))
{
dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
builtin_memcpy_read_str,
- (void *) src_str, dest_align, false, 0);
+ CONST_CAST (char *, src_str),
+ dest_align, false, 0);
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
return dest_mem;
&& GET_CODE (len_rtx) == CONST_INT
&& (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
&& can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
- (void *) src_str, dest_align, false))
+ CONST_CAST (char *, src_str),
+ dest_align, false))
{
dest_mem = get_memory_rtx (dest, len);
set_mem_align (dest_mem, dest_align);
dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
builtin_memcpy_read_str,
- (void *) src_str, dest_align,
- false, endp);
+ CONST_CAST (char *, src_str),
+ dest_align, false, endp);
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
return dest_mem;
if (!p || dest_align == 0 || !host_integerp (len, 1)
|| !can_store_by_pieces (tree_low_cst (len, 1),
builtin_strncpy_read_str,
- (void *) p, dest_align, false))
+ CONST_CAST (char *, p),
+ dest_align, false))
return NULL_RTX;
dest_mem = get_memory_rtx (dest, len);
store_by_pieces (dest_mem, tree_low_cst (len, 1),
builtin_strncpy_read_str,
- (void *) p, dest_align, false, 0);
+ CONST_CAST (char *, p), dest_align, false, 0);
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
return dest_mem;
enum machine_mode mode)
{
const char *c = (const char *) data;
- char *p = alloca (GET_MODE_SIZE (mode));
+ char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
memset (p, *c, GET_MODE_SIZE (mode));
if (size == 1)
return (rtx) data;
- p = alloca (size);
+ p = XALLOCAVEC (char, size);
memset (p, 1, size);
coeff = c_readstr (p, mode);
if (insn)
emit_insn (insn);
else
- emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
+ emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
TYPE_MODE (integer_type_node), 3,
XEXP (arg1_rtx, 0), Pmode,
XEXP (arg2_rtx, 0), Pmode,
/* If the actual alignment is less than the alignment of the type,
adjust the type accordingly so that we don't assume strict alignment
- when deferencing the pointer. */
+ when dereferencing the pointer. */
boundary *= BITS_PER_UNIT;
if (boundary < TYPE_ALIGN (type))
{
t = build_string (len, str);
elem = build_type_variant (char_type_node, 1, 0);
- index = build_index_type (build_int_cst (NULL_TREE, len - 1));
+ index = build_index_type (size_int (len - 1));
type = build_array_type (elem, index);
TREE_TYPE (t) = type;
TREE_CONSTANT (t) = 1;
TREE_READONLY (t) = 1;
TREE_STATIC (t) = 1;
- type = build_pointer_type (type);
- t = build1 (ADDR_EXPR, type, t);
-
type = build_pointer_type (elem);
- t = build1 (NOP_EXPR, type, t);
+ t = build1 (ADDR_EXPR, type,
+ build4 (ARRAY_REF, elem,
+ t, integer_zero_node, NULL_TREE, NULL_TREE));
return t;
}
{
/* Create a NUL-terminated string that's one char shorter
than the original, stripping off the trailing '\n'. */
- char *newstr = alloca (len);
+ char *newstr = XALLOCAVEC (char, len);
memcpy (newstr, fmt_str, len - 1);
newstr[len - 1] = 0;
arg = build_string_literal (len, newstr);
lo = 0;
}
- if (imode != rmode)
+ if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
temp = gen_lowpart (rmode, temp);
temp = expand_binop (rmode, and_optab, temp,
immed_double_const (lo, hi, rmode),
none of its arguments are volatile, we can avoid expanding the
built-in call and just evaluate the arguments for side-effects. */
if (target == const0_rtx
- && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
+ && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
{
bool volatilep = false;
tree arg;
if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
return NULL_TREE;
/* With memcpy, it is possible to bypass aliasing rules, so without
- this check i. e. execute/20060930-2.c would be misoptimized, because
+ this check i.e. execute/20060930-2.c would be misoptimized, because
it use conflicting alias set to hold argument for the memcpy call.
- This check is probably unnecesary with -fno-strict-aliasing.
+ This check is probably unnecessary with -fno-strict-aliasing.
Similarly for destvar. See also PR29286. */
if (!var_decl_component_p (srcvar)
/* Accept: memcpy (*char_var, "test", 1); that simplify
if (target_char_cast (arg2, &c))
return NULL_TREE;
- r = memchr (p1, c, tree_low_cst (len, 1));
+ r = (char *) memchr (p1, c, tree_low_cst (len, 1));
if (r == NULL)
return build_int_cst (TREE_TYPE (arg1), 0);
return NULL_TREE;
+ case BUILT_IN_ISINF_SIGN:
+ {
+ /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
+ /* In a boolean context, GCC will fold the inner COND_EXPR to
+ 1. So e.g. "if (isinf_sign(x))" would be folded to just
+ "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
+ tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
+ tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
+ tree tmp = NULL_TREE;
+
+ arg = builtin_save_expr (arg);
+
+ if (signbit_fn && isinf_fn)
+ {
+ tree signbit_call = build_call_expr (signbit_fn, 1, arg);
+ tree isinf_call = build_call_expr (isinf_fn, 1, arg);
+
+ signbit_call = fold_build2 (NE_EXPR, integer_type_node,
+ signbit_call, integer_zero_node);
+ isinf_call = fold_build2 (NE_EXPR, integer_type_node,
+ isinf_call, integer_zero_node);
+
+ tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
+ integer_minus_one_node, integer_one_node);
+ tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
+ integer_zero_node);
+ }
+
+ return tmp;
+ }
+
case BUILT_IN_ISFINITE:
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
&& !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
}
}
+/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
+ This builtin will generate code to return the appropriate floating
+ point classification depending on the value of the floating point
+ number passed in. The possible return values must be supplied as
+ int arguments to the call in the following order: FP_NAN, FP_INFINITE,
+ FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
+ one floating point argument which is "type generic". */
+
+static tree
+fold_builtin_fpclassify (tree exp)
+{
+ tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
+ arg, type, res, tmp;
+ enum machine_mode mode;
+ REAL_VALUE_TYPE r;
+ char buf[128];
+
+ /* Verify the required arguments in the original call. */
+ if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
+ INTEGER_TYPE, INTEGER_TYPE,
+ INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
+ return NULL_TREE;
+
+ fp_nan = CALL_EXPR_ARG (exp, 0);
+ fp_infinite = CALL_EXPR_ARG (exp, 1);
+ fp_normal = CALL_EXPR_ARG (exp, 2);
+ fp_subnormal = CALL_EXPR_ARG (exp, 3);
+ fp_zero = CALL_EXPR_ARG (exp, 4);
+ arg = CALL_EXPR_ARG (exp, 5);
+ type = TREE_TYPE (arg);
+ mode = TYPE_MODE (type);
+ arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
+
+ /* fpclassify(x) ->
+ isnan(x) ? FP_NAN :
+ (fabs(x) == Inf ? FP_INFINITE :
+ (fabs(x) >= DBL_MIN ? FP_NORMAL :
+ (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
+
+ tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
+ build_real (type, dconst0));
+ res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
+
+ sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
+ real_from_string (&r, buf);
+ tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
+ res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
+
+ if (HONOR_INFINITIES (mode))
+ {
+ real_inf (&r);
+ tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
+ build_real (type, r));
+ res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
+ }
+
+ if (HONOR_NANS (mode))
+ {
+ tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
+ res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
+ }
+
+ return res;
+}
+
/* Fold a call to an unordered comparison function such as
__builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
being called and ARG0 and ARG1 are the arguments for the call.
case BUILT_IN_ISINFD128:
return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
+ case BUILT_IN_ISINF_SIGN:
+ return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
+
CASE_FLT_FN (BUILT_IN_ISNAN):
case BUILT_IN_ISNAND32:
case BUILT_IN_ISNAND64:
case BUILT_IN_SNPRINTF_CHK:
case BUILT_IN_VSNPRINTF_CHK:
ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
+ break;
+
+ case BUILT_IN_FPCLASSIFY:
+ ret = fold_builtin_fpclassify (exp);
+ break;
default:
break;
if (CAN_HAVE_LOCATION_P (realret)
&& !EXPR_HAS_LOCATION (realret))
SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
+ return realret;
}
return ret;
}
int i, j;
va_list ap;
- buffer = alloca (nargs * sizeof (tree));
+ buffer = XALLOCAVEC (tree, nargs);
va_start (ap, n);
for (i = 0; i < n; i++)
buffer[i] = va_arg (ap, tree);
is not quite the same as STRIP_NOPS. It does more.
We must also strip off INDIRECT_EXPR for C++ reference
parameters. */
- while (TREE_CODE (arg) == NOP_EXPR
- || TREE_CODE (arg) == CONVERT_EXPR
+ while (CONVERT_EXPR_P (arg)
|| TREE_CODE (arg) == INDIRECT_REF)
arg = TREE_OPERAND (arg, 0);
if (arg != last_parm)
{
/* Create a NUL-terminated string that's one char shorter
than the original, stripping off the trailing '\n'. */
- char *newstr = alloca (len);
+ char *newstr = XALLOCAVEC (char, len);
memcpy (newstr, str, len - 1);
newstr[len - 1] = 0;