You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "basic-block.h"
#include "tree-mudflap.h"
-#define CALLED_AS_BUILT_IN(NODE) \
- (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
-
#ifndef PAD_VARARGS_DOWN
#define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
#endif
static rtx expand_builtin_mathfn (tree, rtx, rtx);
static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
+static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
static rtx expand_builtin_args_info (tree);
static rtx expand_builtin_next_arg (void);
static rtx expand_builtin_va_start (tree);
static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
static rtx expand_builtin_mempcpy (tree, tree, rtx, enum machine_mode, int);
-static rtx expand_builtin_memmove (tree, tree, rtx, enum machine_mode);
-static rtx expand_builtin_bcopy (tree, tree);
+static rtx expand_builtin_memmove (tree, tree, rtx, enum machine_mode, tree);
+static rtx expand_builtin_bcopy (tree);
static rtx expand_builtin_strcpy (tree, rtx, enum machine_mode);
static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
-static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
+static rtx expand_builtin_memset (tree, rtx, enum machine_mode, tree);
static rtx expand_builtin_bzero (tree);
static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
static rtx expand_builtin_strstr (tree, tree, rtx, enum machine_mode);
static tree fold_builtin_nan (tree, tree, int);
static int validate_arglist (tree, ...);
static bool integer_valued_real_p (tree);
-static tree fold_trunc_transparent_mathfn (tree);
+static tree fold_trunc_transparent_mathfn (tree, tree);
static bool readonly_data_expr (tree);
static rtx expand_builtin_fabs (tree, rtx, rtx);
static rtx expand_builtin_signbit (tree, rtx);
static tree fold_builtin_cos (tree, tree, tree);
static tree fold_builtin_tan (tree);
static tree fold_builtin_atan (tree, tree);
-static tree fold_builtin_trunc (tree);
-static tree fold_builtin_floor (tree);
-static tree fold_builtin_ceil (tree);
-static tree fold_builtin_round (tree);
-static tree fold_builtin_bitop (tree);
-static tree fold_builtin_memcpy (tree);
+static tree fold_builtin_trunc (tree, tree);
+static tree fold_builtin_floor (tree, tree);
+static tree fold_builtin_ceil (tree, tree);
+static tree fold_builtin_round (tree, tree);
+static tree fold_builtin_int_roundingfn (tree, tree);
+static tree fold_builtin_bitop (tree, tree);
+static tree fold_builtin_memcpy (tree, tree);
static tree fold_builtin_mempcpy (tree, tree, int);
static tree fold_builtin_memmove (tree, tree);
static tree fold_builtin_strchr (tree, tree);
static tree fold_builtin_memcmp (tree);
static tree fold_builtin_strcmp (tree);
static tree fold_builtin_strncmp (tree);
-static tree fold_builtin_signbit (tree);
+static tree fold_builtin_signbit (tree, tree);
static tree fold_builtin_copysign (tree, tree, tree);
static tree fold_builtin_isascii (tree);
static tree fold_builtin_toascii (tree);
static tree fold_builtin_abs (tree, tree);
static tree fold_builtin_unordered_cmp (tree, tree, enum tree_code,
enum tree_code);
-static tree fold_builtin_1 (tree, bool);
+static tree fold_builtin_1 (tree, tree, bool);
static tree fold_builtin_strpbrk (tree, tree);
static tree fold_builtin_strstr (tree, tree);
static tree fold_builtin_strcspn (tree);
static tree fold_builtin_sprintf (tree, int);
+/* Return true if NODE should be considered for inline expansion regardless
+ of the optimization level. This means whenever a function is invoked with
+ its "internal" name, which normally contains the prefix "__builtin". */
+
+static bool called_as_built_in (tree node)
+{
+ const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
+ if (strncmp (name, "__builtin_", 10) == 0)
+ return true;
+ if (strncmp (name, "__sync_", 7) == 0)
+ return true;
+ return false;
+}
/* Return the alignment in bits of EXP, a pointer valued expression.
But don't return more than MAX_ALIGN no matter what.
runtime. */
if (offset < 0 || offset > max)
{
- warning ("offset outside bounds of constant string");
+ warning (0, "offset outside bounds of constant string");
return 0;
}
/* Argument 1 must be either zero or one. */
if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
{
- warning ("invalid second argument to %<__builtin_prefetch%>;"
+ warning (0, "invalid second argument to %<__builtin_prefetch%>;"
" using zero");
op1 = const0_rtx;
}
/* Argument 2 must be 0, 1, 2, or 3. */
if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
{
- warning ("invalid third argument to %<__builtin_prefetch%>; using zero");
+ warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
op2 = const0_rtx;
}
static rtx
get_memory_rtx (tree exp)
{
- rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
- rtx mem;
-
- addr = convert_memory_address (Pmode, addr);
-
- mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
+ rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
+ rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
/* Get an expression we can use to find the attributes to assign to MEM.
If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
case QUAL_UNION_TYPE: return union_type_class;
case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
? string_type_class : array_type_class);
- case FILE_TYPE: return file_type_class;
case LANG_TYPE: return lang_type_class;
default: return no_type_class;
}
CASE_MATHFN (BUILT_IN_J0)
CASE_MATHFN (BUILT_IN_J1)
CASE_MATHFN (BUILT_IN_JN)
+ CASE_MATHFN (BUILT_IN_LCEIL)
CASE_MATHFN (BUILT_IN_LDEXP)
+ CASE_MATHFN (BUILT_IN_LFLOOR)
CASE_MATHFN (BUILT_IN_LGAMMA)
+ CASE_MATHFN (BUILT_IN_LLCEIL)
+ CASE_MATHFN (BUILT_IN_LLFLOOR)
CASE_MATHFN (BUILT_IN_LLRINT)
CASE_MATHFN (BUILT_IN_LLROUND)
CASE_MATHFN (BUILT_IN_LOG)
case BUILT_IN_RINTF:
case BUILT_IN_RINTL:
builtin_optab = rint_optab; break;
+ case BUILT_IN_LRINT:
+ case BUILT_IN_LRINTF:
+ case BUILT_IN_LRINTL:
+ case BUILT_IN_LLRINT:
+ case BUILT_IN_LLRINTF:
+ case BUILT_IN_LLRINTL:
+ builtin_optab = lrint_optab; break;
default:
gcc_unreachable ();
}
return target;
}
+/* Expand a call to one of the builtin rounding functions (lfloor).
+ If expanding via optab fails, lower expression to (int)(floor(x)).
+ EXP is the expression that is a call to the builtin function;
+ if convenient, the result should be placed in TARGET. SUBTARGET may
+ be used as the target for computing one of EXP's operands. */
+
+static rtx
+expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
+{
+ optab builtin_optab;
+ rtx op0, insns, tmp;
+ tree fndecl = get_callee_fndecl (exp);
+ tree arglist = TREE_OPERAND (exp, 1);
+ enum built_in_function fallback_fn;
+ tree fallback_fndecl;
+ enum machine_mode mode;
+ tree arg, narg;
+
+ if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ gcc_unreachable ();
+
+ arg = TREE_VALUE (arglist);
+
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ case BUILT_IN_LCEIL:
+ case BUILT_IN_LCEILF:
+ case BUILT_IN_LCEILL:
+ case BUILT_IN_LLCEIL:
+ case BUILT_IN_LLCEILF:
+ case BUILT_IN_LLCEILL:
+ builtin_optab = lceil_optab;
+ fallback_fn = BUILT_IN_CEIL;
+ break;
+
+ case BUILT_IN_LFLOOR:
+ case BUILT_IN_LFLOORF:
+ case BUILT_IN_LFLOORL:
+ case BUILT_IN_LLFLOOR:
+ case BUILT_IN_LLFLOORF:
+ case BUILT_IN_LLFLOORL:
+ builtin_optab = lfloor_optab;
+ fallback_fn = BUILT_IN_FLOOR;
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+
+ /* Make a suitable register to place result in. */
+ mode = TYPE_MODE (TREE_TYPE (exp));
+
+ /* Before working hard, check whether the instruction is available. */
+ if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
+ {
+ target = gen_reg_rtx (mode);
+
+ /* Wrap the computation of the argument in a SAVE_EXPR, as we may
+ need to expand the argument again. This way, we will not perform
+ side-effects more the once. */
+ narg = builtin_save_expr (arg);
+ if (narg != arg)
+ {
+ arg = narg;
+ arglist = build_tree_list (NULL_TREE, arg);
+ exp = build_function_call_expr (fndecl, arglist);
+ }
+
+ op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+
+ start_sequence ();
+
+ /* Compute into TARGET.
+ Set TARGET to wherever the result comes back. */
+ target = expand_unop (mode, builtin_optab, op0, target, 0);
+
+ if (target != 0)
+ {
+ /* Output the entire sequence. */
+ insns = get_insns ();
+ end_sequence ();
+ emit_insn (insns);
+ return target;
+ }
+
+ /* If we were unable to expand via the builtin, stop the sequence
+ (without outputting the insns). */
+ end_sequence ();
+ }
+
+ /* Fall back to floating point rounding optab. */
+ fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
+ /* We shouldn't get here on targets without TARGET_C99_FUNCTIONS.
+ ??? Perhaps convert (int)floorf(x) into (int)floor((double)x). */
+ gcc_assert (fallback_fndecl != NULL_TREE);
+ exp = build_function_call_expr (fallback_fndecl, arglist);
+
+ tmp = expand_builtin_mathfn (exp, NULL_RTX, NULL_RTX);
+
+ /* Truncate the result of floating point optab to integer
+ via expand_fix (). */
+ target = gen_reg_rtx (mode);
+ expand_fix (target, tmp, 0);
+
+ return target;
+}
+
/* To evaluate powi(x,n), the floating point value x raised to the
constant integer exponent n, we use a hybrid algorithm that
combines the "window method" with look-up tables. For an
tree arg0, arg1;
rtx op0, op1;
enum machine_mode mode;
+ enum machine_mode mode2;
if (! validate_arglist (arglist, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
return 0;
/* Emit a libcall to libgcc. */
+ /* Mode of the 2nd argument must match that of an int. */
+ mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
+
if (target == NULL_RTX)
target = gen_reg_rtx (mode);
op0 = expand_expr (arg0, subtarget, mode, 0);
if (GET_MODE (op0) != mode)
op0 = convert_to_mode (mode, op0, 0);
- op1 = expand_expr (arg1, 0, word_mode, 0);
- if (GET_MODE (op1) != word_mode)
- op1 = convert_to_mode (word_mode, op1, 0);
+ op1 = expand_expr (arg1, 0, mode2, 0);
+ if (GET_MODE (op1) != mode2)
+ op1 = convert_to_mode (mode2, op1, 0);
target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
target, LCT_CONST_MAKE_BLOCK, mode, 2,
- op0, mode, op1, word_mode);
+ op0, mode, op1, mode2);
return target;
}
static rtx
expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
{
+ tree fndecl = get_callee_fndecl (exp);
tree arglist = TREE_OPERAND (exp, 1);
if (!validate_arglist (arglist,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
unsigned int dest_align
= get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
rtx dest_mem, src_mem, dest_addr, len_rtx;
- tree result = fold_builtin_memcpy (exp);
+ tree result = fold_builtin_memcpy (fndecl, arglist);
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
/* Copy word part most expediently. */
dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
- BLOCK_OP_NORMAL);
+ CALL_EXPR_TAILCALL (exp)
+ ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL);
if (dest_addr == 0)
{
static rtx
expand_builtin_memmove (tree arglist, tree type, rtx target,
- enum machine_mode mode)
+ enum machine_mode mode, tree orig_exp)
{
if (!validate_arglist (arglist,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
normal memcpy. */
if (readonly_data_expr (src))
{
- tree const fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
+ tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
if (!fn)
return 0;
- return expand_expr (build_function_call_expr (fn, arglist),
- target, mode, EXPAND_NORMAL);
+ fn = build_function_call_expr (fn, arglist);
+ if (TREE_CODE (fn) == CALL_EXPR)
+ CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
+ return expand_expr (fn, target, mode, EXPAND_NORMAL);
}
/* If length is 1 and we can expand memcpy call inline,
if we failed the caller should emit a normal call. */
static rtx
-expand_builtin_bcopy (tree arglist, tree type)
+expand_builtin_bcopy (tree exp)
{
+ tree arglist = TREE_OPERAND (exp, 1);
+ tree type = TREE_TYPE (exp);
tree src, dest, size, newarglist;
if (!validate_arglist (arglist,
newarglist = tree_cons (NULL_TREE, src, newarglist);
newarglist = tree_cons (NULL_TREE, dest, newarglist);
- return expand_builtin_memmove (newarglist, type, const0_rtx, VOIDmode);
+ return expand_builtin_memmove (newarglist, type, const0_rtx, VOIDmode, exp);
}
#ifndef HAVE_movstr
convenient). */
static rtx
-expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode)
+expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode,
+ tree orig_exp)
{
if (!validate_arglist (arglist,
POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
&c, dest_align))
return 0;
- val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
+ val = fold_build1 (CONVERT_EXPR, unsigned_char_type_node, val);
val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
val_rtx);
dest_mem = get_memory_rtx (dest);
set_mem_align (dest_mem, dest_align);
- dest_addr = clear_storage (dest_mem, len_rtx);
+ dest_addr = clear_storage (dest_mem, len_rtx,
+ CALL_EXPR_TAILCALL (orig_exp)
+ ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL);
if (dest_addr == 0)
{
if we failed the caller should emit a normal call. */
static rtx
-expand_builtin_bzero (tree arglist)
+expand_builtin_bzero (tree exp)
{
+ tree arglist = TREE_OPERAND (exp, 1);
tree dest, size, newarglist;
if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
newarglist = tree_cons (NULL_TREE, dest, newarglist);
- return expand_builtin_memset (newarglist, const0_rtx, VOIDmode);
+ return expand_builtin_memset (newarglist, const0_rtx, VOIDmode, exp);
}
/* Expand expression EXP, which is a call to the memcmp built-in function.
tree len, len1, len2;
rtx arg1_rtx, arg2_rtx, arg3_rtx;
rtx result, insn;
- tree fndecl;
+ tree fndecl, fn;
int arg1_align
= get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
arglist = build_tree_list (NULL_TREE, arg2);
arglist = tree_cons (NULL_TREE, arg1, arglist);
fndecl = get_callee_fndecl (exp);
- exp = build_function_call_expr (fndecl, arglist);
- return expand_call (exp, target, target == const0_rtx);
+ fn = build_function_call_expr (fndecl, arglist);
+ if (TREE_CODE (fn) == CALL_EXPR)
+ CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
+ return expand_call (fn, target, target == const0_rtx);
}
#endif
return 0;
tree len, len1, len2;
rtx arg1_rtx, arg2_rtx, arg3_rtx;
rtx result, insn;
- tree fndecl;
+ tree fndecl, fn;
int arg1_align
= get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
return 0;
/* The actual new length parameter is MIN(len,arg3). */
- len = fold (build2 (MIN_EXPR, TREE_TYPE (len), len,
- fold_convert (TREE_TYPE (len), arg3)));
+ len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
+ fold_convert (TREE_TYPE (len), arg3));
/* If we don't have POINTER_TYPE, call the function. */
if (arg1_align == 0 || arg2_align == 0)
arglist = tree_cons (NULL_TREE, arg2, arglist);
arglist = tree_cons (NULL_TREE, arg1, arglist);
fndecl = get_callee_fndecl (exp);
- exp = build_function_call_expr (fndecl, arglist);
- return expand_call (exp, target, target == const0_rtx);
+ fn = build_function_call_expr (fndecl, arglist);
+ if (TREE_CODE (fn) == CALL_EXPR)
+ CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
+ return expand_call (fn, target, target == const0_rtx);
}
#endif
return 0;
fold (build_function_call_expr (strlen_fn,
build_tree_list (NULL_TREE,
dst)));
- /* Create (dst + strlen (dst)). */
- newdst = fold (build2 (PLUS_EXPR, TREE_TYPE (dst), dst, newdst));
+ /* Create (dst + (cast) strlen (dst)). */
+ newdst = fold_convert (TREE_TYPE (dst), newdst);
+ newdst = fold_build2 (PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
/* Prepend the new dst argument. */
arglist = tree_cons (NULL_TREE, newdst, arglist);
return valist;
pt = build_pointer_type (va_list_type_node);
- valist = fold (build1 (ADDR_EXPR, pt, valist));
+ valist = fold_build1 (ADDR_EXPR, pt, valist);
TREE_SIDE_EFFECTS (valist) = 1;
}
if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
{
/* Small args are padded downward. */
- t = fold (build2 (GT_EXPR, sizetype, rounded_size, size_int (align)));
- t = fold (build3 (COND_EXPR, sizetype, t, size_zero_node,
- size_binop (MINUS_EXPR, rounded_size, type_size)));
+ t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
+ t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
+ size_binop (MINUS_EXPR, rounded_size, type_size));
t = fold_convert (TREE_TYPE (addr), t);
- addr = fold (build2 (PLUS_EXPR, TREE_TYPE (addr), addr, t));
+ addr = fold_build2 (PLUS_EXPR, TREE_TYPE (addr), addr, t);
}
/* Compute new value for AP. */
/* Unfortunately, this is merely undefined, rather than a constraint
violation, so we cannot make this an error. If this call is never
executed, the program is still strictly conforming. */
- warning ("%qT is promoted to %qT when passed through %<...%>",
+ warning (0, "%qT is promoted to %qT when passed through %<...%>",
type, promoted_type);
if (! gave_help)
{
gave_help = true;
- warning ("(so you should pass %qT not %qT to %<va_arg%>)",
+ warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
promoted_type, type);
}
gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
if (!targetm.gimplify_va_arg_expr)
- /* Once most targets are converted this should abort. */
+ /* FIXME:Once most targets are converted we should merely
+ assert this is non-null. */
return GS_ALL_DONE;
*expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
if (tem == NULL)
{
if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
- warning ("unsupported argument to %<__builtin_frame_address%>");
+ warning (0, "unsupported argument to %<__builtin_frame_address%>");
else
- warning ("unsupported argument to %<__builtin_return_address%>");
+ warning (0, "unsupported argument to %<__builtin_return_address%>");
return const0_rtx;
}
return t;
}
-/* Expand a call to printf or printf_unlocked with argument list ARGLIST.
+/* Expand EXP, a call to printf or printf_unlocked.
Return 0 if a normal call should be emitted rather than transforming
the function inline. If convenient, the result should be placed in
TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
call. */
static rtx
-expand_builtin_printf (tree arglist, rtx target, enum machine_mode mode,
+expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
bool unlocked)
{
+ tree arglist = TREE_OPERAND (exp, 1);
tree fn_putchar = unlocked
? implicit_built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
: implicit_built_in_decls[BUILT_IN_PUTCHAR];
if (!fn)
return 0;
- return expand_expr (build_function_call_expr (fn, arglist),
- target, mode, EXPAND_NORMAL);
+ fn = build_function_call_expr (fn, arglist);
+ if (TREE_CODE (fn) == CALL_EXPR)
+ CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
+ return expand_expr (fn, target, mode, EXPAND_NORMAL);
}
-/* Expand a call to fprintf or fprintf_unlocked with argument list ARGLIST.
+/* Expand EXP, a call to fprintf or fprintf_unlocked.
Return 0 if a normal call should be emitted rather than transforming
the function inline. If convenient, the result should be placed in
TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
call. */
static rtx
-expand_builtin_fprintf (tree arglist, rtx target, enum machine_mode mode,
+expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
bool unlocked)
{
+ tree arglist = TREE_OPERAND (exp, 1);
tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
: implicit_built_in_decls[BUILT_IN_FPUTC];
tree fn_fputs = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
if (!fn)
return 0;
- return expand_expr (build_function_call_expr (fn, arglist),
- target, mode, EXPAND_NORMAL);
+ fn = build_function_call_expr (fn, arglist);
+ if (TREE_CODE (fn) == CALL_EXPR)
+ CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
+ return expand_expr (fn, target, mode, EXPAND_NORMAL);
}
/* Expand a call to sprintf with argument list ARGLIST. Return 0 if
if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
return 0;
- arg = fold (build2 (LT_EXPR, TREE_TYPE (exp), arg,
- build_real (TREE_TYPE (arg), dconst0)));
+ arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
+ build_real (TREE_TYPE (arg), dconst0));
return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
}
return expand_call (call, target, ignore);
}
+
+\f
+/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
+ ARGLIST is the operands list to the function. CODE is the rtx code
+ that corresponds to the arithmetic or logical operation from the name;
+ an exception here is that NOT actually means NAND. TARGET is an optional
+ place for us to store the results; AFTER is true if this is the
+ fetch_and_xxx form. IGNORE is true if we don't actually care about
+ the result of the operation at all. */
+
+static rtx
+expand_builtin_sync_operation (tree arglist, enum rtx_code code, bool after,
+ rtx target, bool ignore)
+{
+ enum machine_mode mode;
+ rtx addr, val, mem;
+
+ /* Expand the operands. */
+ addr = expand_expr (TREE_VALUE (arglist), NULL, Pmode, EXPAND_SUM);
+ mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (TREE_VALUE (arglist))));
+
+ arglist = TREE_CHAIN (arglist);
+ val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
+
+ /* Note that we explicitly do not want any alias information for this
+ memory, so that we kill all other live memories. Otherwise we don't
+ satisfy the full barrier semantics of the intrinsic. */
+ mem = validize_mem (gen_rtx_MEM (mode, addr));
+ MEM_VOLATILE_P (mem) = 1;
+
+ if (ignore)
+ return expand_sync_operation (mem, val, code);
+ else
+ return expand_sync_fetch_operation (mem, val, code, after, target);
+}
+
+/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
+ intrinsics. ARGLIST is the operands list to the function. IS_BOOL is
+ true if this is the boolean form. TARGET is a place for us to store the
+ results; this is NOT optional if IS_BOOL is true. */
+
+static rtx
+expand_builtin_compare_and_swap (tree arglist, bool is_bool, rtx target)
+{
+ enum machine_mode mode;
+ rtx addr, old_val, new_val, mem;
+
+ /* Expand the operands. */
+ addr = expand_expr (TREE_VALUE (arglist), NULL, Pmode, EXPAND_SUM);
+ mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (TREE_VALUE (arglist))));
+
+ arglist = TREE_CHAIN (arglist);
+ old_val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
+
+ arglist = TREE_CHAIN (arglist);
+ new_val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
+
+ /* Note that we explicitly do not want any alias information for this
+ memory, so that we kill all other live memories. Otherwise we don't
+ satisfy the full barrier semantics of the intrinsic. */
+ mem = validize_mem (gen_rtx_MEM (mode, addr));
+ MEM_VOLATILE_P (mem) = 1;
+
+ if (is_bool)
+ return expand_bool_compare_and_swap (mem, old_val, new_val, target);
+ else
+ return expand_val_compare_and_swap (mem, old_val, new_val, target);
+}
+
+/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
+ general form is actually an atomic exchange, and some targets only
+ support a reduced form with the second argument being a constant 1.
+ ARGLIST is the operands list to the function; TARGET is an optional
+ place for us to store the results. */
+
+static rtx
+expand_builtin_lock_test_and_set (tree arglist, rtx target)
+{
+ enum machine_mode mode;
+ rtx addr, val, mem;
+
+ /* Expand the operands. */
+ addr = expand_expr (TREE_VALUE (arglist), NULL, Pmode, EXPAND_NORMAL);
+ mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (TREE_VALUE (arglist))));
+
+ arglist = TREE_CHAIN (arglist);
+ val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
+
+ /* Note that we explicitly do not want any alias information for this
+ memory, so that we kill all other live memories. Otherwise we don't
+ satisfy the barrier semantics of the intrinsic. */
+ mem = validize_mem (gen_rtx_MEM (mode, addr));
+ MEM_VOLATILE_P (mem) = 1;
+
+ return expand_sync_lock_test_and_set (mem, val, target);
+}
+
+/* Expand the __sync_synchronize intrinsic. */
+
+static void
+expand_builtin_synchronize (void)
+{
+ rtx body;
+
+#ifdef HAVE_memory_barrier
+ if (HAVE_memory_barrier)
+ {
+ emit_insn (gen_memory_barrier ());
+ return;
+ }
+#endif
+
+ /* If no explicit memory barrier instruction is available, create an empty
+ asm stmt that will prevent compiler movement across the barrier. */
+ body = gen_rtx_ASM_INPUT (VOIDmode, "");
+ MEM_VOLATILE_P (body) = 1;
+ emit_insn (body);
+}
+
+/* Expand the __sync_lock_release intrinsic. ARGLIST is the operands list
+ to the function. */
+
+static void
+expand_builtin_lock_release (tree arglist)
+{
+ enum machine_mode mode;
+ enum insn_code icode;
+ rtx addr, val, mem, insn;
+
+ /* Expand the operands. */
+ addr = expand_expr (TREE_VALUE (arglist), NULL, Pmode, EXPAND_NORMAL);
+ mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (TREE_VALUE (arglist))));
+ val = const0_rtx;
+
+ /* Note that we explicitly do not want any alias information for this
+ memory, so that we kill all other live memories. Otherwise we don't
+ satisfy the barrier semantics of the intrinsic. */
+ mem = validize_mem (gen_rtx_MEM (mode, addr));
+ MEM_VOLATILE_P (mem) = 1;
+
+ /* If there is an explicit operation in the md file, use it. */
+ icode = sync_lock_release[mode];
+ if (icode != CODE_FOR_nothing)
+ {
+ if (!insn_data[icode].operand[1].predicate (val, mode))
+ val = force_reg (mode, val);
+
+ insn = GEN_FCN (icode) (mem, val);
+ if (insn)
+ {
+ emit_insn (insn);
+ return;
+ }
+ }
+
+ /* Otherwise we can implement this operation by emitting a barrier
+ followed by a store of zero. */
+ expand_builtin_synchronize ();
+ emit_move_insn (mem, val);
+}
\f
/* Expand an expression EXP that calls a built-in function,
with result going to TARGET if that's convenient
/* When not optimizing, generate calls to library functions for a certain
set of builtins. */
if (!optimize
- && !CALLED_AS_BUILT_IN (fndecl)
+ && !called_as_built_in (fndecl)
&& DECL_ASSEMBLER_NAME_SET_P (fndecl)
&& fcode != BUILT_IN_ALLOCA)
return expand_call (exp, target, ignore);
case BUILT_IN_RINT:
case BUILT_IN_RINTF:
case BUILT_IN_RINTL:
+ case BUILT_IN_LRINT:
+ case BUILT_IN_LRINTF:
+ case BUILT_IN_LRINTL:
+ case BUILT_IN_LLRINT:
+ case BUILT_IN_LLRINTF:
+ case BUILT_IN_LLRINTL:
target = expand_builtin_mathfn (exp, target, subtarget);
if (target)
return target;
break;
+ case BUILT_IN_LCEIL:
+ case BUILT_IN_LCEILF:
+ case BUILT_IN_LCEILL:
+ case BUILT_IN_LLCEIL:
+ case BUILT_IN_LLCEILF:
+ case BUILT_IN_LLCEILL:
+ case BUILT_IN_LFLOOR:
+ case BUILT_IN_LFLOORF:
+ case BUILT_IN_LFLOORL:
+ case BUILT_IN_LLFLOOR:
+ case BUILT_IN_LLFLOORF:
+ case BUILT_IN_LLFLOORL:
+ target = expand_builtin_int_roundingfn (exp, target, subtarget);
+ if (target)
+ return target;
+ break;
+
case BUILT_IN_POW:
case BUILT_IN_POWF:
case BUILT_IN_POWL:
break;
case BUILT_IN_MEMMOVE:
- target = expand_builtin_memmove (arglist, TREE_TYPE (exp), target, mode);
+ target = expand_builtin_memmove (arglist, TREE_TYPE (exp), target,
+ mode, exp);
if (target)
return target;
break;
case BUILT_IN_BCOPY:
- target = expand_builtin_bcopy (arglist, TREE_TYPE (exp));
+ target = expand_builtin_bcopy (exp);
if (target)
return target;
break;
case BUILT_IN_MEMSET:
- target = expand_builtin_memset (arglist, target, mode);
+ target = expand_builtin_memset (arglist, target, mode, exp);
if (target)
return target;
break;
case BUILT_IN_BZERO:
- target = expand_builtin_bzero (arglist);
+ target = expand_builtin_bzero (exp);
if (target)
return target;
break;
return const0_rtx;
case BUILT_IN_PRINTF:
- target = expand_builtin_printf (arglist, target, mode, false);
+ target = expand_builtin_printf (exp, target, mode, false);
if (target)
return target;
break;
case BUILT_IN_PRINTF_UNLOCKED:
- target = expand_builtin_printf (arglist, target, mode, true);
+ target = expand_builtin_printf (exp, target, mode, true);
if (target)
return target;
break;
break;
case BUILT_IN_FPRINTF:
- target = expand_builtin_fprintf (arglist, target, mode, false);
+ target = expand_builtin_fprintf (exp, target, mode, false);
if (target)
return target;
break;
case BUILT_IN_FPRINTF_UNLOCKED:
- target = expand_builtin_fprintf (arglist, target, mode, true);
+ target = expand_builtin_fprintf (exp, target, mode, true);
if (target)
return target;
break;
return target;
break;
+ case BUILT_IN_FETCH_AND_ADD_1:
+ case BUILT_IN_FETCH_AND_ADD_2:
+ case BUILT_IN_FETCH_AND_ADD_4:
+ case BUILT_IN_FETCH_AND_ADD_8:
+ target = expand_builtin_sync_operation (arglist, PLUS,
+ false, target, ignore);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_FETCH_AND_SUB_1:
+ case BUILT_IN_FETCH_AND_SUB_2:
+ case BUILT_IN_FETCH_AND_SUB_4:
+ case BUILT_IN_FETCH_AND_SUB_8:
+ target = expand_builtin_sync_operation (arglist, MINUS,
+ false, target, ignore);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_FETCH_AND_OR_1:
+ case BUILT_IN_FETCH_AND_OR_2:
+ case BUILT_IN_FETCH_AND_OR_4:
+ case BUILT_IN_FETCH_AND_OR_8:
+ target = expand_builtin_sync_operation (arglist, IOR,
+ false, target, ignore);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_FETCH_AND_AND_1:
+ case BUILT_IN_FETCH_AND_AND_2:
+ case BUILT_IN_FETCH_AND_AND_4:
+ case BUILT_IN_FETCH_AND_AND_8:
+ target = expand_builtin_sync_operation (arglist, AND,
+ false, target, ignore);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_FETCH_AND_XOR_1:
+ case BUILT_IN_FETCH_AND_XOR_2:
+ case BUILT_IN_FETCH_AND_XOR_4:
+ case BUILT_IN_FETCH_AND_XOR_8:
+ target = expand_builtin_sync_operation (arglist, XOR,
+ false, target, ignore);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_FETCH_AND_NAND_1:
+ case BUILT_IN_FETCH_AND_NAND_2:
+ case BUILT_IN_FETCH_AND_NAND_4:
+ case BUILT_IN_FETCH_AND_NAND_8:
+ target = expand_builtin_sync_operation (arglist, NOT,
+ false, target, ignore);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_ADD_AND_FETCH_1:
+ case BUILT_IN_ADD_AND_FETCH_2:
+ case BUILT_IN_ADD_AND_FETCH_4:
+ case BUILT_IN_ADD_AND_FETCH_8:
+ target = expand_builtin_sync_operation (arglist, PLUS,
+ true, target, ignore);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_SUB_AND_FETCH_1:
+ case BUILT_IN_SUB_AND_FETCH_2:
+ case BUILT_IN_SUB_AND_FETCH_4:
+ case BUILT_IN_SUB_AND_FETCH_8:
+ target = expand_builtin_sync_operation (arglist, MINUS,
+ true, target, ignore);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_OR_AND_FETCH_1:
+ case BUILT_IN_OR_AND_FETCH_2:
+ case BUILT_IN_OR_AND_FETCH_4:
+ case BUILT_IN_OR_AND_FETCH_8:
+ target = expand_builtin_sync_operation (arglist, IOR,
+ true, target, ignore);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_AND_AND_FETCH_1:
+ case BUILT_IN_AND_AND_FETCH_2:
+ case BUILT_IN_AND_AND_FETCH_4:
+ case BUILT_IN_AND_AND_FETCH_8:
+ target = expand_builtin_sync_operation (arglist, AND,
+ true, target, ignore);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_XOR_AND_FETCH_1:
+ case BUILT_IN_XOR_AND_FETCH_2:
+ case BUILT_IN_XOR_AND_FETCH_4:
+ case BUILT_IN_XOR_AND_FETCH_8:
+ target = expand_builtin_sync_operation (arglist, XOR,
+ true, target, ignore);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_NAND_AND_FETCH_1:
+ case BUILT_IN_NAND_AND_FETCH_2:
+ case BUILT_IN_NAND_AND_FETCH_4:
+ case BUILT_IN_NAND_AND_FETCH_8:
+ target = expand_builtin_sync_operation (arglist, NOT,
+ true, target, ignore);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
+ case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
+ case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
+ case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
+ if (mode == VOIDmode)
+ mode = TYPE_MODE (boolean_type_node);
+ if (!target || !register_operand (target, mode))
+ target = gen_reg_rtx (mode);
+ target = expand_builtin_compare_and_swap (arglist, true, target);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
+ case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
+ case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
+ case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
+ target = expand_builtin_compare_and_swap (arglist, false, target);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_LOCK_TEST_AND_SET_1:
+ case BUILT_IN_LOCK_TEST_AND_SET_2:
+ case BUILT_IN_LOCK_TEST_AND_SET_4:
+ case BUILT_IN_LOCK_TEST_AND_SET_8:
+ target = expand_builtin_lock_test_and_set (arglist, target);
+ if (target)
+ return target;
+ break;
+
+ case BUILT_IN_LOCK_RELEASE_1:
+ case BUILT_IN_LOCK_RELEASE_2:
+ case BUILT_IN_LOCK_RELEASE_4:
+ case BUILT_IN_LOCK_RELEASE_8:
+ expand_builtin_lock_release (arglist);
+ return const0_rtx;
+
+ case BUILT_IN_SYNCHRONIZE:
+ expand_builtin_synchronize ();
+ return const0_rtx;
+
default: /* just do library call, if unknown builtin */
break;
}
/* If we know this is a constant, emit the constant of one. */
if (CONSTANT_CLASS_P (arglist)
|| (TREE_CODE (arglist) == CONSTRUCTOR
- && TREE_CONSTANT (arglist))
- || (TREE_CODE (arglist) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
+ && TREE_CONSTANT (arglist)))
return integer_one_node;
+ if (TREE_CODE (arglist) == ADDR_EXPR)
+ {
+ tree op = TREE_OPERAND (arglist, 0);
+ if (TREE_CODE (op) == STRING_CST
+ || (TREE_CODE (op) == ARRAY_REF
+ && integer_zerop (TREE_OPERAND (op, 1))
+ && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
+ return integer_one_node;
+ }
/* If this expression has side effects, show we don't know it to be a
constant. Likewise if it's a pointer or aggregate type since in
Do the transformation. */
static tree
-fold_trunc_transparent_mathfn (tree exp)
+fold_trunc_transparent_mathfn (tree fndecl, tree arglist)
{
- tree fndecl = get_callee_fndecl (exp);
- tree arglist = TREE_OPERAND (exp, 1);
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
tree arg;
the argument, for instance lround((double)f) -> lroundf (f). */
static tree
-fold_fixed_mathfn (tree exp)
+fold_fixed_mathfn (tree fndecl, tree arglist)
{
- tree fndecl = get_callee_fndecl (exp);
- tree arglist = TREE_OPERAND (exp, 1);
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
tree arg;
/* If argument is already integer valued, and we don't need to worry
about setting errno, there's no need to perform rounding. */
if (! flag_errno_math && integer_valued_real_p (arg))
- return fold (build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg));
+ return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
if (optimize)
{
/* If either part is zero, cabs is fabs of the other. */
if (TREE_CODE (arg) == COMPLEX_EXPR
&& real_zerop (TREE_OPERAND (arg, 0)))
- return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 1)));
+ return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg, 1));
if (TREE_CODE (arg) == COMPLEX_EXPR
&& real_zerop (TREE_OPERAND (arg, 1)))
- return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 0)));
+ return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg, 0));
/* Don't do this when optimizing for size. */
if (flag_unsafe_math_optimizations
arg = builtin_save_expr (arg);
- rpart = fold (build1 (REALPART_EXPR, type, arg));
- ipart = fold (build1 (IMAGPART_EXPR, type, arg));
+ rpart = fold_build1 (REALPART_EXPR, type, arg);
+ ipart = fold_build1 (IMAGPART_EXPR, type, arg);
rpart = builtin_save_expr (rpart);
ipart = builtin_save_expr (ipart);
- result = fold (build2 (PLUS_EXPR, type,
- fold (build2 (MULT_EXPR, type,
- rpart, rpart)),
- fold (build2 (MULT_EXPR, type,
- ipart, ipart))));
+ result = fold_build2 (PLUS_EXPR, type,
+ fold_build2 (MULT_EXPR, type,
+ rpart, rpart),
+ fold_build2 (MULT_EXPR, type,
+ ipart, ipart));
arglist = build_tree_list (NULL_TREE, result);
return build_function_call_expr (sqrtfn, arglist);
if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
{
tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
- arg = fold (build2 (MULT_EXPR, type,
- TREE_VALUE (TREE_OPERAND (arg, 1)),
- build_real (type, dconsthalf)));
+ arg = fold_build2 (MULT_EXPR, type,
+ TREE_VALUE (TREE_OPERAND (arg, 1)),
+ build_real (type, dconsthalf));
arglist = build_tree_list (NULL_TREE, arg);
return build_function_call_expr (expfn, arglist);
}
tree narg1;
if (!tree_expr_nonnegative_p (arg0))
arg0 = build1 (ABS_EXPR, type, arg0);
- narg1 = fold (build2 (MULT_EXPR, type, arg1,
- build_real (type, dconsthalf)));
+ narg1 = fold_build2 (MULT_EXPR, type, arg1,
+ build_real (type, dconsthalf));
arglist = tree_cons (NULL_TREE, arg0,
build_tree_list (NULL_TREE, narg1));
return build_function_call_expr (powfn, arglist);
tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
const REAL_VALUE_TYPE third_trunc =
real_value_truncate (TYPE_MODE (type), dconstthird);
- arg = fold (build2 (MULT_EXPR, type,
- TREE_VALUE (TREE_OPERAND (arg, 1)),
- build_real (type, third_trunc)));
+ arg = fold_build2 (MULT_EXPR, type,
+ TREE_VALUE (TREE_OPERAND (arg, 1)),
+ build_real (type, third_trunc));
arglist = build_tree_list (NULL_TREE, arg);
return build_function_call_expr (expfn, arglist);
}
tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
const REAL_VALUE_TYPE dconstroot
= real_value_truncate (TYPE_MODE (type), dconstthird);
- tree narg01 = fold (build2 (MULT_EXPR, type, arg01,
- build_real (type, dconstroot)));
+ tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
+ build_real (type, dconstroot));
arglist = tree_cons (NULL_TREE, arg00,
build_tree_list (NULL_TREE, narg01));
return build_function_call_expr (powfn, arglist);
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_trunc (tree exp)
+fold_builtin_trunc (tree fndecl, tree arglist)
{
- tree fndecl = get_callee_fndecl (exp);
- tree arglist = TREE_OPERAND (exp, 1);
tree arg;
if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
return build_real (type, r);
}
- return fold_trunc_transparent_mathfn (exp);
+ return fold_trunc_transparent_mathfn (fndecl, arglist);
}
/* Fold function call to builtin floor, floorf or floorl. Return
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_floor (tree exp)
+fold_builtin_floor (tree fndecl, tree arglist)
{
- tree fndecl = get_callee_fndecl (exp);
- tree arglist = TREE_OPERAND (exp, 1);
tree arg;
if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
}
}
- return fold_trunc_transparent_mathfn (exp);
+ return fold_trunc_transparent_mathfn (fndecl, arglist);
}
/* Fold function call to builtin ceil, ceilf or ceill. Return
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_ceil (tree exp)
+fold_builtin_ceil (tree fndecl, tree arglist)
{
- tree fndecl = get_callee_fndecl (exp);
- tree arglist = TREE_OPERAND (exp, 1);
tree arg;
if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
}
}
- return fold_trunc_transparent_mathfn (exp);
+ return fold_trunc_transparent_mathfn (fndecl, arglist);
}
/* Fold function call to builtin round, roundf or roundl. Return
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_round (tree exp)
+fold_builtin_round (tree fndecl, tree arglist)
{
- tree fndecl = get_callee_fndecl (exp);
- tree arglist = TREE_OPERAND (exp, 1);
tree arg;
if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
}
}
- return fold_trunc_transparent_mathfn (exp);
+ return fold_trunc_transparent_mathfn (fndecl, arglist);
}
/* Fold function call to builtin lround, lroundf or lroundl (or the
- corresponding long long versions). Return NULL_TREE if no
- simplification can be made. */
+ corresponding long long versions) and other rounding functions.
+ Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_lround (tree exp)
+fold_builtin_int_roundingfn (tree fndecl, tree arglist)
{
- tree fndecl = get_callee_fndecl (exp);
- tree arglist = TREE_OPERAND (exp, 1);
tree arg;
if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
HOST_WIDE_INT hi, lo;
REAL_VALUE_TYPE r;
- real_round (&r, TYPE_MODE (ftype), &x);
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ case BUILT_IN_LFLOOR:
+ case BUILT_IN_LFLOORF:
+ case BUILT_IN_LFLOORL:
+ case BUILT_IN_LLFLOOR:
+ case BUILT_IN_LLFLOORF:
+ case BUILT_IN_LLFLOORL:
+ real_floor (&r, TYPE_MODE (ftype), &x);
+ break;
+
+ case BUILT_IN_LCEIL:
+ case BUILT_IN_LCEILF:
+ case BUILT_IN_LCEILL:
+ case BUILT_IN_LLCEIL:
+ case BUILT_IN_LLCEILF:
+ case BUILT_IN_LLCEILL:
+ real_ceil (&r, TYPE_MODE (ftype), &x);
+ break;
+
+ case BUILT_IN_LROUND:
+ case BUILT_IN_LROUNDF:
+ case BUILT_IN_LROUNDL:
+ case BUILT_IN_LLROUND:
+ case BUILT_IN_LLROUNDF:
+ case BUILT_IN_LLROUNDL:
+ real_round (&r, TYPE_MODE (ftype), &x);
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+
REAL_VALUE_TO_INT (&lo, &hi, r);
result = build_int_cst_wide (NULL_TREE, lo, hi);
if (int_fits_type_p (result, itype))
}
}
- return fold_fixed_mathfn (exp);
+ return fold_fixed_mathfn (fndecl, arglist);
}
/* Fold function call to builtin ffs, clz, ctz, popcount and parity
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_bitop (tree exp)
+fold_builtin_bitop (tree fndecl, tree arglist)
{
- tree fndecl = get_callee_fndecl (exp);
- tree arglist = TREE_OPERAND (exp, 1);
tree arg;
if (! validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
tree logfn;
arglist = build_tree_list (NULL_TREE, x);
logfn = build_function_call_expr (fndecl, arglist);
- return fold (build2 (MULT_EXPR, type, exponent, logfn));
+ return fold_build2 (MULT_EXPR, type, exponent, logfn);
}
}
}
/* Optimize pow(x,-1.0) = 1.0/x. */
if (REAL_VALUES_EQUAL (c, dconstm1))
- return fold (build2 (RDIV_EXPR, type,
- build_real (type, dconst1), arg0));
+ return fold_build2 (RDIV_EXPR, type,
+ build_real (type, dconst1), arg0);
/* Optimize pow(x,0.5) = sqrt(x). */
if (flag_unsafe_math_optimizations
{
tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
- arg = fold (build2 (MULT_EXPR, type, arg, arg1));
+ arg = fold_build2 (MULT_EXPR, type, arg, arg1);
arglist = build_tree_list (NULL_TREE, arg);
return build_function_call_expr (expfn, arglist);
}
if (BUILTIN_SQRT_P (fcode))
{
tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
- tree narg1 = fold (build2 (MULT_EXPR, type, arg1,
- build_real (type, dconsthalf)));
+ tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
+ build_real (type, dconsthalf));
arglist = tree_cons (NULL_TREE, narg0,
build_tree_list (NULL_TREE, narg1));
{
const REAL_VALUE_TYPE dconstroot
= real_value_truncate (TYPE_MODE (type), dconstthird);
- tree narg1 = fold (build2 (MULT_EXPR, type, arg1,
- build_real (type, dconstroot)));
+ tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
+ build_real (type, dconstroot));
arglist = tree_cons (NULL_TREE, arg,
build_tree_list (NULL_TREE, narg1));
return build_function_call_expr (fndecl, arglist);
{
tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
- tree narg1 = fold (build2 (MULT_EXPR, type, arg01, arg1));
+ tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
arglist = tree_cons (NULL_TREE, arg00,
build_tree_list (NULL_TREE, narg1));
return build_function_call_expr (fndecl, arglist);
/* Optimize pow(x,-1) = 1.0/x. */
if (c == -1)
- return fold (build2 (RDIV_EXPR, type,
- build_real (type, dconst1), arg0));
+ return fold_build2 (RDIV_EXPR, type,
+ build_real (type, dconst1), arg0);
}
return NULL_TREE;
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_memcpy (tree exp)
+fold_builtin_memcpy (tree fndecl, tree arglist)
{
- tree fndecl = get_callee_fndecl (exp);
- tree arglist = TREE_OPERAND (exp, 1);
tree dest, src, len;
if (!validate_arglist (arglist,
return omit_one_operand (type, dest, len);
if (endp == 2)
- len = fold (build2 (MINUS_EXPR, TREE_TYPE (len), len,
- ssize_int (1)));
+ len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
+ ssize_int (1));
len = fold_convert (TREE_TYPE (dest), len);
- len = fold (build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len));
+ len = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
return fold_convert (type, len);
}
}
build1 (INDIRECT_REF, cst_uchar_node,
fold_convert (cst_uchar_ptr_node,
arg2)));
- return fold (build2 (MINUS_EXPR, integer_type_node, ind1, ind2));
+ return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
}
return 0;
build1 (INDIRECT_REF, cst_uchar_node,
fold_convert (cst_uchar_ptr_node,
arg2)));
- return fold (build1 (NEGATE_EXPR, integer_type_node, temp));
+ return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
}
return 0;
build1 (INDIRECT_REF, cst_uchar_node,
fold_convert (cst_uchar_ptr_node,
arg2)));
- return fold (build1 (NEGATE_EXPR, integer_type_node, temp));
+ return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
}
/* If len parameter is one, return an expression corresponding to
build1 (INDIRECT_REF, cst_uchar_node,
fold_convert (cst_uchar_ptr_node,
arg2)));
- return fold (build2 (MINUS_EXPR, integer_type_node, ind1, ind2));
+ return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
}
return 0;
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_signbit (tree exp)
+fold_builtin_signbit (tree fndecl, tree arglist)
{
- tree fndecl = get_callee_fndecl (exp);
- tree arglist = TREE_OPERAND (exp, 1);
tree type = TREE_TYPE (TREE_TYPE (fndecl));
tree arg, temp;
/* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
- return fold (build2 (LT_EXPR, type, arg,
- build_real (TREE_TYPE (arg), dconst0)));
+ return fold_build2 (LT_EXPR, type, arg,
+ build_real (TREE_TYPE (arg), dconst0));
return NULL_TREE;
}
Remember to evaluate Y for side-effects. */
if (tree_expr_nonnegative_p (arg2))
return omit_one_operand (type,
- fold (build1 (ABS_EXPR, type, arg1)),
+ fold_build1 (ABS_EXPR, type, arg1),
arg2);
/* Strip sign changing operations for the first argument. */
arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
build_int_cst (NULL_TREE,
~ (unsigned HOST_WIDE_INT) 0x7f));
- arg = fold (build2 (EQ_EXPR, integer_type_node,
- arg, integer_zero_node));
+ arg = fold_build2 (EQ_EXPR, integer_type_node,
+ arg, integer_zero_node);
if (in_gimple_form && !TREE_CONSTANT (arg))
return NULL_TREE;
/* Transform toascii(c) -> (c & 0x7f). */
tree arg = TREE_VALUE (arglist);
- return fold (build2 (BIT_AND_EXPR, integer_type_node, arg,
- build_int_cst (NULL_TREE, 0x7f)));
+ return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
+ build_int_cst (NULL_TREE, 0x7f));
}
}
arg = fold_convert (type, arg);
if (TREE_CODE (arg) == REAL_CST)
return fold_abs_const (arg, type);
- return fold (build1 (ABS_EXPR, type, arg));
+ return fold_build1 (ABS_EXPR, type, arg);
}
/* Fold a call to abs, labs, llabs or imaxabs. */
arg = fold_convert (type, arg);
if (TREE_CODE (arg) == INTEGER_CST)
return fold_abs_const (arg, type);
- return fold (build1 (ABS_EXPR, type, arg));
+ return fold_build1 (ABS_EXPR, type, arg);
}
/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
}
arg = builtin_save_expr (arg);
- return fold (build2 (UNORDERED_EXPR, type, arg, arg));
+ return fold_build2 (UNORDERED_EXPR, type, arg, arg);
default:
gcc_unreachable ();
{
if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg0))))
return omit_two_operands (type, integer_zero_node, arg0, arg1);
- return fold (build2 (UNORDERED_EXPR, type, arg0, arg1));
+ return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
}
code = MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
: ordered_code;
- return fold (build1 (TRUTH_NOT_EXPR, type,
- fold (build2 (code, type, arg0, arg1))));
-}
-
-/* Fold a call to one of the external complex multiply libcalls. */
-
-static tree
-fold_builtin_complex_mul (tree type, tree arglist)
-{
- tree ar, ai, br, bi;
-
- if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, REAL_TYPE,
- REAL_TYPE, VOID_TYPE))
- return NULL;
-
- ar = TREE_VALUE (arglist); arglist = TREE_CHAIN (arglist);
- ai = TREE_VALUE (arglist); arglist = TREE_CHAIN (arglist);
- br = TREE_VALUE (arglist); arglist = TREE_CHAIN (arglist);
- bi = TREE_VALUE (arglist);
-
- return fold_complex_mult_parts (type, ar, ai, br, bi);
-}
-
-/* Fold a call to one of the external complex division libcalls. */
-
-static tree
-fold_builtin_complex_div (tree type, tree arglist)
-{
- tree ar, ai, br, bi;
-
- if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, REAL_TYPE,
- REAL_TYPE, VOID_TYPE))
- return NULL;
-
- ar = TREE_VALUE (arglist); arglist = TREE_CHAIN (arglist);
- ai = TREE_VALUE (arglist); arglist = TREE_CHAIN (arglist);
- br = TREE_VALUE (arglist); arglist = TREE_CHAIN (arglist);
- bi = TREE_VALUE (arglist);
-
- return fold_complex_div_parts (type, ar, ai, br, bi, RDIV_EXPR);
+ return fold_build1 (TRUTH_NOT_EXPR, type,
+ fold_build2 (code, type, arg0, arg1));
}
/* Used by constant folding to simplify calls to builtin functions. EXP is
if no simplification was possible. */
static tree
-fold_builtin_1 (tree exp, bool ignore)
+fold_builtin_1 (tree fndecl, tree arglist, bool ignore)
{
- tree fndecl = get_callee_fndecl (exp);
- tree arglist = TREE_OPERAND (exp, 1);
tree type = TREE_TYPE (TREE_TYPE (fndecl));
enum built_in_function fcode;
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
- return targetm.fold_builtin (exp, ignore);
+ return targetm.fold_builtin (fndecl, arglist, ignore);
fcode = DECL_FUNCTION_CODE (fndecl);
switch (fcode)
case BUILT_IN_CONJF:
case BUILT_IN_CONJL:
if (validate_arglist (arglist, COMPLEX_TYPE, VOID_TYPE))
- return fold (build1 (CONJ_EXPR, type, TREE_VALUE (arglist)));
+ return fold_build1 (CONJ_EXPR, type, TREE_VALUE (arglist));
break;
case BUILT_IN_CREAL:
case BUILT_IN_CREALF:
case BUILT_IN_CREALL:
if (validate_arglist (arglist, COMPLEX_TYPE, VOID_TYPE))
- return non_lvalue (fold (build1 (REALPART_EXPR, type,
- TREE_VALUE (arglist))));
+ return non_lvalue (fold_build1 (REALPART_EXPR, type,
+ TREE_VALUE (arglist)));
break;
case BUILT_IN_CIMAG:
case BUILT_IN_CIMAGF:
case BUILT_IN_CIMAGL:
if (validate_arglist (arglist, COMPLEX_TYPE, VOID_TYPE))
- return non_lvalue (fold (build1 (IMAGPART_EXPR, type,
- TREE_VALUE (arglist))));
+ return non_lvalue (fold_build1 (IMAGPART_EXPR, type,
+ TREE_VALUE (arglist)));
break;
case BUILT_IN_CABS:
case BUILT_IN_FLOOR:
case BUILT_IN_FLOORF:
case BUILT_IN_FLOORL:
- return fold_builtin_floor (exp);
+ return fold_builtin_floor (fndecl, arglist);
case BUILT_IN_CEIL:
case BUILT_IN_CEILF:
case BUILT_IN_CEILL:
- return fold_builtin_ceil (exp);
+ return fold_builtin_ceil (fndecl, arglist);
case BUILT_IN_TRUNC:
case BUILT_IN_TRUNCF:
case BUILT_IN_TRUNCL:
- return fold_builtin_trunc (exp);
+ return fold_builtin_trunc (fndecl, arglist);
case BUILT_IN_ROUND:
case BUILT_IN_ROUNDF:
case BUILT_IN_ROUNDL:
- return fold_builtin_round (exp);
+ return fold_builtin_round (fndecl, arglist);
case BUILT_IN_NEARBYINT:
case BUILT_IN_NEARBYINTF:
case BUILT_IN_RINT:
case BUILT_IN_RINTF:
case BUILT_IN_RINTL:
- return fold_trunc_transparent_mathfn (exp);
-
+ return fold_trunc_transparent_mathfn (fndecl, arglist);
+
+ case BUILT_IN_LCEIL:
+ case BUILT_IN_LCEILF:
+ case BUILT_IN_LCEILL:
+ case BUILT_IN_LLCEIL:
+ case BUILT_IN_LLCEILF:
+ case BUILT_IN_LLCEILL:
+ case BUILT_IN_LFLOOR:
+ case BUILT_IN_LFLOORF:
+ case BUILT_IN_LFLOORL:
+ case BUILT_IN_LLFLOOR:
+ case BUILT_IN_LLFLOORF:
+ case BUILT_IN_LLFLOORL:
case BUILT_IN_LROUND:
case BUILT_IN_LROUNDF:
case BUILT_IN_LROUNDL:
case BUILT_IN_LLROUND:
case BUILT_IN_LLROUNDF:
case BUILT_IN_LLROUNDL:
- return fold_builtin_lround (exp);
+ return fold_builtin_int_roundingfn (fndecl, arglist);
case BUILT_IN_LRINT:
case BUILT_IN_LRINTF:
case BUILT_IN_LLRINT:
case BUILT_IN_LLRINTF:
case BUILT_IN_LLRINTL:
- return fold_fixed_mathfn (exp);
+ return fold_fixed_mathfn (fndecl, arglist);
case BUILT_IN_FFS:
case BUILT_IN_FFSL:
case BUILT_IN_PARITY:
case BUILT_IN_PARITYL:
case BUILT_IN_PARITYLL:
- return fold_builtin_bitop (exp);
+ return fold_builtin_bitop (fndecl, arglist);
case BUILT_IN_MEMCPY:
- return fold_builtin_memcpy (exp);
+ return fold_builtin_memcpy (fndecl, arglist);
case BUILT_IN_MEMPCPY:
return fold_builtin_mempcpy (arglist, type, /*endp=*/1);
case BUILT_IN_SIGNBIT:
case BUILT_IN_SIGNBITF:
case BUILT_IN_SIGNBITL:
- return fold_builtin_signbit (exp);
+ return fold_builtin_signbit (fndecl, arglist);
case BUILT_IN_ISASCII:
return fold_builtin_isascii (arglist);
break;
default:
- if (fcode >= BUILT_IN_COMPLEX_MUL_MIN
- && fcode <= BUILT_IN_COMPLEX_MUL_MAX)
- return fold_builtin_complex_mul (type, arglist);
- if (fcode >= BUILT_IN_COMPLEX_DIV_MIN
- && fcode <= BUILT_IN_COMPLEX_DIV_MAX)
- return fold_builtin_complex_div (type, arglist);
break;
}
call node earlier than the warning is generated. */
tree
-fold_builtin (tree exp, bool ignore)
+fold_builtin (tree fndecl, tree arglist, bool ignore)
{
- exp = fold_builtin_1 (exp, ignore);
+ tree exp = fold_builtin_1 (fndecl, arglist, ignore);
if (exp)
{
/* ??? Don't clobber shared nodes such as integer_zero_node. */
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold (build2 (PLUS_EXPR, TREE_TYPE (s1),
- s1, build_int_cst (TREE_TYPE (s1), r - p1)));
+ tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
+ s1, build_int_cst (TREE_TYPE (s1), r - p1));
return fold_convert (type, tem);
}
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold (build2 (PLUS_EXPR, TREE_TYPE (s1),
- s1, build_int_cst (TREE_TYPE (s1), r - p1)));
+ tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
+ s1, build_int_cst (TREE_TYPE (s1), r - p1));
return fold_convert (type, tem);
}
return 0;
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold (build2 (PLUS_EXPR, TREE_TYPE (s1),
- s1, build_int_cst (TREE_TYPE (s1), r - p1)));
+ tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
+ s1, build_int_cst (TREE_TYPE (s1), r - p1));
return fold_convert (type, tem);
}
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold (build2 (PLUS_EXPR, TREE_TYPE (s1),
- s1, build_int_cst (TREE_TYPE (s1), r - p1)));
+ tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
+ s1, build_int_cst (TREE_TYPE (s1), r - p1));
return fold_convert (type, tem);
}
{
/* Evidently an out of date version of <stdarg.h>; can't validate
va_start's second argument, but can still work as intended. */
- warning ("%<__builtin_next_arg%> called without an argument");
+ warning (0, "%<__builtin_next_arg%> called without an argument");
return true;
}
/* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
argument. We just warn and set the arg to be the last
argument so that we will get wrong-code because of
it. */
- warning ("second parameter of %<va_start%> not last named argument");
+ warning (0, "second parameter of %<va_start%> not last named argument");
}
/* We want to verify the second parameter just once before the tree
optimizers are run and then avoid keeping it in the tree,