/* Expand builtin functions.
Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
+ 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
This file is part of GCC.
#define CALLED_AS_BUILT_IN(NODE) \
(!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
-/* Register mappings for target machines without register windows. */
-#ifndef INCOMING_REGNO
-#define INCOMING_REGNO(OUT) (OUT)
-#endif
-#ifndef OUTGOING_REGNO
-#define OUTGOING_REGNO(IN) (IN)
-#endif
-
#ifndef PAD_VARARGS_DOWN
#define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
#endif
static void expand_errno_check (tree, rtx);
static rtx expand_builtin_mathfn (tree, rtx, rtx);
static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
+static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
static rtx expand_builtin_constant_p (tree, enum machine_mode);
static rtx expand_builtin_args_info (tree);
static rtx expand_builtin_next_arg (tree);
static bool readonly_data_expr (tree);
static rtx expand_builtin_fabs (tree, rtx, rtx);
static rtx expand_builtin_cabs (tree, rtx);
+static rtx expand_builtin_signbit (tree, rtx);
static tree fold_builtin_cabs (tree, tree, tree);
static tree fold_builtin_trunc (tree);
static tree fold_builtin_floor (tree);
static tree fold_builtin_ceil (tree);
+static tree fold_builtin_round (tree);
static tree fold_builtin_bitop (tree);
static tree fold_builtin_memcpy (tree);
static tree fold_builtin_mempcpy (tree);
static tree fold_builtin_memcmp (tree);
static tree fold_builtin_strcmp (tree);
static tree fold_builtin_strncmp (tree);
+static tree fold_builtin_signbit (tree);
/* Return the alignment in bits of EXP, a pointer valued expression.
But don't return more than MAX_ALIGN no matter what.
len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
- if (tree_int_cst_equal (len1, len2))
+ if (tree_int_cst_equal (len1, len2))
return len1;
}
the buffer and use the rest of it for the stack save area, which
is machine-dependent. */
-#ifndef BUILTIN_SETJMP_FRAME_VALUE
-#define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
-#endif
-
mem = gen_rtx_MEM (Pmode, buf_addr);
set_mem_alias_set (mem, setjmp_alias_set);
- emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
+ emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
set_mem_alias_set (mem, setjmp_alias_set);
used for calling a function. */
static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
-/* Return the offset of register REGNO into the block returned by
- __builtin_apply_args. This is not declared static, since it is
- needed in objc-act.c. */
-
-int
-apply_args_register_offset (int regno)
-{
- apply_args_size ();
-
- /* Arguments are always put in outgoing registers (in the argument
- block) if such make sense. */
-#ifdef OUTGOING_REGNO
- regno = OUTGOING_REGNO (regno);
-#endif
- return apply_args_reg_offset[regno];
-}
-
/* Return the size required for the block returned by __builtin_apply_args,
and initialize apply_args_mode. */
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
if (FUNCTION_ARG_REGNO_P (regno))
{
- /* Search for the proper mode for copying this register's
- value. I'm not sure this is right, but it works so far. */
- enum machine_mode best_mode = VOIDmode;
-
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode)
- && HARD_REGNO_NREGS (regno, mode) == 1)
- best_mode = mode;
-
- if (best_mode == VOIDmode)
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode)
- && have_insn_for (SET, mode))
- best_mode = mode;
-
- if (best_mode == VOIDmode)
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode)
- && have_insn_for (SET, mode))
- best_mode = mode;
-
- if (best_mode == VOIDmode)
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode)
- && have_insn_for (SET, mode))
- best_mode = mode;
-
- mode = best_mode;
+ mode = reg_raw_mode[regno];
+
if (mode == VOIDmode)
abort ();
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
if (FUNCTION_VALUE_REGNO_P (regno))
{
- /* Search for the proper mode for copying this register's
- value. I'm not sure this is right, but it works so far. */
- enum machine_mode best_mode = VOIDmode;
-
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
- mode != TImode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode))
- best_mode = mode;
-
- if (best_mode == VOIDmode)
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode)
- && have_insn_for (SET, mode))
- best_mode = mode;
-
- if (best_mode == VOIDmode)
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode)
- && have_insn_for (SET, mode))
- best_mode = mode;
-
- if (best_mode == VOIDmode)
- for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
- mode != VOIDmode;
- mode = GET_MODE_WIDER_MODE (mode))
- if (HARD_REGNO_MODE_OK (regno, mode)
- && have_insn_for (SET, mode))
- best_mode = mode;
-
- mode = best_mode;
+ mode = reg_raw_mode[regno];
+
if (mode == VOIDmode)
abort ();
tem = copy_to_reg (virtual_incoming_args_rtx);
#ifdef STACK_GROWS_DOWNWARD
/* We need the pointer as the caller actually passed them to us, not
- as we might have pretended they were passed. */
- tem = plus_constant (tem, current_function_pretend_args_size);
+ as we might have pretended they were passed. Make sure it's a valid
+ operand, as emit_move_insn isn't expected to handle a PLUS. */
+ tem
+ = force_operand (plus_constant (tem, current_function_pretend_args_size),
+ NULL_RTX);
#endif
emit_move_insn (adjust_address (registers, Pmode, 0), tem);
-
+
size = GET_MODE_SIZE (Pmode);
/* Save the structure value address unless this is passed as an
}
-/* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
+/* Expand a call to one of the builtin math functions (sqrt, exp, or log).
Return 0 if a normal call should be emitted rather than expanding the
function in-line. EXP is the expression that is a call to the builtin
function; if convenient, the result should be placed in TARGET.
switch (DECL_FUNCTION_CODE (fndecl))
{
- case BUILT_IN_SIN:
- case BUILT_IN_SINF:
- case BUILT_IN_SINL:
- builtin_optab = sin_optab; break;
- case BUILT_IN_COS:
- case BUILT_IN_COSF:
- case BUILT_IN_COSL:
- builtin_optab = cos_optab; break;
case BUILT_IN_SQRT:
case BUILT_IN_SQRTF:
case BUILT_IN_SQRTL:
case BUILT_IN_EXPF:
case BUILT_IN_EXPL:
errno_set = true; builtin_optab = exp_optab; break;
+ case BUILT_IN_EXP10:
+ case BUILT_IN_EXP10F:
+ case BUILT_IN_EXP10L:
+ case BUILT_IN_POW10:
+ case BUILT_IN_POW10F:
+ case BUILT_IN_POW10L:
+ errno_set = true; builtin_optab = exp10_optab; break;
+ case BUILT_IN_EXP2:
+ case BUILT_IN_EXP2F:
+ case BUILT_IN_EXP2L:
+ errno_set = true; builtin_optab = exp2_optab; break;
case BUILT_IN_LOG:
case BUILT_IN_LOGF:
case BUILT_IN_LOGL:
errno_set = true; builtin_optab = log_optab; break;
+ case BUILT_IN_LOG10:
+ case BUILT_IN_LOG10F:
+ case BUILT_IN_LOG10L:
+ errno_set = true; builtin_optab = log10_optab; break;
+ case BUILT_IN_LOG2:
+ case BUILT_IN_LOG2F:
+ case BUILT_IN_LOG2L:
+ errno_set = true; builtin_optab = log2_optab; break;
case BUILT_IN_TAN:
case BUILT_IN_TANF:
case BUILT_IN_TANL:
return target;
}
+/* Expand a call to the builtin sin and cos math functions.
+ Return 0 if a normal call should be emitted rather than expanding the
+ function in-line. EXP is the expression that is a call to the builtin
+ function; if convenient, the result should be placed in TARGET.
+ SUBTARGET may be used as the target for computing one of EXP's
+ operands. */
+
+static rtx
+expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
+{
+ optab builtin_optab;
+ rtx op0, insns, before_call;
+ tree fndecl = get_callee_fndecl (exp);
+ tree arglist = TREE_OPERAND (exp, 1);
+ enum machine_mode mode;
+ bool errno_set = false;
+ tree arg, narg;
+
+ if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return 0;
+
+ arg = TREE_VALUE (arglist);
+
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ case BUILT_IN_SIN:
+ case BUILT_IN_SINF:
+ case BUILT_IN_SINL:
+ case BUILT_IN_COS:
+ case BUILT_IN_COSF:
+ case BUILT_IN_COSL:
+ builtin_optab = sincos_optab; break;
+ default:
+ abort ();
+ }
+
+ /* Make a suitable register to place result in. */
+ mode = TYPE_MODE (TREE_TYPE (exp));
+
+ if (! flag_errno_math || ! HONOR_NANS (mode))
+ errno_set = false;
+
+ /* Check if sincos insn is available, otherwise fallback
+ to sin or cos insn. */
+ if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) {
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ case BUILT_IN_SIN:
+ case BUILT_IN_SINF:
+ case BUILT_IN_SINL:
+ builtin_optab = sin_optab; break;
+ case BUILT_IN_COS:
+ case BUILT_IN_COSF:
+ case BUILT_IN_COSL:
+ builtin_optab = cos_optab; break;
+ default:
+ abort();
+ }
+ }
+
+ /* Before working hard, check whether the instruction is available. */
+ if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
+ {
+ target = gen_reg_rtx (mode);
+
+ /* Wrap the computation of the argument in a SAVE_EXPR, as we may
+ need to expand the argument again. This way, we will not perform
+ side-effects more the once. */
+ narg = save_expr (arg);
+ if (narg != arg)
+ {
+ arglist = build_tree_list (NULL_TREE, arg);
+ exp = build_function_call_expr (fndecl, arglist);
+ }
+
+ op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+
+ emit_queue ();
+ start_sequence ();
+
+ /* Compute into TARGET.
+ Set TARGET to wherever the result comes back. */
+ if (builtin_optab == sincos_optab)
+ {
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ case BUILT_IN_SIN:
+ case BUILT_IN_SINF:
+ case BUILT_IN_SINL:
+ if (! expand_twoval_unop(builtin_optab, 0, target, op0, 0))
+ abort();
+ break;
+ case BUILT_IN_COS:
+ case BUILT_IN_COSF:
+ case BUILT_IN_COSL:
+ if (! expand_twoval_unop(builtin_optab, target, 0, op0, 0))
+ abort();
+ break;
+ default:
+ abort();
+ }
+ }
+ else
+ {
+ target = expand_unop (mode, builtin_optab, op0, target, 0);
+ }
+
+ if (target != 0)
+ {
+ if (errno_set)
+ expand_errno_check (exp, target);
+
+ /* Output the entire sequence. */
+ insns = get_insns ();
+ end_sequence ();
+ emit_insn (insns);
+ return target;
+ }
+
+ /* If we were unable to expand via the builtin, stop the sequence
+ (without outputting the insns) and call to the library function
+ with the stabilized argument list. */
+ end_sequence ();
+ }
+
+ before_call = get_last_insn ();
+
+ target = expand_call (exp, target, target == const0_rtx);
+
+ return target;
+}
+
/* To evaluate powi(x,n), the floating point value x raised to the
constant integer exponent n, we use a hybrid algorithm that
combines the "window method" with look-up tables. For an
multiplications to inline before calling the system library's pow
function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
so this default never requires calling pow, powf or powl. */
-
+
#ifndef POWI_MAX_MULTS
#define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
#endif
result++;
}
}
-
+
return result + powi_lookup_cost (val, cache);
}
/* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
floating point operand in mode MODE, and N is the exponent. This
function needs to be kept in sync with powi_cost above. */
-
+
static rtx
expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
{
}
}
}
- return expand_builtin_mathfn_2 (exp, target, NULL_RTX);
+
+ if (! flag_unsafe_math_optimizations)
+ return NULL_RTX;
+ return expand_builtin_mathfn_2 (exp, target, subtarget);
}
/* Expand expression EXP which is a call to the strlen builtin. Return 0
/* If LEN is not constant, call the normal function. */
if (! host_integerp (len, 1))
return 0;
-
+
/* If the LEN parameter is zero, return DEST. */
if (tree_low_cst (len, 1) == 0)
{
insn_mode = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
else
#endif
- return 0;
+ return 0;
/* If we don't have POINTER_TYPE, call the function. */
if (arg1_align == 0 || arg2_align == 0)
XEXP (arg1_rtx, 0), Pmode,
XEXP (arg2_rtx, 0), Pmode,
convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
- TREE_UNSIGNED (sizetype)),
+ TYPE_UNSIGNED (sizetype)),
TYPE_MODE (sizetype));
/* Return the value in the proper mode for this function. */
pieces into (dst + strlen(dst)). */
tree newdst, arglist,
strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
-
+
/* This is the length argument. */
arglist = build_tree_list (NULL_TREE,
fold (size_binop (PLUS_EXPR,
ssize_int (1))));
/* Prepend src argument. */
arglist = tree_cons (NULL_TREE, src, arglist);
-
+
/* We're going to use dst more than once. */
dst = save_expr (dst);
/* Prepend the new dst argument. */
arglist = tree_cons (NULL_TREE, newdst, arglist);
-
+
/* We don't want to get turned into a memcpy if the
target is const0_rtx, i.e. when the return value
isn't used. That would produce pessimized code so
tree align, alignm1;
tree rounded_size;
rtx addr;
+ HOST_WIDE_INT boundary;
/* Compute the rounded size of the type. */
align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
+ boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
+
+ /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
+ requires greater alignment, we must perform dynamic alignment. */
+
+ if (boundary > PARM_BOUNDARY)
+ {
+ if (!PAD_VARARGS_DOWN)
+ {
+ t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
+ build (PLUS_EXPR, TREE_TYPE (valist), valist,
+ build_int_2 (boundary / BITS_PER_UNIT - 1, 0)));
+ TREE_SIDE_EFFECTS (t) = 1;
+ expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
+ }
+ t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
+ build (BIT_AND_EXPR, TREE_TYPE (valist), valist,
+ build_int_2 (~(boundary / BITS_PER_UNIT - 1), -1)));
+ TREE_SIDE_EFFECTS (t) = 1;
+ expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
+ }
if (type == error_mark_node
|| (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
|| TREE_OVERFLOW (type_size))
/* Generate a diagnostic for requesting data of a type that cannot
be passed through `...' due to type promotion at the call site. */
- else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
+ else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
!= type)
{
const char *name = "<anonymous type>", *pname = 0;
/* We can, however, treat "undefined" any way we please.
Call abort to encourage the user to fix the program. */
+ inform ("if this code is reached, the program will abort");
expand_builtin_trap ();
/* This is dead code, but go ahead and finish so that the
{
tree valist = TREE_VALUE (arglist);
-#ifdef EXPAND_BUILTIN_VA_END
- valist = stabilize_va_list (valist, 0);
- EXPAND_BUILTIN_VA_END (arglist);
-#else
/* Evaluate for side effects, if needed. I hate macros that don't
do that. */
if (TREE_SIDE_EFFECTS (valist))
expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
-#endif
return const0_rtx;
}
break;
}
}
- /* FALLTHROUGH */
+ /* Fall through. */
case 1: /* length is greater than 1, call fwrite. */
{
tree string_arg;
if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
&& (integer_zerop (arg1) || integer_onep (arg1)))
{
- int num_jumps = 0;
- int save_pending_stack_adjust = pending_stack_adjust;
- rtx insn;
-
- /* If we fail to locate an appropriate conditional jump, we'll
- fall back to normal evaluation. Ensure that the expression
- can be re-evaluated. */
- switch (unsafe_for_reeval (arg0))
- {
- case 0: /* Safe. */
- break;
-
- case 1: /* Mildly unsafe. */
- arg0 = unsave_expr (arg0);
- break;
-
- case 2: /* Wildly unsafe. */
- return NULL_RTX;
- }
+ rtx insn, drop_through_label, temp;
/* Expand the jump insns. */
start_sequence ();
do_jump (arg0, if_false_label, if_true_label);
ret = get_insns ();
+
+ drop_through_label = get_last_insn ();
+ if (drop_through_label && GET_CODE (drop_through_label) == NOTE)
+ drop_through_label = prev_nonnote_insn (drop_through_label);
+ if (drop_through_label && GET_CODE (drop_through_label) != CODE_LABEL)
+ drop_through_label = NULL_RTX;
end_sequence ();
- /* For mildly unsafe builtin jump's, if unsave_expr_now
- creates a new tree instead of changing the old one
- TREE_VALUE (arglist) needs to be updated. */
- if (arg0 != TREE_VALUE (arglist)
- && TREE_CODE (arg0) == UNSAVE_EXPR
- && TREE_OPERAND (arg0, 0) != TREE_VALUE (arglist))
- TREE_VALUE (arglist) = TREE_OPERAND (arg0, 0);
-
- /* Now that the __builtin_expect has been validated, go through and add
- the expect's to each of the conditional jumps. If we run into an
- error, just give up and generate the 'safe' code of doing a SCC
- operation and then doing a branch on that. */
+ if (! if_true_label)
+ if_true_label = drop_through_label;
+ if (! if_false_label)
+ if_false_label = drop_through_label;
+
+ /* Go through and add the expect's to each of the conditional jumps. */
insn = ret;
while (insn != NULL_RTX)
{
if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn))
{
rtx ifelse = SET_SRC (pc_set (insn));
- rtx label;
- int taken;
-
- if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
+ rtx then_dest = XEXP (ifelse, 1);
+ rtx else_dest = XEXP (ifelse, 2);
+ int taken = -1;
+
+ /* First check if we recognize any of the labels. */
+ if (GET_CODE (then_dest) == LABEL_REF
+ && XEXP (then_dest, 0) == if_true_label)
+ taken = 1;
+ else if (GET_CODE (then_dest) == LABEL_REF
+ && XEXP (then_dest, 0) == if_false_label)
+ taken = 0;
+ else if (GET_CODE (else_dest) == LABEL_REF
+ && XEXP (else_dest, 0) == if_false_label)
+ taken = 1;
+ else if (GET_CODE (else_dest) == LABEL_REF
+ && XEXP (else_dest, 0) == if_true_label)
+ taken = 0;
+ /* Otherwise check where we drop through. */
+ else if (else_dest == pc_rtx)
{
- taken = 1;
- label = XEXP (XEXP (ifelse, 1), 0);
- }
- /* An inverted jump reverses the probabilities. */
- else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
- {
- taken = 0;
- label = XEXP (XEXP (ifelse, 2), 0);
+ if (next && GET_CODE (next) == NOTE)
+ next = next_nonnote_insn (next);
+
+ if (next && GET_CODE (next) == JUMP_INSN
+ && any_uncondjump_p (next))
+ temp = XEXP (SET_SRC (pc_set (next)), 0);
+ else
+ temp = next;
+
+ /* TEMP is either a CODE_LABEL, NULL_RTX or something
+ else that can't possibly match either target label. */
+ if (temp == if_false_label)
+ taken = 1;
+ else if (temp == if_true_label)
+ taken = 0;
}
- /* We shouldn't have to worry about conditional returns during
- the expansion stage, but handle it gracefully anyway. */
- else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
+ else if (then_dest == pc_rtx)
{
- taken = 1;
- label = NULL_RTX;
+ if (next && GET_CODE (next) == NOTE)
+ next = next_nonnote_insn (next);
+
+ if (next && GET_CODE (next) == JUMP_INSN
+ && any_uncondjump_p (next))
+ temp = XEXP (SET_SRC (pc_set (next)), 0);
+ else
+ temp = next;
+
+ if (temp == if_false_label)
+ taken = 0;
+ else if (temp == if_true_label)
+ taken = 1;
}
- /* An inverted return reverses the probabilities. */
- else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
+
+ if (taken != -1)
{
- taken = 0;
- label = NULL_RTX;
+ /* If the test is expected to fail, reverse the
+ probabilities. */
+ if (integer_zerop (arg1))
+ taken = 1 - taken;
+ predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
}
- else
- goto do_next_insn;
-
- /* If the test is expected to fail, reverse the
- probabilities. */
- if (integer_zerop (arg1))
- taken = 1 - taken;
-
- /* If we are jumping to the false label, reverse the
- probabilities. */
- if (label == NULL_RTX)
- ; /* conditional return */
- else if (label == if_false_label)
- taken = 1 - taken;
- else if (label != if_true_label)
- goto do_next_insn;
-
- num_jumps++;
- predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
}
- do_next_insn:
insn = next;
}
-
- /* If no jumps were modified, fail and do __builtin_expect the normal
- way. */
- if (num_jumps == 0)
- {
- ret = NULL_RTX;
- pending_stack_adjust = save_pending_stack_adjust;
- }
}
return ret;
/* Expand a call to printf or printf_unlocked with argument list ARGLIST.
Return 0 if a normal call should be emitted rather than transforming
the function inline. If convenient, the result should be placed in
- TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
+ TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
call. */
static rtx
expand_builtin_printf (tree arglist, rtx target, enum machine_mode mode,
{
/* Create a NUL-terminated string that's one char shorter
than the original, stripping off the trailing '\n'. */
- char *newstr = (char *) alloca (len);
+ char *newstr = alloca (len);
memcpy (newstr, fmt_str, len - 1);
newstr[len - 1] = 0;
/* Expand a call to fprintf or fprintf_unlocked with argument list ARGLIST.
Return 0 if a normal call should be emitted rather than transforming
the function inline. If convenient, the result should be placed in
- TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
+ TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
call. */
static rtx
expand_builtin_fprintf (tree arglist, rtx target, enum machine_mode mode,
return 0;
}
+
+/* Expand a call to the built-in signbit, signbitf or signbitl function.
+ Return NULL_RTX if a normal call should be emitted rather than expanding
+ the function in-line. EXP is the expression that is a call to the builtin
+ function; if convenient, the result should be placed in TARGET. */
+
+static rtx
+expand_builtin_signbit (tree exp, rtx target)
+{
+ const struct real_format *fmt;
+ enum machine_mode fmode, imode, rmode;
+ HOST_WIDE_INT hi, lo;
+ tree arg, arglist;
+ int bitpos;
+ rtx temp;
+
+ arglist = TREE_OPERAND (exp, 1);
+ if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return 0;
+
+ arg = TREE_VALUE (arglist);
+ fmode = TYPE_MODE (TREE_TYPE (arg));
+ rmode = TYPE_MODE (TREE_TYPE (exp));
+ fmt = REAL_MODE_FORMAT (fmode);
+
+ /* For floating point formats without a sign bit, implement signbit
+ as "ARG < 0.0". */
+ if (fmt->signbit < 0)
+ {
+ /* But we can't do this if the format supports signed zero. */
+ if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
+ return 0;
+
+ arg = fold (build (LT_EXPR, TREE_TYPE (exp), arg,
+ build_real (TREE_TYPE (arg), dconst0)));
+ return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
+ }
+
+ imode = int_mode_for_mode (fmode);
+ if (imode == BLKmode)
+ return 0;
+
+ bitpos = fmt->signbit;
+ /* Handle targets with different FP word orders. */
+ if (FLOAT_WORDS_BIG_ENDIAN != WORDS_BIG_ENDIAN)
+ {
+ int nwords = GET_MODE_BITSIZE (fmode) / BITS_PER_WORD;
+ int word = nwords - (bitpos / BITS_PER_WORD) - 1;
+ bitpos = word * BITS_PER_WORD + bitpos % BITS_PER_WORD;
+ }
+
+ /* If the sign bit is not in the lowpart and the floating point format
+ is wider than an integer, check that is twice the size of an integer
+ so that we can use gen_highpart below. */
+ if (bitpos >= GET_MODE_BITSIZE (rmode)
+ && GET_MODE_BITSIZE (imode) != 2 * GET_MODE_BITSIZE (rmode))
+ return 0;
+
+ temp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
+ temp = gen_lowpart (imode, temp);
+
+ if (GET_MODE_BITSIZE (imode) > GET_MODE_BITSIZE (rmode))
+ {
+ if (BITS_BIG_ENDIAN)
+ bitpos = GET_MODE_BITSIZE (imode) - 1 - bitpos;
+ temp = copy_to_mode_reg (imode, temp);
+ temp = extract_bit_field (temp, 1, bitpos, 1,
+ NULL_RTX, rmode, rmode,
+ GET_MODE_SIZE (imode));
+ }
+ else
+ {
+ if (GET_MODE_BITSIZE (imode) < GET_MODE_BITSIZE (rmode))
+ temp = gen_lowpart (rmode, temp);
+ if (bitpos < HOST_BITS_PER_WIDE_INT)
+ {
+ hi = 0;
+ lo = (HOST_WIDE_INT) 1 << bitpos;
+ }
+ else
+ {
+ hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
+ lo = 0;
+ }
+
+ temp = force_reg (rmode, temp);
+ temp = expand_binop (rmode, and_optab, temp,
+ immed_double_const (lo, hi, rmode),
+ target, 1, OPTAB_LIB_WIDEN);
+ }
+ return temp;
+}
\f
/* Expand an expression EXP that calls a built-in function,
with result going to TARGET if that's convenient
emit_queue ();
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
- return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
+ return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
/* When not optimizing, generate calls to library functions for a certain
set of builtins. */
and IMAGPART_EXPR. */
abort ();
- case BUILT_IN_SIN:
- case BUILT_IN_SINF:
- case BUILT_IN_SINL:
- case BUILT_IN_COS:
- case BUILT_IN_COSF:
- case BUILT_IN_COSL:
case BUILT_IN_EXP:
case BUILT_IN_EXPF:
case BUILT_IN_EXPL:
+ case BUILT_IN_EXP10:
+ case BUILT_IN_EXP10F:
+ case BUILT_IN_EXP10L:
+ case BUILT_IN_POW10:
+ case BUILT_IN_POW10F:
+ case BUILT_IN_POW10L:
+ case BUILT_IN_EXP2:
+ case BUILT_IN_EXP2F:
+ case BUILT_IN_EXP2L:
case BUILT_IN_LOG:
case BUILT_IN_LOGF:
case BUILT_IN_LOGL:
+ case BUILT_IN_LOG10:
+ case BUILT_IN_LOG10F:
+ case BUILT_IN_LOG10L:
+ case BUILT_IN_LOG2:
+ case BUILT_IN_LOG2F:
+ case BUILT_IN_LOG2L:
case BUILT_IN_TAN:
case BUILT_IN_TANF:
case BUILT_IN_TANL:
case BUILT_IN_POW:
case BUILT_IN_POWF:
case BUILT_IN_POWL:
- if (! flag_unsafe_math_optimizations)
- break;
target = expand_builtin_pow (exp, target, subtarget);
if (target)
return target;
return target;
break;
+ case BUILT_IN_SIN:
+ case BUILT_IN_SINF:
+ case BUILT_IN_SINL:
+ case BUILT_IN_COS:
+ case BUILT_IN_COSF:
+ case BUILT_IN_COSL:
+ if (! flag_unsafe_math_optimizations)
+ break;
+ target = expand_builtin_mathfn_3 (exp, target, subtarget);
+ if (target)
+ return target;
+ break;
+
case BUILT_IN_APPLY_ARGS:
return expand_builtin_apply_args ();
return target;
break;
+ case BUILT_IN_SIGNBIT:
+ case BUILT_IN_SIGNBITF:
+ case BUILT_IN_SIGNBITL:
+ target = expand_builtin_signbit (exp, target);
+ if (target)
+ return target;
+ break;
+
/* Various hooks for the DWARF 2 __throw routine. */
case BUILT_IN_UNWIND_INIT:
expand_builtin_unwind_init ();
case BUILT_IN_EH_RETURN_DATA_REGNO:
return expand_builtin_eh_return_data_regno (arglist);
#endif
+ case BUILT_IN_EXTEND_POINTER:
+ return expand_builtin_extend_pointer (TREE_VALUE (arglist));
+
case BUILT_IN_VA_START:
case BUILT_IN_STDARG_START:
return expand_builtin_va_start (arglist);
case BUILT_IN_NEARBYINT:
case BUILT_IN_NEARBYINTF:
case BUILT_IN_NEARBYINTL:
+ case BUILT_IN_RINT:
+ case BUILT_IN_RINTF:
+ case BUILT_IN_RINTL:
case BUILT_IN_ROUND:
case BUILT_IN_ROUNDF:
case BUILT_IN_ROUNDL:
return fold_trunc_transparent_mathfn (exp);
}
+/* Fold function call to builtin round, roundf or roundl. Return
+ NULL_TREE if no simplification can be made. */
+
+static tree
+fold_builtin_round (tree exp)
+{
+ tree arglist = TREE_OPERAND (exp, 1);
+ tree arg;
+
+ if (! validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return 0;
+
+ /* Optimize ceil of constant value. */
+ arg = TREE_VALUE (arglist);
+ if (TREE_CODE (arg) == REAL_CST && ! TREE_CONSTANT_OVERFLOW (arg))
+ {
+ REAL_VALUE_TYPE x;
+
+ x = TREE_REAL_CST (arg);
+ if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
+ {
+ tree type = TREE_TYPE (exp);
+ REAL_VALUE_TYPE r;
+
+ real_round (&r, TYPE_MODE (type), &x);
+ return build_real (type, r);
+ }
+ }
+
+ return fold_trunc_transparent_mathfn (exp);
+}
+
/* Fold function call to builtin ffs, clz, ctz, popcount and parity
and their long and long long variants (i.e. ffsl and ffsll).
Return NULL_TREE if no simplification can be made. */
}
/* A subroutine of fold_builtin to fold the various logarithmic
- functions. EXP is the CALL_EXPR of a call to a builtin log*
- function. VALUE is the base of the log* function. */
+ functions. EXP is the CALL_EXPR of a call to a builtin logN
+ function. VALUE is the base of the logN function. */
static tree
fold_builtin_logarithm (tree exp, const REAL_VALUE_TYPE *value)
tree type = TREE_TYPE (TREE_TYPE (fndecl));
tree arg = TREE_VALUE (arglist);
const enum built_in_function fcode = builtin_mathfn_code (arg);
-
- /* Optimize log*(1.0) = 0.0. */
+
+ /* Optimize logN(1.0) = 0.0. */
if (real_onep (arg))
return build_real (type, dconst0);
if (real_dconstp (arg, &value_truncate))
return build_real (type, dconst1);
}
-
+
/* Special case, optimize logN(expN(x)) = x. */
if (flag_unsafe_math_optimizations
&& ((value == &dconste
&& (fcode == BUILT_IN_EXP2
|| fcode == BUILT_IN_EXP2F
|| fcode == BUILT_IN_EXP2L))
- || (value == &dconst10
- && (fcode == BUILT_IN_EXP10
- || fcode == BUILT_IN_EXP10F
- || fcode == BUILT_IN_EXP10L))))
+ || (value == &dconst10 && (BUILTIN_EXP10_P (fcode)))))
return convert (type, TREE_VALUE (TREE_OPERAND (arg, 1)));
- /* Optimize log*(func()) for various exponential functions. We
+ /* Optimize logN(func()) for various exponential functions. We
want to determine the value "x" and the power "exponent" in
order to transform logN(x**exponent) into exponent*logN(x). */
if (flag_unsafe_math_optimizations)
{
tree exponent = 0, x = 0;
-
+
switch (fcode)
{
case BUILT_IN_EXP:
return 0;
}
-
+
/* A subroutine of fold_builtin to fold the various exponent
functions. EXP is the CALL_EXPR of a call to a builtin function.
VALUE is the value which will be raised to a power. */
return 0;
}
+/* Fold function call to builtin signbit, signbitf or signbitl. Return
+ NULL_TREE if no simplification can be made. */
+
+static tree
+fold_builtin_signbit (tree exp)
+{
+ tree arglist = TREE_OPERAND (exp, 1);
+ tree arg, temp;
+
+ if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ return NULL_TREE;
+
+ arg = TREE_VALUE (arglist);
+
+ /* If ARG is a compile-time constant, determine the result. */
+ if (TREE_CODE (arg) == REAL_CST
+ && !TREE_CONSTANT_OVERFLOW (arg))
+ {
+ REAL_VALUE_TYPE c;
+
+ c = TREE_REAL_CST (arg);
+ temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
+ return convert (TREE_TYPE (exp), temp);
+ }
+
+ /* If ARG is non-negative, the result is always zero. */
+ if (tree_expr_nonnegative_p (arg))
+ return omit_one_operand (TREE_TYPE (exp), integer_zero_node, arg);
+
+ /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
+ if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
+ return fold (build (LT_EXPR, TREE_TYPE (exp), arg,
+ build_real (TREE_TYPE (arg), dconst0)));
+
+ return NULL_TREE;
+}
+
+/* Fold a call to builtin isascii. */
+
+static tree
+fold_builtin_isascii (tree arglist)
+{
+ if (! validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
+ return 0;
+ else
+ {
+ /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
+ tree arg = TREE_VALUE (arglist);
+
+ return fold (build (EQ_EXPR, integer_type_node,
+ build (BIT_AND_EXPR, integer_type_node, arg,
+ build_int_2 (~ (unsigned HOST_WIDE_INT) 0x7f,
+ ~ (HOST_WIDE_INT) 0)),
+ integer_zero_node));
+ }
+}
+
+/* Fold a call to builtin toascii. */
+
+static tree
+fold_builtin_toascii (tree arglist)
+{
+ if (! validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
+ return 0;
+ else
+ {
+ /* Transform toascii(c) -> (c & 0x7f). */
+ tree arg = TREE_VALUE (arglist);
+
+ return fold (build (BIT_AND_EXPR, integer_type_node, arg,
+ build_int_2 (0x7f, 0)));
+ }
+}
+
+
/* Used by constant folding to eliminate some builtin calls early. EXP is
the CALL_EXPR of a call to a builtin function. */
return build_real (type, r);
}
- /* Optimize sqrt(exp(x)) = exp(x*0.5). */
+ /* Optimize sqrt(expN(x)) = expN(x*0.5). */
fcode = builtin_mathfn_code (arg);
- if (flag_unsafe_math_optimizations
- && (fcode == BUILT_IN_EXP
- || fcode == BUILT_IN_EXPF
- || fcode == BUILT_IN_EXPL))
+ if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
{
tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
arg = fold (build (MULT_EXPR, type,
return build_function_call_expr (expfn, arglist);
}
+ /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
+ if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
+ {
+ tree powfn = mathfn_built_in (type, BUILT_IN_POW);
+
+ if (powfn)
+ {
+ tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
+ tree tree_root;
+ /* The inner root was either sqrt or cbrt. */
+ REAL_VALUE_TYPE dconstroot =
+ BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
+
+ /* Adjust for the outer root. */
+ SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
+ dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
+ tree_root = build_real (type, dconstroot);
+ arglist = tree_cons (NULL_TREE, arg0,
+ build_tree_list (NULL_TREE, tree_root));
+ return build_function_call_expr (powfn, arglist);
+ }
+ }
+
/* Optimize sqrt(pow(x,y)) = pow(x,y*0.5). */
if (flag_unsafe_math_optimizations
&& (fcode == BUILT_IN_POW
}
break;
+ case BUILT_IN_CBRT:
+ case BUILT_IN_CBRTF:
+ case BUILT_IN_CBRTL:
+ if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
+ {
+ tree arg = TREE_VALUE (arglist);
+ const enum built_in_function fcode = builtin_mathfn_code (arg);
+
+ /* Optimize cbrt of constant value. */
+ if (real_zerop (arg) || real_onep (arg) || real_minus_onep (arg))
+ return arg;
+
+ /* Optimize cbrt(expN(x)) -> expN(x/3). */
+ if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
+ {
+ tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
+ const REAL_VALUE_TYPE third_trunc =
+ real_value_truncate (TYPE_MODE (type), dconstthird);
+ arg = fold (build (MULT_EXPR, type,
+ TREE_VALUE (TREE_OPERAND (arg, 1)),
+ build_real (type, third_trunc)));
+ arglist = build_tree_list (NULL_TREE, arg);
+ return build_function_call_expr (expfn, arglist);
+ }
+
+ /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
+ /* We don't optimize cbrt(cbrt(x)) -> pow(x,1/9) because if
+ x is negative pow will error but cbrt won't. */
+ if (flag_unsafe_math_optimizations && BUILTIN_SQRT_P (fcode))
+ {
+ tree powfn = mathfn_built_in (type, BUILT_IN_POW);
+
+ if (powfn)
+ {
+ tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
+ tree tree_root;
+ REAL_VALUE_TYPE dconstroot = dconstthird;
+
+ SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
+ dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
+ tree_root = build_real (type, dconstroot);
+ arglist = tree_cons (NULL_TREE, arg0,
+ build_tree_list (NULL_TREE, tree_root));
+ return build_function_call_expr (powfn, arglist);
+ }
+
+ }
+ }
+ break;
+
case BUILT_IN_SIN:
case BUILT_IN_SINF:
case BUILT_IN_SINL:
REAL_VALUE_TYPE cst;
real_convert (&cst, TYPE_MODE (type), &dconstpi);
- cst.exp -= 2;
+ SET_REAL_EXP (&cst, REAL_EXP (&cst) - 2);
return build_real (type, cst);
}
}
}
}
- /* Optimize pow(exp(x),y) = exp(x*y). */
+ /* Optimize pow(expN(x),y) = expN(x*y). */
fcode = builtin_mathfn_code (arg0);
- if (flag_unsafe_math_optimizations
- && (fcode == BUILT_IN_EXP
- || fcode == BUILT_IN_EXPF
- || fcode == BUILT_IN_EXPL))
+ if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
{
tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
}
/* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
- if (flag_unsafe_math_optimizations
- && (fcode == BUILT_IN_SQRT
- || fcode == BUILT_IN_SQRTF
- || fcode == BUILT_IN_SQRTL))
+ if (flag_unsafe_math_optimizations && BUILTIN_SQRT_P (fcode))
{
tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
tree narg1 = fold (build (MULT_EXPR, type, arg1,
case BUILT_IN_ROUND:
case BUILT_IN_ROUNDF:
case BUILT_IN_ROUNDL:
+ return fold_builtin_round (exp);
+
case BUILT_IN_NEARBYINT:
case BUILT_IN_NEARBYINTF:
case BUILT_IN_NEARBYINTL:
+ case BUILT_IN_RINT:
+ case BUILT_IN_RINTF:
+ case BUILT_IN_RINTL:
return fold_trunc_transparent_mathfn (exp);
case BUILT_IN_FFS:
case BUILT_IN_STRNCMP:
return fold_builtin_strncmp (exp);
+ case BUILT_IN_SIGNBIT:
+ case BUILT_IN_SIGNBITF:
+ case BUILT_IN_SIGNBITL:
+ return fold_builtin_signbit (exp);
+
+ case BUILT_IN_ISASCII:
+ return fold_builtin_isascii (arglist);
+
+ case BUILT_IN_TOASCII:
+ return fold_builtin_toascii (arglist);
+
default:
break;
}