/* Expand builtin functions.
Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
+ 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
This file is part of GCC.
static rtx c_readstr (const char *, enum machine_mode);
static int target_char_cast (tree, char *);
static rtx get_memory_rtx (tree, tree);
-static tree build_string_literal (int, const char *);
static int apply_args_size (void);
static int apply_result_size (void);
#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
static bool readonly_data_expr (tree);
static rtx expand_builtin_fabs (tree, rtx, rtx);
static rtx expand_builtin_signbit (tree, rtx);
-static tree fold_builtin_cabs (tree, tree);
static tree fold_builtin_sqrt (tree, tree);
static tree fold_builtin_cbrt (tree, tree);
static tree fold_builtin_pow (tree, tree, tree);
case ADDR_EXPR:
/* See what we are pointing at and look at its alignment. */
exp = TREE_OPERAND (exp, 0);
+ inner = max_align;
+ while (handled_component_p (exp))
+ {
+ /* Fields in a structure can be packed, honor DECL_ALIGN
+ of the FIELD_DECL. For all other references the conservative
+ alignment is the element type alignment. */
+ if (TREE_CODE (exp) == COMPONENT_REF)
+ inner = MIN (inner, DECL_ALIGN (TREE_OPERAND (exp, 1)));
+ else
+ inner = MIN (inner, TYPE_ALIGN (TREE_TYPE (exp)));
+ exp = TREE_OPERAND (exp, 0);
+ }
if (TREE_CODE (exp) == FUNCTION_DECL)
align = FUNCTION_BOUNDARY;
else if (DECL_P (exp))
- align = DECL_ALIGN (exp);
+ align = MIN (inner, DECL_ALIGN (exp));
#ifdef CONSTANT_ALIGNMENT
else if (CONSTANT_CLASS_P (exp))
- align = CONSTANT_ALIGNMENT (exp, align);
+ align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
#endif
+ else
+ align = MIN (align, inner);
return MIN (align, max_align);
default:
/* For a zero count, we don't care what frame address we return, so frame
pointer elimination is OK, and using the soft frame pointer is OK.
- For a non-zero count, we require a stable offset from the current frame
+ For a nonzero count, we require a stable offset from the current frame
pointer to the previous one, so we must use the hard frame pointer, and
we must disable frame pointer elimination. */
if (count == 0)
tem = frame_pointer_rtx;
- else
+ else
{
tem = hard_frame_pointer_rtx;
|| REGNO (target) < FIRST_PSEUDO_REGISTER)
target = gen_reg_rtx (TYPE_MODE (integer_type_node));
- buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
+ buf_addr = expand_normal (TREE_VALUE (arglist));
next_lab = gen_label_rtx ();
cont_lab = gen_label_rtx ();
/* Pick up FP, label, and SP from the block and jump. This code is
from expand_goto in stmt.c; see there for detailed comments. */
-#if HAVE_nonlocal_goto
+#ifdef HAVE_nonlocal_goto
if (HAVE_nonlocal_goto)
/* We have to pass a value to the nonlocal_goto pattern that will
get copied into the static_chain pointer, but it does not matter
arglist = TREE_CHAIN (arglist);
t_save_area = TREE_VALUE (arglist);
- r_label = expand_expr (t_label, NULL_RTX, VOIDmode, 0);
+ r_label = expand_normal (t_label);
r_label = convert_memory_address (Pmode, r_label);
- r_save_area = expand_expr (t_save_area, NULL_RTX, VOIDmode, 0);
+ r_save_area = expand_normal (t_save_area);
r_save_area = convert_memory_address (Pmode, r_save_area);
r_fp = gen_rtx_MEM (Pmode, r_save_area);
r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
current_function_has_nonlocal_goto = 1;
-#if HAVE_nonlocal_goto
+#ifdef HAVE_nonlocal_goto
/* ??? We no longer need to pass the static chain value, afaik. */
if (HAVE_nonlocal_goto)
emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
error ("second argument to %<__builtin_prefetch%> must be a constant");
arg1 = integer_zero_node;
}
- op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
+ op1 = expand_normal (arg1);
/* Argument 1 must be either zero or one. */
if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
{
error ("third argument to %<__builtin_prefetch%> must be a constant");
arg2 = integer_zero_node;
}
- op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
+ op2 = expand_normal (arg2);
/* Argument 2 must be 0, 1, 2, or 3. */
if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
{
{
case VOID_TYPE: return void_type_class;
case INTEGER_TYPE: return integer_type_class;
- case CHAR_TYPE: return char_type_class;
case ENUMERAL_TYPE: return enumeral_type_class;
case BOOLEAN_TYPE: return boolean_type_class;
case POINTER_TYPE: return pointer_type_class;
if (! stable)
exp = build_function_call_expr (fndecl, arglist);
- op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
- op1 = expand_expr (arg1, 0, VOIDmode, 0);
+ op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
+ op1 = expand_normal (arg1);
start_sequence ();
target1 = gen_reg_rtx (mode);
target2 = gen_reg_rtx (mode);
- op0 = expand_expr (arg, NULL_RTX, VOIDmode, 0);
- op1 = expand_expr (build_fold_indirect_ref (sinp), NULL_RTX, VOIDmode, 0);
- op2 = expand_expr (build_fold_indirect_ref (cosp), NULL_RTX, VOIDmode, 0);
+ op0 = expand_normal (arg);
+ op1 = expand_normal (build_fold_indirect_ref (sinp));
+ op2 = expand_normal (build_fold_indirect_ref (cosp));
/* Compute into target1 and target2.
Set TARGET to wherever the result comes back. */
gcc_assert (fallback_fndecl != NULL_TREE);
exp = build_function_call_expr (fallback_fndecl, arglist);
- tmp = expand_builtin_mathfn (exp, NULL_RTX, NULL_RTX);
+ tmp = expand_normal (exp);
/* Truncate the result of floating point optab to integer
via expand_fix (). */
if (n < POWI_TABLE_SIZE)
{
if (cache[n])
- return cache[n];
+ return cache[n];
target = gen_reg_rtx (mode);
cache[n] = target;
return 0;
/* If either SRC is not a pointer type, don't do this
- operation in-line. */
+ operation in-line. */
if (src_align == 0)
return 0;
dest_mem = get_memory_rtx (dest, len);
set_mem_align (dest_mem, dest_align);
- len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
+ len_rtx = expand_normal (len);
src_str = c_getstr (src);
/* If SRC is a string constant and block move would be done
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
-
+
/* If either SRC or DEST is not a pointer type, don't do this
- operation in-line. */
+ operation in-line. */
if (dest_align == 0 || src_align == 0)
return 0;
if (! host_integerp (len, 1))
return 0;
- len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
+ len_rtx = expand_normal (len);
src_str = c_getstr (src);
/* If SRC is a string constant and block move would be done
return 0;
/* If either SRC is not a pointer type, don't do this
- operation in-line. */
+ operation in-line. */
if (src_align == 0)
return 0;
/* If src is categorized for a readonly section we can use
normal memcpy. */
if (readonly_data_expr (src))
- {
+ {
tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
if (!fn)
return 0;
/* If length is 1 and we can expand memcpy call inline,
it is ok to use memcpy as well. */
if (integer_onep (len))
- {
+ {
rtx ret = expand_builtin_mempcpy (arglist, type, target, mode,
/*endp=*/0);
if (ret)
return ret;
- }
+ }
/* Otherwise, call the normal function. */
return 0;
rtx ret;
/* Ensure we get an actual string whose length can be evaluated at
- compile-time, not an expression containing a string. This is
- because the latter will potentially produce pessimized code
- when used to produce the return value. */
+ compile-time, not an expression containing a string. This is
+ because the latter will potentially produce pessimized code
+ when used to produce the return value. */
src = TREE_VALUE (TREE_CHAIN (arglist));
if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
return expand_movstr (TREE_VALUE (arglist),
if (TREE_CODE (len) == INTEGER_CST)
{
- rtx len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
+ rtx len_rtx = expand_normal (len);
if (GET_CODE (len_rtx) == CONST_INT)
{
- ret = expand_builtin_strcpy (get_callee_fndecl (exp),
+ ret = expand_builtin_strcpy (get_callee_fndecl (exp),
arglist, target, mode);
if (ret)
tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
tree result = fold_builtin_strncpy (fndecl, arglist, slen);
-
+
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
/* We're required to pad with trailing zeros if the requested
- len is greater than strlen(s2)+1. In that case try to
+ len is greater than strlen(s2)+1. In that case try to
use store_by_pieces, if it fails, punt. */
if (tree_int_cst_lt (slen, len))
{
tree dest = TREE_VALUE (arglist);
tree val = TREE_VALUE (TREE_CHAIN (arglist));
tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
+ tree fndecl, fn;
+ enum built_in_function fcode;
char c;
-
- unsigned int dest_align
- = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
+ unsigned int dest_align;
rtx dest_mem, dest_addr, len_rtx;
+ dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
+
/* If DEST is not a pointer type, don't do this
operation in-line. */
if (dest_align == 0)
return expand_expr (dest, target, mode, EXPAND_NORMAL);
}
- len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
+ /* Stabilize the arguments in case we fail. */
+ dest = builtin_save_expr (dest);
+ val = builtin_save_expr (val);
+ len = builtin_save_expr (len);
+
+ len_rtx = expand_normal (len);
dest_mem = get_memory_rtx (dest, len);
if (TREE_CODE (val) != INTEGER_CST)
{
rtx val_rtx;
- val = fold_build1 (CONVERT_EXPR, unsigned_char_type_node, val);
- val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
+ val_rtx = expand_normal (val);
+ val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
+ val_rtx, 0);
/* Assume that we can memset by pieces if we can store the
* the coefficients by pieces (in the required modes).
&& can_store_by_pieces (tree_low_cst (len, 1),
builtin_memset_read_str, &c, dest_align))
{
- val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
+ val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
val_rtx);
store_by_pieces (dest_mem, tree_low_cst (len, 1),
builtin_memset_gen_str, val_rtx, dest_align, 0);
}
- else if (!set_storage_via_setmem(dest_mem, len_rtx, val_rtx,
- dest_align))
- return 0;
+ else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
+ dest_align))
+ goto do_libcall;
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
}
if (target_char_cast (val, &c))
- return 0;
+ goto do_libcall;
if (c)
{
builtin_memset_read_str, &c, dest_align, 0);
else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
dest_align))
- return 0;
+ goto do_libcall;
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
}
return dest_addr;
+
+ do_libcall:
+ fndecl = get_callee_fndecl (orig_exp);
+ fcode = DECL_FUNCTION_CODE (fndecl);
+ gcc_assert (fcode == BUILT_IN_MEMSET || fcode == BUILT_IN_BZERO);
+ arglist = build_tree_list (NULL_TREE, len);
+ if (fcode == BUILT_IN_MEMSET)
+ arglist = tree_cons (NULL_TREE, val, arglist);
+ arglist = tree_cons (NULL_TREE, dest, arglist);
+ fn = build_function_call_expr (fndecl, arglist);
+ if (TREE_CODE (fn) == CALL_EXPR)
+ CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
+ return expand_call (fn, target, target == const0_rtx);
}
}
arg1_rtx = get_memory_rtx (arg1, len);
arg2_rtx = get_memory_rtx (arg2, len);
- arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
+ arg3_rtx = expand_normal (len);
/* Set MEM_SIZE as appropriate. */
if (GET_CODE (arg3_rtx) == CONST_INT)
rtx arg1_rtx, arg2_rtx;
rtx result, insn = NULL_RTX;
tree fndecl, fn;
-
+
tree arg1 = TREE_VALUE (arglist);
tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
int arg1_align
/* Try to call cmpstrsi. */
if (HAVE_cmpstrsi)
{
- enum machine_mode insn_mode
+ enum machine_mode insn_mode
= insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
/* Make a place to write the result of the instruction. */
GEN_INT (MIN (arg1_align, arg2_align)));
}
#endif
-#if HAVE_cmpstrnsi
+#ifdef HAVE_cmpstrnsi
/* Try to determine at least one length and call cmpstrnsi. */
- if (!insn && HAVE_cmpstrnsi)
+ if (!insn && HAVE_cmpstrnsi)
{
tree len;
rtx arg3_rtx;
- enum machine_mode insn_mode
+ enum machine_mode insn_mode
= insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
tree len1 = c_strlen (arg1, 1);
tree len2 = c_strlen (arg2, 1);
/* If both arguments have side effects, we cannot optimize. */
if (!len || TREE_SIDE_EFFECTS (len))
- return 0;
+ goto do_libcall;
- /* Stabilize the arguments in case gen_cmpstrnsi fails. */
- arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
+ arg3_rtx = expand_normal (len);
/* Make a place to write the result of the instruction. */
result = target;
/* Expand the library call ourselves using a stabilized argument
list to avoid re-evaluating the function's arguments twice. */
+#ifdef HAVE_cmpstrnsi
+ do_libcall:
+#endif
arglist = build_tree_list (NULL_TREE, arg2);
arglist = tree_cons (NULL_TREE, arg1, arglist);
fndecl = get_callee_fndecl (exp);
arg1_rtx = get_memory_rtx (arg1, len);
arg2_rtx = get_memory_rtx (arg2, len);
- arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
+ arg3_rtx = expand_normal (len);
insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
GEN_INT (MIN (arg1_align, arg2_align)));
if (insn)
const char *p = c_getstr (src);
/* If the string length is zero, return the dst parameter. */
- if (p && *p == '\0')
+ if (p && *p == '\0')
return expand_expr (dst, target, mode, EXPAND_NORMAL);
-
+
if (!optimize_size)
{
/* See if we can store by pieces into (dst + strlen(dst)). */
newsrc = builtin_save_expr (src);
if (newsrc != src)
arglist = build_tree_list (NULL_TREE, newsrc);
- else
+ else
arglist = TREE_CHAIN (arglist); /* Reusing arglist if safe. */
dst = builtin_save_expr (dst);
end_sequence (); /* Stop sequence. */
return 0;
}
-
+
/* Output the entire sequence. */
insns = get_insns ();
end_sequence ();
emit_insn (insns);
-
+
return expand_expr (dst, target, mode, EXPAND_NORMAL);
}
static tree
dummy_object (tree type)
{
- tree t = convert (build_pointer_type (type), null_pointer_node);
+ tree t = build_int_cst (build_pointer_type (type), 0);
return build1 (INDIRECT_REF, type, t);
}
{
/* If va_list is an array type, the argument may have decayed
to a pointer type, e.g. by being passed to another function.
- In that case, unwrap both types so that we can compare the
+ In that case, unwrap both types so that we can compare the
underlying records. */
if (TREE_CODE (have_va_type) == ARRAY_TYPE
|| POINTER_TYPE_P (have_va_type))
else
{
/* Make it easier for the backends by protecting the valist argument
- from multiple evaluations. */
+ from multiple evaluations. */
if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
{
/* For this case, the backends will be expecting a pointer to
return 0;
/* Compute the argument. */
- op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
+ op0 = expand_normal (TREE_VALUE (arglist));
/* Allocate the desired space. */
result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
probabilities. */
if (integer_zerop (arg1))
taken = 1 - taken;
- predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
+ predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
}
}
return 0;
arg = TREE_VALUE (arglist);
- op0 = expand_expr (arg, subtarget, VOIDmode, 0);
+ op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
arg = TREE_VALUE (TREE_CHAIN (arglist));
- op1 = expand_expr (arg, NULL, VOIDmode, 0);
+ op1 = expand_normal (arg);
return expand_copysign (op0, op1, target);
}
/* Create a new constant string literal and return a char* pointer to it.
The STRING_CST value is the LEN characters at STR. */
-static tree
+tree
build_string_literal (int len, const char *str)
{
tree t, elem, index, type;
if (!init_target_chars())
return 0;
-
+
/* If the format specifier was "%s\n", call __builtin_puts(arg). */
if (strcmp (fmt_str, target_percent_s_newline) == 0)
{
if (! arglist
- || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
+ || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
|| TREE_CHAIN (arglist))
return 0;
fn = fn_puts;
{
/* We can't handle anything else with % args or %% ... yet. */
if (strchr (fmt_str, target_percent))
- return 0;
+ return 0;
if (arglist)
return 0;
call. */
static rtx
expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
- bool unlocked)
+ bool unlocked)
{
tree arglist = TREE_OPERAND (exp, 1);
/* If we're using an unlocked function, assume the other unlocked
if (!init_target_chars())
return 0;
-
+
/* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
if (strcmp (fmt_str, target_percent_s) == 0)
{
if (! arglist
- || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
+ || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
|| TREE_CHAIN (arglist))
return 0;
arg = TREE_VALUE (arglist);
{
/* We can't handle anything else with % args or %% ... yet. */
if (strchr (fmt_str, target_percent))
- return 0;
+ return 0;
if (arglist)
return 0;
arglist = TREE_CHAIN (arglist);
t_chain = TREE_VALUE (arglist);
- r_tramp = expand_expr (t_tramp, NULL_RTX, VOIDmode, 0);
- r_func = expand_expr (t_func, NULL_RTX, VOIDmode, 0);
- r_chain = expand_expr (t_chain, NULL_RTX, VOIDmode, 0);
+ r_tramp = expand_normal (t_tramp);
+ r_func = expand_normal (t_func);
+ r_chain = expand_normal (t_chain);
/* Generate insns to initialize the trampoline. */
r_tramp = round_trampoline_addr (r_tramp);
if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
return NULL_RTX;
- tramp = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
+ tramp = expand_normal (TREE_VALUE (arglist));
tramp = round_trampoline_addr (tramp);
#ifdef TRAMPOLINE_ADJUST_ADDRESS
TRAMPOLINE_ADJUST_ADDRESS (tramp);
return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
}
- temp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
+ temp = expand_normal (arg);
if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
{
imode = int_mode_for_mode (fmode);
imode = word_mode;
/* Handle targets with different FP word orders. */
if (FLOAT_WORDS_BIG_ENDIAN)
- word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
+ word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
else
- word = bitpos / BITS_PER_WORD;
+ word = bitpos / BITS_PER_WORD;
temp = operand_subword_force (temp, word, fmode);
bitpos = bitpos % BITS_PER_WORD;
}
else
{
/* Perform a logical right shift to place the signbit in the least
- significant bit, then truncate the result to the desired mode
+ significant bit, then truncate the result to the desired mode
and mask just this bit. */
temp = expand_shift (RSHIFT_EXPR, imode, temp,
build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
TREE_PUBLIC (decl) = 1;
DECL_ARTIFICIAL (decl) = 1;
TREE_NOTHROW (decl) = 1;
+ DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
+ DECL_VISIBILITY_SPECIFIED (decl) = 1;
call = build_function_call_expr (decl, arglist);
return expand_call (call, target, ignore);
mem = validize_mem (gen_rtx_MEM (mode, addr));
set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
+ set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
MEM_VOLATILE_P (mem) = 1;
return mem;
}
/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
- ARGLIST is the operands list to the function. CODE is the rtx code
+ ARGLIST is the operands list to the function. CODE is the rtx code
that corresponds to the arithmetic or logical operation from the name;
an exception here is that NOT actually means NAND. TARGET is an optional
place for us to store the results; AFTER is true if this is the
CASE_FLT_FN (BUILT_IN_FABS):
target = expand_builtin_fabs (arglist, target, subtarget);
if (target)
- return target;
+ return target;
break;
CASE_FLT_FN (BUILT_IN_COPYSIGN):
rtx ops[3];
for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
- ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
+ ops[i] = expand_normal (TREE_VALUE (t));
return expand_builtin_apply (ops[0], ops[1], ops[2]);
}
memory returned by __builtin_apply. */
case BUILT_IN_RETURN:
if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
- expand_builtin_return (expand_expr (TREE_VALUE (arglist),
- NULL_RTX, VOIDmode, 0));
+ expand_builtin_return (expand_normal (TREE_VALUE (arglist)));
return const0_rtx;
case BUILT_IN_SAVEREGS:
/* Return the address of the first anonymous stack arg. */
case BUILT_IN_NEXT_ARG:
if (fold_builtin_next_arg (arglist))
- return const0_rtx;
+ return const0_rtx;
return expand_builtin_next_arg ();
case BUILT_IN_CLASSIFY_TYPE:
else
{
rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
- VOIDmode, 0);
- rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
- NULL_RTX, VOIDmode, 0);
+ VOIDmode, EXPAND_NORMAL);
+ rtx value = expand_normal (TREE_VALUE (TREE_CHAIN (arglist)));
if (value != const1_rtx)
{
if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
{
rtx buf_addr
- = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
+ = expand_normal (TREE_VALUE (arglist));
expand_builtin_update_setjmp_buf (buf_addr);
return const0_rtx;
case REAL_CST:
if (! TREE_CONSTANT_OVERFLOW (t))
{
- REAL_VALUE_TYPE c, cint;
+ REAL_VALUE_TYPE c, cint;
c = TREE_REAL_CST (t);
real_trunc (&cint, TYPE_MODE (TREE_TYPE (t)), &c);
}
/* Fold function call to builtin cabs, cabsf or cabsl. ARGLIST
- is the argument list and TYPE is the return type. Return
- NULL_TREE if no if no simplification can be made. */
+ is the argument list, TYPE is the return type and FNDECL is the
+ original function DECL. Return NULL_TREE if no if no simplification
+ can be made. */
static tree
-fold_builtin_cabs (tree arglist, tree type)
+fold_builtin_cabs (tree arglist, tree type, tree fndecl)
{
tree arg;
&& real_zerop (TREE_OPERAND (arg, 1)))
return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg, 0));
+ /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
+ if (TREE_CODE (arg) == NEGATE_EXPR
+ || TREE_CODE (arg) == CONJ_EXPR)
+ {
+ tree arglist = build_tree_list (NULL_TREE, TREE_OPERAND (arg, 0));
+ return build_function_call_expr (fndecl, arglist);
+ }
+
/* Don't do this when optimizing for size. */
if (flag_unsafe_math_optimizations
&& optimize && !optimize_size)
{
/* Optimize cbrt(expN(x)) -> expN(x/3). */
if (BUILTIN_EXPONENT_P (fcode))
- {
+ {
tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
const REAL_VALUE_TYPE third_trunc =
real_value_truncate (TYPE_MODE (type), dconstthird);
/* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
if (BUILTIN_SQRT_P (fcode))
- {
+ {
tree powfn = mathfn_built_in (type, BUILT_IN_POW);
if (powfn)
/* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
if (BUILTIN_CBRT_P (fcode))
- {
+ {
tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
if (tree_expr_nonnegative_p (arg0))
{
tree powfn = mathfn_built_in (type, BUILT_IN_POW);
if (powfn)
- {
+ {
tree tree_root;
REAL_VALUE_TYPE dconstroot;
-
+
real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
tree_root = build_real (type, dconstroot);
}
}
}
-
+
/* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
if (fcode == BUILT_IN_POW || fcode == BUILT_IN_POWF
|| fcode == BUILT_IN_POWL)
- {
+ {
tree arg00 = TREE_VALUE (TREE_OPERAND (arg, 1));
tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
if (tree_expr_nonnegative_p (arg00))
STRIP_NOPS (expr);
return ((TREE_CODE (expr) == REAL_CST
- && ! TREE_CONSTANT_OVERFLOW (expr)
- && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
- || (TREE_CODE (expr) == COMPLEX_CST
- && real_dconstp (TREE_REALPART (expr), value)
- && real_zerop (TREE_IMAGPART (expr))));
+ && ! TREE_CONSTANT_OVERFLOW (expr)
+ && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
+ || (TREE_CODE (expr) == COMPLEX_CST
+ && real_dconstp (TREE_REALPART (expr), value)
+ && real_zerop (TREE_IMAGPART (expr))));
}
/* A subroutine of fold_builtin to fold the various logarithmic
return build_real (type, dconst0);
/* Optimize logN(N) = 1.0. If N can't be truncated to MODE
- exactly, then only do this if flag_unsafe_math_optimizations. */
+ exactly, then only do this if flag_unsafe_math_optimizations. */
if (exact_real_truncate (TYPE_MODE (type), value)
|| flag_unsafe_math_optimizations)
- {
+ {
const REAL_VALUE_TYPE value_truncate =
real_value_truncate (TYPE_MODE (type), *value);
if (real_dconstp (arg, &value_truncate))
return fold_convert (type, TREE_VALUE (TREE_OPERAND (arg, 1)));
/* Optimize logN(func()) for various exponential functions. We
- want to determine the value "x" and the power "exponent" in
- order to transform logN(x**exponent) into exponent*logN(x). */
+ want to determine the value "x" and the power "exponent" in
+ order to transform logN(x**exponent) into exponent*logN(x). */
if (flag_unsafe_math_optimizations)
- {
+ {
tree exponent = 0, x = 0;
switch (fcode)
/* Optimize pow(expN(x),y) = expN(x*y). */
if (BUILTIN_EXPONENT_P (fcode))
- {
+ {
tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
arg = fold_build2 (MULT_EXPR, type, arg, arg1);
/* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
if (BUILTIN_SQRT_P (fcode))
- {
+ {
tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
build_real (type, dconsthalf));
/* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
if (BUILTIN_CBRT_P (fcode))
- {
+ {
tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
if (tree_expr_nonnegative_p (arg))
{
return build_function_call_expr (fndecl, arglist);
}
}
-
+
/* Optimize pow(pow(x,y),z) = pow(x,y*z). */
if (fcode == BUILT_IN_POW || fcode == BUILT_IN_POWF
|| fcode == BUILT_IN_POWL)
- {
+ {
tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
/* Optimize expN(1.0) = N. */
if (real_onep (arg))
- {
+ {
REAL_VALUE_TYPE cst;
real_convert (&cst, TYPE_MODE (type), value);
if (flag_unsafe_math_optimizations
&& TREE_CODE (arg) == REAL_CST
&& ! TREE_CONSTANT_OVERFLOW (arg))
- {
+ {
REAL_VALUE_TYPE cint;
REAL_VALUE_TYPE c;
HOST_WIDE_INT n;
/* Optimize expN(logN(x)) = x. */
if (flag_unsafe_math_optimizations)
- {
+ {
const enum built_in_function fcode = builtin_mathfn_code (arg);
if ((value == &dconste
/* If SRC and DEST are the same (and not volatile), return DEST+LEN. */
if (operand_equal_p (src, dest, 0))
- {
+ {
if (endp == 0)
return omit_one_operand (type, dest, len);
if (endp == 2)
len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
ssize_int (1));
-
+
len = fold_convert (TREE_TYPE (dest), len);
len = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
return fold_convert (type, len);
c1 = TREE_REAL_CST (arg1);
c2 = TREE_REAL_CST (arg2);
+ /* c1.sign := c2.sign. */
real_copysign (&c1, &c2);
return build_real (type, c1);
- c1.sign = c2.sign;
}
/* copysign(X, Y) is fabs(X) when Y is always non-negative.
arg, integer_zero_node);
if (in_gimple_form && !TREE_CONSTANT (arg))
- return NULL_TREE;
+ return NULL_TREE;
else
- return arg;
+ return arg;
}
}
arg = fold_build2 (LE_EXPR, integer_type_node, arg,
build_int_cst (unsigned_type_node, 9));
if (in_gimple_form && !TREE_CONSTANT (arg))
- return NULL_TREE;
+ return NULL_TREE;
else
- return arg;
+ return arg;
}
}
{
case BUILT_IN_ISINF:
if (!MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
- return omit_one_operand (type, integer_zero_node, arg);
+ return omit_one_operand (type, integer_zero_node, arg);
if (TREE_CODE (arg) == REAL_CST)
{
case BUILT_IN_FINITE:
if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg)))
- && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
- return omit_one_operand (type, integer_zero_node, arg);
+ && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
+ return omit_one_operand (type, integer_zero_node, arg);
if (TREE_CODE (arg) == REAL_CST)
{
case BUILT_IN_ISNAN:
if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg))))
- return omit_one_operand (type, integer_zero_node, arg);
+ return omit_one_operand (type, integer_zero_node, arg);
if (TREE_CODE (arg) == REAL_CST)
{
arg0 = TREE_VALUE (arglist);
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
-
+
type0 = TREE_TYPE (arg0);
type1 = TREE_TYPE (arg1);
-
+
code0 = TREE_CODE (type0);
code1 = TREE_CODE (type1);
-
+
if (code0 == REAL_TYPE && code1 == REAL_TYPE)
/* Choose the wider of two real types. */
cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
IDENTIFIER_POINTER (DECL_NAME (fndecl)));
return error_mark_node;
}
-
+
arg0 = fold_convert (cmp_type, arg0);
arg1 = fold_convert (cmp_type, arg1);
CASE_FLT_FN (BUILT_IN_CREAL):
if (validate_arglist (arglist, COMPLEX_TYPE, VOID_TYPE))
- return non_lvalue (fold_build1 (REALPART_EXPR, type,
+ return non_lvalue (fold_build1 (REALPART_EXPR, type,
TREE_VALUE (arglist)));
break;
CASE_FLT_FN (BUILT_IN_CIMAG):
if (validate_arglist (arglist, COMPLEX_TYPE, VOID_TYPE))
- return non_lvalue (fold_build1 (IMAGPART_EXPR, type,
+ return non_lvalue (fold_build1 (IMAGPART_EXPR, type,
TREE_VALUE (arglist)));
break;
CASE_FLT_FN (BUILT_IN_CABS):
- return fold_builtin_cabs (arglist, type);
+ return fold_builtin_cabs (arglist, type, fndecl);
CASE_FLT_FN (BUILT_IN_SQRT):
return fold_builtin_sqrt (arglist, type);
CASE_FLT_FN (BUILT_IN_LLCEIL):
CASE_FLT_FN (BUILT_IN_LFLOOR):
CASE_FLT_FN (BUILT_IN_LLFLOOR):
- CASE_FLT_FN (BUILT_IN_LROUND):
+ CASE_FLT_FN (BUILT_IN_LROUND):
CASE_FLT_FN (BUILT_IN_LLROUND):
return fold_builtin_int_roundingfn (fndecl, arglist);
/* If the requested length is zero, or the src parameter string
length is zero, return the dst parameter. */
if (integer_zerop (len) || (p && *p == '\0'))
- return omit_two_operands (TREE_TYPE (dst), dst, src, len);
+ return omit_two_operands (TREE_TYPE (dst), dst, src, len);
/* If the requested len is greater than or equal to the string
- length, call strcat. */
+ length, call strcat. */
if (TREE_CODE (len) == INTEGER_CST && p
&& compare_tree_int (len, strlen (p)) >= 0)
{
/* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
when we checked the arguments and if needed issued a warning. */
else if (!TREE_CHAIN (arglist)
- || !integer_zerop (TREE_VALUE (arglist))
- || !integer_zerop (TREE_VALUE (TREE_CHAIN (arglist)))
- || TREE_CHAIN (TREE_CHAIN (arglist)))
+ || !integer_zerop (TREE_VALUE (arglist))
+ || !integer_zerop (TREE_VALUE (TREE_CHAIN (arglist)))
+ || TREE_CHAIN (TREE_CHAIN (arglist)))
{
tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
tree arg = TREE_VALUE (arglist);
if (TREE_CHAIN (arglist))
- {
- error ("%<va_start%> used with too many arguments");
- return true;
- }
+ {
+ error ("%<va_start%> used with too many arguments");
+ return true;
+ }
/* Strip off all nops for the sake of the comparison. This
is not quite the same as STRIP_NOPS. It does more.
|| TREE_CODE (arg) == INDIRECT_REF)
arg = TREE_OPERAND (arg, 0);
if (arg != last_parm)
- {
+ {
/* FIXME: Sometimes with the tree optimizers we can get the
not the last argument even though the user used the last
argument. We just warn and set the arg to be the last
warning (0, "second parameter of %<va_start%> not last named argument");
}
/* We want to verify the second parameter just once before the tree
- optimizers are run and then avoid keeping it in the tree,
- as otherwise we could warn even for correct code like:
- void foo (int i, ...)
- { va_list ap; i++; va_start (ap, i); va_end (ap); } */
+ optimizers are run and then avoid keeping it in the tree,
+ as otherwise we could warn even for correct code like:
+ void foo (int i, ...)
+ { va_list ap; i++; va_start (ap, i); va_end (ap); } */
TREE_VALUE (arglist) = integer_zero_node;
TREE_CHAIN (arglist) = build_tree_list (NULL, integer_zero_node);
}
if (call && retval)
{
- retval = convert
+ retval = fold_convert
(TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
retval);
return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
/* If SRC and DEST are the same (and not volatile), return DEST. */
if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
-
+
if (! host_integerp (size, 1))
return 0;
if (!init_target_chars())
return 0;
-
+
if (strcmp (fmt_str, target_percent_s) == 0 || strchr (fmt_str, target_percent) == NULL)
{
const char *str;
if (!init_target_chars())
return 0;
-
+
/* If the format doesn't contain % args or %%, use strcpy. */
if (strchr (fmt_str, target_percent) == NULL)
{
target_percent_s_newline[1] = target_s;
target_percent_s_newline[2] = target_newline;
target_percent_s_newline[3] = '\0';
-
+
init = true;
}
return true;