1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
250 if (strncmp (name, "__sync_", 7) == 0)
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
280 switch (TREE_CODE (exp))
284 case NON_LVALUE_EXPR:
285 exp = TREE_OPERAND (exp, 0);
286 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
289 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
290 align = MIN (inner, max_align);
293 case POINTER_PLUS_EXPR:
294 /* If sum of pointer + int, restrict our maximum alignment to that
295 imposed by the integer. If not, we can't do any better than
297 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
300 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
301 & (max_align / BITS_PER_UNIT - 1))
305 exp = TREE_OPERAND (exp, 0);
309 /* See what we are pointing at and look at its alignment. */
310 exp = TREE_OPERAND (exp, 0);
312 if (handled_component_p (exp))
314 HOST_WIDE_INT bitsize, bitpos;
316 enum machine_mode mode;
317 int unsignedp, volatilep;
319 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
320 &mode, &unsignedp, &volatilep, true);
322 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
323 if (offset && TREE_CODE (offset) == PLUS_EXPR
324 && host_integerp (TREE_OPERAND (offset, 1), 1))
326 /* Any overflow in calculating offset_bits won't change
329 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
333 inner = MIN (inner, (offset_bits & -offset_bits));
334 offset = TREE_OPERAND (offset, 0);
336 if (offset && TREE_CODE (offset) == MULT_EXPR
337 && host_integerp (TREE_OPERAND (offset, 1), 1))
339 /* Any overflow in calculating offset_factor won't change
341 unsigned offset_factor
342 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
346 inner = MIN (inner, (offset_factor & -offset_factor));
349 inner = MIN (inner, BITS_PER_UNIT);
352 align = MIN (inner, DECL_ALIGN (exp));
353 #ifdef CONSTANT_ALIGNMENT
354 else if (CONSTANT_CLASS_P (exp))
355 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
357 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
358 || TREE_CODE (exp) == INDIRECT_REF)
359 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
361 align = MIN (align, inner);
362 return MIN (align, max_align);
370 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
371 way, because it could contain a zero byte in the middle.
372 TREE_STRING_LENGTH is the size of the character array, not the string.
374 ONLY_VALUE should be nonzero if the result is not going to be emitted
375 into the instruction stream and zero if it is going to be expanded.
376 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
377 is returned, otherwise NULL, since
378 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
379 evaluate the side-effects.
381 The value returned is of type `ssizetype'.
383 Unfortunately, string_constant can't access the values of const char
384 arrays with initializers, so neither can we do so here. */
387 c_strlen (tree src, int only_value)
390 HOST_WIDE_INT offset;
395 if (TREE_CODE (src) == COND_EXPR
396 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
400 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
401 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
402 if (tree_int_cst_equal (len1, len2))
406 if (TREE_CODE (src) == COMPOUND_EXPR
407 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
408 return c_strlen (TREE_OPERAND (src, 1), only_value);
410 src = string_constant (src, &offset_node);
414 max = TREE_STRING_LENGTH (src) - 1;
415 ptr = TREE_STRING_POINTER (src);
417 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
419 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
420 compute the offset to the following null if we don't know where to
421 start searching for it. */
424 for (i = 0; i < max; i++)
428 /* We don't know the starting offset, but we do know that the string
429 has no internal zero bytes. We can assume that the offset falls
430 within the bounds of the string; otherwise, the programmer deserves
431 what he gets. Subtract the offset from the length of the string,
432 and return that. This would perhaps not be valid if we were dealing
433 with named arrays in addition to literal string constants. */
435 return size_diffop (size_int (max), offset_node);
438 /* We have a known offset into the string. Start searching there for
439 a null character if we can represent it as a single HOST_WIDE_INT. */
440 if (offset_node == 0)
442 else if (! host_integerp (offset_node, 0))
445 offset = tree_low_cst (offset_node, 0);
447 /* If the offset is known to be out of bounds, warn, and call strlen at
449 if (offset < 0 || offset > max)
451 warning (0, "offset outside bounds of constant string");
455 /* Use strlen to search for the first zero byte. Since any strings
456 constructed with build_string will have nulls appended, we win even
457 if we get handed something like (char[4])"abcd".
459 Since OFFSET is our starting index into the string, no further
460 calculation is needed. */
461 return ssize_int (strlen (ptr + offset));
464 /* Return a char pointer for a C string if it is a string constant
465 or sum of string constant and integer constant. */
472 src = string_constant (src, &offset_node);
476 if (offset_node == 0)
477 return TREE_STRING_POINTER (src);
478 else if (!host_integerp (offset_node, 1)
479 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
482 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
485 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
486 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
489 c_readstr (const char *str, enum machine_mode mode)
495 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
500 for (i = 0; i < GET_MODE_SIZE (mode); i++)
503 if (WORDS_BIG_ENDIAN)
504 j = GET_MODE_SIZE (mode) - i - 1;
505 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
506 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
507 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
509 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
512 ch = (unsigned char) str[i];
513 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
515 return immed_double_const (c[0], c[1], mode);
518 /* Cast a target constant CST to target CHAR and if that value fits into
519 host char type, return zero and put that value into variable pointed to by
523 target_char_cast (tree cst, char *p)
525 unsigned HOST_WIDE_INT val, hostval;
527 if (!host_integerp (cst, 1)
528 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
531 val = tree_low_cst (cst, 1);
532 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
533 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
536 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
537 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
546 /* Similar to save_expr, but assumes that arbitrary code is not executed
547 in between the multiple evaluations. In particular, we assume that a
548 non-addressable local variable will not be modified. */
551 builtin_save_expr (tree exp)
553 if (TREE_ADDRESSABLE (exp) == 0
554 && (TREE_CODE (exp) == PARM_DECL
555 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
558 return save_expr (exp);
561 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
562 times to get the address of either a higher stack frame, or a return
563 address located within it (depending on FNDECL_CODE). */
566 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
570 #ifdef INITIAL_FRAME_ADDRESS_RTX
571 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
575 /* For a zero count with __builtin_return_address, we don't care what
576 frame address we return, because target-specific definitions will
577 override us. Therefore frame pointer elimination is OK, and using
578 the soft frame pointer is OK.
580 For a nonzero count, or a zero count with __builtin_frame_address,
581 we require a stable offset from the current frame pointer to the
582 previous one, so we must use the hard frame pointer, and
583 we must disable frame pointer elimination. */
584 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
585 tem = frame_pointer_rtx;
588 tem = hard_frame_pointer_rtx;
590 /* Tell reload not to eliminate the frame pointer. */
591 current_function_accesses_prior_frames = 1;
595 /* Some machines need special handling before we can access
596 arbitrary frames. For example, on the SPARC, we must first flush
597 all register windows to the stack. */
598 #ifdef SETUP_FRAME_ADDRESSES
600 SETUP_FRAME_ADDRESSES ();
603 /* On the SPARC, the return address is not in the frame, it is in a
604 register. There is no way to access it off of the current frame
605 pointer, but it can be accessed off the previous frame pointer by
606 reading the value from the register window save area. */
607 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
608 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
612 /* Scan back COUNT frames to the specified frame. */
613 for (i = 0; i < count; i++)
615 /* Assume the dynamic chain pointer is in the word that the
616 frame address points to, unless otherwise specified. */
617 #ifdef DYNAMIC_CHAIN_ADDRESS
618 tem = DYNAMIC_CHAIN_ADDRESS (tem);
620 tem = memory_address (Pmode, tem);
621 tem = gen_frame_mem (Pmode, tem);
622 tem = copy_to_reg (tem);
625 /* For __builtin_frame_address, return what we've got. But, on
626 the SPARC for example, we may have to add a bias. */
627 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
628 #ifdef FRAME_ADDR_RTX
629 return FRAME_ADDR_RTX (tem);
634 /* For __builtin_return_address, get the return address from that frame. */
635 #ifdef RETURN_ADDR_RTX
636 tem = RETURN_ADDR_RTX (count, tem);
638 tem = memory_address (Pmode,
639 plus_constant (tem, GET_MODE_SIZE (Pmode)));
640 tem = gen_frame_mem (Pmode, tem);
645 /* Alias set used for setjmp buffer. */
646 static alias_set_type setjmp_alias_set = -1;
648 /* Construct the leading half of a __builtin_setjmp call. Control will
649 return to RECEIVER_LABEL. This is also called directly by the SJLJ
650 exception handling code. */
653 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
655 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
659 if (setjmp_alias_set == -1)
660 setjmp_alias_set = new_alias_set ();
662 buf_addr = convert_memory_address (Pmode, buf_addr);
664 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
666 /* We store the frame pointer and the address of receiver_label in
667 the buffer and use the rest of it for the stack save area, which
668 is machine-dependent. */
670 mem = gen_rtx_MEM (Pmode, buf_addr);
671 set_mem_alias_set (mem, setjmp_alias_set);
672 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
674 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
675 set_mem_alias_set (mem, setjmp_alias_set);
677 emit_move_insn (validize_mem (mem),
678 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
680 stack_save = gen_rtx_MEM (sa_mode,
681 plus_constant (buf_addr,
682 2 * GET_MODE_SIZE (Pmode)));
683 set_mem_alias_set (stack_save, setjmp_alias_set);
684 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
686 /* If there is further processing to do, do it. */
687 #ifdef HAVE_builtin_setjmp_setup
688 if (HAVE_builtin_setjmp_setup)
689 emit_insn (gen_builtin_setjmp_setup (buf_addr));
692 /* Tell optimize_save_area_alloca that extra work is going to
693 need to go on during alloca. */
694 current_function_calls_setjmp = 1;
696 /* We have a nonlocal label. */
697 current_function_has_nonlocal_label = 1;
700 /* Construct the trailing part of a __builtin_setjmp call. This is
701 also called directly by the SJLJ exception handling code. */
704 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
706 /* Clobber the FP when we get here, so we have to make sure it's
707 marked as used by this function. */
708 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
710 /* Mark the static chain as clobbered here so life information
711 doesn't get messed up for it. */
712 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
714 /* Now put in the code to restore the frame pointer, and argument
715 pointer, if needed. */
716 #ifdef HAVE_nonlocal_goto
717 if (! HAVE_nonlocal_goto)
720 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
721 /* This might change the hard frame pointer in ways that aren't
722 apparent to early optimization passes, so force a clobber. */
723 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
726 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
727 if (fixed_regs[ARG_POINTER_REGNUM])
729 #ifdef ELIMINABLE_REGS
731 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
733 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
734 if (elim_regs[i].from == ARG_POINTER_REGNUM
735 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
738 if (i == ARRAY_SIZE (elim_regs))
741 /* Now restore our arg pointer from the address at which it
742 was saved in our stack frame. */
743 emit_move_insn (virtual_incoming_args_rtx,
744 copy_to_reg (get_arg_pointer_save_area ()));
749 #ifdef HAVE_builtin_setjmp_receiver
750 if (HAVE_builtin_setjmp_receiver)
751 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
754 #ifdef HAVE_nonlocal_goto_receiver
755 if (HAVE_nonlocal_goto_receiver)
756 emit_insn (gen_nonlocal_goto_receiver ());
761 /* We must not allow the code we just generated to be reordered by
762 scheduling. Specifically, the update of the frame pointer must
763 happen immediately, not later. */
764 emit_insn (gen_blockage ());
767 /* __builtin_longjmp is passed a pointer to an array of five words (not
768 all will be used on all machines). It operates similarly to the C
769 library function of the same name, but is more efficient. Much of
770 the code below is copied from the handling of non-local gotos. */
773 expand_builtin_longjmp (rtx buf_addr, rtx value)
775 rtx fp, lab, stack, insn, last;
776 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
778 if (setjmp_alias_set == -1)
779 setjmp_alias_set = new_alias_set ();
781 buf_addr = convert_memory_address (Pmode, buf_addr);
783 buf_addr = force_reg (Pmode, buf_addr);
785 /* We used to store value in static_chain_rtx, but that fails if pointers
786 are smaller than integers. We instead require that the user must pass
787 a second argument of 1, because that is what builtin_setjmp will
788 return. This also makes EH slightly more efficient, since we are no
789 longer copying around a value that we don't care about. */
790 gcc_assert (value == const1_rtx);
792 last = get_last_insn ();
793 #ifdef HAVE_builtin_longjmp
794 if (HAVE_builtin_longjmp)
795 emit_insn (gen_builtin_longjmp (buf_addr));
799 fp = gen_rtx_MEM (Pmode, buf_addr);
800 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
801 GET_MODE_SIZE (Pmode)));
803 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
804 2 * GET_MODE_SIZE (Pmode)));
805 set_mem_alias_set (fp, setjmp_alias_set);
806 set_mem_alias_set (lab, setjmp_alias_set);
807 set_mem_alias_set (stack, setjmp_alias_set);
809 /* Pick up FP, label, and SP from the block and jump. This code is
810 from expand_goto in stmt.c; see there for detailed comments. */
811 #ifdef HAVE_nonlocal_goto
812 if (HAVE_nonlocal_goto)
813 /* We have to pass a value to the nonlocal_goto pattern that will
814 get copied into the static_chain pointer, but it does not matter
815 what that value is, because builtin_setjmp does not use it. */
816 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
820 lab = copy_to_reg (lab);
822 emit_insn (gen_rtx_CLOBBER (VOIDmode,
823 gen_rtx_MEM (BLKmode,
824 gen_rtx_SCRATCH (VOIDmode))));
825 emit_insn (gen_rtx_CLOBBER (VOIDmode,
826 gen_rtx_MEM (BLKmode,
827 hard_frame_pointer_rtx)));
829 emit_move_insn (hard_frame_pointer_rtx, fp);
830 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
832 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
833 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
834 emit_indirect_jump (lab);
838 /* Search backwards and mark the jump insn as a non-local goto.
839 Note that this precludes the use of __builtin_longjmp to a
840 __builtin_setjmp target in the same function. However, we've
841 already cautioned the user that these functions are for
842 internal exception handling use only. */
843 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
845 gcc_assert (insn != last);
849 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
853 else if (CALL_P (insn))
858 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
859 and the address of the save area. */
862 expand_builtin_nonlocal_goto (tree exp)
864 tree t_label, t_save_area;
865 rtx r_label, r_save_area, r_fp, r_sp, insn;
867 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
870 t_label = CALL_EXPR_ARG (exp, 0);
871 t_save_area = CALL_EXPR_ARG (exp, 1);
873 r_label = expand_normal (t_label);
874 r_label = convert_memory_address (Pmode, r_label);
875 r_save_area = expand_normal (t_save_area);
876 r_save_area = convert_memory_address (Pmode, r_save_area);
877 r_fp = gen_rtx_MEM (Pmode, r_save_area);
878 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
879 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
881 current_function_has_nonlocal_goto = 1;
883 #ifdef HAVE_nonlocal_goto
884 /* ??? We no longer need to pass the static chain value, afaik. */
885 if (HAVE_nonlocal_goto)
886 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
890 r_label = copy_to_reg (r_label);
892 emit_insn (gen_rtx_CLOBBER (VOIDmode,
893 gen_rtx_MEM (BLKmode,
894 gen_rtx_SCRATCH (VOIDmode))));
896 emit_insn (gen_rtx_CLOBBER (VOIDmode,
897 gen_rtx_MEM (BLKmode,
898 hard_frame_pointer_rtx)));
900 /* Restore frame pointer for containing function.
901 This sets the actual hard register used for the frame pointer
902 to the location of the function's incoming static chain info.
903 The non-local goto handler will then adjust it to contain the
904 proper value and reload the argument pointer, if needed. */
905 emit_move_insn (hard_frame_pointer_rtx, r_fp);
906 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
908 /* USE of hard_frame_pointer_rtx added for consistency;
909 not clear if really needed. */
910 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
911 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
913 /* If the architecture is using a GP register, we must
914 conservatively assume that the target function makes use of it.
915 The prologue of functions with nonlocal gotos must therefore
916 initialize the GP register to the appropriate value, and we
917 must then make sure that this value is live at the point
918 of the jump. (Note that this doesn't necessarily apply
919 to targets with a nonlocal_goto pattern; they are free
920 to implement it in their own way. Note also that this is
921 a no-op if the GP register is a global invariant.) */
922 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
923 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
924 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
926 emit_indirect_jump (r_label);
929 /* Search backwards to the jump insn and mark it as a
931 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
935 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
936 const0_rtx, REG_NOTES (insn));
939 else if (CALL_P (insn))
946 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
947 (not all will be used on all machines) that was passed to __builtin_setjmp.
948 It updates the stack pointer in that block to correspond to the current
952 expand_builtin_update_setjmp_buf (rtx buf_addr)
954 enum machine_mode sa_mode = Pmode;
958 #ifdef HAVE_save_stack_nonlocal
959 if (HAVE_save_stack_nonlocal)
960 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
962 #ifdef STACK_SAVEAREA_MODE
963 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
967 = gen_rtx_MEM (sa_mode,
970 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
974 emit_insn (gen_setjmp ());
977 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
980 /* Expand a call to __builtin_prefetch. For a target that does not support
981 data prefetch, evaluate the memory address argument in case it has side
985 expand_builtin_prefetch (tree exp)
987 tree arg0, arg1, arg2;
991 if (!validate_arglist (exp, POINTER_TYPE, 0))
994 arg0 = CALL_EXPR_ARG (exp, 0);
996 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
997 zero (read) and argument 2 (locality) defaults to 3 (high degree of
999 nargs = call_expr_nargs (exp);
1001 arg1 = CALL_EXPR_ARG (exp, 1);
1003 arg1 = integer_zero_node;
1005 arg2 = CALL_EXPR_ARG (exp, 2);
1007 arg2 = build_int_cst (NULL_TREE, 3);
1009 /* Argument 0 is an address. */
1010 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1012 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1013 if (TREE_CODE (arg1) != INTEGER_CST)
1015 error ("second argument to %<__builtin_prefetch%> must be a constant");
1016 arg1 = integer_zero_node;
1018 op1 = expand_normal (arg1);
1019 /* Argument 1 must be either zero or one. */
1020 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1022 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1027 /* Argument 2 (locality) must be a compile-time constant int. */
1028 if (TREE_CODE (arg2) != INTEGER_CST)
1030 error ("third argument to %<__builtin_prefetch%> must be a constant");
1031 arg2 = integer_zero_node;
1033 op2 = expand_normal (arg2);
1034 /* Argument 2 must be 0, 1, 2, or 3. */
1035 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1037 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1041 #ifdef HAVE_prefetch
1044 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1046 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1047 || (GET_MODE (op0) != Pmode))
1049 op0 = convert_memory_address (Pmode, op0);
1050 op0 = force_reg (Pmode, op0);
1052 emit_insn (gen_prefetch (op0, op1, op2));
1056 /* Don't do anything with direct references to volatile memory, but
1057 generate code to handle other side effects. */
1058 if (!MEM_P (op0) && side_effects_p (op0))
1062 /* Get a MEM rtx for expression EXP which is the address of an operand
1063 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1064 the maximum length of the block of memory that might be accessed or
1068 get_memory_rtx (tree exp, tree len)
1070 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1071 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1073 /* Get an expression we can use to find the attributes to assign to MEM.
1074 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1075 we can. First remove any nops. */
1076 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1077 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1078 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1079 exp = TREE_OPERAND (exp, 0);
1081 if (TREE_CODE (exp) == ADDR_EXPR)
1082 exp = TREE_OPERAND (exp, 0);
1083 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1084 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1088 /* Honor attributes derived from exp, except for the alias set
1089 (as builtin stringops may alias with anything) and the size
1090 (as stringops may access multiple array elements). */
1093 set_mem_attributes (mem, exp, 0);
1095 /* Allow the string and memory builtins to overflow from one
1096 field into another, see http://gcc.gnu.org/PR23561.
1097 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1098 memory accessed by the string or memory builtin will fit
1099 within the field. */
1100 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1102 tree mem_expr = MEM_EXPR (mem);
1103 HOST_WIDE_INT offset = -1, length = -1;
1106 while (TREE_CODE (inner) == ARRAY_REF
1107 || TREE_CODE (inner) == NOP_EXPR
1108 || TREE_CODE (inner) == CONVERT_EXPR
1109 || TREE_CODE (inner) == NON_LVALUE_EXPR
1110 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1111 || TREE_CODE (inner) == SAVE_EXPR)
1112 inner = TREE_OPERAND (inner, 0);
1114 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1116 if (MEM_OFFSET (mem)
1117 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1118 offset = INTVAL (MEM_OFFSET (mem));
1120 if (offset >= 0 && len && host_integerp (len, 0))
1121 length = tree_low_cst (len, 0);
1123 while (TREE_CODE (inner) == COMPONENT_REF)
1125 tree field = TREE_OPERAND (inner, 1);
1126 gcc_assert (! DECL_BIT_FIELD (field));
1127 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1128 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1131 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1132 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1135 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1136 /* If we can prove the memory starting at XEXP (mem, 0)
1137 and ending at XEXP (mem, 0) + LENGTH will fit into
1138 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1141 && offset + length <= size)
1146 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1147 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1148 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1156 mem_expr = TREE_OPERAND (mem_expr, 0);
1157 inner = TREE_OPERAND (inner, 0);
1160 if (mem_expr == NULL)
1162 if (mem_expr != MEM_EXPR (mem))
1164 set_mem_expr (mem, mem_expr);
1165 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1168 set_mem_alias_set (mem, 0);
1169 set_mem_size (mem, NULL_RTX);
1175 /* Built-in functions to perform an untyped call and return. */
1177 /* For each register that may be used for calling a function, this
1178 gives a mode used to copy the register's value. VOIDmode indicates
1179 the register is not used for calling a function. If the machine
1180 has register windows, this gives only the outbound registers.
1181 INCOMING_REGNO gives the corresponding inbound register. */
1182 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1184 /* For each register that may be used for returning values, this gives
1185 a mode used to copy the register's value. VOIDmode indicates the
1186 register is not used for returning values. If the machine has
1187 register windows, this gives only the outbound registers.
1188 INCOMING_REGNO gives the corresponding inbound register. */
1189 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1191 /* For each register that may be used for calling a function, this
1192 gives the offset of that register into the block returned by
1193 __builtin_apply_args. 0 indicates that the register is not
1194 used for calling a function. */
1195 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1197 /* Return the size required for the block returned by __builtin_apply_args,
1198 and initialize apply_args_mode. */
1201 apply_args_size (void)
1203 static int size = -1;
1206 enum machine_mode mode;
1208 /* The values computed by this function never change. */
1211 /* The first value is the incoming arg-pointer. */
1212 size = GET_MODE_SIZE (Pmode);
1214 /* The second value is the structure value address unless this is
1215 passed as an "invisible" first argument. */
1216 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1217 size += GET_MODE_SIZE (Pmode);
1219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1220 if (FUNCTION_ARG_REGNO_P (regno))
1222 mode = reg_raw_mode[regno];
1224 gcc_assert (mode != VOIDmode);
1226 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1227 if (size % align != 0)
1228 size = CEIL (size, align) * align;
1229 apply_args_reg_offset[regno] = size;
1230 size += GET_MODE_SIZE (mode);
1231 apply_args_mode[regno] = mode;
1235 apply_args_mode[regno] = VOIDmode;
1236 apply_args_reg_offset[regno] = 0;
1242 /* Return the size required for the block returned by __builtin_apply,
1243 and initialize apply_result_mode. */
1246 apply_result_size (void)
1248 static int size = -1;
1250 enum machine_mode mode;
1252 /* The values computed by this function never change. */
1257 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1258 if (FUNCTION_VALUE_REGNO_P (regno))
1260 mode = reg_raw_mode[regno];
1262 gcc_assert (mode != VOIDmode);
1264 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1265 if (size % align != 0)
1266 size = CEIL (size, align) * align;
1267 size += GET_MODE_SIZE (mode);
1268 apply_result_mode[regno] = mode;
1271 apply_result_mode[regno] = VOIDmode;
1273 /* Allow targets that use untyped_call and untyped_return to override
1274 the size so that machine-specific information can be stored here. */
1275 #ifdef APPLY_RESULT_SIZE
1276 size = APPLY_RESULT_SIZE;
1282 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1283 /* Create a vector describing the result block RESULT. If SAVEP is true,
1284 the result block is used to save the values; otherwise it is used to
1285 restore the values. */
1288 result_vector (int savep, rtx result)
1290 int regno, size, align, nelts;
1291 enum machine_mode mode;
1293 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1296 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1297 if ((mode = apply_result_mode[regno]) != VOIDmode)
1299 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1300 if (size % align != 0)
1301 size = CEIL (size, align) * align;
1302 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1303 mem = adjust_address (result, mode, size);
1304 savevec[nelts++] = (savep
1305 ? gen_rtx_SET (VOIDmode, mem, reg)
1306 : gen_rtx_SET (VOIDmode, reg, mem));
1307 size += GET_MODE_SIZE (mode);
1309 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1311 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1313 /* Save the state required to perform an untyped call with the same
1314 arguments as were passed to the current function. */
1317 expand_builtin_apply_args_1 (void)
1320 int size, align, regno;
1321 enum machine_mode mode;
1322 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1324 /* Create a block where the arg-pointer, structure value address,
1325 and argument registers can be saved. */
1326 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1328 /* Walk past the arg-pointer and structure value address. */
1329 size = GET_MODE_SIZE (Pmode);
1330 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1331 size += GET_MODE_SIZE (Pmode);
1333 /* Save each register used in calling a function to the block. */
1334 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1335 if ((mode = apply_args_mode[regno]) != VOIDmode)
1337 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1338 if (size % align != 0)
1339 size = CEIL (size, align) * align;
1341 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1343 emit_move_insn (adjust_address (registers, mode, size), tem);
1344 size += GET_MODE_SIZE (mode);
1347 /* Save the arg pointer to the block. */
1348 tem = copy_to_reg (virtual_incoming_args_rtx);
1349 #ifdef STACK_GROWS_DOWNWARD
1350 /* We need the pointer as the caller actually passed them to us, not
1351 as we might have pretended they were passed. Make sure it's a valid
1352 operand, as emit_move_insn isn't expected to handle a PLUS. */
1354 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1357 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1359 size = GET_MODE_SIZE (Pmode);
1361 /* Save the structure value address unless this is passed as an
1362 "invisible" first argument. */
1363 if (struct_incoming_value)
1365 emit_move_insn (adjust_address (registers, Pmode, size),
1366 copy_to_reg (struct_incoming_value));
1367 size += GET_MODE_SIZE (Pmode);
1370 /* Return the address of the block. */
1371 return copy_addr_to_reg (XEXP (registers, 0));
1374 /* __builtin_apply_args returns block of memory allocated on
1375 the stack into which is stored the arg pointer, structure
1376 value address, static chain, and all the registers that might
1377 possibly be used in performing a function call. The code is
1378 moved to the start of the function so the incoming values are
1382 expand_builtin_apply_args (void)
1384 /* Don't do __builtin_apply_args more than once in a function.
1385 Save the result of the first call and reuse it. */
1386 if (apply_args_value != 0)
1387 return apply_args_value;
1389 /* When this function is called, it means that registers must be
1390 saved on entry to this function. So we migrate the
1391 call to the first insn of this function. */
1396 temp = expand_builtin_apply_args_1 ();
1400 apply_args_value = temp;
1402 /* Put the insns after the NOTE that starts the function.
1403 If this is inside a start_sequence, make the outer-level insn
1404 chain current, so the code is placed at the start of the
1406 push_topmost_sequence ();
1407 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1408 pop_topmost_sequence ();
1413 /* Perform an untyped call and save the state required to perform an
1414 untyped return of whatever value was returned by the given function. */
1417 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1419 int size, align, regno;
1420 enum machine_mode mode;
1421 rtx incoming_args, result, reg, dest, src, call_insn;
1422 rtx old_stack_level = 0;
1423 rtx call_fusage = 0;
1424 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1426 arguments = convert_memory_address (Pmode, arguments);
1428 /* Create a block where the return registers can be saved. */
1429 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1431 /* Fetch the arg pointer from the ARGUMENTS block. */
1432 incoming_args = gen_reg_rtx (Pmode);
1433 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1434 #ifndef STACK_GROWS_DOWNWARD
1435 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1436 incoming_args, 0, OPTAB_LIB_WIDEN);
1439 /* Push a new argument block and copy the arguments. Do not allow
1440 the (potential) memcpy call below to interfere with our stack
1442 do_pending_stack_adjust ();
1445 /* Save the stack with nonlocal if available. */
1446 #ifdef HAVE_save_stack_nonlocal
1447 if (HAVE_save_stack_nonlocal)
1448 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1451 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1453 /* Allocate a block of memory onto the stack and copy the memory
1454 arguments to the outgoing arguments address. */
1455 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1456 dest = virtual_outgoing_args_rtx;
1457 #ifndef STACK_GROWS_DOWNWARD
1458 if (GET_CODE (argsize) == CONST_INT)
1459 dest = plus_constant (dest, -INTVAL (argsize));
1461 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1463 dest = gen_rtx_MEM (BLKmode, dest);
1464 set_mem_align (dest, PARM_BOUNDARY);
1465 src = gen_rtx_MEM (BLKmode, incoming_args);
1466 set_mem_align (src, PARM_BOUNDARY);
1467 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1469 /* Refer to the argument block. */
1471 arguments = gen_rtx_MEM (BLKmode, arguments);
1472 set_mem_align (arguments, PARM_BOUNDARY);
1474 /* Walk past the arg-pointer and structure value address. */
1475 size = GET_MODE_SIZE (Pmode);
1477 size += GET_MODE_SIZE (Pmode);
1479 /* Restore each of the registers previously saved. Make USE insns
1480 for each of these registers for use in making the call. */
1481 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1482 if ((mode = apply_args_mode[regno]) != VOIDmode)
1484 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1485 if (size % align != 0)
1486 size = CEIL (size, align) * align;
1487 reg = gen_rtx_REG (mode, regno);
1488 emit_move_insn (reg, adjust_address (arguments, mode, size));
1489 use_reg (&call_fusage, reg);
1490 size += GET_MODE_SIZE (mode);
1493 /* Restore the structure value address unless this is passed as an
1494 "invisible" first argument. */
1495 size = GET_MODE_SIZE (Pmode);
1498 rtx value = gen_reg_rtx (Pmode);
1499 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1500 emit_move_insn (struct_value, value);
1501 if (REG_P (struct_value))
1502 use_reg (&call_fusage, struct_value);
1503 size += GET_MODE_SIZE (Pmode);
1506 /* All arguments and registers used for the call are set up by now! */
1507 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1509 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1510 and we don't want to load it into a register as an optimization,
1511 because prepare_call_address already did it if it should be done. */
1512 if (GET_CODE (function) != SYMBOL_REF)
1513 function = memory_address (FUNCTION_MODE, function);
1515 /* Generate the actual call instruction and save the return value. */
1516 #ifdef HAVE_untyped_call
1517 if (HAVE_untyped_call)
1518 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1519 result, result_vector (1, result)));
1522 #ifdef HAVE_call_value
1523 if (HAVE_call_value)
1527 /* Locate the unique return register. It is not possible to
1528 express a call that sets more than one return register using
1529 call_value; use untyped_call for that. In fact, untyped_call
1530 only needs to save the return registers in the given block. */
1531 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1532 if ((mode = apply_result_mode[regno]) != VOIDmode)
1534 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1536 valreg = gen_rtx_REG (mode, regno);
1539 emit_call_insn (GEN_CALL_VALUE (valreg,
1540 gen_rtx_MEM (FUNCTION_MODE, function),
1541 const0_rtx, NULL_RTX, const0_rtx));
1543 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1549 /* Find the CALL insn we just emitted, and attach the register usage
1551 call_insn = last_call_insn ();
1552 add_function_usage_to (call_insn, call_fusage);
1554 /* Restore the stack. */
1555 #ifdef HAVE_save_stack_nonlocal
1556 if (HAVE_save_stack_nonlocal)
1557 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1560 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1564 /* Return the address of the result block. */
1565 result = copy_addr_to_reg (XEXP (result, 0));
1566 return convert_memory_address (ptr_mode, result);
1569 /* Perform an untyped return. */
1572 expand_builtin_return (rtx result)
1574 int size, align, regno;
1575 enum machine_mode mode;
1577 rtx call_fusage = 0;
1579 result = convert_memory_address (Pmode, result);
1581 apply_result_size ();
1582 result = gen_rtx_MEM (BLKmode, result);
1584 #ifdef HAVE_untyped_return
1585 if (HAVE_untyped_return)
1587 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1593 /* Restore the return value and note that each value is used. */
1595 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1596 if ((mode = apply_result_mode[regno]) != VOIDmode)
1598 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1599 if (size % align != 0)
1600 size = CEIL (size, align) * align;
1601 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1602 emit_move_insn (reg, adjust_address (result, mode, size));
1604 push_to_sequence (call_fusage);
1605 emit_insn (gen_rtx_USE (VOIDmode, reg));
1606 call_fusage = get_insns ();
1608 size += GET_MODE_SIZE (mode);
1611 /* Put the USE insns before the return. */
1612 emit_insn (call_fusage);
1614 /* Return whatever values was restored by jumping directly to the end
1616 expand_naked_return ();
1619 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1621 static enum type_class
1622 type_to_class (tree type)
1624 switch (TREE_CODE (type))
1626 case VOID_TYPE: return void_type_class;
1627 case INTEGER_TYPE: return integer_type_class;
1628 case ENUMERAL_TYPE: return enumeral_type_class;
1629 case BOOLEAN_TYPE: return boolean_type_class;
1630 case POINTER_TYPE: return pointer_type_class;
1631 case REFERENCE_TYPE: return reference_type_class;
1632 case OFFSET_TYPE: return offset_type_class;
1633 case REAL_TYPE: return real_type_class;
1634 case COMPLEX_TYPE: return complex_type_class;
1635 case FUNCTION_TYPE: return function_type_class;
1636 case METHOD_TYPE: return method_type_class;
1637 case RECORD_TYPE: return record_type_class;
1639 case QUAL_UNION_TYPE: return union_type_class;
1640 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1641 ? string_type_class : array_type_class);
1642 case LANG_TYPE: return lang_type_class;
1643 default: return no_type_class;
1647 /* Expand a call EXP to __builtin_classify_type. */
1650 expand_builtin_classify_type (tree exp)
1652 if (call_expr_nargs (exp))
1653 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1654 return GEN_INT (no_type_class);
1657 /* This helper macro, meant to be used in mathfn_built_in below,
1658 determines which among a set of three builtin math functions is
1659 appropriate for a given type mode. The `F' and `L' cases are
1660 automatically generated from the `double' case. */
1661 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1662 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1663 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1664 fcodel = BUILT_IN_MATHFN##L ; break;
1665 /* Similar to above, but appends _R after any F/L suffix. */
1666 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1667 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1668 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1669 fcodel = BUILT_IN_MATHFN##L_R ; break;
1671 /* Return mathematic function equivalent to FN but operating directly
1672 on TYPE, if available. If we can't do the conversion, return zero. */
1674 mathfn_built_in (tree type, enum built_in_function fn)
1676 enum built_in_function fcode, fcodef, fcodel;
1680 CASE_MATHFN (BUILT_IN_ACOS)
1681 CASE_MATHFN (BUILT_IN_ACOSH)
1682 CASE_MATHFN (BUILT_IN_ASIN)
1683 CASE_MATHFN (BUILT_IN_ASINH)
1684 CASE_MATHFN (BUILT_IN_ATAN)
1685 CASE_MATHFN (BUILT_IN_ATAN2)
1686 CASE_MATHFN (BUILT_IN_ATANH)
1687 CASE_MATHFN (BUILT_IN_CBRT)
1688 CASE_MATHFN (BUILT_IN_CEIL)
1689 CASE_MATHFN (BUILT_IN_CEXPI)
1690 CASE_MATHFN (BUILT_IN_COPYSIGN)
1691 CASE_MATHFN (BUILT_IN_COS)
1692 CASE_MATHFN (BUILT_IN_COSH)
1693 CASE_MATHFN (BUILT_IN_DREM)
1694 CASE_MATHFN (BUILT_IN_ERF)
1695 CASE_MATHFN (BUILT_IN_ERFC)
1696 CASE_MATHFN (BUILT_IN_EXP)
1697 CASE_MATHFN (BUILT_IN_EXP10)
1698 CASE_MATHFN (BUILT_IN_EXP2)
1699 CASE_MATHFN (BUILT_IN_EXPM1)
1700 CASE_MATHFN (BUILT_IN_FABS)
1701 CASE_MATHFN (BUILT_IN_FDIM)
1702 CASE_MATHFN (BUILT_IN_FLOOR)
1703 CASE_MATHFN (BUILT_IN_FMA)
1704 CASE_MATHFN (BUILT_IN_FMAX)
1705 CASE_MATHFN (BUILT_IN_FMIN)
1706 CASE_MATHFN (BUILT_IN_FMOD)
1707 CASE_MATHFN (BUILT_IN_FREXP)
1708 CASE_MATHFN (BUILT_IN_GAMMA)
1709 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1710 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1711 CASE_MATHFN (BUILT_IN_HYPOT)
1712 CASE_MATHFN (BUILT_IN_ILOGB)
1713 CASE_MATHFN (BUILT_IN_INF)
1714 CASE_MATHFN (BUILT_IN_ISINF)
1715 CASE_MATHFN (BUILT_IN_J0)
1716 CASE_MATHFN (BUILT_IN_J1)
1717 CASE_MATHFN (BUILT_IN_JN)
1718 CASE_MATHFN (BUILT_IN_LCEIL)
1719 CASE_MATHFN (BUILT_IN_LDEXP)
1720 CASE_MATHFN (BUILT_IN_LFLOOR)
1721 CASE_MATHFN (BUILT_IN_LGAMMA)
1722 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1723 CASE_MATHFN (BUILT_IN_LLCEIL)
1724 CASE_MATHFN (BUILT_IN_LLFLOOR)
1725 CASE_MATHFN (BUILT_IN_LLRINT)
1726 CASE_MATHFN (BUILT_IN_LLROUND)
1727 CASE_MATHFN (BUILT_IN_LOG)
1728 CASE_MATHFN (BUILT_IN_LOG10)
1729 CASE_MATHFN (BUILT_IN_LOG1P)
1730 CASE_MATHFN (BUILT_IN_LOG2)
1731 CASE_MATHFN (BUILT_IN_LOGB)
1732 CASE_MATHFN (BUILT_IN_LRINT)
1733 CASE_MATHFN (BUILT_IN_LROUND)
1734 CASE_MATHFN (BUILT_IN_MODF)
1735 CASE_MATHFN (BUILT_IN_NAN)
1736 CASE_MATHFN (BUILT_IN_NANS)
1737 CASE_MATHFN (BUILT_IN_NEARBYINT)
1738 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1739 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1740 CASE_MATHFN (BUILT_IN_POW)
1741 CASE_MATHFN (BUILT_IN_POWI)
1742 CASE_MATHFN (BUILT_IN_POW10)
1743 CASE_MATHFN (BUILT_IN_REMAINDER)
1744 CASE_MATHFN (BUILT_IN_REMQUO)
1745 CASE_MATHFN (BUILT_IN_RINT)
1746 CASE_MATHFN (BUILT_IN_ROUND)
1747 CASE_MATHFN (BUILT_IN_SCALB)
1748 CASE_MATHFN (BUILT_IN_SCALBLN)
1749 CASE_MATHFN (BUILT_IN_SCALBN)
1750 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1751 CASE_MATHFN (BUILT_IN_SIN)
1752 CASE_MATHFN (BUILT_IN_SINCOS)
1753 CASE_MATHFN (BUILT_IN_SINH)
1754 CASE_MATHFN (BUILT_IN_SQRT)
1755 CASE_MATHFN (BUILT_IN_TAN)
1756 CASE_MATHFN (BUILT_IN_TANH)
1757 CASE_MATHFN (BUILT_IN_TGAMMA)
1758 CASE_MATHFN (BUILT_IN_TRUNC)
1759 CASE_MATHFN (BUILT_IN_Y0)
1760 CASE_MATHFN (BUILT_IN_Y1)
1761 CASE_MATHFN (BUILT_IN_YN)
1767 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1768 return implicit_built_in_decls[fcode];
1769 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1770 return implicit_built_in_decls[fcodef];
1771 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1772 return implicit_built_in_decls[fcodel];
1777 /* If errno must be maintained, expand the RTL to check if the result,
1778 TARGET, of a built-in function call, EXP, is NaN, and if so set
1782 expand_errno_check (tree exp, rtx target)
1784 rtx lab = gen_label_rtx ();
1786 /* Test the result; if it is NaN, set errno=EDOM because
1787 the argument was not in the domain. */
1788 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1792 /* If this built-in doesn't throw an exception, set errno directly. */
1793 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1795 #ifdef GEN_ERRNO_RTX
1796 rtx errno_rtx = GEN_ERRNO_RTX;
1799 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1801 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1807 /* We can't set errno=EDOM directly; let the library call do it.
1808 Pop the arguments right away in case the call gets deleted. */
1810 expand_call (exp, target, 0);
1815 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1816 Return NULL_RTX if a normal call should be emitted rather than expanding
1817 the function in-line. EXP is the expression that is a call to the builtin
1818 function; if convenient, the result should be placed in TARGET.
1819 SUBTARGET may be used as the target for computing one of EXP's operands. */
1822 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1824 optab builtin_optab;
1825 rtx op0, insns, before_call;
1826 tree fndecl = get_callee_fndecl (exp);
1827 enum machine_mode mode;
1828 bool errno_set = false;
1831 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1834 arg = CALL_EXPR_ARG (exp, 0);
1836 switch (DECL_FUNCTION_CODE (fndecl))
1838 CASE_FLT_FN (BUILT_IN_SQRT):
1839 errno_set = ! tree_expr_nonnegative_p (arg);
1840 builtin_optab = sqrt_optab;
1842 CASE_FLT_FN (BUILT_IN_EXP):
1843 errno_set = true; builtin_optab = exp_optab; break;
1844 CASE_FLT_FN (BUILT_IN_EXP10):
1845 CASE_FLT_FN (BUILT_IN_POW10):
1846 errno_set = true; builtin_optab = exp10_optab; break;
1847 CASE_FLT_FN (BUILT_IN_EXP2):
1848 errno_set = true; builtin_optab = exp2_optab; break;
1849 CASE_FLT_FN (BUILT_IN_EXPM1):
1850 errno_set = true; builtin_optab = expm1_optab; break;
1851 CASE_FLT_FN (BUILT_IN_LOGB):
1852 errno_set = true; builtin_optab = logb_optab; break;
1853 CASE_FLT_FN (BUILT_IN_LOG):
1854 errno_set = true; builtin_optab = log_optab; break;
1855 CASE_FLT_FN (BUILT_IN_LOG10):
1856 errno_set = true; builtin_optab = log10_optab; break;
1857 CASE_FLT_FN (BUILT_IN_LOG2):
1858 errno_set = true; builtin_optab = log2_optab; break;
1859 CASE_FLT_FN (BUILT_IN_LOG1P):
1860 errno_set = true; builtin_optab = log1p_optab; break;
1861 CASE_FLT_FN (BUILT_IN_ASIN):
1862 builtin_optab = asin_optab; break;
1863 CASE_FLT_FN (BUILT_IN_ACOS):
1864 builtin_optab = acos_optab; break;
1865 CASE_FLT_FN (BUILT_IN_TAN):
1866 builtin_optab = tan_optab; break;
1867 CASE_FLT_FN (BUILT_IN_ATAN):
1868 builtin_optab = atan_optab; break;
1869 CASE_FLT_FN (BUILT_IN_FLOOR):
1870 builtin_optab = floor_optab; break;
1871 CASE_FLT_FN (BUILT_IN_CEIL):
1872 builtin_optab = ceil_optab; break;
1873 CASE_FLT_FN (BUILT_IN_TRUNC):
1874 builtin_optab = btrunc_optab; break;
1875 CASE_FLT_FN (BUILT_IN_ROUND):
1876 builtin_optab = round_optab; break;
1877 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1878 builtin_optab = nearbyint_optab;
1879 if (flag_trapping_math)
1881 /* Else fallthrough and expand as rint. */
1882 CASE_FLT_FN (BUILT_IN_RINT):
1883 builtin_optab = rint_optab; break;
1888 /* Make a suitable register to place result in. */
1889 mode = TYPE_MODE (TREE_TYPE (exp));
1891 if (! flag_errno_math || ! HONOR_NANS (mode))
1894 /* Before working hard, check whether the instruction is available. */
1895 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1897 target = gen_reg_rtx (mode);
1899 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1900 need to expand the argument again. This way, we will not perform
1901 side-effects more the once. */
1902 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1904 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1908 /* Compute into TARGET.
1909 Set TARGET to wherever the result comes back. */
1910 target = expand_unop (mode, builtin_optab, op0, target, 0);
1915 expand_errno_check (exp, target);
1917 /* Output the entire sequence. */
1918 insns = get_insns ();
1924 /* If we were unable to expand via the builtin, stop the sequence
1925 (without outputting the insns) and call to the library function
1926 with the stabilized argument list. */
1930 before_call = get_last_insn ();
1932 target = expand_call (exp, target, target == const0_rtx);
1934 /* If this is a sqrt operation and we don't care about errno, try to
1935 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1936 This allows the semantics of the libcall to be visible to the RTL
1938 if (builtin_optab == sqrt_optab && !errno_set)
1940 /* Search backwards through the insns emitted by expand_call looking
1941 for the instruction with the REG_RETVAL note. */
1942 rtx last = get_last_insn ();
1943 while (last != before_call)
1945 if (find_reg_note (last, REG_RETVAL, NULL))
1947 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1948 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1949 two elements, i.e. symbol_ref(sqrt) and the operand. */
1951 && GET_CODE (note) == EXPR_LIST
1952 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1953 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1954 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1956 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1957 /* Check operand is a register with expected mode. */
1960 && GET_MODE (operand) == mode)
1962 /* Replace the REG_EQUAL note with a SQRT rtx. */
1963 rtx equiv = gen_rtx_SQRT (mode, operand);
1964 set_unique_reg_note (last, REG_EQUAL, equiv);
1969 last = PREV_INSN (last);
1976 /* Expand a call to the builtin binary math functions (pow and atan2).
1977 Return NULL_RTX if a normal call should be emitted rather than expanding the
1978 function in-line. EXP is the expression that is a call to the builtin
1979 function; if convenient, the result should be placed in TARGET.
1980 SUBTARGET may be used as the target for computing one of EXP's
1984 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1986 optab builtin_optab;
1987 rtx op0, op1, insns;
1988 int op1_type = REAL_TYPE;
1989 tree fndecl = get_callee_fndecl (exp);
1991 enum machine_mode mode;
1992 bool errno_set = true;
1994 switch (DECL_FUNCTION_CODE (fndecl))
1996 CASE_FLT_FN (BUILT_IN_SCALBN):
1997 CASE_FLT_FN (BUILT_IN_SCALBLN):
1998 CASE_FLT_FN (BUILT_IN_LDEXP):
1999 op1_type = INTEGER_TYPE;
2004 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2007 arg0 = CALL_EXPR_ARG (exp, 0);
2008 arg1 = CALL_EXPR_ARG (exp, 1);
2010 switch (DECL_FUNCTION_CODE (fndecl))
2012 CASE_FLT_FN (BUILT_IN_POW):
2013 builtin_optab = pow_optab; break;
2014 CASE_FLT_FN (BUILT_IN_ATAN2):
2015 builtin_optab = atan2_optab; break;
2016 CASE_FLT_FN (BUILT_IN_SCALB):
2017 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2019 builtin_optab = scalb_optab; break;
2020 CASE_FLT_FN (BUILT_IN_SCALBN):
2021 CASE_FLT_FN (BUILT_IN_SCALBLN):
2022 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2024 /* Fall through... */
2025 CASE_FLT_FN (BUILT_IN_LDEXP):
2026 builtin_optab = ldexp_optab; break;
2027 CASE_FLT_FN (BUILT_IN_FMOD):
2028 builtin_optab = fmod_optab; break;
2029 CASE_FLT_FN (BUILT_IN_REMAINDER):
2030 CASE_FLT_FN (BUILT_IN_DREM):
2031 builtin_optab = remainder_optab; break;
2036 /* Make a suitable register to place result in. */
2037 mode = TYPE_MODE (TREE_TYPE (exp));
2039 /* Before working hard, check whether the instruction is available. */
2040 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2043 target = gen_reg_rtx (mode);
2045 if (! flag_errno_math || ! HONOR_NANS (mode))
2048 /* Always stabilize the argument list. */
2049 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2050 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2052 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2053 op1 = expand_normal (arg1);
2057 /* Compute into TARGET.
2058 Set TARGET to wherever the result comes back. */
2059 target = expand_binop (mode, builtin_optab, op0, op1,
2060 target, 0, OPTAB_DIRECT);
2062 /* If we were unable to expand via the builtin, stop the sequence
2063 (without outputting the insns) and call to the library function
2064 with the stabilized argument list. */
2068 return expand_call (exp, target, target == const0_rtx);
2072 expand_errno_check (exp, target);
2074 /* Output the entire sequence. */
2075 insns = get_insns ();
2082 /* Expand a call to the builtin sin and cos math functions.
2083 Return NULL_RTX if a normal call should be emitted rather than expanding the
2084 function in-line. EXP is the expression that is a call to the builtin
2085 function; if convenient, the result should be placed in TARGET.
2086 SUBTARGET may be used as the target for computing one of EXP's
2090 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2092 optab builtin_optab;
2094 tree fndecl = get_callee_fndecl (exp);
2095 enum machine_mode mode;
2098 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2101 arg = CALL_EXPR_ARG (exp, 0);
2103 switch (DECL_FUNCTION_CODE (fndecl))
2105 CASE_FLT_FN (BUILT_IN_SIN):
2106 CASE_FLT_FN (BUILT_IN_COS):
2107 builtin_optab = sincos_optab; break;
2112 /* Make a suitable register to place result in. */
2113 mode = TYPE_MODE (TREE_TYPE (exp));
2115 /* Check if sincos insn is available, otherwise fallback
2116 to sin or cos insn. */
2117 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2118 switch (DECL_FUNCTION_CODE (fndecl))
2120 CASE_FLT_FN (BUILT_IN_SIN):
2121 builtin_optab = sin_optab; break;
2122 CASE_FLT_FN (BUILT_IN_COS):
2123 builtin_optab = cos_optab; break;
2128 /* Before working hard, check whether the instruction is available. */
2129 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2131 target = gen_reg_rtx (mode);
2133 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2134 need to expand the argument again. This way, we will not perform
2135 side-effects more the once. */
2136 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2138 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2142 /* Compute into TARGET.
2143 Set TARGET to wherever the result comes back. */
2144 if (builtin_optab == sincos_optab)
2148 switch (DECL_FUNCTION_CODE (fndecl))
2150 CASE_FLT_FN (BUILT_IN_SIN):
2151 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2153 CASE_FLT_FN (BUILT_IN_COS):
2154 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2159 gcc_assert (result);
2163 target = expand_unop (mode, builtin_optab, op0, target, 0);
2168 /* Output the entire sequence. */
2169 insns = get_insns ();
2175 /* If we were unable to expand via the builtin, stop the sequence
2176 (without outputting the insns) and call to the library function
2177 with the stabilized argument list. */
2181 target = expand_call (exp, target, target == const0_rtx);
2186 /* Expand a call to one of the builtin math functions that operate on
2187 floating point argument and output an integer result (ilogb, isinf,
2189 Return 0 if a normal call should be emitted rather than expanding the
2190 function in-line. EXP is the expression that is a call to the builtin
2191 function; if convenient, the result should be placed in TARGET.
2192 SUBTARGET may be used as the target for computing one of EXP's operands. */
2195 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2197 optab builtin_optab = 0;
2198 enum insn_code icode = CODE_FOR_nothing;
2200 tree fndecl = get_callee_fndecl (exp);
2201 enum machine_mode mode;
2202 bool errno_set = false;
2205 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2208 arg = CALL_EXPR_ARG (exp, 0);
2210 switch (DECL_FUNCTION_CODE (fndecl))
2212 CASE_FLT_FN (BUILT_IN_ILOGB):
2213 errno_set = true; builtin_optab = ilogb_optab; break;
2214 CASE_FLT_FN (BUILT_IN_ISINF):
2215 builtin_optab = isinf_optab; break;
2216 case BUILT_IN_ISNORMAL:
2217 case BUILT_IN_ISFINITE:
2218 CASE_FLT_FN (BUILT_IN_FINITE):
2219 /* These builtins have no optabs (yet). */
2225 /* There's no easy way to detect the case we need to set EDOM. */
2226 if (flag_errno_math && errno_set)
2229 /* Optab mode depends on the mode of the input argument. */
2230 mode = TYPE_MODE (TREE_TYPE (arg));
2233 icode = optab_handler (builtin_optab, mode)->insn_code;
2235 /* Before working hard, check whether the instruction is available. */
2236 if (icode != CODE_FOR_nothing)
2238 /* Make a suitable register to place result in. */
2240 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2241 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2243 gcc_assert (insn_data[icode].operand[0].predicate
2244 (target, GET_MODE (target)));
2246 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2247 need to expand the argument again. This way, we will not perform
2248 side-effects more the once. */
2249 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2251 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2253 if (mode != GET_MODE (op0))
2254 op0 = convert_to_mode (mode, op0, 0);
2256 /* Compute into TARGET.
2257 Set TARGET to wherever the result comes back. */
2258 emit_unop_insn (icode, target, op0, UNKNOWN);
2262 /* If there is no optab, try generic code. */
2263 switch (DECL_FUNCTION_CODE (fndecl))
2267 CASE_FLT_FN (BUILT_IN_ISINF):
2269 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2270 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2271 tree const type = TREE_TYPE (arg);
2275 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2276 real_from_string (&r, buf);
2277 result = build_call_expr (isgr_fn, 2,
2278 fold_build1 (ABS_EXPR, type, arg),
2279 build_real (type, r));
2280 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2282 CASE_FLT_FN (BUILT_IN_FINITE):
2283 case BUILT_IN_ISFINITE:
2285 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2286 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2287 tree const type = TREE_TYPE (arg);
2291 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2292 real_from_string (&r, buf);
2293 result = build_call_expr (isle_fn, 2,
2294 fold_build1 (ABS_EXPR, type, arg),
2295 build_real (type, r));
2296 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2298 case BUILT_IN_ISNORMAL:
2300 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2301 islessequal(fabs(x),DBL_MAX). */
2302 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2303 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2304 tree const type = TREE_TYPE (arg);
2305 REAL_VALUE_TYPE rmax, rmin;
2308 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2309 real_from_string (&rmax, buf);
2310 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2311 real_from_string (&rmin, buf);
2312 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2313 result = build_call_expr (isle_fn, 2, arg,
2314 build_real (type, rmax));
2315 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2316 build_call_expr (isge_fn, 2, arg,
2317 build_real (type, rmin)));
2318 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2324 target = expand_call (exp, target, target == const0_rtx);
2329 /* Expand a call to the builtin sincos math function.
2330 Return NULL_RTX if a normal call should be emitted rather than expanding the
2331 function in-line. EXP is the expression that is a call to the builtin
2335 expand_builtin_sincos (tree exp)
2337 rtx op0, op1, op2, target1, target2;
2338 enum machine_mode mode;
2339 tree arg, sinp, cosp;
2342 if (!validate_arglist (exp, REAL_TYPE,
2343 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2346 arg = CALL_EXPR_ARG (exp, 0);
2347 sinp = CALL_EXPR_ARG (exp, 1);
2348 cosp = CALL_EXPR_ARG (exp, 2);
2350 /* Make a suitable register to place result in. */
2351 mode = TYPE_MODE (TREE_TYPE (arg));
2353 /* Check if sincos insn is available, otherwise emit the call. */
2354 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2357 target1 = gen_reg_rtx (mode);
2358 target2 = gen_reg_rtx (mode);
2360 op0 = expand_normal (arg);
2361 op1 = expand_normal (build_fold_indirect_ref (sinp));
2362 op2 = expand_normal (build_fold_indirect_ref (cosp));
2364 /* Compute into target1 and target2.
2365 Set TARGET to wherever the result comes back. */
2366 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2367 gcc_assert (result);
2369 /* Move target1 and target2 to the memory locations indicated
2371 emit_move_insn (op1, target1);
2372 emit_move_insn (op2, target2);
2377 /* Expand a call to the internal cexpi builtin to the sincos math function.
2378 EXP is the expression that is a call to the builtin function; if convenient,
2379 the result should be placed in TARGET. SUBTARGET may be used as the target
2380 for computing one of EXP's operands. */
2383 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2385 tree fndecl = get_callee_fndecl (exp);
2387 enum machine_mode mode;
2390 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2393 arg = CALL_EXPR_ARG (exp, 0);
2394 type = TREE_TYPE (arg);
2395 mode = TYPE_MODE (TREE_TYPE (arg));
2397 /* Try expanding via a sincos optab, fall back to emitting a libcall
2398 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2399 is only generated from sincos, cexp or if we have either of them. */
2400 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2402 op1 = gen_reg_rtx (mode);
2403 op2 = gen_reg_rtx (mode);
2405 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2407 /* Compute into op1 and op2. */
2408 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2410 else if (TARGET_HAS_SINCOS)
2412 tree call, fn = NULL_TREE;
2416 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2417 fn = built_in_decls[BUILT_IN_SINCOSF];
2418 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2419 fn = built_in_decls[BUILT_IN_SINCOS];
2420 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2421 fn = built_in_decls[BUILT_IN_SINCOSL];
2425 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2426 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2427 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2428 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2429 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2430 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2432 /* Make sure not to fold the sincos call again. */
2433 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2434 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2435 call, 3, arg, top1, top2));
2439 tree call, fn = NULL_TREE, narg;
2440 tree ctype = build_complex_type (type);
2442 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2443 fn = built_in_decls[BUILT_IN_CEXPF];
2444 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2445 fn = built_in_decls[BUILT_IN_CEXP];
2446 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2447 fn = built_in_decls[BUILT_IN_CEXPL];
2451 /* If we don't have a decl for cexp create one. This is the
2452 friendliest fallback if the user calls __builtin_cexpi
2453 without full target C99 function support. */
2454 if (fn == NULL_TREE)
2457 const char *name = NULL;
2459 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2461 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2463 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2466 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2467 fn = build_fn_decl (name, fntype);
2470 narg = fold_build2 (COMPLEX_EXPR, ctype,
2471 build_real (type, dconst0), arg);
2473 /* Make sure not to fold the cexp call again. */
2474 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2475 return expand_expr (build_call_nary (ctype, call, 1, narg),
2476 target, VOIDmode, EXPAND_NORMAL);
2479 /* Now build the proper return type. */
2480 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2481 make_tree (TREE_TYPE (arg), op2),
2482 make_tree (TREE_TYPE (arg), op1)),
2483 target, VOIDmode, EXPAND_NORMAL);
2486 /* Expand a call to one of the builtin rounding functions gcc defines
2487 as an extension (lfloor and lceil). As these are gcc extensions we
2488 do not need to worry about setting errno to EDOM.
2489 If expanding via optab fails, lower expression to (int)(floor(x)).
2490 EXP is the expression that is a call to the builtin function;
2491 if convenient, the result should be placed in TARGET. SUBTARGET may
2492 be used as the target for computing one of EXP's operands. */
2495 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2497 convert_optab builtin_optab;
2498 rtx op0, insns, tmp;
2499 tree fndecl = get_callee_fndecl (exp);
2500 enum built_in_function fallback_fn;
2501 tree fallback_fndecl;
2502 enum machine_mode mode;
2505 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2508 arg = CALL_EXPR_ARG (exp, 0);
2510 switch (DECL_FUNCTION_CODE (fndecl))
2512 CASE_FLT_FN (BUILT_IN_LCEIL):
2513 CASE_FLT_FN (BUILT_IN_LLCEIL):
2514 builtin_optab = lceil_optab;
2515 fallback_fn = BUILT_IN_CEIL;
2518 CASE_FLT_FN (BUILT_IN_LFLOOR):
2519 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2520 builtin_optab = lfloor_optab;
2521 fallback_fn = BUILT_IN_FLOOR;
2528 /* Make a suitable register to place result in. */
2529 mode = TYPE_MODE (TREE_TYPE (exp));
2531 target = gen_reg_rtx (mode);
2533 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2534 need to expand the argument again. This way, we will not perform
2535 side-effects more the once. */
2536 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2538 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2542 /* Compute into TARGET. */
2543 if (expand_sfix_optab (target, op0, builtin_optab))
2545 /* Output the entire sequence. */
2546 insns = get_insns ();
2552 /* If we were unable to expand via the builtin, stop the sequence
2553 (without outputting the insns). */
2556 /* Fall back to floating point rounding optab. */
2557 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2559 /* For non-C99 targets we may end up without a fallback fndecl here
2560 if the user called __builtin_lfloor directly. In this case emit
2561 a call to the floor/ceil variants nevertheless. This should result
2562 in the best user experience for not full C99 targets. */
2563 if (fallback_fndecl == NULL_TREE)
2566 const char *name = NULL;
2568 switch (DECL_FUNCTION_CODE (fndecl))
2570 case BUILT_IN_LCEIL:
2571 case BUILT_IN_LLCEIL:
2574 case BUILT_IN_LCEILF:
2575 case BUILT_IN_LLCEILF:
2578 case BUILT_IN_LCEILL:
2579 case BUILT_IN_LLCEILL:
2582 case BUILT_IN_LFLOOR:
2583 case BUILT_IN_LLFLOOR:
2586 case BUILT_IN_LFLOORF:
2587 case BUILT_IN_LLFLOORF:
2590 case BUILT_IN_LFLOORL:
2591 case BUILT_IN_LLFLOORL:
2598 fntype = build_function_type_list (TREE_TYPE (arg),
2599 TREE_TYPE (arg), NULL_TREE);
2600 fallback_fndecl = build_fn_decl (name, fntype);
2603 exp = build_call_expr (fallback_fndecl, 1, arg);
2605 tmp = expand_normal (exp);
2607 /* Truncate the result of floating point optab to integer
2608 via expand_fix (). */
2609 target = gen_reg_rtx (mode);
2610 expand_fix (target, tmp, 0);
2615 /* Expand a call to one of the builtin math functions doing integer
2617 Return 0 if a normal call should be emitted rather than expanding the
2618 function in-line. EXP is the expression that is a call to the builtin
2619 function; if convenient, the result should be placed in TARGET.
2620 SUBTARGET may be used as the target for computing one of EXP's operands. */
2623 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2625 convert_optab builtin_optab;
2627 tree fndecl = get_callee_fndecl (exp);
2629 enum machine_mode mode;
2631 /* There's no easy way to detect the case we need to set EDOM. */
2632 if (flag_errno_math)
2635 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2638 arg = CALL_EXPR_ARG (exp, 0);
2640 switch (DECL_FUNCTION_CODE (fndecl))
2642 CASE_FLT_FN (BUILT_IN_LRINT):
2643 CASE_FLT_FN (BUILT_IN_LLRINT):
2644 builtin_optab = lrint_optab; break;
2645 CASE_FLT_FN (BUILT_IN_LROUND):
2646 CASE_FLT_FN (BUILT_IN_LLROUND):
2647 builtin_optab = lround_optab; break;
2652 /* Make a suitable register to place result in. */
2653 mode = TYPE_MODE (TREE_TYPE (exp));
2655 target = gen_reg_rtx (mode);
2657 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2658 need to expand the argument again. This way, we will not perform
2659 side-effects more the once. */
2660 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2662 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2666 if (expand_sfix_optab (target, op0, builtin_optab))
2668 /* Output the entire sequence. */
2669 insns = get_insns ();
2675 /* If we were unable to expand via the builtin, stop the sequence
2676 (without outputting the insns) and call to the library function
2677 with the stabilized argument list. */
2680 target = expand_call (exp, target, target == const0_rtx);
2685 /* To evaluate powi(x,n), the floating point value x raised to the
2686 constant integer exponent n, we use a hybrid algorithm that
2687 combines the "window method" with look-up tables. For an
2688 introduction to exponentiation algorithms and "addition chains",
2689 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2690 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2691 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2692 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2694 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2695 multiplications to inline before calling the system library's pow
2696 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2697 so this default never requires calling pow, powf or powl. */
2699 #ifndef POWI_MAX_MULTS
2700 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2703 /* The size of the "optimal power tree" lookup table. All
2704 exponents less than this value are simply looked up in the
2705 powi_table below. This threshold is also used to size the
2706 cache of pseudo registers that hold intermediate results. */
2707 #define POWI_TABLE_SIZE 256
2709 /* The size, in bits of the window, used in the "window method"
2710 exponentiation algorithm. This is equivalent to a radix of
2711 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2712 #define POWI_WINDOW_SIZE 3
2714 /* The following table is an efficient representation of an
2715 "optimal power tree". For each value, i, the corresponding
2716 value, j, in the table states than an optimal evaluation
2717 sequence for calculating pow(x,i) can be found by evaluating
2718 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2719 100 integers is given in Knuth's "Seminumerical algorithms". */
2721 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2723 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2724 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2725 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2726 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2727 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2728 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2729 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2730 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2731 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2732 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2733 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2734 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2735 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2736 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2737 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2738 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2739 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2740 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2741 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2742 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2743 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2744 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2745 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2746 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2747 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2748 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2749 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2750 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2751 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2752 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2753 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2754 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2758 /* Return the number of multiplications required to calculate
2759 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2760 subroutine of powi_cost. CACHE is an array indicating
2761 which exponents have already been calculated. */
2764 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2766 /* If we've already calculated this exponent, then this evaluation
2767 doesn't require any additional multiplications. */
2772 return powi_lookup_cost (n - powi_table[n], cache)
2773 + powi_lookup_cost (powi_table[n], cache) + 1;
2776 /* Return the number of multiplications required to calculate
2777 powi(x,n) for an arbitrary x, given the exponent N. This
2778 function needs to be kept in sync with expand_powi below. */
2781 powi_cost (HOST_WIDE_INT n)
2783 bool cache[POWI_TABLE_SIZE];
2784 unsigned HOST_WIDE_INT digit;
2785 unsigned HOST_WIDE_INT val;
2791 /* Ignore the reciprocal when calculating the cost. */
2792 val = (n < 0) ? -n : n;
2794 /* Initialize the exponent cache. */
2795 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2800 while (val >= POWI_TABLE_SIZE)
2804 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2805 result += powi_lookup_cost (digit, cache)
2806 + POWI_WINDOW_SIZE + 1;
2807 val >>= POWI_WINDOW_SIZE;
2816 return result + powi_lookup_cost (val, cache);
2819 /* Recursive subroutine of expand_powi. This function takes the array,
2820 CACHE, of already calculated exponents and an exponent N and returns
2821 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2824 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2826 unsigned HOST_WIDE_INT digit;
2830 if (n < POWI_TABLE_SIZE)
2835 target = gen_reg_rtx (mode);
2838 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2839 op1 = expand_powi_1 (mode, powi_table[n], cache);
2843 target = gen_reg_rtx (mode);
2844 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2845 op0 = expand_powi_1 (mode, n - digit, cache);
2846 op1 = expand_powi_1 (mode, digit, cache);
2850 target = gen_reg_rtx (mode);
2851 op0 = expand_powi_1 (mode, n >> 1, cache);
2855 result = expand_mult (mode, op0, op1, target, 0);
2856 if (result != target)
2857 emit_move_insn (target, result);
2861 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2862 floating point operand in mode MODE, and N is the exponent. This
2863 function needs to be kept in sync with powi_cost above. */
2866 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2868 unsigned HOST_WIDE_INT val;
2869 rtx cache[POWI_TABLE_SIZE];
2873 return CONST1_RTX (mode);
2875 val = (n < 0) ? -n : n;
2877 memset (cache, 0, sizeof (cache));
2880 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2882 /* If the original exponent was negative, reciprocate the result. */
2884 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2885 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2890 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2891 a normal call should be emitted rather than expanding the function
2892 in-line. EXP is the expression that is a call to the builtin
2893 function; if convenient, the result should be placed in TARGET. */
2896 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2900 tree type = TREE_TYPE (exp);
2901 REAL_VALUE_TYPE cint, c, c2;
2904 enum machine_mode mode = TYPE_MODE (type);
2906 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2909 arg0 = CALL_EXPR_ARG (exp, 0);
2910 arg1 = CALL_EXPR_ARG (exp, 1);
2912 if (TREE_CODE (arg1) != REAL_CST
2913 || TREE_OVERFLOW (arg1))
2914 return expand_builtin_mathfn_2 (exp, target, subtarget);
2916 /* Handle constant exponents. */
2918 /* For integer valued exponents we can expand to an optimal multiplication
2919 sequence using expand_powi. */
2920 c = TREE_REAL_CST (arg1);
2921 n = real_to_integer (&c);
2922 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2923 if (real_identical (&c, &cint)
2924 && ((n >= -1 && n <= 2)
2925 || (flag_unsafe_math_optimizations
2927 && powi_cost (n) <= POWI_MAX_MULTS)))
2929 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2932 op = force_reg (mode, op);
2933 op = expand_powi (op, mode, n);
2938 narg0 = builtin_save_expr (arg0);
2940 /* If the exponent is not integer valued, check if it is half of an integer.
2941 In this case we can expand to sqrt (x) * x**(n/2). */
2942 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2943 if (fn != NULL_TREE)
2945 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2946 n = real_to_integer (&c2);
2947 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2948 if (real_identical (&c2, &cint)
2949 && ((flag_unsafe_math_optimizations
2951 && powi_cost (n/2) <= POWI_MAX_MULTS)
2954 tree call_expr = build_call_expr (fn, 1, narg0);
2955 /* Use expand_expr in case the newly built call expression
2956 was folded to a non-call. */
2957 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2960 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2961 op2 = force_reg (mode, op2);
2962 op2 = expand_powi (op2, mode, abs (n / 2));
2963 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2964 0, OPTAB_LIB_WIDEN);
2965 /* If the original exponent was negative, reciprocate the
2968 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2969 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2975 /* Try if the exponent is a third of an integer. In this case
2976 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2977 different from pow (x, 1./3.) due to rounding and behavior
2978 with negative x we need to constrain this transformation to
2979 unsafe math and positive x or finite math. */
2980 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2982 && flag_unsafe_math_optimizations
2983 && (tree_expr_nonnegative_p (arg0)
2984 || !HONOR_NANS (mode)))
2986 REAL_VALUE_TYPE dconst3;
2987 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2988 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2989 real_round (&c2, mode, &c2);
2990 n = real_to_integer (&c2);
2991 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2992 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2993 real_convert (&c2, mode, &c2);
2994 if (real_identical (&c2, &c)
2996 && powi_cost (n/3) <= POWI_MAX_MULTS)
2999 tree call_expr = build_call_expr (fn, 1,narg0);
3000 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3001 if (abs (n) % 3 == 2)
3002 op = expand_simple_binop (mode, MULT, op, op, op,
3003 0, OPTAB_LIB_WIDEN);
3006 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3007 op2 = force_reg (mode, op2);
3008 op2 = expand_powi (op2, mode, abs (n / 3));
3009 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3010 0, OPTAB_LIB_WIDEN);
3011 /* If the original exponent was negative, reciprocate the
3014 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3015 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3021 /* Fall back to optab expansion. */
3022 return expand_builtin_mathfn_2 (exp, target, subtarget);
3025 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3026 a normal call should be emitted rather than expanding the function
3027 in-line. EXP is the expression that is a call to the builtin
3028 function; if convenient, the result should be placed in TARGET. */
3031 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3035 enum machine_mode mode;
3036 enum machine_mode mode2;
3038 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3041 arg0 = CALL_EXPR_ARG (exp, 0);
3042 arg1 = CALL_EXPR_ARG (exp, 1);
3043 mode = TYPE_MODE (TREE_TYPE (exp));
3045 /* Handle constant power. */
3047 if (TREE_CODE (arg1) == INTEGER_CST
3048 && !TREE_OVERFLOW (arg1))
3050 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3052 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3053 Otherwise, check the number of multiplications required. */
3054 if ((TREE_INT_CST_HIGH (arg1) == 0
3055 || TREE_INT_CST_HIGH (arg1) == -1)
3056 && ((n >= -1 && n <= 2)
3058 && powi_cost (n) <= POWI_MAX_MULTS)))
3060 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3061 op0 = force_reg (mode, op0);
3062 return expand_powi (op0, mode, n);
3066 /* Emit a libcall to libgcc. */
3068 /* Mode of the 2nd argument must match that of an int. */
3069 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3071 if (target == NULL_RTX)
3072 target = gen_reg_rtx (mode);
3074 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3075 if (GET_MODE (op0) != mode)
3076 op0 = convert_to_mode (mode, op0, 0);
3077 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3078 if (GET_MODE (op1) != mode2)
3079 op1 = convert_to_mode (mode2, op1, 0);
3081 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3082 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3083 op0, mode, op1, mode2);
3088 /* Expand expression EXP which is a call to the strlen builtin. Return
3089 NULL_RTX if we failed the caller should emit a normal call, otherwise
3090 try to get the result in TARGET, if convenient. */
3093 expand_builtin_strlen (tree exp, rtx target,
3094 enum machine_mode target_mode)
3096 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3102 tree src = CALL_EXPR_ARG (exp, 0);
3103 rtx result, src_reg, char_rtx, before_strlen;
3104 enum machine_mode insn_mode = target_mode, char_mode;
3105 enum insn_code icode = CODE_FOR_nothing;
3108 /* If the length can be computed at compile-time, return it. */
3109 len = c_strlen (src, 0);
3111 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3113 /* If the length can be computed at compile-time and is constant
3114 integer, but there are side-effects in src, evaluate
3115 src for side-effects, then return len.
3116 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3117 can be optimized into: i++; x = 3; */
3118 len = c_strlen (src, 1);
3119 if (len && TREE_CODE (len) == INTEGER_CST)
3121 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3122 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3125 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3127 /* If SRC is not a pointer type, don't do this operation inline. */
3131 /* Bail out if we can't compute strlen in the right mode. */
3132 while (insn_mode != VOIDmode)
3134 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3135 if (icode != CODE_FOR_nothing)
3138 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3140 if (insn_mode == VOIDmode)
3143 /* Make a place to write the result of the instruction. */
3147 && GET_MODE (result) == insn_mode
3148 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3149 result = gen_reg_rtx (insn_mode);
3151 /* Make a place to hold the source address. We will not expand
3152 the actual source until we are sure that the expansion will
3153 not fail -- there are trees that cannot be expanded twice. */
3154 src_reg = gen_reg_rtx (Pmode);
3156 /* Mark the beginning of the strlen sequence so we can emit the
3157 source operand later. */
3158 before_strlen = get_last_insn ();
3160 char_rtx = const0_rtx;
3161 char_mode = insn_data[(int) icode].operand[2].mode;
3162 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3164 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3166 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3167 char_rtx, GEN_INT (align));
3172 /* Now that we are assured of success, expand the source. */
3174 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3176 emit_move_insn (src_reg, pat);
3181 emit_insn_after (pat, before_strlen);
3183 emit_insn_before (pat, get_insns ());
3185 /* Return the value in the proper mode for this function. */
3186 if (GET_MODE (result) == target_mode)
3188 else if (target != 0)
3189 convert_move (target, result, 0);
3191 target = convert_to_mode (target_mode, result, 0);
3197 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3198 caller should emit a normal call, otherwise try to get the result
3199 in TARGET, if convenient (and in mode MODE if that's convenient). */
3202 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3204 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3206 tree type = TREE_TYPE (exp);
3207 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3208 CALL_EXPR_ARG (exp, 1), type);
3210 return expand_expr (result, target, mode, EXPAND_NORMAL);
3215 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3216 caller should emit a normal call, otherwise try to get the result
3217 in TARGET, if convenient (and in mode MODE if that's convenient). */
3220 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3222 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3224 tree type = TREE_TYPE (exp);
3225 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3226 CALL_EXPR_ARG (exp, 1), type);
3228 return expand_expr (result, target, mode, EXPAND_NORMAL);
3230 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3235 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3236 caller should emit a normal call, otherwise try to get the result
3237 in TARGET, if convenient (and in mode MODE if that's convenient). */
3240 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3242 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3244 tree type = TREE_TYPE (exp);
3245 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3246 CALL_EXPR_ARG (exp, 1), type);
3248 return expand_expr (result, target, mode, EXPAND_NORMAL);
3253 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3254 caller should emit a normal call, otherwise try to get the result
3255 in TARGET, if convenient (and in mode MODE if that's convenient). */
3258 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3260 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3262 tree type = TREE_TYPE (exp);
3263 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3264 CALL_EXPR_ARG (exp, 1), type);
3266 return expand_expr (result, target, mode, EXPAND_NORMAL);
3271 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3272 bytes from constant string DATA + OFFSET and return it as target
3276 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3277 enum machine_mode mode)
3279 const char *str = (const char *) data;
3281 gcc_assert (offset >= 0
3282 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3283 <= strlen (str) + 1));
3285 return c_readstr (str + offset, mode);
3288 /* Expand a call EXP to the memcpy builtin.
3289 Return NULL_RTX if we failed, the caller should emit a normal call,
3290 otherwise try to get the result in TARGET, if convenient (and in
3291 mode MODE if that's convenient). */
3294 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3296 tree fndecl = get_callee_fndecl (exp);
3298 if (!validate_arglist (exp,
3299 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3303 tree dest = CALL_EXPR_ARG (exp, 0);
3304 tree src = CALL_EXPR_ARG (exp, 1);
3305 tree len = CALL_EXPR_ARG (exp, 2);
3306 const char *src_str;
3307 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3308 unsigned int dest_align
3309 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3310 rtx dest_mem, src_mem, dest_addr, len_rtx;
3311 tree result = fold_builtin_memory_op (dest, src, len,
3312 TREE_TYPE (TREE_TYPE (fndecl)),
3314 HOST_WIDE_INT expected_size = -1;
3315 unsigned int expected_align = 0;
3319 while (TREE_CODE (result) == COMPOUND_EXPR)
3321 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3323 result = TREE_OPERAND (result, 1);
3325 return expand_expr (result, target, mode, EXPAND_NORMAL);
3328 /* If DEST is not a pointer type, call the normal function. */
3329 if (dest_align == 0)
3332 /* If either SRC is not a pointer type, don't do this
3333 operation in-line. */
3337 stringop_block_profile (exp, &expected_align, &expected_size);
3338 if (expected_align < dest_align)
3339 expected_align = dest_align;
3340 dest_mem = get_memory_rtx (dest, len);
3341 set_mem_align (dest_mem, dest_align);
3342 len_rtx = expand_normal (len);
3343 src_str = c_getstr (src);
3345 /* If SRC is a string constant and block move would be done
3346 by pieces, we can avoid loading the string from memory
3347 and only stored the computed constants. */
3349 && GET_CODE (len_rtx) == CONST_INT
3350 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3351 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3352 (void *) src_str, dest_align, false))
3354 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3355 builtin_memcpy_read_str,
3356 (void *) src_str, dest_align, false, 0);
3357 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3358 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3362 src_mem = get_memory_rtx (src, len);
3363 set_mem_align (src_mem, src_align);
3365 /* Copy word part most expediently. */
3366 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3367 CALL_EXPR_TAILCALL (exp)
3368 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3369 expected_align, expected_size);
3373 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3374 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3380 /* Expand a call EXP to the mempcpy builtin.
3381 Return NULL_RTX if we failed; the caller should emit a normal call,
3382 otherwise try to get the result in TARGET, if convenient (and in
3383 mode MODE if that's convenient). If ENDP is 0 return the
3384 destination pointer, if ENDP is 1 return the end pointer ala
3385 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3389 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3391 if (!validate_arglist (exp,
3392 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3396 tree dest = CALL_EXPR_ARG (exp, 0);
3397 tree src = CALL_EXPR_ARG (exp, 1);
3398 tree len = CALL_EXPR_ARG (exp, 2);
3399 return expand_builtin_mempcpy_args (dest, src, len,
3401 target, mode, /*endp=*/ 1);
3405 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3406 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3407 so that this can also be called without constructing an actual CALL_EXPR.
3408 TYPE is the return type of the call. The other arguments and return value
3409 are the same as for expand_builtin_mempcpy. */
3412 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3413 rtx target, enum machine_mode mode, int endp)
3415 /* If return value is ignored, transform mempcpy into memcpy. */
3416 if (target == const0_rtx)
3418 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3423 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3424 target, mode, EXPAND_NORMAL);
3428 const char *src_str;
3429 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3430 unsigned int dest_align
3431 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3432 rtx dest_mem, src_mem, len_rtx;
3433 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3437 while (TREE_CODE (result) == COMPOUND_EXPR)
3439 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3441 result = TREE_OPERAND (result, 1);
3443 return expand_expr (result, target, mode, EXPAND_NORMAL);
3446 /* If either SRC or DEST is not a pointer type, don't do this
3447 operation in-line. */
3448 if (dest_align == 0 || src_align == 0)
3451 /* If LEN is not constant, call the normal function. */
3452 if (! host_integerp (len, 1))
3455 len_rtx = expand_normal (len);
3456 src_str = c_getstr (src);
3458 /* If SRC is a string constant and block move would be done
3459 by pieces, we can avoid loading the string from memory
3460 and only stored the computed constants. */
3462 && GET_CODE (len_rtx) == CONST_INT
3463 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3464 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3465 (void *) src_str, dest_align, false))
3467 dest_mem = get_memory_rtx (dest, len);
3468 set_mem_align (dest_mem, dest_align);
3469 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3470 builtin_memcpy_read_str,
3471 (void *) src_str, dest_align,
3473 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3474 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3478 if (GET_CODE (len_rtx) == CONST_INT
3479 && can_move_by_pieces (INTVAL (len_rtx),
3480 MIN (dest_align, src_align)))
3482 dest_mem = get_memory_rtx (dest, len);
3483 set_mem_align (dest_mem, dest_align);
3484 src_mem = get_memory_rtx (src, len);
3485 set_mem_align (src_mem, src_align);
3486 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3487 MIN (dest_align, src_align), endp);
3488 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3489 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3497 /* Expand expression EXP, which is a call to the memmove builtin. Return
3498 NULL_RTX if we failed; the caller should emit a normal call. */
3501 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3503 if (!validate_arglist (exp,
3504 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3508 tree dest = CALL_EXPR_ARG (exp, 0);
3509 tree src = CALL_EXPR_ARG (exp, 1);
3510 tree len = CALL_EXPR_ARG (exp, 2);
3511 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3512 target, mode, ignore);
3516 /* Helper function to do the actual work for expand_builtin_memmove. The
3517 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3518 so that this can also be called without constructing an actual CALL_EXPR.
3519 TYPE is the return type of the call. The other arguments and return value
3520 are the same as for expand_builtin_memmove. */
3523 expand_builtin_memmove_args (tree dest, tree src, tree len,
3524 tree type, rtx target, enum machine_mode mode,
3527 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3531 STRIP_TYPE_NOPS (result);
3532 while (TREE_CODE (result) == COMPOUND_EXPR)
3534 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3536 result = TREE_OPERAND (result, 1);
3538 return expand_expr (result, target, mode, EXPAND_NORMAL);
3541 /* Otherwise, call the normal function. */
3545 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3546 NULL_RTX if we failed the caller should emit a normal call. */
3549 expand_builtin_bcopy (tree exp, int ignore)
3551 tree type = TREE_TYPE (exp);
3552 tree src, dest, size;
3554 if (!validate_arglist (exp,
3555 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3558 src = CALL_EXPR_ARG (exp, 0);
3559 dest = CALL_EXPR_ARG (exp, 1);
3560 size = CALL_EXPR_ARG (exp, 2);
3562 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3563 This is done this way so that if it isn't expanded inline, we fall
3564 back to calling bcopy instead of memmove. */
3565 return expand_builtin_memmove_args (dest, src,
3566 fold_convert (sizetype, size),
3567 type, const0_rtx, VOIDmode,
3572 # define HAVE_movstr 0
3573 # define CODE_FOR_movstr CODE_FOR_nothing
3576 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3577 we failed, the caller should emit a normal call, otherwise try to
3578 get the result in TARGET, if convenient. If ENDP is 0 return the
3579 destination pointer, if ENDP is 1 return the end pointer ala
3580 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3584 expand_movstr (tree dest, tree src, rtx target, int endp)
3590 const struct insn_data * data;
3595 dest_mem = get_memory_rtx (dest, NULL);
3596 src_mem = get_memory_rtx (src, NULL);
3599 target = force_reg (Pmode, XEXP (dest_mem, 0));
3600 dest_mem = replace_equiv_address (dest_mem, target);
3601 end = gen_reg_rtx (Pmode);
3605 if (target == 0 || target == const0_rtx)
3607 end = gen_reg_rtx (Pmode);
3615 data = insn_data + CODE_FOR_movstr;
3617 if (data->operand[0].mode != VOIDmode)
3618 end = gen_lowpart (data->operand[0].mode, end);
3620 insn = data->genfun (end, dest_mem, src_mem);
3626 /* movstr is supposed to set end to the address of the NUL
3627 terminator. If the caller requested a mempcpy-like return value,
3629 if (endp == 1 && target != const0_rtx)
3631 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3632 emit_move_insn (target, force_operand (tem, NULL_RTX));
3638 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3639 NULL_RTX if we failed the caller should emit a normal call, otherwise
3640 try to get the result in TARGET, if convenient (and in mode MODE if that's
3644 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3646 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3648 tree dest = CALL_EXPR_ARG (exp, 0);
3649 tree src = CALL_EXPR_ARG (exp, 1);
3650 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3655 /* Helper function to do the actual work for expand_builtin_strcpy. The
3656 arguments to the builtin_strcpy call DEST and SRC are broken out
3657 so that this can also be called without constructing an actual CALL_EXPR.
3658 The other arguments and return value are the same as for
3659 expand_builtin_strcpy. */
3662 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3663 rtx target, enum machine_mode mode)
3665 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3667 return expand_expr (result, target, mode, EXPAND_NORMAL);
3668 return expand_movstr (dest, src, target, /*endp=*/0);
3672 /* Expand a call EXP to the stpcpy builtin.
3673 Return NULL_RTX if we failed the caller should emit a normal call,
3674 otherwise try to get the result in TARGET, if convenient (and in
3675 mode MODE if that's convenient). */
3678 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3682 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3685 dst = CALL_EXPR_ARG (exp, 0);
3686 src = CALL_EXPR_ARG (exp, 1);
3688 /* If return value is ignored, transform stpcpy into strcpy. */
3689 if (target == const0_rtx)
3691 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3695 return expand_expr (build_call_expr (fn, 2, dst, src),
3696 target, mode, EXPAND_NORMAL);
3703 /* Ensure we get an actual string whose length can be evaluated at
3704 compile-time, not an expression containing a string. This is
3705 because the latter will potentially produce pessimized code
3706 when used to produce the return value. */
3707 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3708 return expand_movstr (dst, src, target, /*endp=*/2);
3710 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3711 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3712 target, mode, /*endp=*/2);
3717 if (TREE_CODE (len) == INTEGER_CST)
3719 rtx len_rtx = expand_normal (len);
3721 if (GET_CODE (len_rtx) == CONST_INT)
3723 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3724 dst, src, target, mode);
3730 if (mode != VOIDmode)
3731 target = gen_reg_rtx (mode);
3733 target = gen_reg_rtx (GET_MODE (ret));
3735 if (GET_MODE (target) != GET_MODE (ret))
3736 ret = gen_lowpart (GET_MODE (target), ret);
3738 ret = plus_constant (ret, INTVAL (len_rtx));
3739 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3747 return expand_movstr (dst, src, target, /*endp=*/2);
3751 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3752 bytes from constant string DATA + OFFSET and return it as target
3756 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3757 enum machine_mode mode)
3759 const char *str = (const char *) data;
3761 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3764 return c_readstr (str + offset, mode);
3767 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3768 NULL_RTX if we failed the caller should emit a normal call. */
3771 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3773 tree fndecl = get_callee_fndecl (exp);
3775 if (validate_arglist (exp,
3776 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3778 tree dest = CALL_EXPR_ARG (exp, 0);
3779 tree src = CALL_EXPR_ARG (exp, 1);
3780 tree len = CALL_EXPR_ARG (exp, 2);
3781 tree slen = c_strlen (src, 1);
3782 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3786 while (TREE_CODE (result) == COMPOUND_EXPR)
3788 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3790 result = TREE_OPERAND (result, 1);
3792 return expand_expr (result, target, mode, EXPAND_NORMAL);
3795 /* We must be passed a constant len and src parameter. */
3796 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3799 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3801 /* We're required to pad with trailing zeros if the requested
3802 len is greater than strlen(s2)+1. In that case try to
3803 use store_by_pieces, if it fails, punt. */
3804 if (tree_int_cst_lt (slen, len))
3806 unsigned int dest_align
3807 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3808 const char *p = c_getstr (src);
3811 if (!p || dest_align == 0 || !host_integerp (len, 1)
3812 || !can_store_by_pieces (tree_low_cst (len, 1),
3813 builtin_strncpy_read_str,
3814 (void *) p, dest_align, false))
3817 dest_mem = get_memory_rtx (dest, len);
3818 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3819 builtin_strncpy_read_str,
3820 (void *) p, dest_align, false, 0);
3821 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3822 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3829 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3830 bytes from constant string DATA + OFFSET and return it as target
3834 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3835 enum machine_mode mode)
3837 const char *c = (const char *) data;
3838 char *p = alloca (GET_MODE_SIZE (mode));
3840 memset (p, *c, GET_MODE_SIZE (mode));
3842 return c_readstr (p, mode);
3845 /* Callback routine for store_by_pieces. Return the RTL of a register
3846 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3847 char value given in the RTL register data. For example, if mode is
3848 4 bytes wide, return the RTL for 0x01010101*data. */
3851 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3852 enum machine_mode mode)
3858 size = GET_MODE_SIZE (mode);
3863 memset (p, 1, size);
3864 coeff = c_readstr (p, mode);
3866 target = convert_to_mode (mode, (rtx) data, 1);
3867 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3868 return force_reg (mode, target);
3871 /* Expand expression EXP, which is a call to the memset builtin. Return
3872 NULL_RTX if we failed the caller should emit a normal call, otherwise
3873 try to get the result in TARGET, if convenient (and in mode MODE if that's
3877 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3879 if (!validate_arglist (exp,
3880 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3884 tree dest = CALL_EXPR_ARG (exp, 0);
3885 tree val = CALL_EXPR_ARG (exp, 1);
3886 tree len = CALL_EXPR_ARG (exp, 2);
3887 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3891 /* Helper function to do the actual work for expand_builtin_memset. The
3892 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3893 so that this can also be called without constructing an actual CALL_EXPR.
3894 The other arguments and return value are the same as for
3895 expand_builtin_memset. */
3898 expand_builtin_memset_args (tree dest, tree val, tree len,
3899 rtx target, enum machine_mode mode, tree orig_exp)
3902 enum built_in_function fcode;
3904 unsigned int dest_align;
3905 rtx dest_mem, dest_addr, len_rtx;
3906 HOST_WIDE_INT expected_size = -1;
3907 unsigned int expected_align = 0;
3909 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3911 /* If DEST is not a pointer type, don't do this operation in-line. */
3912 if (dest_align == 0)
3915 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3916 if (expected_align < dest_align)
3917 expected_align = dest_align;
3919 /* If the LEN parameter is zero, return DEST. */
3920 if (integer_zerop (len))
3922 /* Evaluate and ignore VAL in case it has side-effects. */
3923 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3924 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3927 /* Stabilize the arguments in case we fail. */
3928 dest = builtin_save_expr (dest);
3929 val = builtin_save_expr (val);
3930 len = builtin_save_expr (len);
3932 len_rtx = expand_normal (len);
3933 dest_mem = get_memory_rtx (dest, len);
3935 if (TREE_CODE (val) != INTEGER_CST)
3939 val_rtx = expand_normal (val);
3940 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3943 /* Assume that we can memset by pieces if we can store
3944 * the coefficients by pieces (in the required modes).
3945 * We can't pass builtin_memset_gen_str as that emits RTL. */
3947 if (host_integerp (len, 1)
3948 && can_store_by_pieces (tree_low_cst (len, 1),
3949 builtin_memset_read_str, &c, dest_align,
3952 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3954 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3955 builtin_memset_gen_str, val_rtx, dest_align,
3958 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3959 dest_align, expected_align,
3963 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3964 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3968 if (target_char_cast (val, &c))
3973 if (host_integerp (len, 1)
3974 && can_store_by_pieces (tree_low_cst (len, 1),
3975 builtin_memset_read_str, &c, dest_align,
3977 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3978 builtin_memset_read_str, &c, dest_align, true, 0);
3979 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3980 dest_align, expected_align,
3984 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3985 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3989 set_mem_align (dest_mem, dest_align);
3990 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3991 CALL_EXPR_TAILCALL (orig_exp)
3992 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3993 expected_align, expected_size);
3997 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3998 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4004 fndecl = get_callee_fndecl (orig_exp);
4005 fcode = DECL_FUNCTION_CODE (fndecl);
4006 if (fcode == BUILT_IN_MEMSET)
4007 fn = build_call_expr (fndecl, 3, dest, val, len);
4008 else if (fcode == BUILT_IN_BZERO)
4009 fn = build_call_expr (fndecl, 2, dest, len);
4012 if (TREE_CODE (fn) == CALL_EXPR)
4013 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4014 return expand_call (fn, target, target == const0_rtx);
4017 /* Expand expression EXP, which is a call to the bzero builtin. Return
4018 NULL_RTX if we failed the caller should emit a normal call. */
4021 expand_builtin_bzero (tree exp)
4025 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4028 dest = CALL_EXPR_ARG (exp, 0);
4029 size = CALL_EXPR_ARG (exp, 1);
4031 /* New argument list transforming bzero(ptr x, int y) to
4032 memset(ptr x, int 0, size_t y). This is done this way
4033 so that if it isn't expanded inline, we fallback to
4034 calling bzero instead of memset. */
4036 return expand_builtin_memset_args (dest, integer_zero_node,
4037 fold_convert (sizetype, size),
4038 const0_rtx, VOIDmode, exp);
4041 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4042 caller should emit a normal call, otherwise try to get the result
4043 in TARGET, if convenient (and in mode MODE if that's convenient). */
4046 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4048 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4049 INTEGER_TYPE, VOID_TYPE))
4051 tree type = TREE_TYPE (exp);
4052 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4053 CALL_EXPR_ARG (exp, 1),
4054 CALL_EXPR_ARG (exp, 2), type);
4056 return expand_expr (result, target, mode, EXPAND_NORMAL);
4061 /* Expand expression EXP, which is a call to the memcmp built-in function.
4062 Return NULL_RTX if we failed and the
4063 caller should emit a normal call, otherwise try to get the result in
4064 TARGET, if convenient (and in mode MODE, if that's convenient). */
4067 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4069 if (!validate_arglist (exp,
4070 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4074 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4075 CALL_EXPR_ARG (exp, 1),
4076 CALL_EXPR_ARG (exp, 2));
4078 return expand_expr (result, target, mode, EXPAND_NORMAL);
4081 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4083 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4086 tree arg1 = CALL_EXPR_ARG (exp, 0);
4087 tree arg2 = CALL_EXPR_ARG (exp, 1);
4088 tree len = CALL_EXPR_ARG (exp, 2);
4091 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4093 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4094 enum machine_mode insn_mode;
4096 #ifdef HAVE_cmpmemsi
4098 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4101 #ifdef HAVE_cmpstrnsi
4103 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4108 /* If we don't have POINTER_TYPE, call the function. */
4109 if (arg1_align == 0 || arg2_align == 0)
4112 /* Make a place to write the result of the instruction. */
4115 && REG_P (result) && GET_MODE (result) == insn_mode
4116 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4117 result = gen_reg_rtx (insn_mode);
4119 arg1_rtx = get_memory_rtx (arg1, len);
4120 arg2_rtx = get_memory_rtx (arg2, len);
4121 arg3_rtx = expand_normal (len);
4123 /* Set MEM_SIZE as appropriate. */
4124 if (GET_CODE (arg3_rtx) == CONST_INT)
4126 set_mem_size (arg1_rtx, arg3_rtx);
4127 set_mem_size (arg2_rtx, arg3_rtx);
4130 #ifdef HAVE_cmpmemsi
4132 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4133 GEN_INT (MIN (arg1_align, arg2_align)));
4136 #ifdef HAVE_cmpstrnsi
4138 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4139 GEN_INT (MIN (arg1_align, arg2_align)));
4147 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4148 TYPE_MODE (integer_type_node), 3,
4149 XEXP (arg1_rtx, 0), Pmode,
4150 XEXP (arg2_rtx, 0), Pmode,
4151 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4152 TYPE_UNSIGNED (sizetype)),
4153 TYPE_MODE (sizetype));
4155 /* Return the value in the proper mode for this function. */
4156 mode = TYPE_MODE (TREE_TYPE (exp));
4157 if (GET_MODE (result) == mode)
4159 else if (target != 0)
4161 convert_move (target, result, 0);
4165 return convert_to_mode (mode, result, 0);
4172 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4173 if we failed the caller should emit a normal call, otherwise try to get
4174 the result in TARGET, if convenient. */
4177 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4179 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4183 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4184 CALL_EXPR_ARG (exp, 1));
4186 return expand_expr (result, target, mode, EXPAND_NORMAL);
4189 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4190 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4191 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4193 rtx arg1_rtx, arg2_rtx;
4194 rtx result, insn = NULL_RTX;
4196 tree arg1 = CALL_EXPR_ARG (exp, 0);
4197 tree arg2 = CALL_EXPR_ARG (exp, 1);
4200 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4202 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4204 /* If we don't have POINTER_TYPE, call the function. */
4205 if (arg1_align == 0 || arg2_align == 0)
4208 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4209 arg1 = builtin_save_expr (arg1);
4210 arg2 = builtin_save_expr (arg2);
4212 arg1_rtx = get_memory_rtx (arg1, NULL);
4213 arg2_rtx = get_memory_rtx (arg2, NULL);
4215 #ifdef HAVE_cmpstrsi
4216 /* Try to call cmpstrsi. */
4219 enum machine_mode insn_mode
4220 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4222 /* Make a place to write the result of the instruction. */
4225 && REG_P (result) && GET_MODE (result) == insn_mode
4226 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4227 result = gen_reg_rtx (insn_mode);
4229 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4230 GEN_INT (MIN (arg1_align, arg2_align)));
4233 #ifdef HAVE_cmpstrnsi
4234 /* Try to determine at least one length and call cmpstrnsi. */
4235 if (!insn && HAVE_cmpstrnsi)
4240 enum machine_mode insn_mode
4241 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4242 tree len1 = c_strlen (arg1, 1);
4243 tree len2 = c_strlen (arg2, 1);
4246 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4248 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4250 /* If we don't have a constant length for the first, use the length
4251 of the second, if we know it. We don't require a constant for
4252 this case; some cost analysis could be done if both are available
4253 but neither is constant. For now, assume they're equally cheap,
4254 unless one has side effects. If both strings have constant lengths,
4261 else if (TREE_SIDE_EFFECTS (len1))
4263 else if (TREE_SIDE_EFFECTS (len2))
4265 else if (TREE_CODE (len1) != INTEGER_CST)
4267 else if (TREE_CODE (len2) != INTEGER_CST)
4269 else if (tree_int_cst_lt (len1, len2))
4274 /* If both arguments have side effects, we cannot optimize. */
4275 if (!len || TREE_SIDE_EFFECTS (len))
4278 arg3_rtx = expand_normal (len);
4280 /* Make a place to write the result of the instruction. */
4283 && REG_P (result) && GET_MODE (result) == insn_mode
4284 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4285 result = gen_reg_rtx (insn_mode);
4287 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4288 GEN_INT (MIN (arg1_align, arg2_align)));
4296 /* Return the value in the proper mode for this function. */
4297 mode = TYPE_MODE (TREE_TYPE (exp));
4298 if (GET_MODE (result) == mode)
4301 return convert_to_mode (mode, result, 0);
4302 convert_move (target, result, 0);
4306 /* Expand the library call ourselves using a stabilized argument
4307 list to avoid re-evaluating the function's arguments twice. */
4308 #ifdef HAVE_cmpstrnsi
4311 fndecl = get_callee_fndecl (exp);
4312 fn = build_call_expr (fndecl, 2, arg1, arg2);
4313 if (TREE_CODE (fn) == CALL_EXPR)
4314 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4315 return expand_call (fn, target, target == const0_rtx);
4321 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4322 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4323 the result in TARGET, if convenient. */
4326 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4328 if (!validate_arglist (exp,
4329 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4333 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4334 CALL_EXPR_ARG (exp, 1),
4335 CALL_EXPR_ARG (exp, 2));
4337 return expand_expr (result, target, mode, EXPAND_NORMAL);
4340 /* If c_strlen can determine an expression for one of the string
4341 lengths, and it doesn't have side effects, then emit cmpstrnsi
4342 using length MIN(strlen(string)+1, arg3). */
4343 #ifdef HAVE_cmpstrnsi
4346 tree len, len1, len2;
4347 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4350 tree arg1 = CALL_EXPR_ARG (exp, 0);
4351 tree arg2 = CALL_EXPR_ARG (exp, 1);
4352 tree arg3 = CALL_EXPR_ARG (exp, 2);
4355 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4357 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4358 enum machine_mode insn_mode
4359 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4361 len1 = c_strlen (arg1, 1);
4362 len2 = c_strlen (arg2, 1);
4365 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4367 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4369 /* If we don't have a constant length for the first, use the length
4370 of the second, if we know it. We don't require a constant for
4371 this case; some cost analysis could be done if both are available
4372 but neither is constant. For now, assume they're equally cheap,
4373 unless one has side effects. If both strings have constant lengths,
4380 else if (TREE_SIDE_EFFECTS (len1))
4382 else if (TREE_SIDE_EFFECTS (len2))
4384 else if (TREE_CODE (len1) != INTEGER_CST)
4386 else if (TREE_CODE (len2) != INTEGER_CST)
4388 else if (tree_int_cst_lt (len1, len2))
4393 /* If both arguments have side effects, we cannot optimize. */
4394 if (!len || TREE_SIDE_EFFECTS (len))
4397 /* The actual new length parameter is MIN(len,arg3). */
4398 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4399 fold_convert (TREE_TYPE (len), arg3));
4401 /* If we don't have POINTER_TYPE, call the function. */
4402 if (arg1_align == 0 || arg2_align == 0)
4405 /* Make a place to write the result of the instruction. */
4408 && REG_P (result) && GET_MODE (result) == insn_mode
4409 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4410 result = gen_reg_rtx (insn_mode);
4412 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4413 arg1 = builtin_save_expr (arg1);
4414 arg2 = builtin_save_expr (arg2);
4415 len = builtin_save_expr (len);
4417 arg1_rtx = get_memory_rtx (arg1, len);
4418 arg2_rtx = get_memory_rtx (arg2, len);
4419 arg3_rtx = expand_normal (len);
4420 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4421 GEN_INT (MIN (arg1_align, arg2_align)));
4426 /* Return the value in the proper mode for this function. */
4427 mode = TYPE_MODE (TREE_TYPE (exp));
4428 if (GET_MODE (result) == mode)
4431 return convert_to_mode (mode, result, 0);
4432 convert_move (target, result, 0);
4436 /* Expand the library call ourselves using a stabilized argument
4437 list to avoid re-evaluating the function's arguments twice. */
4438 fndecl = get_callee_fndecl (exp);
4439 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4440 if (TREE_CODE (fn) == CALL_EXPR)
4441 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4442 return expand_call (fn, target, target == const0_rtx);
4448 /* Expand expression EXP, which is a call to the strcat builtin.
4449 Return NULL_RTX if we failed the caller should emit a normal call,
4450 otherwise try to get the result in TARGET, if convenient. */
4453 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4455 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4459 tree dst = CALL_EXPR_ARG (exp, 0);
4460 tree src = CALL_EXPR_ARG (exp, 1);
4461 const char *p = c_getstr (src);
4463 /* If the string length is zero, return the dst parameter. */
4464 if (p && *p == '\0')
4465 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4469 /* See if we can store by pieces into (dst + strlen(dst)). */
4470 tree newsrc, newdst,
4471 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4474 /* Stabilize the argument list. */
4475 newsrc = builtin_save_expr (src);
4476 dst = builtin_save_expr (dst);
4480 /* Create strlen (dst). */
4481 newdst = build_call_expr (strlen_fn, 1, dst);
4482 /* Create (dst p+ strlen (dst)). */
4484 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4485 newdst = builtin_save_expr (newdst);
4487 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4489 end_sequence (); /* Stop sequence. */
4493 /* Output the entire sequence. */
4494 insns = get_insns ();
4498 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4505 /* Expand expression EXP, which is a call to the strncat builtin.
4506 Return NULL_RTX if we failed the caller should emit a normal call,
4507 otherwise try to get the result in TARGET, if convenient. */
4510 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4512 if (validate_arglist (exp,
4513 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4515 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4516 CALL_EXPR_ARG (exp, 1),
4517 CALL_EXPR_ARG (exp, 2));
4519 return expand_expr (result, target, mode, EXPAND_NORMAL);
4524 /* Expand expression EXP, which is a call to the strspn builtin.
4525 Return NULL_RTX if we failed the caller should emit a normal call,
4526 otherwise try to get the result in TARGET, if convenient. */
4529 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4531 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4533 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4534 CALL_EXPR_ARG (exp, 1));
4536 return expand_expr (result, target, mode, EXPAND_NORMAL);
4541 /* Expand expression EXP, which is a call to the strcspn builtin.
4542 Return NULL_RTX if we failed the caller should emit a normal call,
4543 otherwise try to get the result in TARGET, if convenient. */
4546 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4548 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4550 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4551 CALL_EXPR_ARG (exp, 1));
4553 return expand_expr (result, target, mode, EXPAND_NORMAL);
4558 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4559 if that's convenient. */
4562 expand_builtin_saveregs (void)
4566 /* Don't do __builtin_saveregs more than once in a function.
4567 Save the result of the first call and reuse it. */
4568 if (saveregs_value != 0)
4569 return saveregs_value;
4571 /* When this function is called, it means that registers must be
4572 saved on entry to this function. So we migrate the call to the
4573 first insn of this function. */
4577 /* Do whatever the machine needs done in this case. */
4578 val = targetm.calls.expand_builtin_saveregs ();
4583 saveregs_value = val;
4585 /* Put the insns after the NOTE that starts the function. If this
4586 is inside a start_sequence, make the outer-level insn chain current, so
4587 the code is placed at the start of the function. */
4588 push_topmost_sequence ();
4589 emit_insn_after (seq, entry_of_function ());
4590 pop_topmost_sequence ();
4595 /* __builtin_args_info (N) returns word N of the arg space info
4596 for the current function. The number and meanings of words
4597 is controlled by the definition of CUMULATIVE_ARGS. */
4600 expand_builtin_args_info (tree exp)
4602 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4603 int *word_ptr = (int *) ¤t_function_args_info;
4605 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4607 if (call_expr_nargs (exp) != 0)
4609 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4610 error ("argument of %<__builtin_args_info%> must be constant");
4613 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4615 if (wordnum < 0 || wordnum >= nwords)
4616 error ("argument of %<__builtin_args_info%> out of range");
4618 return GEN_INT (word_ptr[wordnum]);
4622 error ("missing argument in %<__builtin_args_info%>");
4627 /* Expand a call to __builtin_next_arg. */
4630 expand_builtin_next_arg (void)
4632 /* Checking arguments is already done in fold_builtin_next_arg
4633 that must be called before this function. */
4634 return expand_binop (ptr_mode, add_optab,
4635 current_function_internal_arg_pointer,
4636 current_function_arg_offset_rtx,
4637 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4640 /* Make it easier for the backends by protecting the valist argument
4641 from multiple evaluations. */
4644 stabilize_va_list (tree valist, int needs_lvalue)
4646 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4648 if (TREE_SIDE_EFFECTS (valist))
4649 valist = save_expr (valist);
4651 /* For this case, the backends will be expecting a pointer to
4652 TREE_TYPE (va_list_type_node), but it's possible we've
4653 actually been given an array (an actual va_list_type_node).
4655 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4657 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4658 valist = build_fold_addr_expr_with_type (valist, p1);
4667 if (! TREE_SIDE_EFFECTS (valist))
4670 pt = build_pointer_type (va_list_type_node);
4671 valist = fold_build1 (ADDR_EXPR, pt, valist);
4672 TREE_SIDE_EFFECTS (valist) = 1;
4675 if (TREE_SIDE_EFFECTS (valist))
4676 valist = save_expr (valist);
4677 valist = build_fold_indirect_ref (valist);
4683 /* The "standard" definition of va_list is void*. */
4686 std_build_builtin_va_list (void)
4688 return ptr_type_node;
4691 /* The "standard" implementation of va_start: just assign `nextarg' to
4695 std_expand_builtin_va_start (tree valist, rtx nextarg)
4697 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4698 convert_move (va_r, nextarg, 0);
4701 /* Expand EXP, a call to __builtin_va_start. */
4704 expand_builtin_va_start (tree exp)
4709 if (call_expr_nargs (exp) < 2)
4711 error ("too few arguments to function %<va_start%>");
4715 if (fold_builtin_next_arg (exp, true))
4718 nextarg = expand_builtin_next_arg ();
4719 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4721 if (targetm.expand_builtin_va_start)
4722 targetm.expand_builtin_va_start (valist, nextarg);
4724 std_expand_builtin_va_start (valist, nextarg);
4729 /* The "standard" implementation of va_arg: read the value from the
4730 current (padded) address and increment by the (padded) size. */
4733 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4735 tree addr, t, type_size, rounded_size, valist_tmp;
4736 unsigned HOST_WIDE_INT align, boundary;
4739 #ifdef ARGS_GROW_DOWNWARD
4740 /* All of the alignment and movement below is for args-grow-up machines.
4741 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4742 implement their own specialized gimplify_va_arg_expr routines. */
4746 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4748 type = build_pointer_type (type);
4750 align = PARM_BOUNDARY / BITS_PER_UNIT;
4751 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4753 /* Hoist the valist value into a temporary for the moment. */
4754 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4756 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4757 requires greater alignment, we must perform dynamic alignment. */
4758 if (boundary > align
4759 && !integer_zerop (TYPE_SIZE (type)))
4761 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4762 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4763 valist_tmp, size_int (boundary - 1)));
4764 gimplify_and_add (t, pre_p);
4766 t = fold_convert (sizetype, valist_tmp);
4767 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4768 fold_convert (TREE_TYPE (valist),
4769 fold_build2 (BIT_AND_EXPR, sizetype, t,
4770 size_int (-boundary))));
4771 gimplify_and_add (t, pre_p);
4776 /* If the actual alignment is less than the alignment of the type,
4777 adjust the type accordingly so that we don't assume strict alignment
4778 when deferencing the pointer. */
4779 boundary *= BITS_PER_UNIT;
4780 if (boundary < TYPE_ALIGN (type))
4782 type = build_variant_type_copy (type);
4783 TYPE_ALIGN (type) = boundary;
4786 /* Compute the rounded size of the type. */
4787 type_size = size_in_bytes (type);
4788 rounded_size = round_up (type_size, align);
4790 /* Reduce rounded_size so it's sharable with the postqueue. */
4791 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4795 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4797 /* Small args are padded downward. */
4798 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4799 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4800 size_binop (MINUS_EXPR, rounded_size, type_size));
4801 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4804 /* Compute new value for AP. */
4805 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4806 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4807 gimplify_and_add (t, pre_p);
4809 addr = fold_convert (build_pointer_type (type), addr);
4812 addr = build_va_arg_indirect_ref (addr);
4814 return build_va_arg_indirect_ref (addr);
4817 /* Build an indirect-ref expression over the given TREE, which represents a
4818 piece of a va_arg() expansion. */
4820 build_va_arg_indirect_ref (tree addr)
4822 addr = build_fold_indirect_ref (addr);
4824 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4830 /* Return a dummy expression of type TYPE in order to keep going after an
4834 dummy_object (tree type)
4836 tree t = build_int_cst (build_pointer_type (type), 0);
4837 return build1 (INDIRECT_REF, type, t);
4840 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4841 builtin function, but a very special sort of operator. */
4843 enum gimplify_status
4844 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4846 tree promoted_type, want_va_type, have_va_type;
4847 tree valist = TREE_OPERAND (*expr_p, 0);
4848 tree type = TREE_TYPE (*expr_p);
4851 /* Verify that valist is of the proper type. */
4852 want_va_type = va_list_type_node;
4853 have_va_type = TREE_TYPE (valist);
4855 if (have_va_type == error_mark_node)
4858 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4860 /* If va_list is an array type, the argument may have decayed
4861 to a pointer type, e.g. by being passed to another function.
4862 In that case, unwrap both types so that we can compare the
4863 underlying records. */
4864 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4865 || POINTER_TYPE_P (have_va_type))
4867 want_va_type = TREE_TYPE (want_va_type);
4868 have_va_type = TREE_TYPE (have_va_type);
4872 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4874 error ("first argument to %<va_arg%> not of type %<va_list%>");
4878 /* Generate a diagnostic for requesting data of a type that cannot
4879 be passed through `...' due to type promotion at the call site. */
4880 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4883 static bool gave_help;
4885 /* Unfortunately, this is merely undefined, rather than a constraint
4886 violation, so we cannot make this an error. If this call is never
4887 executed, the program is still strictly conforming. */
4888 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4889 type, promoted_type);
4893 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4894 promoted_type, type);
4897 /* We can, however, treat "undefined" any way we please.
4898 Call abort to encourage the user to fix the program. */
4899 inform ("if this code is reached, the program will abort");
4900 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4901 append_to_statement_list (t, pre_p);
4903 /* This is dead code, but go ahead and finish so that the
4904 mode of the result comes out right. */
4905 *expr_p = dummy_object (type);
4910 /* Make it easier for the backends by protecting the valist argument
4911 from multiple evaluations. */
4912 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4914 /* For this case, the backends will be expecting a pointer to
4915 TREE_TYPE (va_list_type_node), but it's possible we've
4916 actually been given an array (an actual va_list_type_node).
4918 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4920 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4921 valist = build_fold_addr_expr_with_type (valist, p1);
4923 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4926 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4928 if (!targetm.gimplify_va_arg_expr)
4929 /* FIXME:Once most targets are converted we should merely
4930 assert this is non-null. */
4933 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4938 /* Expand EXP, a call to __builtin_va_end. */
4941 expand_builtin_va_end (tree exp)
4943 tree valist = CALL_EXPR_ARG (exp, 0);
4945 /* Evaluate for side effects, if needed. I hate macros that don't
4947 if (TREE_SIDE_EFFECTS (valist))
4948 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4953 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4954 builtin rather than just as an assignment in stdarg.h because of the
4955 nastiness of array-type va_list types. */
4958 expand_builtin_va_copy (tree exp)
4962 dst = CALL_EXPR_ARG (exp, 0);
4963 src = CALL_EXPR_ARG (exp, 1);
4965 dst = stabilize_va_list (dst, 1);
4966 src = stabilize_va_list (src, 0);
4968 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4970 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4971 TREE_SIDE_EFFECTS (t) = 1;
4972 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4976 rtx dstb, srcb, size;
4978 /* Evaluate to pointers. */
4979 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4980 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4981 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4982 VOIDmode, EXPAND_NORMAL);
4984 dstb = convert_memory_address (Pmode, dstb);
4985 srcb = convert_memory_address (Pmode, srcb);
4987 /* "Dereference" to BLKmode memories. */
4988 dstb = gen_rtx_MEM (BLKmode, dstb);
4989 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4990 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4991 srcb = gen_rtx_MEM (BLKmode, srcb);
4992 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4993 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4996 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5002 /* Expand a call to one of the builtin functions __builtin_frame_address or
5003 __builtin_return_address. */
5006 expand_builtin_frame_address (tree fndecl, tree exp)
5008 /* The argument must be a nonnegative integer constant.
5009 It counts the number of frames to scan up the stack.
5010 The value is the return address saved in that frame. */
5011 if (call_expr_nargs (exp) == 0)
5012 /* Warning about missing arg was already issued. */
5014 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5016 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5017 error ("invalid argument to %<__builtin_frame_address%>");
5019 error ("invalid argument to %<__builtin_return_address%>");
5025 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5026 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5028 /* Some ports cannot access arbitrary stack frames. */
5031 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5032 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5034 warning (0, "unsupported argument to %<__builtin_return_address%>");
5038 /* For __builtin_frame_address, return what we've got. */
5039 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5043 && ! CONSTANT_P (tem))
5044 tem = copy_to_mode_reg (Pmode, tem);
5049 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5050 we failed and the caller should emit a normal call, otherwise try to get
5051 the result in TARGET, if convenient. */
5054 expand_builtin_alloca (tree exp, rtx target)
5059 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5060 should always expand to function calls. These can be intercepted
5065 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5068 /* Compute the argument. */
5069 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5071 /* Allocate the desired space. */
5072 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5073 result = convert_memory_address (ptr_mode, result);
5078 /* Expand a call to a bswap builtin with argument ARG0. MODE
5079 is the mode to expand with. */
5082 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5084 enum machine_mode mode;
5088 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5091 arg = CALL_EXPR_ARG (exp, 0);
5092 mode = TYPE_MODE (TREE_TYPE (arg));
5093 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5095 target = expand_unop (mode, bswap_optab, op0, target, 1);
5097 gcc_assert (target);
5099 return convert_to_mode (mode, target, 0);
5102 /* Expand a call to a unary builtin in EXP.
5103 Return NULL_RTX if a normal call should be emitted rather than expanding the
5104 function in-line. If convenient, the result should be placed in TARGET.
5105 SUBTARGET may be used as the target for computing one of EXP's operands. */
5108 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5109 rtx subtarget, optab op_optab)
5113 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5116 /* Compute the argument. */
5117 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5118 VOIDmode, EXPAND_NORMAL);
5119 /* Compute op, into TARGET if possible.
5120 Set TARGET to wherever the result comes back. */
5121 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5122 op_optab, op0, target, 1);
5123 gcc_assert (target);
5125 return convert_to_mode (target_mode, target, 0);
5128 /* If the string passed to fputs is a constant and is one character
5129 long, we attempt to transform this call into __builtin_fputc(). */
5132 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5134 /* Verify the arguments in the original call. */
5135 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5137 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5138 CALL_EXPR_ARG (exp, 1),
5139 (target == const0_rtx),
5140 unlocked, NULL_TREE);
5142 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5147 /* Expand a call to __builtin_expect. We just return our argument
5148 as the builtin_expect semantic should've been already executed by
5149 tree branch prediction pass. */
5152 expand_builtin_expect (tree exp, rtx target)
5156 if (call_expr_nargs (exp) < 2)
5158 arg = CALL_EXPR_ARG (exp, 0);
5159 c = CALL_EXPR_ARG (exp, 1);
5161 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5162 /* When guessing was done, the hints should be already stripped away. */
5163 gcc_assert (!flag_guess_branch_prob
5164 || optimize == 0 || errorcount || sorrycount);
5169 expand_builtin_trap (void)
5173 emit_insn (gen_trap ());
5176 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5180 /* Expand EXP, a call to fabs, fabsf or fabsl.
5181 Return NULL_RTX if a normal call should be emitted rather than expanding
5182 the function inline. If convenient, the result should be placed
5183 in TARGET. SUBTARGET may be used as the target for computing
5187 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5189 enum machine_mode mode;
5193 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5196 arg = CALL_EXPR_ARG (exp, 0);
5197 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5198 mode = TYPE_MODE (TREE_TYPE (arg));
5199 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5200 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5203 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5204 Return NULL is a normal call should be emitted rather than expanding the
5205 function inline. If convenient, the result should be placed in TARGET.
5206 SUBTARGET may be used as the target for computing the operand. */
5209 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5214 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5217 arg = CALL_EXPR_ARG (exp, 0);
5218 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5220 arg = CALL_EXPR_ARG (exp, 1);
5221 op1 = expand_normal (arg);
5223 return expand_copysign (op0, op1, target);
5226 /* Create a new constant string literal and return a char* pointer to it.
5227 The STRING_CST value is the LEN characters at STR. */
5229 build_string_literal (int len, const char *str)
5231 tree t, elem, index, type;
5233 t = build_string (len, str);
5234 elem = build_type_variant (char_type_node, 1, 0);
5235 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5236 type = build_array_type (elem, index);
5237 TREE_TYPE (t) = type;
5238 TREE_CONSTANT (t) = 1;
5239 TREE_INVARIANT (t) = 1;
5240 TREE_READONLY (t) = 1;
5241 TREE_STATIC (t) = 1;
5243 type = build_pointer_type (type);
5244 t = build1 (ADDR_EXPR, type, t);
5246 type = build_pointer_type (elem);
5247 t = build1 (NOP_EXPR, type, t);
5251 /* Expand EXP, a call to printf or printf_unlocked.
5252 Return NULL_RTX if a normal call should be emitted rather than transforming
5253 the function inline. If convenient, the result should be placed in
5254 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5257 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5260 /* If we're using an unlocked function, assume the other unlocked
5261 functions exist explicitly. */
5262 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5263 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5264 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5265 : implicit_built_in_decls[BUILT_IN_PUTS];
5266 const char *fmt_str;
5269 int nargs = call_expr_nargs (exp);
5271 /* If the return value is used, don't do the transformation. */
5272 if (target != const0_rtx)
5275 /* Verify the required arguments in the original call. */
5278 fmt = CALL_EXPR_ARG (exp, 0);
5279 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5282 /* Check whether the format is a literal string constant. */
5283 fmt_str = c_getstr (fmt);
5284 if (fmt_str == NULL)
5287 if (!init_target_chars ())
5290 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5291 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5294 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5297 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5299 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5300 else if (strcmp (fmt_str, target_percent_c) == 0)
5303 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5306 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5310 /* We can't handle anything else with % args or %% ... yet. */
5311 if (strchr (fmt_str, target_percent))
5317 /* If the format specifier was "", printf does nothing. */
5318 if (fmt_str[0] == '\0')
5320 /* If the format specifier has length of 1, call putchar. */
5321 if (fmt_str[1] == '\0')
5323 /* Given printf("c"), (where c is any one character,)
5324 convert "c"[0] to an int and pass that to the replacement
5326 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5328 fn = build_call_expr (fn_putchar, 1, arg);
5332 /* If the format specifier was "string\n", call puts("string"). */
5333 size_t len = strlen (fmt_str);
5334 if ((unsigned char)fmt_str[len - 1] == target_newline)
5336 /* Create a NUL-terminated string that's one char shorter
5337 than the original, stripping off the trailing '\n'. */
5338 char *newstr = alloca (len);
5339 memcpy (newstr, fmt_str, len - 1);
5340 newstr[len - 1] = 0;
5341 arg = build_string_literal (len, newstr);
5343 fn = build_call_expr (fn_puts, 1, arg);
5346 /* We'd like to arrange to call fputs(string,stdout) here,
5347 but we need stdout and don't have a way to get it yet. */
5354 if (TREE_CODE (fn) == CALL_EXPR)
5355 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5356 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5359 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5360 Return NULL_RTX if a normal call should be emitted rather than transforming
5361 the function inline. If convenient, the result should be placed in
5362 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5365 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5368 /* If we're using an unlocked function, assume the other unlocked
5369 functions exist explicitly. */
5370 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5371 : implicit_built_in_decls[BUILT_IN_FPUTC];
5372 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5373 : implicit_built_in_decls[BUILT_IN_FPUTS];
5374 const char *fmt_str;
5377 int nargs = call_expr_nargs (exp);
5379 /* If the return value is used, don't do the transformation. */
5380 if (target != const0_rtx)
5383 /* Verify the required arguments in the original call. */
5386 fp = CALL_EXPR_ARG (exp, 0);
5387 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5389 fmt = CALL_EXPR_ARG (exp, 1);
5390 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5393 /* Check whether the format is a literal string constant. */
5394 fmt_str = c_getstr (fmt);
5395 if (fmt_str == NULL)
5398 if (!init_target_chars ())
5401 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5402 if (strcmp (fmt_str, target_percent_s) == 0)
5405 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5407 arg = CALL_EXPR_ARG (exp, 2);
5409 fn = build_call_expr (fn_fputs, 2, arg, fp);
5411 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5412 else if (strcmp (fmt_str, target_percent_c) == 0)
5415 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5417 arg = CALL_EXPR_ARG (exp, 2);
5419 fn = build_call_expr (fn_fputc, 2, arg, fp);
5423 /* We can't handle anything else with % args or %% ... yet. */
5424 if (strchr (fmt_str, target_percent))
5430 /* If the format specifier was "", fprintf does nothing. */
5431 if (fmt_str[0] == '\0')
5433 /* Evaluate and ignore FILE* argument for side-effects. */
5434 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5438 /* When "string" doesn't contain %, replace all cases of
5439 fprintf(stream,string) with fputs(string,stream). The fputs
5440 builtin will take care of special cases like length == 1. */
5442 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5447 if (TREE_CODE (fn) == CALL_EXPR)
5448 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5449 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5452 /* Expand a call EXP to sprintf. Return NULL_RTX if
5453 a normal call should be emitted rather than expanding the function
5454 inline. If convenient, the result should be placed in TARGET with
5458 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5461 const char *fmt_str;
5462 int nargs = call_expr_nargs (exp);
5464 /* Verify the required arguments in the original call. */
5467 dest = CALL_EXPR_ARG (exp, 0);
5468 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5470 fmt = CALL_EXPR_ARG (exp, 0);
5471 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5474 /* Check whether the format is a literal string constant. */
5475 fmt_str = c_getstr (fmt);
5476 if (fmt_str == NULL)
5479 if (!init_target_chars ())
5482 /* If the format doesn't contain % args or %%, use strcpy. */
5483 if (strchr (fmt_str, target_percent) == 0)
5485 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5488 if ((nargs > 2) || ! fn)
5490 expand_expr (build_call_expr (fn, 2, dest, fmt),
5491 const0_rtx, VOIDmode, EXPAND_NORMAL);
5492 if (target == const0_rtx)
5494 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5495 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5497 /* If the format is "%s", use strcpy if the result isn't used. */
5498 else if (strcmp (fmt_str, target_percent_s) == 0)
5501 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5507 arg = CALL_EXPR_ARG (exp, 2);
5508 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5511 if (target != const0_rtx)
5513 len = c_strlen (arg, 1);
5514 if (! len || TREE_CODE (len) != INTEGER_CST)
5520 expand_expr (build_call_expr (fn, 2, dest, arg),
5521 const0_rtx, VOIDmode, EXPAND_NORMAL);
5523 if (target == const0_rtx)
5525 return expand_expr (len, target, mode, EXPAND_NORMAL);
5531 /* Expand a call to either the entry or exit function profiler. */
5534 expand_builtin_profile_func (bool exitp)
5538 this = DECL_RTL (current_function_decl);
5539 gcc_assert (MEM_P (this));
5540 this = XEXP (this, 0);
5543 which = profile_function_exit_libfunc;
5545 which = profile_function_entry_libfunc;
5547 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5548 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5555 /* Expand a call to __builtin___clear_cache. */
5558 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5560 #ifndef HAVE_clear_cache
5561 #ifdef CLEAR_INSN_CACHE
5562 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5563 does something. Just do the default expansion to a call to
5567 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5568 does nothing. There is no need to call it. Do nothing. */
5570 #endif /* CLEAR_INSN_CACHE */
5572 /* We have a "clear_cache" insn, and it will handle everything. */
5574 rtx begin_rtx, end_rtx;
5575 enum insn_code icode;
5577 /* We must not expand to a library call. If we did, any
5578 fallback library function in libgcc that might contain a call to
5579 __builtin___clear_cache() would recurse infinitely. */
5580 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5582 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5586 if (HAVE_clear_cache)
5588 icode = CODE_FOR_clear_cache;
5590 begin = CALL_EXPR_ARG (exp, 0);
5591 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5592 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5593 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5594 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5596 end = CALL_EXPR_ARG (exp, 1);
5597 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5598 end_rtx = convert_memory_address (Pmode, end_rtx);
5599 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5600 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5602 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5605 #endif /* HAVE_clear_cache */
5608 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5611 round_trampoline_addr (rtx tramp)
5613 rtx temp, addend, mask;
5615 /* If we don't need too much alignment, we'll have been guaranteed
5616 proper alignment by get_trampoline_type. */
5617 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5620 /* Round address up to desired boundary. */
5621 temp = gen_reg_rtx (Pmode);
5622 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5623 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5625 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5626 temp, 0, OPTAB_LIB_WIDEN);
5627 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5628 temp, 0, OPTAB_LIB_WIDEN);
5634 expand_builtin_init_trampoline (tree exp)
5636 tree t_tramp, t_func, t_chain;
5637 rtx r_tramp, r_func, r_chain;
5638 #ifdef TRAMPOLINE_TEMPLATE
5642 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5643 POINTER_TYPE, VOID_TYPE))
5646 t_tramp = CALL_EXPR_ARG (exp, 0);
5647 t_func = CALL_EXPR_ARG (exp, 1);
5648 t_chain = CALL_EXPR_ARG (exp, 2);
5650 r_tramp = expand_normal (t_tramp);
5651 r_func = expand_normal (t_func);
5652 r_chain = expand_normal (t_chain);
5654 /* Generate insns to initialize the trampoline. */
5655 r_tramp = round_trampoline_addr (r_tramp);
5656 #ifdef TRAMPOLINE_TEMPLATE
5657 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5658 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5659 emit_block_move (blktramp, assemble_trampoline_template (),
5660 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5662 trampolines_created = 1;
5663 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5669 expand_builtin_adjust_trampoline (tree exp)
5673 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5676 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5677 tramp = round_trampoline_addr (tramp);
5678 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5679 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5685 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5686 function. The function first checks whether the back end provides
5687 an insn to implement signbit for the respective mode. If not, it
5688 checks whether the floating point format of the value is such that
5689 the sign bit can be extracted. If that is not the case, the
5690 function returns NULL_RTX to indicate that a normal call should be
5691 emitted rather than expanding the function in-line. EXP is the
5692 expression that is a call to the builtin function; if convenient,
5693 the result should be placed in TARGET. */
5695 expand_builtin_signbit (tree exp, rtx target)
5697 const struct real_format *fmt;
5698 enum machine_mode fmode, imode, rmode;
5699 HOST_WIDE_INT hi, lo;
5702 enum insn_code icode;
5705 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5708 arg = CALL_EXPR_ARG (exp, 0);
5709 fmode = TYPE_MODE (TREE_TYPE (arg));
5710 rmode = TYPE_MODE (TREE_TYPE (exp));
5711 fmt = REAL_MODE_FORMAT (fmode);
5713 arg = builtin_save_expr (arg);
5715 /* Expand the argument yielding a RTX expression. */
5716 temp = expand_normal (arg);
5718 /* Check if the back end provides an insn that handles signbit for the
5720 icode = signbit_optab->handlers [(int) fmode].insn_code;
5721 if (icode != CODE_FOR_nothing)
5723 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5724 emit_unop_insn (icode, target, temp, UNKNOWN);
5728 /* For floating point formats without a sign bit, implement signbit
5730 bitpos = fmt->signbit_ro;
5733 /* But we can't do this if the format supports signed zero. */
5734 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5737 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5738 build_real (TREE_TYPE (arg), dconst0));
5739 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5742 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5744 imode = int_mode_for_mode (fmode);
5745 if (imode == BLKmode)
5747 temp = gen_lowpart (imode, temp);
5752 /* Handle targets with different FP word orders. */
5753 if (FLOAT_WORDS_BIG_ENDIAN)
5754 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5756 word = bitpos / BITS_PER_WORD;
5757 temp = operand_subword_force (temp, word, fmode);
5758 bitpos = bitpos % BITS_PER_WORD;
5761 /* Force the intermediate word_mode (or narrower) result into a
5762 register. This avoids attempting to create paradoxical SUBREGs
5763 of floating point modes below. */
5764 temp = force_reg (imode, temp);
5766 /* If the bitpos is within the "result mode" lowpart, the operation
5767 can be implement with a single bitwise AND. Otherwise, we need
5768 a right shift and an AND. */
5770 if (bitpos < GET_MODE_BITSIZE (rmode))
5772 if (bitpos < HOST_BITS_PER_WIDE_INT)
5775 lo = (HOST_WIDE_INT) 1 << bitpos;
5779 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5784 temp = gen_lowpart (rmode, temp);
5785 temp = expand_binop (rmode, and_optab, temp,
5786 immed_double_const (lo, hi, rmode),
5787 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5791 /* Perform a logical right shift to place the signbit in the least
5792 significant bit, then truncate the result to the desired mode
5793 and mask just this bit. */
5794 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5795 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5796 temp = gen_lowpart (rmode, temp);
5797 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5798 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5804 /* Expand fork or exec calls. TARGET is the desired target of the
5805 call. EXP is the call. FN is the
5806 identificator of the actual function. IGNORE is nonzero if the
5807 value is to be ignored. */
5810 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5815 /* If we are not profiling, just call the function. */
5816 if (!profile_arc_flag)
5819 /* Otherwise call the wrapper. This should be equivalent for the rest of
5820 compiler, so the code does not diverge, and the wrapper may run the
5821 code necessary for keeping the profiling sane. */
5823 switch (DECL_FUNCTION_CODE (fn))
5826 id = get_identifier ("__gcov_fork");
5829 case BUILT_IN_EXECL:
5830 id = get_identifier ("__gcov_execl");
5833 case BUILT_IN_EXECV:
5834 id = get_identifier ("__gcov_execv");
5837 case BUILT_IN_EXECLP:
5838 id = get_identifier ("__gcov_execlp");
5841 case BUILT_IN_EXECLE:
5842 id = get_identifier ("__gcov_execle");
5845 case BUILT_IN_EXECVP:
5846 id = get_identifier ("__gcov_execvp");
5849 case BUILT_IN_EXECVE:
5850 id = get_identifier ("__gcov_execve");
5857 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5858 DECL_EXTERNAL (decl) = 1;
5859 TREE_PUBLIC (decl) = 1;
5860 DECL_ARTIFICIAL (decl) = 1;
5861 TREE_NOTHROW (decl) = 1;
5862 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5863 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5864 call = rewrite_call_expr (exp, 0, decl, 0);
5865 return expand_call (call, target, ignore);
5870 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5871 the pointer in these functions is void*, the tree optimizers may remove
5872 casts. The mode computed in expand_builtin isn't reliable either, due
5873 to __sync_bool_compare_and_swap.
5875 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5876 group of builtins. This gives us log2 of the mode size. */
5878 static inline enum machine_mode
5879 get_builtin_sync_mode (int fcode_diff)
5881 /* The size is not negotiable, so ask not to get BLKmode in return
5882 if the target indicates that a smaller size would be better. */
5883 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5886 /* Expand the memory expression LOC and return the appropriate memory operand
5887 for the builtin_sync operations. */
5890 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5894 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5896 /* Note that we explicitly do not want any alias information for this
5897 memory, so that we kill all other live memories. Otherwise we don't
5898 satisfy the full barrier semantics of the intrinsic. */
5899 mem = validize_mem (gen_rtx_MEM (mode, addr));
5901 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5902 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5903 MEM_VOLATILE_P (mem) = 1;
5908 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5909 EXP is the CALL_EXPR. CODE is the rtx code
5910 that corresponds to the arithmetic or logical operation from the name;
5911 an exception here is that NOT actually means NAND. TARGET is an optional
5912 place for us to store the results; AFTER is true if this is the
5913 fetch_and_xxx form. IGNORE is true if we don't actually care about
5914 the result of the operation at all. */
5917 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5918 enum rtx_code code, bool after,
5919 rtx target, bool ignore)
5922 enum machine_mode old_mode;
5924 /* Expand the operands. */
5925 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5927 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5928 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5929 of CONST_INTs, where we know the old_mode only from the call argument. */
5930 old_mode = GET_MODE (val);
5931 if (old_mode == VOIDmode)
5932 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5933 val = convert_modes (mode, old_mode, val, 1);
5936 return expand_sync_operation (mem, val, code);
5938 return expand_sync_fetch_operation (mem, val, code, after, target);
5941 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5942 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5943 true if this is the boolean form. TARGET is a place for us to store the
5944 results; this is NOT optional if IS_BOOL is true. */
5947 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5948 bool is_bool, rtx target)
5950 rtx old_val, new_val, mem;
5951 enum machine_mode old_mode;
5953 /* Expand the operands. */
5954 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5957 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5958 mode, EXPAND_NORMAL);
5959 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5960 of CONST_INTs, where we know the old_mode only from the call argument. */
5961 old_mode = GET_MODE (old_val);
5962 if (old_mode == VOIDmode)
5963 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5964 old_val = convert_modes (mode, old_mode, old_val, 1);
5966 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5967 mode, EXPAND_NORMAL);
5968 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5969 of CONST_INTs, where we know the old_mode only from the call argument. */
5970 old_mode = GET_MODE (new_val);
5971 if (old_mode == VOIDmode)
5972 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5973 new_val = convert_modes (mode, old_mode, new_val, 1);
5976 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5978 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5981 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5982 general form is actually an atomic exchange, and some targets only
5983 support a reduced form with the second argument being a constant 1.
5984 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5988 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5992 enum machine_mode old_mode;
5994 /* Expand the operands. */
5995 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5996 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5997 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5998 of CONST_INTs, where we know the old_mode only from the call argument. */
5999 old_mode = GET_MODE (val);
6000 if (old_mode == VOIDmode)
6001 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6002 val = convert_modes (mode, old_mode, val, 1);
6004 return expand_sync_lock_test_and_set (mem, val, target);
6007 /* Expand the __sync_synchronize intrinsic. */
6010 expand_builtin_synchronize (void)
6014 #ifdef HAVE_memory_barrier
6015 if (HAVE_memory_barrier)
6017 emit_insn (gen_memory_barrier ());
6022 /* If no explicit memory barrier instruction is available, create an
6023 empty asm stmt with a memory clobber. */
6024 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6025 tree_cons (NULL, build_string (6, "memory"), NULL));
6026 ASM_VOLATILE_P (x) = 1;
6027 expand_asm_expr (x);
6030 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6033 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6035 enum insn_code icode;
6037 rtx val = const0_rtx;
6039 /* Expand the operands. */
6040 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6042 /* If there is an explicit operation in the md file, use it. */
6043 icode = sync_lock_release[mode];
6044 if (icode != CODE_FOR_nothing)
6046 if (!insn_data[icode].operand[1].predicate (val, mode))
6047 val = force_reg (mode, val);
6049 insn = GEN_FCN (icode) (mem, val);
6057 /* Otherwise we can implement this operation by emitting a barrier
6058 followed by a store of zero. */
6059 expand_builtin_synchronize ();
6060 emit_move_insn (mem, val);
6063 /* Expand an expression EXP that calls a built-in function,
6064 with result going to TARGET if that's convenient
6065 (and in mode MODE if that's convenient).
6066 SUBTARGET may be used as the target for computing one of EXP's operands.
6067 IGNORE is nonzero if the value is to be ignored. */
6070 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6073 tree fndecl = get_callee_fndecl (exp);
6074 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6075 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6077 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6078 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6080 /* When not optimizing, generate calls to library functions for a certain
6083 && !called_as_built_in (fndecl)
6084 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6085 && fcode != BUILT_IN_ALLOCA)
6086 return expand_call (exp, target, ignore);
6088 /* The built-in function expanders test for target == const0_rtx
6089 to determine whether the function's result will be ignored. */
6091 target = const0_rtx;
6093 /* If the result of a pure or const built-in function is ignored, and
6094 none of its arguments are volatile, we can avoid expanding the
6095 built-in call and just evaluate the arguments for side-effects. */
6096 if (target == const0_rtx
6097 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
6099 bool volatilep = false;
6101 call_expr_arg_iterator iter;
6103 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6104 if (TREE_THIS_VOLATILE (arg))
6112 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6113 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6120 CASE_FLT_FN (BUILT_IN_FABS):
6121 target = expand_builtin_fabs (exp, target, subtarget);
6126 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6127 target = expand_builtin_copysign (exp, target, subtarget);
6132 /* Just do a normal library call if we were unable to fold
6134 CASE_FLT_FN (BUILT_IN_CABS):
6137 CASE_FLT_FN (BUILT_IN_EXP):
6138 CASE_FLT_FN (BUILT_IN_EXP10):
6139 CASE_FLT_FN (BUILT_IN_POW10):
6140 CASE_FLT_FN (BUILT_IN_EXP2):
6141 CASE_FLT_FN (BUILT_IN_EXPM1):
6142 CASE_FLT_FN (BUILT_IN_LOGB):
6143 CASE_FLT_FN (BUILT_IN_LOG):
6144 CASE_FLT_FN (BUILT_IN_LOG10):
6145 CASE_FLT_FN (BUILT_IN_LOG2):
6146 CASE_FLT_FN (BUILT_IN_LOG1P):
6147 CASE_FLT_FN (BUILT_IN_TAN):
6148 CASE_FLT_FN (BUILT_IN_ASIN):
6149 CASE_FLT_FN (BUILT_IN_ACOS):
6150 CASE_FLT_FN (BUILT_IN_ATAN):
6151 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6152 because of possible accuracy problems. */
6153 if (! flag_unsafe_math_optimizations)
6155 CASE_FLT_FN (BUILT_IN_SQRT):
6156 CASE_FLT_FN (BUILT_IN_FLOOR):
6157 CASE_FLT_FN (BUILT_IN_CEIL):
6158 CASE_FLT_FN (BUILT_IN_TRUNC):
6159 CASE_FLT_FN (BUILT_IN_ROUND):
6160 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6161 CASE_FLT_FN (BUILT_IN_RINT):
6162 target = expand_builtin_mathfn (exp, target, subtarget);
6167 CASE_FLT_FN (BUILT_IN_ILOGB):
6168 if (! flag_unsafe_math_optimizations)
6170 CASE_FLT_FN (BUILT_IN_ISINF):
6171 CASE_FLT_FN (BUILT_IN_FINITE):
6172 case BUILT_IN_ISFINITE:
6173 case BUILT_IN_ISNORMAL:
6174 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6179 CASE_FLT_FN (BUILT_IN_LCEIL):
6180 CASE_FLT_FN (BUILT_IN_LLCEIL):
6181 CASE_FLT_FN (BUILT_IN_LFLOOR):
6182 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6183 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6188 CASE_FLT_FN (BUILT_IN_LRINT):
6189 CASE_FLT_FN (BUILT_IN_LLRINT):
6190 CASE_FLT_FN (BUILT_IN_LROUND):
6191 CASE_FLT_FN (BUILT_IN_LLROUND):
6192 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6197 CASE_FLT_FN (BUILT_IN_POW):
6198 target = expand_builtin_pow (exp, target, subtarget);
6203 CASE_FLT_FN (BUILT_IN_POWI):
6204 target = expand_builtin_powi (exp, target, subtarget);
6209 CASE_FLT_FN (BUILT_IN_ATAN2):
6210 CASE_FLT_FN (BUILT_IN_LDEXP):
6211 CASE_FLT_FN (BUILT_IN_SCALB):
6212 CASE_FLT_FN (BUILT_IN_SCALBN):
6213 CASE_FLT_FN (BUILT_IN_SCALBLN):
6214 if (! flag_unsafe_math_optimizations)
6217 CASE_FLT_FN (BUILT_IN_FMOD):
6218 CASE_FLT_FN (BUILT_IN_REMAINDER):
6219 CASE_FLT_FN (BUILT_IN_DREM):
6220 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6225 CASE_FLT_FN (BUILT_IN_CEXPI):
6226 target = expand_builtin_cexpi (exp, target, subtarget);
6227 gcc_assert (target);
6230 CASE_FLT_FN (BUILT_IN_SIN):
6231 CASE_FLT_FN (BUILT_IN_COS):
6232 if (! flag_unsafe_math_optimizations)
6234 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6239 CASE_FLT_FN (BUILT_IN_SINCOS):
6240 if (! flag_unsafe_math_optimizations)
6242 target = expand_builtin_sincos (exp);
6247 case BUILT_IN_APPLY_ARGS:
6248 return expand_builtin_apply_args ();
6250 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6251 FUNCTION with a copy of the parameters described by
6252 ARGUMENTS, and ARGSIZE. It returns a block of memory
6253 allocated on the stack into which is stored all the registers
6254 that might possibly be used for returning the result of a
6255 function. ARGUMENTS is the value returned by
6256 __builtin_apply_args. ARGSIZE is the number of bytes of
6257 arguments that must be copied. ??? How should this value be
6258 computed? We'll also need a safe worst case value for varargs
6260 case BUILT_IN_APPLY:
6261 if (!validate_arglist (exp, POINTER_TYPE,
6262 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6263 && !validate_arglist (exp, REFERENCE_TYPE,
6264 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6270 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6271 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6272 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6274 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6277 /* __builtin_return (RESULT) causes the function to return the
6278 value described by RESULT. RESULT is address of the block of
6279 memory returned by __builtin_apply. */
6280 case BUILT_IN_RETURN:
6281 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6282 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6285 case BUILT_IN_SAVEREGS:
6286 return expand_builtin_saveregs ();
6288 case BUILT_IN_ARGS_INFO:
6289 return expand_builtin_args_info (exp);
6291 case BUILT_IN_VA_ARG_PACK:
6292 /* All valid uses of __builtin_va_arg_pack () are removed during
6294 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6297 case BUILT_IN_VA_ARG_PACK_LEN:
6298 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6300 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6303 /* Return the address of the first anonymous stack arg. */
6304 case BUILT_IN_NEXT_ARG:
6305 if (fold_builtin_next_arg (exp, false))
6307 return expand_builtin_next_arg ();
6309 case BUILT_IN_CLEAR_CACHE:
6310 target = expand_builtin___clear_cache (exp);
6315 case BUILT_IN_CLASSIFY_TYPE:
6316 return expand_builtin_classify_type (exp);
6318 case BUILT_IN_CONSTANT_P:
6321 case BUILT_IN_FRAME_ADDRESS:
6322 case BUILT_IN_RETURN_ADDRESS:
6323 return expand_builtin_frame_address (fndecl, exp);
6325 /* Returns the address of the area where the structure is returned.
6327 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6328 if (call_expr_nargs (exp) != 0
6329 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6330 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6333 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6335 case BUILT_IN_ALLOCA:
6336 target = expand_builtin_alloca (exp, target);
6341 case BUILT_IN_STACK_SAVE:
6342 return expand_stack_save ();
6344 case BUILT_IN_STACK_RESTORE:
6345 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6348 case BUILT_IN_BSWAP32:
6349 case BUILT_IN_BSWAP64:
6350 target = expand_builtin_bswap (exp, target, subtarget);
6356 CASE_INT_FN (BUILT_IN_FFS):
6357 case BUILT_IN_FFSIMAX:
6358 target = expand_builtin_unop (target_mode, exp, target,
6359 subtarget, ffs_optab);
6364 CASE_INT_FN (BUILT_IN_CLZ):
6365 case BUILT_IN_CLZIMAX:
6366 target = expand_builtin_unop (target_mode, exp, target,
6367 subtarget, clz_optab);
6372 CASE_INT_FN (BUILT_IN_CTZ):
6373 case BUILT_IN_CTZIMAX:
6374 target = expand_builtin_unop (target_mode, exp, target,
6375 subtarget, ctz_optab);
6380 CASE_INT_FN (BUILT_IN_POPCOUNT):
6381 case BUILT_IN_POPCOUNTIMAX:
6382 target = expand_builtin_unop (target_mode, exp, target,
6383 subtarget, popcount_optab);
6388 CASE_INT_FN (BUILT_IN_PARITY):
6389 case BUILT_IN_PARITYIMAX:
6390 target = expand_builtin_unop (target_mode, exp, target,
6391 subtarget, parity_optab);
6396 case BUILT_IN_STRLEN:
6397 target = expand_builtin_strlen (exp, target, target_mode);
6402 case BUILT_IN_STRCPY:
6403 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6408 case BUILT_IN_STRNCPY:
6409 target = expand_builtin_strncpy (exp, target, mode);
6414 case BUILT_IN_STPCPY:
6415 target = expand_builtin_stpcpy (exp, target, mode);
6420 case BUILT_IN_STRCAT:
6421 target = expand_builtin_strcat (fndecl, exp, target, mode);
6426 case BUILT_IN_STRNCAT:
6427 target = expand_builtin_strncat (exp, target, mode);
6432 case BUILT_IN_STRSPN:
6433 target = expand_builtin_strspn (exp, target, mode);
6438 case BUILT_IN_STRCSPN:
6439 target = expand_builtin_strcspn (exp, target, mode);
6444 case BUILT_IN_STRSTR:
6445 target = expand_builtin_strstr (exp, target, mode);
6450 case BUILT_IN_STRPBRK:
6451 target = expand_builtin_strpbrk (exp, target, mode);
6456 case BUILT_IN_INDEX:
6457 case BUILT_IN_STRCHR:
6458 target = expand_builtin_strchr (exp, target, mode);
6463 case BUILT_IN_RINDEX:
6464 case BUILT_IN_STRRCHR:
6465 target = expand_builtin_strrchr (exp, target, mode);
6470 case BUILT_IN_MEMCPY:
6471 target = expand_builtin_memcpy (exp, target, mode);
6476 case BUILT_IN_MEMPCPY:
6477 target = expand_builtin_mempcpy (exp, target, mode);
6482 case BUILT_IN_MEMMOVE:
6483 target = expand_builtin_memmove (exp, target, mode, ignore);
6488 case BUILT_IN_BCOPY:
6489 target = expand_builtin_bcopy (exp, ignore);
6494 case BUILT_IN_MEMSET:
6495 target = expand_builtin_memset (exp, target, mode);
6500 case BUILT_IN_BZERO:
6501 target = expand_builtin_bzero (exp);
6506 case BUILT_IN_STRCMP:
6507 target = expand_builtin_strcmp (exp, target, mode);
6512 case BUILT_IN_STRNCMP:
6513 target = expand_builtin_strncmp (exp, target, mode);
6518 case BUILT_IN_MEMCHR:
6519 target = expand_builtin_memchr (exp, target, mode);
6525 case BUILT_IN_MEMCMP:
6526 target = expand_builtin_memcmp (exp, target, mode);
6531 case BUILT_IN_SETJMP:
6532 /* This should have been lowered to the builtins below. */
6535 case BUILT_IN_SETJMP_SETUP:
6536 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6537 and the receiver label. */
6538 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6540 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6541 VOIDmode, EXPAND_NORMAL);
6542 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6543 rtx label_r = label_rtx (label);
6545 /* This is copied from the handling of non-local gotos. */
6546 expand_builtin_setjmp_setup (buf_addr, label_r);
6547 nonlocal_goto_handler_labels
6548 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6549 nonlocal_goto_handler_labels);
6550 /* ??? Do not let expand_label treat us as such since we would
6551 not want to be both on the list of non-local labels and on
6552 the list of forced labels. */
6553 FORCED_LABEL (label) = 0;
6558 case BUILT_IN_SETJMP_DISPATCHER:
6559 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6560 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6562 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6563 rtx label_r = label_rtx (label);
6565 /* Remove the dispatcher label from the list of non-local labels
6566 since the receiver labels have been added to it above. */
6567 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6572 case BUILT_IN_SETJMP_RECEIVER:
6573 /* __builtin_setjmp_receiver is passed the receiver label. */
6574 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6576 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6577 rtx label_r = label_rtx (label);
6579 expand_builtin_setjmp_receiver (label_r);
6584 /* __builtin_longjmp is passed a pointer to an array of five words.
6585 It's similar to the C library longjmp function but works with
6586 __builtin_setjmp above. */
6587 case BUILT_IN_LONGJMP:
6588 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6590 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6591 VOIDmode, EXPAND_NORMAL);
6592 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6594 if (value != const1_rtx)
6596 error ("%<__builtin_longjmp%> second argument must be 1");
6600 expand_builtin_longjmp (buf_addr, value);
6605 case BUILT_IN_NONLOCAL_GOTO:
6606 target = expand_builtin_nonlocal_goto (exp);
6611 /* This updates the setjmp buffer that is its argument with the value
6612 of the current stack pointer. */
6613 case BUILT_IN_UPDATE_SETJMP_BUF:
6614 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6617 = expand_normal (CALL_EXPR_ARG (exp, 0));
6619 expand_builtin_update_setjmp_buf (buf_addr);
6625 expand_builtin_trap ();
6628 case BUILT_IN_PRINTF:
6629 target = expand_builtin_printf (exp, target, mode, false);
6634 case BUILT_IN_PRINTF_UNLOCKED:
6635 target = expand_builtin_printf (exp, target, mode, true);
6640 case BUILT_IN_FPUTS:
6641 target = expand_builtin_fputs (exp, target, false);
6645 case BUILT_IN_FPUTS_UNLOCKED:
6646 target = expand_builtin_fputs (exp, target, true);
6651 case BUILT_IN_FPRINTF:
6652 target = expand_builtin_fprintf (exp, target, mode, false);
6657 case BUILT_IN_FPRINTF_UNLOCKED:
6658 target = expand_builtin_fprintf (exp, target, mode, true);
6663 case BUILT_IN_SPRINTF:
6664 target = expand_builtin_sprintf (exp, target, mode);
6669 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6670 case BUILT_IN_SIGNBITD32:
6671 case BUILT_IN_SIGNBITD64:
6672 case BUILT_IN_SIGNBITD128:
6673 target = expand_builtin_signbit (exp, target);
6678 /* Various hooks for the DWARF 2 __throw routine. */
6679 case BUILT_IN_UNWIND_INIT:
6680 expand_builtin_unwind_init ();
6682 case BUILT_IN_DWARF_CFA:
6683 return virtual_cfa_rtx;
6684 #ifdef DWARF2_UNWIND_INFO
6685 case BUILT_IN_DWARF_SP_COLUMN:
6686 return expand_builtin_dwarf_sp_column ();
6687 case BUILT_IN_INIT_DWARF_REG_SIZES:
6688 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6691 case BUILT_IN_FROB_RETURN_ADDR:
6692 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6693 case BUILT_IN_EXTRACT_RETURN_ADDR:
6694 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6695 case BUILT_IN_EH_RETURN:
6696 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6697 CALL_EXPR_ARG (exp, 1));
6699 #ifdef EH_RETURN_DATA_REGNO
6700 case BUILT_IN_EH_RETURN_DATA_REGNO:
6701 return expand_builtin_eh_return_data_regno (exp);
6703 case BUILT_IN_EXTEND_POINTER:
6704 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6706 case BUILT_IN_VA_START:
6707 return expand_builtin_va_start (exp);
6708 case BUILT_IN_VA_END:
6709 return expand_builtin_va_end (exp);
6710 case BUILT_IN_VA_COPY:
6711 return expand_builtin_va_copy (exp);
6712 case BUILT_IN_EXPECT:
6713 return expand_builtin_expect (exp, target);
6714 case BUILT_IN_PREFETCH:
6715 expand_builtin_prefetch (exp);
6718 case BUILT_IN_PROFILE_FUNC_ENTER:
6719 return expand_builtin_profile_func (false);
6720 case BUILT_IN_PROFILE_FUNC_EXIT:
6721 return expand_builtin_profile_func (true);
6723 case BUILT_IN_INIT_TRAMPOLINE:
6724 return expand_builtin_init_trampoline (exp);
6725 case BUILT_IN_ADJUST_TRAMPOLINE:
6726 return expand_builtin_adjust_trampoline (exp);
6729 case BUILT_IN_EXECL:
6730 case BUILT_IN_EXECV:
6731 case BUILT_IN_EXECLP:
6732 case BUILT_IN_EXECLE:
6733 case BUILT_IN_EXECVP:
6734 case BUILT_IN_EXECVE:
6735 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6740 case BUILT_IN_FETCH_AND_ADD_1:
6741 case BUILT_IN_FETCH_AND_ADD_2:
6742 case BUILT_IN_FETCH_AND_ADD_4:
6743 case BUILT_IN_FETCH_AND_ADD_8:
6744 case BUILT_IN_FETCH_AND_ADD_16:
6745 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6746 target = expand_builtin_sync_operation (mode, exp, PLUS,
6747 false, target, ignore);
6752 case BUILT_IN_FETCH_AND_SUB_1:
6753 case BUILT_IN_FETCH_AND_SUB_2:
6754 case BUILT_IN_FETCH_AND_SUB_4:
6755 case BUILT_IN_FETCH_AND_SUB_8:
6756 case BUILT_IN_FETCH_AND_SUB_16:
6757 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6758 target = expand_builtin_sync_operation (mode, exp, MINUS,
6759 false, target, ignore);
6764 case BUILT_IN_FETCH_AND_OR_1:
6765 case BUILT_IN_FETCH_AND_OR_2:
6766 case BUILT_IN_FETCH_AND_OR_4:
6767 case BUILT_IN_FETCH_AND_OR_8:
6768 case BUILT_IN_FETCH_AND_OR_16:
6769 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6770 target = expand_builtin_sync_operation (mode, exp, IOR,
6771 false, target, ignore);
6776 case BUILT_IN_FETCH_AND_AND_1:
6777 case BUILT_IN_FETCH_AND_AND_2:
6778 case BUILT_IN_FETCH_AND_AND_4:
6779 case BUILT_IN_FETCH_AND_AND_8:
6780 case BUILT_IN_FETCH_AND_AND_16:
6781 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6782 target = expand_builtin_sync_operation (mode, exp, AND,
6783 false, target, ignore);
6788 case BUILT_IN_FETCH_AND_XOR_1:
6789 case BUILT_IN_FETCH_AND_XOR_2:
6790 case BUILT_IN_FETCH_AND_XOR_4:
6791 case BUILT_IN_FETCH_AND_XOR_8:
6792 case BUILT_IN_FETCH_AND_XOR_16:
6793 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6794 target = expand_builtin_sync_operation (mode, exp, XOR,
6795 false, target, ignore);
6800 case BUILT_IN_FETCH_AND_NAND_1:
6801 case BUILT_IN_FETCH_AND_NAND_2:
6802 case BUILT_IN_FETCH_AND_NAND_4:
6803 case BUILT_IN_FETCH_AND_NAND_8:
6804 case BUILT_IN_FETCH_AND_NAND_16:
6805 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6806 target = expand_builtin_sync_operation (mode, exp, NOT,
6807 false, target, ignore);
6812 case BUILT_IN_ADD_AND_FETCH_1:
6813 case BUILT_IN_ADD_AND_FETCH_2:
6814 case BUILT_IN_ADD_AND_FETCH_4:
6815 case BUILT_IN_ADD_AND_FETCH_8:
6816 case BUILT_IN_ADD_AND_FETCH_16:
6817 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6818 target = expand_builtin_sync_operation (mode, exp, PLUS,
6819 true, target, ignore);
6824 case BUILT_IN_SUB_AND_FETCH_1:
6825 case BUILT_IN_SUB_AND_FETCH_2:
6826 case BUILT_IN_SUB_AND_FETCH_4:
6827 case BUILT_IN_SUB_AND_FETCH_8:
6828 case BUILT_IN_SUB_AND_FETCH_16:
6829 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6830 target = expand_builtin_sync_operation (mode, exp, MINUS,
6831 true, target, ignore);
6836 case BUILT_IN_OR_AND_FETCH_1:
6837 case BUILT_IN_OR_AND_FETCH_2:
6838 case BUILT_IN_OR_AND_FETCH_4:
6839 case BUILT_IN_OR_AND_FETCH_8:
6840 case BUILT_IN_OR_AND_FETCH_16:
6841 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6842 target = expand_builtin_sync_operation (mode, exp, IOR,
6843 true, target, ignore);
6848 case BUILT_IN_AND_AND_FETCH_1:
6849 case BUILT_IN_AND_AND_FETCH_2:
6850 case BUILT_IN_AND_AND_FETCH_4:
6851 case BUILT_IN_AND_AND_FETCH_8:
6852 case BUILT_IN_AND_AND_FETCH_16:
6853 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6854 target = expand_builtin_sync_operation (mode, exp, AND,
6855 true, target, ignore);
6860 case BUILT_IN_XOR_AND_FETCH_1:
6861 case BUILT_IN_XOR_AND_FETCH_2:
6862 case BUILT_IN_XOR_AND_FETCH_4:
6863 case BUILT_IN_XOR_AND_FETCH_8:
6864 case BUILT_IN_XOR_AND_FETCH_16:
6865 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6866 target = expand_builtin_sync_operation (mode, exp, XOR,
6867 true, target, ignore);
6872 case BUILT_IN_NAND_AND_FETCH_1:
6873 case BUILT_IN_NAND_AND_FETCH_2:
6874 case BUILT_IN_NAND_AND_FETCH_4:
6875 case BUILT_IN_NAND_AND_FETCH_8:
6876 case BUILT_IN_NAND_AND_FETCH_16:
6877 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6878 target = expand_builtin_sync_operation (mode, exp, NOT,
6879 true, target, ignore);
6884 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6885 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6886 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6887 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6888 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6889 if (mode == VOIDmode)
6890 mode = TYPE_MODE (boolean_type_node);
6891 if (!target || !register_operand (target, mode))
6892 target = gen_reg_rtx (mode);
6894 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6895 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6900 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6901 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6902 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6903 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6904 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6905 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6906 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6911 case BUILT_IN_LOCK_TEST_AND_SET_1:
6912 case BUILT_IN_LOCK_TEST_AND_SET_2:
6913 case BUILT_IN_LOCK_TEST_AND_SET_4:
6914 case BUILT_IN_LOCK_TEST_AND_SET_8:
6915 case BUILT_IN_LOCK_TEST_AND_SET_16:
6916 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6917 target = expand_builtin_lock_test_and_set (mode, exp, target);
6922 case BUILT_IN_LOCK_RELEASE_1:
6923 case BUILT_IN_LOCK_RELEASE_2:
6924 case BUILT_IN_LOCK_RELEASE_4:
6925 case BUILT_IN_LOCK_RELEASE_8:
6926 case BUILT_IN_LOCK_RELEASE_16:
6927 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6928 expand_builtin_lock_release (mode, exp);
6931 case BUILT_IN_SYNCHRONIZE:
6932 expand_builtin_synchronize ();
6935 case BUILT_IN_OBJECT_SIZE:
6936 return expand_builtin_object_size (exp);
6938 case BUILT_IN_MEMCPY_CHK:
6939 case BUILT_IN_MEMPCPY_CHK:
6940 case BUILT_IN_MEMMOVE_CHK:
6941 case BUILT_IN_MEMSET_CHK:
6942 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6947 case BUILT_IN_STRCPY_CHK:
6948 case BUILT_IN_STPCPY_CHK:
6949 case BUILT_IN_STRNCPY_CHK:
6950 case BUILT_IN_STRCAT_CHK:
6951 case BUILT_IN_STRNCAT_CHK:
6952 case BUILT_IN_SNPRINTF_CHK:
6953 case BUILT_IN_VSNPRINTF_CHK:
6954 maybe_emit_chk_warning (exp, fcode);
6957 case BUILT_IN_SPRINTF_CHK:
6958 case BUILT_IN_VSPRINTF_CHK:
6959 maybe_emit_sprintf_chk_warning (exp, fcode);
6962 default: /* just do library call, if unknown builtin */
6966 /* The switch statement above can drop through to cause the function
6967 to be called normally. */
6968 return expand_call (exp, target, ignore);
6971 /* Determine whether a tree node represents a call to a built-in
6972 function. If the tree T is a call to a built-in function with
6973 the right number of arguments of the appropriate types, return
6974 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6975 Otherwise the return value is END_BUILTINS. */
6977 enum built_in_function
6978 builtin_mathfn_code (const_tree t)
6980 const_tree fndecl, arg, parmlist;
6981 const_tree argtype, parmtype;
6982 const_call_expr_arg_iterator iter;
6984 if (TREE_CODE (t) != CALL_EXPR
6985 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6986 return END_BUILTINS;
6988 fndecl = get_callee_fndecl (t);
6989 if (fndecl == NULL_TREE
6990 || TREE_CODE (fndecl) != FUNCTION_DECL
6991 || ! DECL_BUILT_IN (fndecl)
6992 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6993 return END_BUILTINS;
6995 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6996 init_const_call_expr_arg_iterator (t, &iter);
6997 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6999 /* If a function doesn't take a variable number of arguments,
7000 the last element in the list will have type `void'. */
7001 parmtype = TREE_VALUE (parmlist);
7002 if (VOID_TYPE_P (parmtype))
7004 if (more_const_call_expr_args_p (&iter))
7005 return END_BUILTINS;
7006 return DECL_FUNCTION_CODE (fndecl);
7009 if (! more_const_call_expr_args_p (&iter))
7010 return END_BUILTINS;
7012 arg = next_const_call_expr_arg (&iter);
7013 argtype = TREE_TYPE (arg);
7015 if (SCALAR_FLOAT_TYPE_P (parmtype))
7017 if (! SCALAR_FLOAT_TYPE_P (argtype))
7018 return END_BUILTINS;
7020 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7022 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7023 return END_BUILTINS;
7025 else if (POINTER_TYPE_P (parmtype))
7027 if (! POINTER_TYPE_P (argtype))
7028 return END_BUILTINS;
7030 else if (INTEGRAL_TYPE_P (parmtype))
7032 if (! INTEGRAL_TYPE_P (argtype))
7033 return END_BUILTINS;
7036 return END_BUILTINS;
7039 /* Variable-length argument list. */
7040 return DECL_FUNCTION_CODE (fndecl);
7043 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7044 evaluate to a constant. */
7047 fold_builtin_constant_p (tree arg)
7049 /* We return 1 for a numeric type that's known to be a constant
7050 value at compile-time or for an aggregate type that's a
7051 literal constant. */
7054 /* If we know this is a constant, emit the constant of one. */
7055 if (CONSTANT_CLASS_P (arg)
7056 || (TREE_CODE (arg) == CONSTRUCTOR
7057 && TREE_CONSTANT (arg)))
7058 return integer_one_node;
7059 if (TREE_CODE (arg) == ADDR_EXPR)
7061 tree op = TREE_OPERAND (arg, 0);
7062 if (TREE_CODE (op) == STRING_CST
7063 || (TREE_CODE (op) == ARRAY_REF
7064 && integer_zerop (TREE_OPERAND (op, 1))
7065 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7066 return integer_one_node;
7069 /* If this expression has side effects, show we don't know it to be a
7070 constant. Likewise if it's a pointer or aggregate type since in
7071 those case we only want literals, since those are only optimized
7072 when generating RTL, not later.
7073 And finally, if we are compiling an initializer, not code, we
7074 need to return a definite result now; there's not going to be any
7075 more optimization done. */
7076 if (TREE_SIDE_EFFECTS (arg)
7077 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7078 || POINTER_TYPE_P (TREE_TYPE (arg))
7080 || folding_initializer)
7081 return integer_zero_node;
7086 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7087 return it as a truthvalue. */
7090 build_builtin_expect_predicate (tree pred, tree expected)
7092 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7094 fn = built_in_decls[BUILT_IN_EXPECT];
7095 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7096 ret_type = TREE_TYPE (TREE_TYPE (fn));
7097 pred_type = TREE_VALUE (arg_types);
7098 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7100 pred = fold_convert (pred_type, pred);
7101 expected = fold_convert (expected_type, expected);
7102 call_expr = build_call_expr (fn, 2, pred, expected);
7104 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7105 build_int_cst (ret_type, 0));
7108 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7109 NULL_TREE if no simplification is possible. */
7112 fold_builtin_expect (tree arg0, tree arg1)
7115 enum tree_code code;
7117 /* If this is a builtin_expect within a builtin_expect keep the
7118 inner one. See through a comparison against a constant. It
7119 might have been added to create a thruthvalue. */
7121 if (COMPARISON_CLASS_P (inner)
7122 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7123 inner = TREE_OPERAND (inner, 0);
7125 if (TREE_CODE (inner) == CALL_EXPR
7126 && (fndecl = get_callee_fndecl (inner))
7127 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7128 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7131 /* Distribute the expected value over short-circuiting operators.
7132 See through the cast from truthvalue_type_node to long. */
7134 while (TREE_CODE (inner) == NOP_EXPR
7135 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7136 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7137 inner = TREE_OPERAND (inner, 0);
7139 code = TREE_CODE (inner);
7140 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7142 tree op0 = TREE_OPERAND (inner, 0);
7143 tree op1 = TREE_OPERAND (inner, 1);
7145 op0 = build_builtin_expect_predicate (op0, arg1);
7146 op1 = build_builtin_expect_predicate (op1, arg1);
7147 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7149 return fold_convert (TREE_TYPE (arg0), inner);
7152 /* If the argument isn't invariant then there's nothing else we can do. */
7153 if (!TREE_INVARIANT (arg0))
7156 /* If we expect that a comparison against the argument will fold to
7157 a constant return the constant. In practice, this means a true
7158 constant or the address of a non-weak symbol. */
7161 if (TREE_CODE (inner) == ADDR_EXPR)
7165 inner = TREE_OPERAND (inner, 0);
7167 while (TREE_CODE (inner) == COMPONENT_REF
7168 || TREE_CODE (inner) == ARRAY_REF);
7169 if (DECL_P (inner) && DECL_WEAK (inner))
7173 /* Otherwise, ARG0 already has the proper type for the return value. */
7177 /* Fold a call to __builtin_classify_type with argument ARG. */
7180 fold_builtin_classify_type (tree arg)
7183 return build_int_cst (NULL_TREE, no_type_class);
7185 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7188 /* Fold a call to __builtin_strlen with argument ARG. */
7191 fold_builtin_strlen (tree arg)
7193 if (!validate_arg (arg, POINTER_TYPE))
7197 tree len = c_strlen (arg, 0);
7201 /* Convert from the internal "sizetype" type to "size_t". */
7203 len = fold_convert (size_type_node, len);
7211 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7214 fold_builtin_inf (tree type, int warn)
7216 REAL_VALUE_TYPE real;
7218 /* __builtin_inff is intended to be usable to define INFINITY on all
7219 targets. If an infinity is not available, INFINITY expands "to a
7220 positive constant of type float that overflows at translation
7221 time", footnote "In this case, using INFINITY will violate the
7222 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7223 Thus we pedwarn to ensure this constraint violation is
7225 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7226 pedwarn ("target format does not support infinity");
7229 return build_real (type, real);
7232 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7235 fold_builtin_nan (tree arg, tree type, int quiet)
7237 REAL_VALUE_TYPE real;
7240 if (!validate_arg (arg, POINTER_TYPE))
7242 str = c_getstr (arg);
7246 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7249 return build_real (type, real);
7252 /* Return true if the floating point expression T has an integer value.
7253 We also allow +Inf, -Inf and NaN to be considered integer values. */
7256 integer_valued_real_p (tree t)
7258 switch (TREE_CODE (t))
7265 case NON_LVALUE_EXPR:
7266 return integer_valued_real_p (TREE_OPERAND (t, 0));
7271 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7278 return integer_valued_real_p (TREE_OPERAND (t, 0))
7279 && integer_valued_real_p (TREE_OPERAND (t, 1));
7282 return integer_valued_real_p (TREE_OPERAND (t, 1))
7283 && integer_valued_real_p (TREE_OPERAND (t, 2));
7286 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7290 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7291 if (TREE_CODE (type) == INTEGER_TYPE)
7293 if (TREE_CODE (type) == REAL_TYPE)
7294 return integer_valued_real_p (TREE_OPERAND (t, 0));
7299 switch (builtin_mathfn_code (t))
7301 CASE_FLT_FN (BUILT_IN_CEIL):
7302 CASE_FLT_FN (BUILT_IN_FLOOR):
7303 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7304 CASE_FLT_FN (BUILT_IN_RINT):
7305 CASE_FLT_FN (BUILT_IN_ROUND):
7306 CASE_FLT_FN (BUILT_IN_TRUNC):
7309 CASE_FLT_FN (BUILT_IN_FMIN):
7310 CASE_FLT_FN (BUILT_IN_FMAX):
7311 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7312 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7325 /* FNDECL is assumed to be a builtin where truncation can be propagated
7326 across (for instance floor((double)f) == (double)floorf (f).
7327 Do the transformation for a call with argument ARG. */
7330 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7332 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7334 if (!validate_arg (arg, REAL_TYPE))
7337 /* Integer rounding functions are idempotent. */
7338 if (fcode == builtin_mathfn_code (arg))
7341 /* If argument is already integer valued, and we don't need to worry
7342 about setting errno, there's no need to perform rounding. */
7343 if (! flag_errno_math && integer_valued_real_p (arg))
7348 tree arg0 = strip_float_extensions (arg);
7349 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7350 tree newtype = TREE_TYPE (arg0);
7353 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7354 && (decl = mathfn_built_in (newtype, fcode)))
7355 return fold_convert (ftype,
7356 build_call_expr (decl, 1,
7357 fold_convert (newtype, arg0)));
7362 /* FNDECL is assumed to be builtin which can narrow the FP type of
7363 the argument, for instance lround((double)f) -> lroundf (f).
7364 Do the transformation for a call with argument ARG. */
7367 fold_fixed_mathfn (tree fndecl, tree arg)
7369 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7371 if (!validate_arg (arg, REAL_TYPE))
7374 /* If argument is already integer valued, and we don't need to worry
7375 about setting errno, there's no need to perform rounding. */
7376 if (! flag_errno_math && integer_valued_real_p (arg))
7377 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7381 tree ftype = TREE_TYPE (arg);
7382 tree arg0 = strip_float_extensions (arg);
7383 tree newtype = TREE_TYPE (arg0);
7386 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7387 && (decl = mathfn_built_in (newtype, fcode)))
7388 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7391 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7392 sizeof (long long) == sizeof (long). */
7393 if (TYPE_PRECISION (long_long_integer_type_node)
7394 == TYPE_PRECISION (long_integer_type_node))
7396 tree newfn = NULL_TREE;
7399 CASE_FLT_FN (BUILT_IN_LLCEIL):
7400 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7403 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7404 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7407 CASE_FLT_FN (BUILT_IN_LLROUND):
7408 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7411 CASE_FLT_FN (BUILT_IN_LLRINT):
7412 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7421 tree newcall = build_call_expr(newfn, 1, arg);
7422 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7429 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7430 return type. Return NULL_TREE if no simplification can be made. */
7433 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7437 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7438 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7441 /* Calculate the result when the argument is a constant. */
7442 if (TREE_CODE (arg) == COMPLEX_CST
7443 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7447 if (TREE_CODE (arg) == COMPLEX_EXPR)
7449 tree real = TREE_OPERAND (arg, 0);
7450 tree imag = TREE_OPERAND (arg, 1);
7452 /* If either part is zero, cabs is fabs of the other. */
7453 if (real_zerop (real))
7454 return fold_build1 (ABS_EXPR, type, imag);
7455 if (real_zerop (imag))
7456 return fold_build1 (ABS_EXPR, type, real);
7458 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7459 if (flag_unsafe_math_optimizations
7460 && operand_equal_p (real, imag, OEP_PURE_SAME))
7462 const REAL_VALUE_TYPE sqrt2_trunc
7463 = real_value_truncate (TYPE_MODE (type),
7464 *get_real_const (rv_sqrt2));
7466 return fold_build2 (MULT_EXPR, type,
7467 fold_build1 (ABS_EXPR, type, real),
7468 build_real (type, sqrt2_trunc));
7472 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7473 if (TREE_CODE (arg) == NEGATE_EXPR
7474 || TREE_CODE (arg) == CONJ_EXPR)
7475 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7477 /* Don't do this when optimizing for size. */
7478 if (flag_unsafe_math_optimizations
7479 && optimize && !optimize_size)
7481 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7483 if (sqrtfn != NULL_TREE)
7485 tree rpart, ipart, result;
7487 arg = builtin_save_expr (arg);
7489 rpart = fold_build1 (REALPART_EXPR, type, arg);
7490 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7492 rpart = builtin_save_expr (rpart);
7493 ipart = builtin_save_expr (ipart);
7495 result = fold_build2 (PLUS_EXPR, type,
7496 fold_build2 (MULT_EXPR, type,
7498 fold_build2 (MULT_EXPR, type,
7501 return build_call_expr (sqrtfn, 1, result);
7508 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7509 Return NULL_TREE if no simplification can be made. */
7512 fold_builtin_sqrt (tree arg, tree type)
7515 enum built_in_function fcode;
7518 if (!validate_arg (arg, REAL_TYPE))
7521 /* Calculate the result when the argument is a constant. */
7522 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7525 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7526 fcode = builtin_mathfn_code (arg);
7527 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7529 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7530 arg = fold_build2 (MULT_EXPR, type,
7531 CALL_EXPR_ARG (arg, 0),
7532 build_real (type, dconsthalf));
7533 return build_call_expr (expfn, 1, arg);
7536 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7537 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7539 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7543 tree arg0 = CALL_EXPR_ARG (arg, 0);
7545 /* The inner root was either sqrt or cbrt. */
7546 REAL_VALUE_TYPE dconstroot =
7547 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7549 /* Adjust for the outer root. */
7550 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7551 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7552 tree_root = build_real (type, dconstroot);
7553 return build_call_expr (powfn, 2, arg0, tree_root);
7557 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7558 if (flag_unsafe_math_optimizations
7559 && (fcode == BUILT_IN_POW
7560 || fcode == BUILT_IN_POWF
7561 || fcode == BUILT_IN_POWL))
7563 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7564 tree arg0 = CALL_EXPR_ARG (arg, 0);
7565 tree arg1 = CALL_EXPR_ARG (arg, 1);
7567 if (!tree_expr_nonnegative_p (arg0))
7568 arg0 = build1 (ABS_EXPR, type, arg0);
7569 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7570 build_real (type, dconsthalf));
7571 return build_call_expr (powfn, 2, arg0, narg1);
7577 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7578 Return NULL_TREE if no simplification can be made. */
7581 fold_builtin_cbrt (tree arg, tree type)
7583 const enum built_in_function fcode = builtin_mathfn_code (arg);
7586 if (!validate_arg (arg, REAL_TYPE))
7589 /* Calculate the result when the argument is a constant. */
7590 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7593 if (flag_unsafe_math_optimizations)
7595 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7596 if (BUILTIN_EXPONENT_P (fcode))
7598 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7599 const REAL_VALUE_TYPE third_trunc =
7600 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7601 arg = fold_build2 (MULT_EXPR, type,
7602 CALL_EXPR_ARG (arg, 0),
7603 build_real (type, third_trunc));
7604 return build_call_expr (expfn, 1, arg);
7607 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7608 if (BUILTIN_SQRT_P (fcode))
7610 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7614 tree arg0 = CALL_EXPR_ARG (arg, 0);
7616 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7618 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7619 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7620 tree_root = build_real (type, dconstroot);
7621 return build_call_expr (powfn, 2, arg0, tree_root);
7625 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7626 if (BUILTIN_CBRT_P (fcode))
7628 tree arg0 = CALL_EXPR_ARG (arg, 0);
7629 if (tree_expr_nonnegative_p (arg0))
7631 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7636 REAL_VALUE_TYPE dconstroot;
7638 real_arithmetic (&dconstroot, MULT_EXPR,
7639 get_real_const (rv_third),
7640 get_real_const (rv_third));
7641 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7642 tree_root = build_real (type, dconstroot);
7643 return build_call_expr (powfn, 2, arg0, tree_root);
7648 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7649 if (fcode == BUILT_IN_POW
7650 || fcode == BUILT_IN_POWF
7651 || fcode == BUILT_IN_POWL)
7653 tree arg00 = CALL_EXPR_ARG (arg, 0);
7654 tree arg01 = CALL_EXPR_ARG (arg, 1);
7655 if (tree_expr_nonnegative_p (arg00))
7657 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7658 const REAL_VALUE_TYPE dconstroot
7659 = real_value_truncate (TYPE_MODE (type),
7660 *get_real_const (rv_third));
7661 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7662 build_real (type, dconstroot));
7663 return build_call_expr (powfn, 2, arg00, narg01);
7670 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7671 TYPE is the type of the return value. Return NULL_TREE if no
7672 simplification can be made. */
7675 fold_builtin_cos (tree arg, tree type, tree fndecl)
7679 if (!validate_arg (arg, REAL_TYPE))
7682 /* Calculate the result when the argument is a constant. */
7683 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7686 /* Optimize cos(-x) into cos (x). */
7687 if ((narg = fold_strip_sign_ops (arg)))
7688 return build_call_expr (fndecl, 1, narg);
7693 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7694 Return NULL_TREE if no simplification can be made. */
7697 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7699 if (validate_arg (arg, REAL_TYPE))
7703 /* Calculate the result when the argument is a constant. */
7704 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7707 /* Optimize cosh(-x) into cosh (x). */
7708 if ((narg = fold_strip_sign_ops (arg)))
7709 return build_call_expr (fndecl, 1, narg);
7715 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7716 Return NULL_TREE if no simplification can be made. */
7719 fold_builtin_tan (tree arg, tree type)
7721 enum built_in_function fcode;
7724 if (!validate_arg (arg, REAL_TYPE))
7727 /* Calculate the result when the argument is a constant. */
7728 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7731 /* Optimize tan(atan(x)) = x. */
7732 fcode = builtin_mathfn_code (arg);
7733 if (flag_unsafe_math_optimizations
7734 && (fcode == BUILT_IN_ATAN
7735 || fcode == BUILT_IN_ATANF
7736 || fcode == BUILT_IN_ATANL))
7737 return CALL_EXPR_ARG (arg, 0);
7742 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7743 NULL_TREE if no simplification can be made. */
7746 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7751 if (!validate_arg (arg0, REAL_TYPE)
7752 || !validate_arg (arg1, POINTER_TYPE)
7753 || !validate_arg (arg2, POINTER_TYPE))
7756 type = TREE_TYPE (arg0);
7758 /* Calculate the result when the argument is a constant. */
7759 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7762 /* Canonicalize sincos to cexpi. */
7763 if (!TARGET_C99_FUNCTIONS)
7765 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7769 call = build_call_expr (fn, 1, arg0);
7770 call = builtin_save_expr (call);
7772 return build2 (COMPOUND_EXPR, type,
7773 build2 (MODIFY_EXPR, void_type_node,
7774 build_fold_indirect_ref (arg1),
7775 build1 (IMAGPART_EXPR, type, call)),
7776 build2 (MODIFY_EXPR, void_type_node,
7777 build_fold_indirect_ref (arg2),
7778 build1 (REALPART_EXPR, type, call)));
7781 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7782 NULL_TREE if no simplification can be made. */
7785 fold_builtin_cexp (tree arg0, tree type)
7788 tree realp, imagp, ifn;
7790 if (!validate_arg (arg0, COMPLEX_TYPE))
7793 rtype = TREE_TYPE (TREE_TYPE (arg0));
7795 /* In case we can figure out the real part of arg0 and it is constant zero
7797 if (!TARGET_C99_FUNCTIONS)
7799 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7803 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7804 && real_zerop (realp))
7806 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7807 return build_call_expr (ifn, 1, narg);
7810 /* In case we can easily decompose real and imaginary parts split cexp
7811 to exp (r) * cexpi (i). */
7812 if (flag_unsafe_math_optimizations
7815 tree rfn, rcall, icall;
7817 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7821 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7825 icall = build_call_expr (ifn, 1, imagp);
7826 icall = builtin_save_expr (icall);
7827 rcall = build_call_expr (rfn, 1, realp);
7828 rcall = builtin_save_expr (rcall);
7829 return fold_build2 (COMPLEX_EXPR, type,
7830 fold_build2 (MULT_EXPR, rtype,
7832 fold_build1 (REALPART_EXPR, rtype, icall)),
7833 fold_build2 (MULT_EXPR, rtype,
7835 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7841 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7842 Return NULL_TREE if no simplification can be made. */
7845 fold_builtin_trunc (tree fndecl, tree arg)
7847 if (!validate_arg (arg, REAL_TYPE))
7850 /* Optimize trunc of constant value. */
7851 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7853 REAL_VALUE_TYPE r, x;
7854 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7856 x = TREE_REAL_CST (arg);
7857 real_trunc (&r, TYPE_MODE (type), &x);
7858 return build_real (type, r);
7861 return fold_trunc_transparent_mathfn (fndecl, arg);
7864 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7865 Return NULL_TREE if no simplification can be made. */
7868 fold_builtin_floor (tree fndecl, tree arg)
7870 if (!validate_arg (arg, REAL_TYPE))
7873 /* Optimize floor of constant value. */
7874 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7878 x = TREE_REAL_CST (arg);
7879 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7881 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7884 real_floor (&r, TYPE_MODE (type), &x);
7885 return build_real (type, r);
7889 /* Fold floor (x) where x is nonnegative to trunc (x). */
7890 if (tree_expr_nonnegative_p (arg))
7892 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7894 return build_call_expr (truncfn, 1, arg);
7897 return fold_trunc_transparent_mathfn (fndecl, arg);
7900 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7901 Return NULL_TREE if no simplification can be made. */
7904 fold_builtin_ceil (tree fndecl, tree arg)
7906 if (!validate_arg (arg, REAL_TYPE))
7909 /* Optimize ceil of constant value. */
7910 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7914 x = TREE_REAL_CST (arg);
7915 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7917 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7920 real_ceil (&r, TYPE_MODE (type), &x);
7921 return build_real (type, r);
7925 return fold_trunc_transparent_mathfn (fndecl, arg);
7928 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7929 Return NULL_TREE if no simplification can be made. */
7932 fold_builtin_round (tree fndecl, tree arg)
7934 if (!validate_arg (arg, REAL_TYPE))
7937 /* Optimize round of constant value. */
7938 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7942 x = TREE_REAL_CST (arg);
7943 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7945 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7948 real_round (&r, TYPE_MODE (type), &x);
7949 return build_real (type, r);
7953 return fold_trunc_transparent_mathfn (fndecl, arg);
7956 /* Fold function call to builtin lround, lroundf or lroundl (or the
7957 corresponding long long versions) and other rounding functions. ARG
7958 is the argument to the call. Return NULL_TREE if no simplification
7962 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7964 if (!validate_arg (arg, REAL_TYPE))
7967 /* Optimize lround of constant value. */
7968 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7970 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7972 if (real_isfinite (&x))
7974 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7975 tree ftype = TREE_TYPE (arg);
7976 unsigned HOST_WIDE_INT lo2;
7977 HOST_WIDE_INT hi, lo;
7980 switch (DECL_FUNCTION_CODE (fndecl))
7982 CASE_FLT_FN (BUILT_IN_LFLOOR):
7983 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7984 real_floor (&r, TYPE_MODE (ftype), &x);
7987 CASE_FLT_FN (BUILT_IN_LCEIL):
7988 CASE_FLT_FN (BUILT_IN_LLCEIL):
7989 real_ceil (&r, TYPE_MODE (ftype), &x);
7992 CASE_FLT_FN (BUILT_IN_LROUND):
7993 CASE_FLT_FN (BUILT_IN_LLROUND):
7994 real_round (&r, TYPE_MODE (ftype), &x);
8001 REAL_VALUE_TO_INT (&lo, &hi, r);
8002 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8003 return build_int_cst_wide (itype, lo2, hi);
8007 switch (DECL_FUNCTION_CODE (fndecl))
8009 CASE_FLT_FN (BUILT_IN_LFLOOR):
8010 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8011 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8012 if (tree_expr_nonnegative_p (arg))
8013 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8019 return fold_fixed_mathfn (fndecl, arg);
8022 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8023 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8024 the argument to the call. Return NULL_TREE if no simplification can
8028 fold_builtin_bitop (tree fndecl, tree arg)
8030 if (!validate_arg (arg, INTEGER_TYPE))
8033 /* Optimize for constant argument. */
8034 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8036 HOST_WIDE_INT hi, width, result;
8037 unsigned HOST_WIDE_INT lo;
8040 type = TREE_TYPE (arg);
8041 width = TYPE_PRECISION (type);
8042 lo = TREE_INT_CST_LOW (arg);
8044 /* Clear all the bits that are beyond the type's precision. */
8045 if (width > HOST_BITS_PER_WIDE_INT)
8047 hi = TREE_INT_CST_HIGH (arg);
8048 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8049 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8054 if (width < HOST_BITS_PER_WIDE_INT)
8055 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8058 switch (DECL_FUNCTION_CODE (fndecl))
8060 CASE_INT_FN (BUILT_IN_FFS):
8062 result = exact_log2 (lo & -lo) + 1;
8064 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8069 CASE_INT_FN (BUILT_IN_CLZ):
8071 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8073 result = width - floor_log2 (lo) - 1;
8074 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8078 CASE_INT_FN (BUILT_IN_CTZ):
8080 result = exact_log2 (lo & -lo);
8082 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8083 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8087 CASE_INT_FN (BUILT_IN_POPCOUNT):
8090 result++, lo &= lo - 1;
8092 result++, hi &= hi - 1;
8095 CASE_INT_FN (BUILT_IN_PARITY):
8098 result++, lo &= lo - 1;
8100 result++, hi &= hi - 1;
8108 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8114 /* Fold function call to builtin_bswap and the long and long long
8115 variants. Return NULL_TREE if no simplification can be made. */
8117 fold_builtin_bswap (tree fndecl, tree arg)
8119 if (! validate_arg (arg, INTEGER_TYPE))
8122 /* Optimize constant value. */
8123 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8125 HOST_WIDE_INT hi, width, r_hi = 0;
8126 unsigned HOST_WIDE_INT lo, r_lo = 0;
8129 type = TREE_TYPE (arg);
8130 width = TYPE_PRECISION (type);
8131 lo = TREE_INT_CST_LOW (arg);
8132 hi = TREE_INT_CST_HIGH (arg);
8134 switch (DECL_FUNCTION_CODE (fndecl))
8136 case BUILT_IN_BSWAP32:
8137 case BUILT_IN_BSWAP64:
8141 for (s = 0; s < width; s += 8)
8143 int d = width - s - 8;
8144 unsigned HOST_WIDE_INT byte;
8146 if (s < HOST_BITS_PER_WIDE_INT)
8147 byte = (lo >> s) & 0xff;
8149 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8151 if (d < HOST_BITS_PER_WIDE_INT)
8154 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8164 if (width < HOST_BITS_PER_WIDE_INT)
8165 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8167 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8173 /* Return true if EXPR is the real constant contained in VALUE. */
8176 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8180 return ((TREE_CODE (expr) == REAL_CST
8181 && !TREE_OVERFLOW (expr)
8182 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8183 || (TREE_CODE (expr) == COMPLEX_CST
8184 && real_dconstp (TREE_REALPART (expr), value)
8185 && real_zerop (TREE_IMAGPART (expr))));
8188 /* A subroutine of fold_builtin to fold the various logarithmic
8189 functions. Return NULL_TREE if no simplification can me made.
8190 FUNC is the corresponding MPFR logarithm function. */
8193 fold_builtin_logarithm (tree fndecl, tree arg,
8194 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8196 if (validate_arg (arg, REAL_TYPE))
8198 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8200 const enum built_in_function fcode = builtin_mathfn_code (arg);
8202 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8203 instead we'll look for 'e' truncated to MODE. So only do
8204 this if flag_unsafe_math_optimizations is set. */
8205 if (flag_unsafe_math_optimizations && func == mpfr_log)
8207 const REAL_VALUE_TYPE e_truncated =
8208 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8209 if (real_dconstp (arg, &e_truncated))
8210 return build_real (type, dconst1);
8213 /* Calculate the result when the argument is a constant. */
8214 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8217 /* Special case, optimize logN(expN(x)) = x. */
8218 if (flag_unsafe_math_optimizations
8219 && ((func == mpfr_log
8220 && (fcode == BUILT_IN_EXP
8221 || fcode == BUILT_IN_EXPF
8222 || fcode == BUILT_IN_EXPL))
8223 || (func == mpfr_log2
8224 && (fcode == BUILT_IN_EXP2
8225 || fcode == BUILT_IN_EXP2F
8226 || fcode == BUILT_IN_EXP2L))
8227 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8228 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8230 /* Optimize logN(func()) for various exponential functions. We
8231 want to determine the value "x" and the power "exponent" in
8232 order to transform logN(x**exponent) into exponent*logN(x). */
8233 if (flag_unsafe_math_optimizations)
8235 tree exponent = 0, x = 0;
8239 CASE_FLT_FN (BUILT_IN_EXP):
8240 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8241 x = build_real (type,
8242 real_value_truncate (TYPE_MODE (type),
8243 *get_real_const (rv_e)));
8244 exponent = CALL_EXPR_ARG (arg, 0);
8246 CASE_FLT_FN (BUILT_IN_EXP2):
8247 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8248 x = build_real (type, dconst2);
8249 exponent = CALL_EXPR_ARG (arg, 0);
8251 CASE_FLT_FN (BUILT_IN_EXP10):
8252 CASE_FLT_FN (BUILT_IN_POW10):
8253 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8255 REAL_VALUE_TYPE dconst10;
8256 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8257 x = build_real (type, dconst10);
8259 exponent = CALL_EXPR_ARG (arg, 0);
8261 CASE_FLT_FN (BUILT_IN_SQRT):
8262 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8263 x = CALL_EXPR_ARG (arg, 0);
8264 exponent = build_real (type, dconsthalf);
8266 CASE_FLT_FN (BUILT_IN_CBRT):
8267 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8268 x = CALL_EXPR_ARG (arg, 0);
8269 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8270 *get_real_const (rv_third)));
8272 CASE_FLT_FN (BUILT_IN_POW):
8273 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8274 x = CALL_EXPR_ARG (arg, 0);
8275 exponent = CALL_EXPR_ARG (arg, 1);
8281 /* Now perform the optimization. */
8284 tree logfn = build_call_expr (fndecl, 1, x);
8285 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8293 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8294 NULL_TREE if no simplification can be made. */
8297 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8299 tree res, narg0, narg1;
8301 if (!validate_arg (arg0, REAL_TYPE)
8302 || !validate_arg (arg1, REAL_TYPE))
8305 /* Calculate the result when the argument is a constant. */
8306 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8309 /* If either argument to hypot has a negate or abs, strip that off.
8310 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8311 narg0 = fold_strip_sign_ops (arg0);
8312 narg1 = fold_strip_sign_ops (arg1);
8315 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8316 narg1 ? narg1 : arg1);
8319 /* If either argument is zero, hypot is fabs of the other. */
8320 if (real_zerop (arg0))
8321 return fold_build1 (ABS_EXPR, type, arg1);
8322 else if (real_zerop (arg1))
8323 return fold_build1 (ABS_EXPR, type, arg0);
8325 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8326 if (flag_unsafe_math_optimizations
8327 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8329 const REAL_VALUE_TYPE sqrt2_trunc
8330 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8331 return fold_build2 (MULT_EXPR, type,
8332 fold_build1 (ABS_EXPR, type, arg0),
8333 build_real (type, sqrt2_trunc));
8340 /* Fold a builtin function call to pow, powf, or powl. Return
8341 NULL_TREE if no simplification can be made. */
8343 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8347 if (!validate_arg (arg0, REAL_TYPE)
8348 || !validate_arg (arg1, REAL_TYPE))
8351 /* Calculate the result when the argument is a constant. */
8352 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8355 /* Optimize pow(1.0,y) = 1.0. */
8356 if (real_onep (arg0))
8357 return omit_one_operand (type, build_real (type, dconst1), arg1);
8359 if (TREE_CODE (arg1) == REAL_CST
8360 && !TREE_OVERFLOW (arg1))
8362 REAL_VALUE_TYPE cint;
8366 c = TREE_REAL_CST (arg1);
8368 /* Optimize pow(x,0.0) = 1.0. */
8369 if (REAL_VALUES_EQUAL (c, dconst0))
8370 return omit_one_operand (type, build_real (type, dconst1),
8373 /* Optimize pow(x,1.0) = x. */
8374 if (REAL_VALUES_EQUAL (c, dconst1))
8377 /* Optimize pow(x,-1.0) = 1.0/x. */
8378 if (REAL_VALUES_EQUAL (c, dconstm1))
8379 return fold_build2 (RDIV_EXPR, type,
8380 build_real (type, dconst1), arg0);
8382 /* Optimize pow(x,0.5) = sqrt(x). */
8383 if (flag_unsafe_math_optimizations
8384 && REAL_VALUES_EQUAL (c, dconsthalf))
8386 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8388 if (sqrtfn != NULL_TREE)
8389 return build_call_expr (sqrtfn, 1, arg0);
8392 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8393 if (flag_unsafe_math_optimizations)
8395 const REAL_VALUE_TYPE dconstroot
8396 = real_value_truncate (TYPE_MODE (type),
8397 *get_real_const (rv_third));
8399 if (REAL_VALUES_EQUAL (c, dconstroot))
8401 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8402 if (cbrtfn != NULL_TREE)
8403 return build_call_expr (cbrtfn, 1, arg0);
8407 /* Check for an integer exponent. */
8408 n = real_to_integer (&c);
8409 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8410 if (real_identical (&c, &cint))
8412 /* Attempt to evaluate pow at compile-time. */
8413 if (TREE_CODE (arg0) == REAL_CST
8414 && !TREE_OVERFLOW (arg0))
8419 x = TREE_REAL_CST (arg0);
8420 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8421 if (flag_unsafe_math_optimizations || !inexact)
8422 return build_real (type, x);
8425 /* Strip sign ops from even integer powers. */
8426 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8428 tree narg0 = fold_strip_sign_ops (arg0);
8430 return build_call_expr (fndecl, 2, narg0, arg1);
8435 if (flag_unsafe_math_optimizations)
8437 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8439 /* Optimize pow(expN(x),y) = expN(x*y). */
8440 if (BUILTIN_EXPONENT_P (fcode))
8442 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8443 tree arg = CALL_EXPR_ARG (arg0, 0);
8444 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8445 return build_call_expr (expfn, 1, arg);
8448 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8449 if (BUILTIN_SQRT_P (fcode))
8451 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8452 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8453 build_real (type, dconsthalf));
8454 return build_call_expr (fndecl, 2, narg0, narg1);
8457 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8458 if (BUILTIN_CBRT_P (fcode))
8460 tree arg = CALL_EXPR_ARG (arg0, 0);
8461 if (tree_expr_nonnegative_p (arg))
8463 const REAL_VALUE_TYPE dconstroot
8464 = real_value_truncate (TYPE_MODE (type),
8465 *get_real_const (rv_third));
8466 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8467 build_real (type, dconstroot));
8468 return build_call_expr (fndecl, 2, arg, narg1);
8472 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8473 if (fcode == BUILT_IN_POW
8474 || fcode == BUILT_IN_POWF
8475 || fcode == BUILT_IN_POWL)
8477 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8478 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8479 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8480 return build_call_expr (fndecl, 2, arg00, narg1);
8487 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8488 Return NULL_TREE if no simplification can be made. */
8490 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8491 tree arg0, tree arg1, tree type)
8493 if (!validate_arg (arg0, REAL_TYPE)
8494 || !validate_arg (arg1, INTEGER_TYPE))
8497 /* Optimize pow(1.0,y) = 1.0. */
8498 if (real_onep (arg0))
8499 return omit_one_operand (type, build_real (type, dconst1), arg1);
8501 if (host_integerp (arg1, 0))
8503 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8505 /* Evaluate powi at compile-time. */
8506 if (TREE_CODE (arg0) == REAL_CST
8507 && !TREE_OVERFLOW (arg0))
8510 x = TREE_REAL_CST (arg0);
8511 real_powi (&x, TYPE_MODE (type), &x, c);
8512 return build_real (type, x);
8515 /* Optimize pow(x,0) = 1.0. */
8517 return omit_one_operand (type, build_real (type, dconst1),
8520 /* Optimize pow(x,1) = x. */
8524 /* Optimize pow(x,-1) = 1.0/x. */
8526 return fold_build2 (RDIV_EXPR, type,
8527 build_real (type, dconst1), arg0);
8533 /* A subroutine of fold_builtin to fold the various exponent
8534 functions. Return NULL_TREE if no simplification can be made.
8535 FUNC is the corresponding MPFR exponent function. */
8538 fold_builtin_exponent (tree fndecl, tree arg,
8539 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8541 if (validate_arg (arg, REAL_TYPE))
8543 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8546 /* Calculate the result when the argument is a constant. */
8547 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8550 /* Optimize expN(logN(x)) = x. */
8551 if (flag_unsafe_math_optimizations)
8553 const enum built_in_function fcode = builtin_mathfn_code (arg);
8555 if ((func == mpfr_exp
8556 && (fcode == BUILT_IN_LOG
8557 || fcode == BUILT_IN_LOGF
8558 || fcode == BUILT_IN_LOGL))
8559 || (func == mpfr_exp2
8560 && (fcode == BUILT_IN_LOG2
8561 || fcode == BUILT_IN_LOG2F
8562 || fcode == BUILT_IN_LOG2L))
8563 || (func == mpfr_exp10
8564 && (fcode == BUILT_IN_LOG10
8565 || fcode == BUILT_IN_LOG10F
8566 || fcode == BUILT_IN_LOG10L)))
8567 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8574 /* Return true if VAR is a VAR_DECL or a component thereof. */
8577 var_decl_component_p (tree var)
8580 while (handled_component_p (inner))
8581 inner = TREE_OPERAND (inner, 0);
8582 return SSA_VAR_P (inner);
8585 /* Fold function call to builtin memset. Return
8586 NULL_TREE if no simplification can be made. */
8589 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8592 unsigned HOST_WIDE_INT length, cval;
8594 if (! validate_arg (dest, POINTER_TYPE)
8595 || ! validate_arg (c, INTEGER_TYPE)
8596 || ! validate_arg (len, INTEGER_TYPE))
8599 if (! host_integerp (len, 1))
8602 /* If the LEN parameter is zero, return DEST. */
8603 if (integer_zerop (len))
8604 return omit_one_operand (type, dest, c);
8606 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8611 if (TREE_CODE (var) != ADDR_EXPR)
8614 var = TREE_OPERAND (var, 0);
8615 if (TREE_THIS_VOLATILE (var))
8618 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8619 && !POINTER_TYPE_P (TREE_TYPE (var)))
8622 if (! var_decl_component_p (var))
8625 length = tree_low_cst (len, 1);
8626 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8627 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8631 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8634 if (integer_zerop (c))
8638 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8641 cval = tree_low_cst (c, 1);
8645 cval |= (cval << 31) << 1;
8648 ret = build_int_cst_type (TREE_TYPE (var), cval);
8649 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8653 return omit_one_operand (type, dest, ret);
8656 /* Fold function call to builtin memset. Return
8657 NULL_TREE if no simplification can be made. */
8660 fold_builtin_bzero (tree dest, tree size, bool ignore)
8662 if (! validate_arg (dest, POINTER_TYPE)
8663 || ! validate_arg (size, INTEGER_TYPE))
8669 /* New argument list transforming bzero(ptr x, int y) to
8670 memset(ptr x, int 0, size_t y). This is done this way
8671 so that if it isn't expanded inline, we fallback to
8672 calling bzero instead of memset. */
8674 return fold_builtin_memset (dest, integer_zero_node,
8675 fold_convert (sizetype, size),
8676 void_type_node, ignore);
8679 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8680 NULL_TREE if no simplification can be made.
8681 If ENDP is 0, return DEST (like memcpy).
8682 If ENDP is 1, return DEST+LEN (like mempcpy).
8683 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8684 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8688 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8690 tree destvar, srcvar, expr;
8692 if (! validate_arg (dest, POINTER_TYPE)
8693 || ! validate_arg (src, POINTER_TYPE)
8694 || ! validate_arg (len, INTEGER_TYPE))
8697 /* If the LEN parameter is zero, return DEST. */
8698 if (integer_zerop (len))
8699 return omit_one_operand (type, dest, src);
8701 /* If SRC and DEST are the same (and not volatile), return
8702 DEST{,+LEN,+LEN-1}. */
8703 if (operand_equal_p (src, dest, 0))
8707 tree srctype, desttype;
8710 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8711 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8713 /* Both DEST and SRC must be pointer types.
8714 ??? This is what old code did. Is the testing for pointer types
8717 If either SRC is readonly or length is 1, we can use memcpy. */
8718 if (dest_align && src_align
8719 && (readonly_data_expr (src)
8720 || (host_integerp (len, 1)
8721 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8722 tree_low_cst (len, 1)))))
8724 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8727 return build_call_expr (fn, 3, dest, src, len);
8732 if (!host_integerp (len, 0))
8735 This logic lose for arguments like (type *)malloc (sizeof (type)),
8736 since we strip the casts of up to VOID return value from malloc.
8737 Perhaps we ought to inherit type from non-VOID argument here? */
8740 srctype = TREE_TYPE (TREE_TYPE (src));
8741 desttype = TREE_TYPE (TREE_TYPE (dest));
8742 if (!srctype || !desttype
8743 || !TYPE_SIZE_UNIT (srctype)
8744 || !TYPE_SIZE_UNIT (desttype)
8745 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8746 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8747 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8748 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8751 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8752 < (int) TYPE_ALIGN (desttype)
8753 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8754 < (int) TYPE_ALIGN (srctype)))
8758 dest = builtin_save_expr (dest);
8760 srcvar = build_fold_indirect_ref (src);
8761 if (TREE_THIS_VOLATILE (srcvar))
8763 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8765 /* With memcpy, it is possible to bypass aliasing rules, so without
8766 this check i. e. execute/20060930-2.c would be misoptimized, because
8767 it use conflicting alias set to hold argument for the memcpy call.
8768 This check is probably unnecesary with -fno-strict-aliasing.
8769 Similarly for destvar. See also PR29286. */
8770 if (!var_decl_component_p (srcvar)
8771 /* Accept: memcpy (*char_var, "test", 1); that simplify
8773 || is_gimple_min_invariant (srcvar)
8774 || readonly_data_expr (src))
8777 destvar = build_fold_indirect_ref (dest);
8778 if (TREE_THIS_VOLATILE (destvar))
8780 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8782 if (!var_decl_component_p (destvar))
8785 if (srctype == desttype
8786 || (gimple_in_ssa_p (cfun)
8787 && useless_type_conversion_p (desttype, srctype)))
8789 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8790 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8791 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8792 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8793 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8795 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8796 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8802 if (endp == 0 || endp == 3)
8803 return omit_one_operand (type, dest, expr);
8809 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8812 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8813 dest = fold_convert (type, dest);
8815 dest = omit_one_operand (type, dest, expr);
8819 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8820 If LEN is not NULL, it represents the length of the string to be
8821 copied. Return NULL_TREE if no simplification can be made. */
8824 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8828 if (!validate_arg (dest, POINTER_TYPE)
8829 || !validate_arg (src, POINTER_TYPE))
8832 /* If SRC and DEST are the same (and not volatile), return DEST. */
8833 if (operand_equal_p (src, dest, 0))
8834 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8839 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8845 len = c_strlen (src, 1);
8846 if (! len || TREE_SIDE_EFFECTS (len))
8850 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8851 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8852 build_call_expr (fn, 3, dest, src, len));
8855 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8856 If SLEN is not NULL, it represents the length of the source string.
8857 Return NULL_TREE if no simplification can be made. */
8860 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8864 if (!validate_arg (dest, POINTER_TYPE)
8865 || !validate_arg (src, POINTER_TYPE)
8866 || !validate_arg (len, INTEGER_TYPE))
8869 /* If the LEN parameter is zero, return DEST. */
8870 if (integer_zerop (len))
8871 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8873 /* We can't compare slen with len as constants below if len is not a
8875 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8879 slen = c_strlen (src, 1);
8881 /* Now, we must be passed a constant src ptr parameter. */
8882 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8885 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8887 /* We do not support simplification of this case, though we do
8888 support it when expanding trees into RTL. */
8889 /* FIXME: generate a call to __builtin_memset. */
8890 if (tree_int_cst_lt (slen, len))
8893 /* OK transform into builtin memcpy. */
8894 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8897 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8898 build_call_expr (fn, 3, dest, src, len));
8901 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8902 arguments to the call, and TYPE is its return type.
8903 Return NULL_TREE if no simplification can be made. */
8906 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8908 if (!validate_arg (arg1, POINTER_TYPE)
8909 || !validate_arg (arg2, INTEGER_TYPE)
8910 || !validate_arg (len, INTEGER_TYPE))
8916 if (TREE_CODE (arg2) != INTEGER_CST
8917 || !host_integerp (len, 1))
8920 p1 = c_getstr (arg1);
8921 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8927 if (target_char_cast (arg2, &c))
8930 r = memchr (p1, c, tree_low_cst (len, 1));
8933 return build_int_cst (TREE_TYPE (arg1), 0);
8935 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8937 return fold_convert (type, tem);
8943 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8944 Return NULL_TREE if no simplification can be made. */
8947 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8949 const char *p1, *p2;
8951 if (!validate_arg (arg1, POINTER_TYPE)
8952 || !validate_arg (arg2, POINTER_TYPE)
8953 || !validate_arg (len, INTEGER_TYPE))
8956 /* If the LEN parameter is zero, return zero. */
8957 if (integer_zerop (len))
8958 return omit_two_operands (integer_type_node, integer_zero_node,
8961 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8962 if (operand_equal_p (arg1, arg2, 0))
8963 return omit_one_operand (integer_type_node, integer_zero_node, len);
8965 p1 = c_getstr (arg1);
8966 p2 = c_getstr (arg2);
8968 /* If all arguments are constant, and the value of len is not greater
8969 than the lengths of arg1 and arg2, evaluate at compile-time. */
8970 if (host_integerp (len, 1) && p1 && p2
8971 && compare_tree_int (len, strlen (p1) + 1) <= 0
8972 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8974 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8977 return integer_one_node;
8979 return integer_minus_one_node;
8981 return integer_zero_node;
8984 /* If len parameter is one, return an expression corresponding to
8985 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8986 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8988 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8989 tree cst_uchar_ptr_node
8990 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8992 tree ind1 = fold_convert (integer_type_node,
8993 build1 (INDIRECT_REF, cst_uchar_node,
8994 fold_convert (cst_uchar_ptr_node,
8996 tree ind2 = fold_convert (integer_type_node,
8997 build1 (INDIRECT_REF, cst_uchar_node,
8998 fold_convert (cst_uchar_ptr_node,
9000 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9006 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9007 Return NULL_TREE if no simplification can be made. */
9010 fold_builtin_strcmp (tree arg1, tree arg2)
9012 const char *p1, *p2;
9014 if (!validate_arg (arg1, POINTER_TYPE)
9015 || !validate_arg (arg2, POINTER_TYPE))
9018 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9019 if (operand_equal_p (arg1, arg2, 0))
9020 return integer_zero_node;
9022 p1 = c_getstr (arg1);
9023 p2 = c_getstr (arg2);
9027 const int i = strcmp (p1, p2);
9029 return integer_minus_one_node;
9031 return integer_one_node;
9033 return integer_zero_node;
9036 /* If the second arg is "", return *(const unsigned char*)arg1. */
9037 if (p2 && *p2 == '\0')
9039 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9040 tree cst_uchar_ptr_node
9041 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9043 return fold_convert (integer_type_node,
9044 build1 (INDIRECT_REF, cst_uchar_node,
9045 fold_convert (cst_uchar_ptr_node,
9049 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9050 if (p1 && *p1 == '\0')
9052 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9053 tree cst_uchar_ptr_node
9054 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9056 tree temp = fold_convert (integer_type_node,
9057 build1 (INDIRECT_REF, cst_uchar_node,
9058 fold_convert (cst_uchar_ptr_node,
9060 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9066 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9067 Return NULL_TREE if no simplification can be made. */
9070 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9072 const char *p1, *p2;
9074 if (!validate_arg (arg1, POINTER_TYPE)
9075 || !validate_arg (arg2, POINTER_TYPE)
9076 || !validate_arg (len, INTEGER_TYPE))
9079 /* If the LEN parameter is zero, return zero. */
9080 if (integer_zerop (len))
9081 return omit_two_operands (integer_type_node, integer_zero_node,
9084 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9085 if (operand_equal_p (arg1, arg2, 0))
9086 return omit_one_operand (integer_type_node, integer_zero_node, len);
9088 p1 = c_getstr (arg1);
9089 p2 = c_getstr (arg2);
9091 if (host_integerp (len, 1) && p1 && p2)
9093 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9095 return integer_one_node;
9097 return integer_minus_one_node;
9099 return integer_zero_node;
9102 /* If the second arg is "", and the length is greater than zero,
9103 return *(const unsigned char*)arg1. */
9104 if (p2 && *p2 == '\0'
9105 && TREE_CODE (len) == INTEGER_CST
9106 && tree_int_cst_sgn (len) == 1)
9108 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9109 tree cst_uchar_ptr_node
9110 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9112 return fold_convert (integer_type_node,
9113 build1 (INDIRECT_REF, cst_uchar_node,
9114 fold_convert (cst_uchar_ptr_node,
9118 /* If the first arg is "", and the length is greater than zero,
9119 return -*(const unsigned char*)arg2. */
9120 if (p1 && *p1 == '\0'
9121 && TREE_CODE (len) == INTEGER_CST
9122 && tree_int_cst_sgn (len) == 1)
9124 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9125 tree cst_uchar_ptr_node
9126 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9128 tree temp = fold_convert (integer_type_node,
9129 build1 (INDIRECT_REF, cst_uchar_node,
9130 fold_convert (cst_uchar_ptr_node,
9132 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9135 /* If len parameter is one, return an expression corresponding to
9136 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9137 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9139 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9140 tree cst_uchar_ptr_node
9141 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9143 tree ind1 = fold_convert (integer_type_node,
9144 build1 (INDIRECT_REF, cst_uchar_node,
9145 fold_convert (cst_uchar_ptr_node,
9147 tree ind2 = fold_convert (integer_type_node,
9148 build1 (INDIRECT_REF, cst_uchar_node,
9149 fold_convert (cst_uchar_ptr_node,
9151 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9157 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9158 ARG. Return NULL_TREE if no simplification can be made. */
9161 fold_builtin_signbit (tree arg, tree type)
9165 if (!validate_arg (arg, REAL_TYPE))
9168 /* If ARG is a compile-time constant, determine the result. */
9169 if (TREE_CODE (arg) == REAL_CST
9170 && !TREE_OVERFLOW (arg))
9174 c = TREE_REAL_CST (arg);
9175 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9176 return fold_convert (type, temp);
9179 /* If ARG is non-negative, the result is always zero. */
9180 if (tree_expr_nonnegative_p (arg))
9181 return omit_one_operand (type, integer_zero_node, arg);
9183 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9184 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9185 return fold_build2 (LT_EXPR, type, arg,
9186 build_real (TREE_TYPE (arg), dconst0));
9191 /* Fold function call to builtin copysign, copysignf or copysignl with
9192 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9196 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9200 if (!validate_arg (arg1, REAL_TYPE)
9201 || !validate_arg (arg2, REAL_TYPE))
9204 /* copysign(X,X) is X. */
9205 if (operand_equal_p (arg1, arg2, 0))
9206 return fold_convert (type, arg1);
9208 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9209 if (TREE_CODE (arg1) == REAL_CST
9210 && TREE_CODE (arg2) == REAL_CST
9211 && !TREE_OVERFLOW (arg1)
9212 && !TREE_OVERFLOW (arg2))
9214 REAL_VALUE_TYPE c1, c2;
9216 c1 = TREE_REAL_CST (arg1);
9217 c2 = TREE_REAL_CST (arg2);
9218 /* c1.sign := c2.sign. */
9219 real_copysign (&c1, &c2);
9220 return build_real (type, c1);
9223 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9224 Remember to evaluate Y for side-effects. */
9225 if (tree_expr_nonnegative_p (arg2))
9226 return omit_one_operand (type,
9227 fold_build1 (ABS_EXPR, type, arg1),
9230 /* Strip sign changing operations for the first argument. */
9231 tem = fold_strip_sign_ops (arg1);
9233 return build_call_expr (fndecl, 2, tem, arg2);
9238 /* Fold a call to builtin isascii with argument ARG. */
9241 fold_builtin_isascii (tree arg)
9243 if (!validate_arg (arg, INTEGER_TYPE))
9247 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9248 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9249 build_int_cst (NULL_TREE,
9250 ~ (unsigned HOST_WIDE_INT) 0x7f));
9251 return fold_build2 (EQ_EXPR, integer_type_node,
9252 arg, integer_zero_node);
9256 /* Fold a call to builtin toascii with argument ARG. */
9259 fold_builtin_toascii (tree arg)
9261 if (!validate_arg (arg, INTEGER_TYPE))
9264 /* Transform toascii(c) -> (c & 0x7f). */
9265 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9266 build_int_cst (NULL_TREE, 0x7f));
9269 /* Fold a call to builtin isdigit with argument ARG. */
9272 fold_builtin_isdigit (tree arg)
9274 if (!validate_arg (arg, INTEGER_TYPE))
9278 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9279 /* According to the C standard, isdigit is unaffected by locale.
9280 However, it definitely is affected by the target character set. */
9281 unsigned HOST_WIDE_INT target_digit0
9282 = lang_hooks.to_target_charset ('0');
9284 if (target_digit0 == 0)
9287 arg = fold_convert (unsigned_type_node, arg);
9288 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9289 build_int_cst (unsigned_type_node, target_digit0));
9290 return fold_build2 (LE_EXPR, integer_type_node, arg,
9291 build_int_cst (unsigned_type_node, 9));
9295 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9298 fold_builtin_fabs (tree arg, tree type)
9300 if (!validate_arg (arg, REAL_TYPE))
9303 arg = fold_convert (type, arg);
9304 if (TREE_CODE (arg) == REAL_CST)
9305 return fold_abs_const (arg, type);
9306 return fold_build1 (ABS_EXPR, type, arg);
9309 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9312 fold_builtin_abs (tree arg, tree type)
9314 if (!validate_arg (arg, INTEGER_TYPE))
9317 arg = fold_convert (type, arg);
9318 if (TREE_CODE (arg) == INTEGER_CST)
9319 return fold_abs_const (arg, type);
9320 return fold_build1 (ABS_EXPR, type, arg);
9323 /* Fold a call to builtin fmin or fmax. */
9326 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9328 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9330 /* Calculate the result when the argument is a constant. */
9331 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9336 /* If either argument is NaN, return the other one. Avoid the
9337 transformation if we get (and honor) a signalling NaN. Using
9338 omit_one_operand() ensures we create a non-lvalue. */
9339 if (TREE_CODE (arg0) == REAL_CST
9340 && real_isnan (&TREE_REAL_CST (arg0))
9341 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9342 || ! TREE_REAL_CST (arg0).signalling))
9343 return omit_one_operand (type, arg1, arg0);
9344 if (TREE_CODE (arg1) == REAL_CST
9345 && real_isnan (&TREE_REAL_CST (arg1))
9346 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9347 || ! TREE_REAL_CST (arg1).signalling))
9348 return omit_one_operand (type, arg0, arg1);
9350 /* Transform fmin/fmax(x,x) -> x. */
9351 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9352 return omit_one_operand (type, arg0, arg1);
9354 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9355 functions to return the numeric arg if the other one is NaN.
9356 These tree codes don't honor that, so only transform if
9357 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9358 handled, so we don't have to worry about it either. */
9359 if (flag_finite_math_only)
9360 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9361 fold_convert (type, arg0),
9362 fold_convert (type, arg1));
9367 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9370 fold_builtin_carg (tree arg, tree type)
9372 if (validate_arg (arg, COMPLEX_TYPE))
9374 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9378 tree new_arg = builtin_save_expr (arg);
9379 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9380 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9381 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9388 /* Fold a call to builtin logb/ilogb. */
9391 fold_builtin_logb (tree arg, tree rettype)
9393 if (! validate_arg (arg, REAL_TYPE))
9398 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9400 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9406 /* If arg is Inf or NaN and we're logb, return it. */
9407 if (TREE_CODE (rettype) == REAL_TYPE)
9408 return fold_convert (rettype, arg);
9409 /* Fall through... */
9411 /* Zero may set errno and/or raise an exception for logb, also
9412 for ilogb we don't know FP_ILOGB0. */
9415 /* For normal numbers, proceed iff radix == 2. In GCC,
9416 normalized significands are in the range [0.5, 1.0). We
9417 want the exponent as if they were [1.0, 2.0) so get the
9418 exponent and subtract 1. */
9419 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9420 return fold_convert (rettype, build_int_cst (NULL_TREE,
9421 REAL_EXP (value)-1));
9429 /* Fold a call to builtin significand, if radix == 2. */
9432 fold_builtin_significand (tree arg, tree rettype)
9434 if (! validate_arg (arg, REAL_TYPE))
9439 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9441 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9448 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9449 return fold_convert (rettype, arg);
9451 /* For normal numbers, proceed iff radix == 2. */
9452 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9454 REAL_VALUE_TYPE result = *value;
9455 /* In GCC, normalized significands are in the range [0.5,
9456 1.0). We want them to be [1.0, 2.0) so set the
9458 SET_REAL_EXP (&result, 1);
9459 return build_real (rettype, result);
9468 /* Fold a call to builtin frexp, we can assume the base is 2. */
9471 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9473 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9478 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9481 arg1 = build_fold_indirect_ref (arg1);
9483 /* Proceed if a valid pointer type was passed in. */
9484 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9486 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9492 /* For +-0, return (*exp = 0, +-0). */
9493 exp = integer_zero_node;
9498 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9499 return omit_one_operand (rettype, arg0, arg1);
9502 /* Since the frexp function always expects base 2, and in
9503 GCC normalized significands are already in the range
9504 [0.5, 1.0), we have exactly what frexp wants. */
9505 REAL_VALUE_TYPE frac_rvt = *value;
9506 SET_REAL_EXP (&frac_rvt, 0);
9507 frac = build_real (rettype, frac_rvt);
9508 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9515 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9516 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9517 TREE_SIDE_EFFECTS (arg1) = 1;
9518 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9524 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9525 then we can assume the base is two. If it's false, then we have to
9526 check the mode of the TYPE parameter in certain cases. */
9529 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9531 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9536 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9537 if (real_zerop (arg0) || integer_zerop (arg1)
9538 || (TREE_CODE (arg0) == REAL_CST
9539 && !real_isfinite (&TREE_REAL_CST (arg0))))
9540 return omit_one_operand (type, arg0, arg1);
9542 /* If both arguments are constant, then try to evaluate it. */
9543 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9544 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9545 && host_integerp (arg1, 0))
9547 /* Bound the maximum adjustment to twice the range of the
9548 mode's valid exponents. Use abs to ensure the range is
9549 positive as a sanity check. */
9550 const long max_exp_adj = 2 *
9551 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9552 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9554 /* Get the user-requested adjustment. */
9555 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9557 /* The requested adjustment must be inside this range. This
9558 is a preliminary cap to avoid things like overflow, we
9559 may still fail to compute the result for other reasons. */
9560 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9562 REAL_VALUE_TYPE initial_result;
9564 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9566 /* Ensure we didn't overflow. */
9567 if (! real_isinf (&initial_result))
9569 const REAL_VALUE_TYPE trunc_result
9570 = real_value_truncate (TYPE_MODE (type), initial_result);
9572 /* Only proceed if the target mode can hold the
9574 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9575 return build_real (type, trunc_result);
9584 /* Fold a call to builtin modf. */
9587 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9589 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9594 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9597 arg1 = build_fold_indirect_ref (arg1);
9599 /* Proceed if a valid pointer type was passed in. */
9600 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9602 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9603 REAL_VALUE_TYPE trunc, frac;
9609 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9610 trunc = frac = *value;
9613 /* For +-Inf, return (*arg1 = arg0, +-0). */
9615 frac.sign = value->sign;
9619 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9620 real_trunc (&trunc, VOIDmode, value);
9621 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9622 /* If the original number was negative and already
9623 integral, then the fractional part is -0.0. */
9624 if (value->sign && frac.cl == rvc_zero)
9625 frac.sign = value->sign;
9629 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9630 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9631 build_real (rettype, trunc));
9632 TREE_SIDE_EFFECTS (arg1) = 1;
9633 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9634 build_real (rettype, frac));
9640 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9641 ARG is the argument for the call. */
9644 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9646 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9649 if (!validate_arg (arg, REAL_TYPE))
9651 error ("non-floating-point argument to function %qs",
9652 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9653 return error_mark_node;
9656 switch (builtin_index)
9658 case BUILT_IN_ISINF:
9659 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9660 return omit_one_operand (type, integer_zero_node, arg);
9662 if (TREE_CODE (arg) == REAL_CST)
9664 r = TREE_REAL_CST (arg);
9665 if (real_isinf (&r))
9666 return real_compare (GT_EXPR, &r, &dconst0)
9667 ? integer_one_node : integer_minus_one_node;
9669 return integer_zero_node;
9674 case BUILT_IN_ISFINITE:
9675 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9676 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9677 return omit_one_operand (type, integer_one_node, arg);
9679 if (TREE_CODE (arg) == REAL_CST)
9681 r = TREE_REAL_CST (arg);
9682 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9687 case BUILT_IN_ISNAN:
9688 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9689 return omit_one_operand (type, integer_zero_node, arg);
9691 if (TREE_CODE (arg) == REAL_CST)
9693 r = TREE_REAL_CST (arg);
9694 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9697 arg = builtin_save_expr (arg);
9698 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9705 /* Fold a call to an unordered comparison function such as
9706 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9707 being called and ARG0 and ARG1 are the arguments for the call.
9708 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9709 the opposite of the desired result. UNORDERED_CODE is used
9710 for modes that can hold NaNs and ORDERED_CODE is used for
9714 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9715 enum tree_code unordered_code,
9716 enum tree_code ordered_code)
9718 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9719 enum tree_code code;
9721 enum tree_code code0, code1;
9722 tree cmp_type = NULL_TREE;
9724 type0 = TREE_TYPE (arg0);
9725 type1 = TREE_TYPE (arg1);
9727 code0 = TREE_CODE (type0);
9728 code1 = TREE_CODE (type1);
9730 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9731 /* Choose the wider of two real types. */
9732 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9734 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9736 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9740 error ("non-floating-point argument to function %qs",
9741 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9742 return error_mark_node;
9745 arg0 = fold_convert (cmp_type, arg0);
9746 arg1 = fold_convert (cmp_type, arg1);
9748 if (unordered_code == UNORDERED_EXPR)
9750 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9751 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9752 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9755 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9757 return fold_build1 (TRUTH_NOT_EXPR, type,
9758 fold_build2 (code, type, arg0, arg1));
9761 /* Fold a call to built-in function FNDECL with 0 arguments.
9762 IGNORE is true if the result of the function call is ignored. This
9763 function returns NULL_TREE if no simplification was possible. */
9766 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9768 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9769 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9772 CASE_FLT_FN (BUILT_IN_INF):
9773 case BUILT_IN_INFD32:
9774 case BUILT_IN_INFD64:
9775 case BUILT_IN_INFD128:
9776 return fold_builtin_inf (type, true);
9778 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9779 return fold_builtin_inf (type, false);
9781 case BUILT_IN_CLASSIFY_TYPE:
9782 return fold_builtin_classify_type (NULL_TREE);
9790 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9791 IGNORE is true if the result of the function call is ignored. This
9792 function returns NULL_TREE if no simplification was possible. */
9795 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9797 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9798 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9802 case BUILT_IN_CONSTANT_P:
9804 tree val = fold_builtin_constant_p (arg0);
9806 /* Gimplification will pull the CALL_EXPR for the builtin out of
9807 an if condition. When not optimizing, we'll not CSE it back.
9808 To avoid link error types of regressions, return false now. */
9809 if (!val && !optimize)
9810 val = integer_zero_node;
9815 case BUILT_IN_CLASSIFY_TYPE:
9816 return fold_builtin_classify_type (arg0);
9818 case BUILT_IN_STRLEN:
9819 return fold_builtin_strlen (arg0);
9821 CASE_FLT_FN (BUILT_IN_FABS):
9822 return fold_builtin_fabs (arg0, type);
9826 case BUILT_IN_LLABS:
9827 case BUILT_IN_IMAXABS:
9828 return fold_builtin_abs (arg0, type);
9830 CASE_FLT_FN (BUILT_IN_CONJ):
9831 if (validate_arg (arg0, COMPLEX_TYPE))
9832 return fold_build1 (CONJ_EXPR, type, arg0);
9835 CASE_FLT_FN (BUILT_IN_CREAL):
9836 if (validate_arg (arg0, COMPLEX_TYPE))
9837 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9840 CASE_FLT_FN (BUILT_IN_CIMAG):
9841 if (validate_arg (arg0, COMPLEX_TYPE))
9842 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9845 CASE_FLT_FN (BUILT_IN_CCOS):
9846 CASE_FLT_FN (BUILT_IN_CCOSH):
9847 /* These functions are "even", i.e. f(x) == f(-x). */
9848 if (validate_arg (arg0, COMPLEX_TYPE))
9850 tree narg = fold_strip_sign_ops (arg0);
9852 return build_call_expr (fndecl, 1, narg);
9856 CASE_FLT_FN (BUILT_IN_CABS):
9857 return fold_builtin_cabs (arg0, type, fndecl);
9859 CASE_FLT_FN (BUILT_IN_CARG):
9860 return fold_builtin_carg (arg0, type);
9862 CASE_FLT_FN (BUILT_IN_SQRT):
9863 return fold_builtin_sqrt (arg0, type);
9865 CASE_FLT_FN (BUILT_IN_CBRT):
9866 return fold_builtin_cbrt (arg0, type);
9868 CASE_FLT_FN (BUILT_IN_ASIN):
9869 if (validate_arg (arg0, REAL_TYPE))
9870 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9871 &dconstm1, &dconst1, true);
9874 CASE_FLT_FN (BUILT_IN_ACOS):
9875 if (validate_arg (arg0, REAL_TYPE))
9876 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9877 &dconstm1, &dconst1, true);
9880 CASE_FLT_FN (BUILT_IN_ATAN):
9881 if (validate_arg (arg0, REAL_TYPE))
9882 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9885 CASE_FLT_FN (BUILT_IN_ASINH):
9886 if (validate_arg (arg0, REAL_TYPE))
9887 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9890 CASE_FLT_FN (BUILT_IN_ACOSH):
9891 if (validate_arg (arg0, REAL_TYPE))
9892 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9893 &dconst1, NULL, true);
9896 CASE_FLT_FN (BUILT_IN_ATANH):
9897 if (validate_arg (arg0, REAL_TYPE))
9898 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9899 &dconstm1, &dconst1, false);
9902 CASE_FLT_FN (BUILT_IN_SIN):
9903 if (validate_arg (arg0, REAL_TYPE))
9904 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9907 CASE_FLT_FN (BUILT_IN_COS):
9908 return fold_builtin_cos (arg0, type, fndecl);
9911 CASE_FLT_FN (BUILT_IN_TAN):
9912 return fold_builtin_tan (arg0, type);
9914 CASE_FLT_FN (BUILT_IN_CEXP):
9915 return fold_builtin_cexp (arg0, type);
9917 CASE_FLT_FN (BUILT_IN_CEXPI):
9918 if (validate_arg (arg0, REAL_TYPE))
9919 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9922 CASE_FLT_FN (BUILT_IN_SINH):
9923 if (validate_arg (arg0, REAL_TYPE))
9924 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9927 CASE_FLT_FN (BUILT_IN_COSH):
9928 return fold_builtin_cosh (arg0, type, fndecl);
9930 CASE_FLT_FN (BUILT_IN_TANH):
9931 if (validate_arg (arg0, REAL_TYPE))
9932 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9935 CASE_FLT_FN (BUILT_IN_ERF):
9936 if (validate_arg (arg0, REAL_TYPE))
9937 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9940 CASE_FLT_FN (BUILT_IN_ERFC):
9941 if (validate_arg (arg0, REAL_TYPE))
9942 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9945 CASE_FLT_FN (BUILT_IN_TGAMMA):
9946 if (validate_arg (arg0, REAL_TYPE))
9947 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9950 CASE_FLT_FN (BUILT_IN_EXP):
9951 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9953 CASE_FLT_FN (BUILT_IN_EXP2):
9954 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9956 CASE_FLT_FN (BUILT_IN_EXP10):
9957 CASE_FLT_FN (BUILT_IN_POW10):
9958 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9960 CASE_FLT_FN (BUILT_IN_EXPM1):
9961 if (validate_arg (arg0, REAL_TYPE))
9962 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9965 CASE_FLT_FN (BUILT_IN_LOG):
9966 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9968 CASE_FLT_FN (BUILT_IN_LOG2):
9969 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9971 CASE_FLT_FN (BUILT_IN_LOG10):
9972 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9974 CASE_FLT_FN (BUILT_IN_LOG1P):
9975 if (validate_arg (arg0, REAL_TYPE))
9976 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9977 &dconstm1, NULL, false);
9980 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9981 CASE_FLT_FN (BUILT_IN_J0):
9982 if (validate_arg (arg0, REAL_TYPE))
9983 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9987 CASE_FLT_FN (BUILT_IN_J1):
9988 if (validate_arg (arg0, REAL_TYPE))
9989 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9993 CASE_FLT_FN (BUILT_IN_Y0):
9994 if (validate_arg (arg0, REAL_TYPE))
9995 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9996 &dconst0, NULL, false);
9999 CASE_FLT_FN (BUILT_IN_Y1):
10000 if (validate_arg (arg0, REAL_TYPE))
10001 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10002 &dconst0, NULL, false);
10006 CASE_FLT_FN (BUILT_IN_NAN):
10007 case BUILT_IN_NAND32:
10008 case BUILT_IN_NAND64:
10009 case BUILT_IN_NAND128:
10010 return fold_builtin_nan (arg0, type, true);
10012 CASE_FLT_FN (BUILT_IN_NANS):
10013 return fold_builtin_nan (arg0, type, false);
10015 CASE_FLT_FN (BUILT_IN_FLOOR):
10016 return fold_builtin_floor (fndecl, arg0);
10018 CASE_FLT_FN (BUILT_IN_CEIL):
10019 return fold_builtin_ceil (fndecl, arg0);
10021 CASE_FLT_FN (BUILT_IN_TRUNC):
10022 return fold_builtin_trunc (fndecl, arg0);
10024 CASE_FLT_FN (BUILT_IN_ROUND):
10025 return fold_builtin_round (fndecl, arg0);
10027 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10028 CASE_FLT_FN (BUILT_IN_RINT):
10029 return fold_trunc_transparent_mathfn (fndecl, arg0);
10031 CASE_FLT_FN (BUILT_IN_LCEIL):
10032 CASE_FLT_FN (BUILT_IN_LLCEIL):
10033 CASE_FLT_FN (BUILT_IN_LFLOOR):
10034 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10035 CASE_FLT_FN (BUILT_IN_LROUND):
10036 CASE_FLT_FN (BUILT_IN_LLROUND):
10037 return fold_builtin_int_roundingfn (fndecl, arg0);
10039 CASE_FLT_FN (BUILT_IN_LRINT):
10040 CASE_FLT_FN (BUILT_IN_LLRINT):
10041 return fold_fixed_mathfn (fndecl, arg0);
10043 case BUILT_IN_BSWAP32:
10044 case BUILT_IN_BSWAP64:
10045 return fold_builtin_bswap (fndecl, arg0);
10047 CASE_INT_FN (BUILT_IN_FFS):
10048 CASE_INT_FN (BUILT_IN_CLZ):
10049 CASE_INT_FN (BUILT_IN_CTZ):
10050 CASE_INT_FN (BUILT_IN_POPCOUNT):
10051 CASE_INT_FN (BUILT_IN_PARITY):
10052 return fold_builtin_bitop (fndecl, arg0);
10054 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10055 return fold_builtin_signbit (arg0, type);
10057 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10058 return fold_builtin_significand (arg0, type);
10060 CASE_FLT_FN (BUILT_IN_ILOGB):
10061 CASE_FLT_FN (BUILT_IN_LOGB):
10062 return fold_builtin_logb (arg0, type);
10064 case BUILT_IN_ISASCII:
10065 return fold_builtin_isascii (arg0);
10067 case BUILT_IN_TOASCII:
10068 return fold_builtin_toascii (arg0);
10070 case BUILT_IN_ISDIGIT:
10071 return fold_builtin_isdigit (arg0);
10073 CASE_FLT_FN (BUILT_IN_FINITE):
10074 case BUILT_IN_FINITED32:
10075 case BUILT_IN_FINITED64:
10076 case BUILT_IN_FINITED128:
10077 case BUILT_IN_ISFINITE:
10078 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10080 CASE_FLT_FN (BUILT_IN_ISINF):
10081 case BUILT_IN_ISINFD32:
10082 case BUILT_IN_ISINFD64:
10083 case BUILT_IN_ISINFD128:
10084 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10086 CASE_FLT_FN (BUILT_IN_ISNAN):
10087 case BUILT_IN_ISNAND32:
10088 case BUILT_IN_ISNAND64:
10089 case BUILT_IN_ISNAND128:
10090 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10092 case BUILT_IN_ISNORMAL:
10093 if (!validate_arg (arg0, REAL_TYPE))
10095 error ("non-floating-point argument to function %qs",
10096 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
10097 return error_mark_node;
10101 case BUILT_IN_PRINTF:
10102 case BUILT_IN_PRINTF_UNLOCKED:
10103 case BUILT_IN_VPRINTF:
10104 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10114 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10115 IGNORE is true if the result of the function call is ignored. This
10116 function returns NULL_TREE if no simplification was possible. */
10119 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10121 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10122 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10126 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10127 CASE_FLT_FN (BUILT_IN_JN):
10128 if (validate_arg (arg0, INTEGER_TYPE)
10129 && validate_arg (arg1, REAL_TYPE))
10130 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10133 CASE_FLT_FN (BUILT_IN_YN):
10134 if (validate_arg (arg0, INTEGER_TYPE)
10135 && validate_arg (arg1, REAL_TYPE))
10136 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10140 CASE_FLT_FN (BUILT_IN_DREM):
10141 CASE_FLT_FN (BUILT_IN_REMAINDER):
10142 if (validate_arg (arg0, REAL_TYPE)
10143 && validate_arg(arg1, REAL_TYPE))
10144 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10147 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10148 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10149 if (validate_arg (arg0, REAL_TYPE)
10150 && validate_arg(arg1, POINTER_TYPE))
10151 return do_mpfr_lgamma_r (arg0, arg1, type);
10155 CASE_FLT_FN (BUILT_IN_ATAN2):
10156 if (validate_arg (arg0, REAL_TYPE)
10157 && validate_arg(arg1, REAL_TYPE))
10158 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10161 CASE_FLT_FN (BUILT_IN_FDIM):
10162 if (validate_arg (arg0, REAL_TYPE)
10163 && validate_arg(arg1, REAL_TYPE))
10164 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10167 CASE_FLT_FN (BUILT_IN_HYPOT):
10168 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10170 CASE_FLT_FN (BUILT_IN_LDEXP):
10171 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10172 CASE_FLT_FN (BUILT_IN_SCALBN):
10173 CASE_FLT_FN (BUILT_IN_SCALBLN):
10174 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10176 CASE_FLT_FN (BUILT_IN_FREXP):
10177 return fold_builtin_frexp (arg0, arg1, type);
10179 CASE_FLT_FN (BUILT_IN_MODF):
10180 return fold_builtin_modf (arg0, arg1, type);
10182 case BUILT_IN_BZERO:
10183 return fold_builtin_bzero (arg0, arg1, ignore);
10185 case BUILT_IN_FPUTS:
10186 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10188 case BUILT_IN_FPUTS_UNLOCKED:
10189 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10191 case BUILT_IN_STRSTR:
10192 return fold_builtin_strstr (arg0, arg1, type);
10194 case BUILT_IN_STRCAT:
10195 return fold_builtin_strcat (arg0, arg1);
10197 case BUILT_IN_STRSPN:
10198 return fold_builtin_strspn (arg0, arg1);
10200 case BUILT_IN_STRCSPN:
10201 return fold_builtin_strcspn (arg0, arg1);
10203 case BUILT_IN_STRCHR:
10204 case BUILT_IN_INDEX:
10205 return fold_builtin_strchr (arg0, arg1, type);
10207 case BUILT_IN_STRRCHR:
10208 case BUILT_IN_RINDEX:
10209 return fold_builtin_strrchr (arg0, arg1, type);
10211 case BUILT_IN_STRCPY:
10212 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10214 case BUILT_IN_STRCMP:
10215 return fold_builtin_strcmp (arg0, arg1);
10217 case BUILT_IN_STRPBRK:
10218 return fold_builtin_strpbrk (arg0, arg1, type);
10220 case BUILT_IN_EXPECT:
10221 return fold_builtin_expect (arg0, arg1);
10223 CASE_FLT_FN (BUILT_IN_POW):
10224 return fold_builtin_pow (fndecl, arg0, arg1, type);
10226 CASE_FLT_FN (BUILT_IN_POWI):
10227 return fold_builtin_powi (fndecl, arg0, arg1, type);
10229 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10230 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10232 CASE_FLT_FN (BUILT_IN_FMIN):
10233 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10235 CASE_FLT_FN (BUILT_IN_FMAX):
10236 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10238 case BUILT_IN_ISGREATER:
10239 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10240 case BUILT_IN_ISGREATEREQUAL:
10241 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10242 case BUILT_IN_ISLESS:
10243 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10244 case BUILT_IN_ISLESSEQUAL:
10245 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10246 case BUILT_IN_ISLESSGREATER:
10247 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10248 case BUILT_IN_ISUNORDERED:
10249 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10252 /* We do the folding for va_start in the expander. */
10253 case BUILT_IN_VA_START:
10256 case BUILT_IN_SPRINTF:
10257 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10259 case BUILT_IN_OBJECT_SIZE:
10260 return fold_builtin_object_size (arg0, arg1);
10262 case BUILT_IN_PRINTF:
10263 case BUILT_IN_PRINTF_UNLOCKED:
10264 case BUILT_IN_VPRINTF:
10265 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10267 case BUILT_IN_PRINTF_CHK:
10268 case BUILT_IN_VPRINTF_CHK:
10269 if (!validate_arg (arg0, INTEGER_TYPE)
10270 || TREE_SIDE_EFFECTS (arg0))
10273 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10276 case BUILT_IN_FPRINTF:
10277 case BUILT_IN_FPRINTF_UNLOCKED:
10278 case BUILT_IN_VFPRINTF:
10279 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10288 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10289 and ARG2. IGNORE is true if the result of the function call is ignored.
10290 This function returns NULL_TREE if no simplification was possible. */
10293 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10295 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10296 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10300 CASE_FLT_FN (BUILT_IN_SINCOS):
10301 return fold_builtin_sincos (arg0, arg1, arg2);
10303 CASE_FLT_FN (BUILT_IN_FMA):
10304 if (validate_arg (arg0, REAL_TYPE)
10305 && validate_arg(arg1, REAL_TYPE)
10306 && validate_arg(arg2, REAL_TYPE))
10307 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10310 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10311 CASE_FLT_FN (BUILT_IN_REMQUO):
10312 if (validate_arg (arg0, REAL_TYPE)
10313 && validate_arg(arg1, REAL_TYPE)
10314 && validate_arg(arg2, POINTER_TYPE))
10315 return do_mpfr_remquo (arg0, arg1, arg2);
10319 case BUILT_IN_MEMSET:
10320 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10322 case BUILT_IN_BCOPY:
10323 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10325 case BUILT_IN_MEMCPY:
10326 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10328 case BUILT_IN_MEMPCPY:
10329 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10331 case BUILT_IN_MEMMOVE:
10332 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10334 case BUILT_IN_STRNCAT:
10335 return fold_builtin_strncat (arg0, arg1, arg2);
10337 case BUILT_IN_STRNCPY:
10338 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10340 case BUILT_IN_STRNCMP:
10341 return fold_builtin_strncmp (arg0, arg1, arg2);
10343 case BUILT_IN_MEMCHR:
10344 return fold_builtin_memchr (arg0, arg1, arg2, type);
10346 case BUILT_IN_BCMP:
10347 case BUILT_IN_MEMCMP:
10348 return fold_builtin_memcmp (arg0, arg1, arg2);;
10350 case BUILT_IN_SPRINTF:
10351 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10353 case BUILT_IN_STRCPY_CHK:
10354 case BUILT_IN_STPCPY_CHK:
10355 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10358 case BUILT_IN_STRCAT_CHK:
10359 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10361 case BUILT_IN_PRINTF_CHK:
10362 case BUILT_IN_VPRINTF_CHK:
10363 if (!validate_arg (arg0, INTEGER_TYPE)
10364 || TREE_SIDE_EFFECTS (arg0))
10367 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10370 case BUILT_IN_FPRINTF:
10371 case BUILT_IN_FPRINTF_UNLOCKED:
10372 case BUILT_IN_VFPRINTF:
10373 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10375 case BUILT_IN_FPRINTF_CHK:
10376 case BUILT_IN_VFPRINTF_CHK:
10377 if (!validate_arg (arg1, INTEGER_TYPE)
10378 || TREE_SIDE_EFFECTS (arg1))
10381 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10390 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10391 ARG2, and ARG3. IGNORE is true if the result of the function call is
10392 ignored. This function returns NULL_TREE if no simplification was
10396 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10399 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10403 case BUILT_IN_MEMCPY_CHK:
10404 case BUILT_IN_MEMPCPY_CHK:
10405 case BUILT_IN_MEMMOVE_CHK:
10406 case BUILT_IN_MEMSET_CHK:
10407 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10409 DECL_FUNCTION_CODE (fndecl));
10411 case BUILT_IN_STRNCPY_CHK:
10412 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10414 case BUILT_IN_STRNCAT_CHK:
10415 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10417 case BUILT_IN_FPRINTF_CHK:
10418 case BUILT_IN_VFPRINTF_CHK:
10419 if (!validate_arg (arg1, INTEGER_TYPE)
10420 || TREE_SIDE_EFFECTS (arg1))
10423 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10433 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10434 arguments, where NARGS <= 4. IGNORE is true if the result of the
10435 function call is ignored. This function returns NULL_TREE if no
10436 simplification was possible. Note that this only folds builtins with
10437 fixed argument patterns. Foldings that do varargs-to-varargs
10438 transformations, or that match calls with more than 4 arguments,
10439 need to be handled with fold_builtin_varargs instead. */
10441 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10444 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10446 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10447 tree ret = NULL_TREE;
10449 /* Verify the number of arguments for type-generic and thus variadic
10453 case BUILT_IN_ISFINITE:
10454 case BUILT_IN_ISINF:
10455 case BUILT_IN_ISNAN:
10456 case BUILT_IN_ISNORMAL:
10459 error ("too few arguments to function %qs",
10460 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
10461 return error_mark_node;
10463 else if (nargs > 1)
10465 error ("too many arguments to function %qs",
10466 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
10467 return error_mark_node;
10471 case BUILT_IN_ISGREATER:
10472 case BUILT_IN_ISGREATEREQUAL:
10473 case BUILT_IN_ISLESS:
10474 case BUILT_IN_ISLESSEQUAL:
10475 case BUILT_IN_ISLESSGREATER:
10476 case BUILT_IN_ISUNORDERED:
10479 error ("too few arguments to function %qs",
10480 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
10481 return error_mark_node;
10483 else if (nargs > 2)
10485 error ("too many arguments to function %qs",
10486 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
10487 return error_mark_node;
10498 ret = fold_builtin_0 (fndecl, ignore);
10501 ret = fold_builtin_1 (fndecl, args[0], ignore);
10504 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10507 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10510 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10518 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10519 TREE_NO_WARNING (ret) = 1;
10525 /* Builtins with folding operations that operate on "..." arguments
10526 need special handling; we need to store the arguments in a convenient
10527 data structure before attempting any folding. Fortunately there are
10528 only a few builtins that fall into this category. FNDECL is the
10529 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10530 result of the function call is ignored. */
10533 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10535 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10536 tree ret = NULL_TREE;
10540 case BUILT_IN_SPRINTF_CHK:
10541 case BUILT_IN_VSPRINTF_CHK:
10542 ret = fold_builtin_sprintf_chk (exp, fcode);
10545 case BUILT_IN_SNPRINTF_CHK:
10546 case BUILT_IN_VSNPRINTF_CHK:
10547 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10554 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10555 TREE_NO_WARNING (ret) = 1;
10561 /* A wrapper function for builtin folding that prevents warnings for
10562 "statement without effect" and the like, caused by removing the
10563 call node earlier than the warning is generated. */
10566 fold_call_expr (tree exp, bool ignore)
10568 tree ret = NULL_TREE;
10569 tree fndecl = get_callee_fndecl (exp);
10571 && TREE_CODE (fndecl) == FUNCTION_DECL
10572 && DECL_BUILT_IN (fndecl)
10573 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10574 yet. Defer folding until we see all the arguments
10575 (after inlining). */
10576 && !CALL_EXPR_VA_ARG_PACK (exp))
10578 int nargs = call_expr_nargs (exp);
10580 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10581 instead last argument is __builtin_va_arg_pack (). Defer folding
10582 even in that case, until arguments are finalized. */
10583 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10585 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10587 && TREE_CODE (fndecl2) == FUNCTION_DECL
10588 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10589 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10593 /* FIXME: Don't use a list in this interface. */
10594 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10595 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10598 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10600 tree *args = CALL_EXPR_ARGP (exp);
10601 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10604 ret = fold_builtin_varargs (fndecl, exp, ignore);
10607 /* Propagate location information from original call to
10608 expansion of builtin. Otherwise things like
10609 maybe_emit_chk_warning, that operate on the expansion
10610 of a builtin, will use the wrong location information. */
10611 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10613 tree realret = ret;
10614 if (TREE_CODE (ret) == NOP_EXPR)
10615 realret = TREE_OPERAND (ret, 0);
10616 if (CAN_HAVE_LOCATION_P (realret)
10617 && !EXPR_HAS_LOCATION (realret))
10618 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10627 /* Conveniently construct a function call expression. FNDECL names the
10628 function to be called and ARGLIST is a TREE_LIST of arguments. */
10631 build_function_call_expr (tree fndecl, tree arglist)
10633 tree fntype = TREE_TYPE (fndecl);
10634 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10635 int n = list_length (arglist);
10636 tree *argarray = (tree *) alloca (n * sizeof (tree));
10639 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10640 argarray[i] = TREE_VALUE (arglist);
10641 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10644 /* Conveniently construct a function call expression. FNDECL names the
10645 function to be called, N is the number of arguments, and the "..."
10646 parameters are the argument expressions. */
10649 build_call_expr (tree fndecl, int n, ...)
10652 tree fntype = TREE_TYPE (fndecl);
10653 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10654 tree *argarray = (tree *) alloca (n * sizeof (tree));
10658 for (i = 0; i < n; i++)
10659 argarray[i] = va_arg (ap, tree);
10661 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10664 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10665 N arguments are passed in the array ARGARRAY. */
10668 fold_builtin_call_array (tree type,
10673 tree ret = NULL_TREE;
10677 if (TREE_CODE (fn) == ADDR_EXPR)
10679 tree fndecl = TREE_OPERAND (fn, 0);
10680 if (TREE_CODE (fndecl) == FUNCTION_DECL
10681 && DECL_BUILT_IN (fndecl))
10683 /* If last argument is __builtin_va_arg_pack (), arguments to this
10684 function are not finalized yet. Defer folding until they are. */
10685 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10687 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10689 && TREE_CODE (fndecl2) == FUNCTION_DECL
10690 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10691 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10692 return build_call_array (type, fn, n, argarray);
10694 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10696 tree arglist = NULL_TREE;
10697 for (i = n - 1; i >= 0; i--)
10698 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10699 ret = targetm.fold_builtin (fndecl, arglist, false);
10703 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10705 /* First try the transformations that don't require consing up
10707 ret = fold_builtin_n (fndecl, argarray, n, false);
10712 /* If we got this far, we need to build an exp. */
10713 exp = build_call_array (type, fn, n, argarray);
10714 ret = fold_builtin_varargs (fndecl, exp, false);
10715 return ret ? ret : exp;
10719 return build_call_array (type, fn, n, argarray);
10722 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10723 along with N new arguments specified as the "..." parameters. SKIP
10724 is the number of arguments in EXP to be omitted. This function is used
10725 to do varargs-to-varargs transformations. */
10728 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10730 int oldnargs = call_expr_nargs (exp);
10731 int nargs = oldnargs - skip + n;
10732 tree fntype = TREE_TYPE (fndecl);
10733 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10741 buffer = alloca (nargs * sizeof (tree));
10743 for (i = 0; i < n; i++)
10744 buffer[i] = va_arg (ap, tree);
10746 for (j = skip; j < oldnargs; j++, i++)
10747 buffer[i] = CALL_EXPR_ARG (exp, j);
10750 buffer = CALL_EXPR_ARGP (exp) + skip;
10752 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10755 /* Validate a single argument ARG against a tree code CODE representing
10759 validate_arg (const_tree arg, enum tree_code code)
10763 else if (code == POINTER_TYPE)
10764 return POINTER_TYPE_P (TREE_TYPE (arg));
10765 else if (code == INTEGER_TYPE)
10766 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10767 return code == TREE_CODE (TREE_TYPE (arg));
10770 /* This function validates the types of a function call argument list
10771 against a specified list of tree_codes. If the last specifier is a 0,
10772 that represents an ellipses, otherwise the last specifier must be a
10776 validate_arglist (const_tree callexpr, ...)
10778 enum tree_code code;
10781 const_call_expr_arg_iterator iter;
10784 va_start (ap, callexpr);
10785 init_const_call_expr_arg_iterator (callexpr, &iter);
10789 code = va_arg (ap, enum tree_code);
10793 /* This signifies an ellipses, any further arguments are all ok. */
10797 /* This signifies an endlink, if no arguments remain, return
10798 true, otherwise return false. */
10799 res = !more_const_call_expr_args_p (&iter);
10802 /* If no parameters remain or the parameter's code does not
10803 match the specified code, return false. Otherwise continue
10804 checking any remaining arguments. */
10805 arg = next_const_call_expr_arg (&iter);
10806 if (!validate_arg (arg, code))
10813 /* We need gotos here since we can only have one VA_CLOSE in a
10821 /* Default target-specific builtin expander that does nothing. */
10824 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10825 rtx target ATTRIBUTE_UNUSED,
10826 rtx subtarget ATTRIBUTE_UNUSED,
10827 enum machine_mode mode ATTRIBUTE_UNUSED,
10828 int ignore ATTRIBUTE_UNUSED)
10833 /* Returns true is EXP represents data that would potentially reside
10834 in a readonly section. */
10837 readonly_data_expr (tree exp)
10841 if (TREE_CODE (exp) != ADDR_EXPR)
10844 exp = get_base_address (TREE_OPERAND (exp, 0));
10848 /* Make sure we call decl_readonly_section only for trees it
10849 can handle (since it returns true for everything it doesn't
10851 if (TREE_CODE (exp) == STRING_CST
10852 || TREE_CODE (exp) == CONSTRUCTOR
10853 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10854 return decl_readonly_section (exp, 0);
10859 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10860 to the call, and TYPE is its return type.
10862 Return NULL_TREE if no simplification was possible, otherwise return the
10863 simplified form of the call as a tree.
10865 The simplified form may be a constant or other expression which
10866 computes the same value, but in a more efficient manner (including
10867 calls to other builtin functions).
10869 The call may contain arguments which need to be evaluated, but
10870 which are not useful to determine the result of the call. In
10871 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10872 COMPOUND_EXPR will be an argument which must be evaluated.
10873 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10874 COMPOUND_EXPR in the chain will contain the tree for the simplified
10875 form of the builtin function call. */
10878 fold_builtin_strstr (tree s1, tree s2, tree type)
10880 if (!validate_arg (s1, POINTER_TYPE)
10881 || !validate_arg (s2, POINTER_TYPE))
10886 const char *p1, *p2;
10888 p2 = c_getstr (s2);
10892 p1 = c_getstr (s1);
10895 const char *r = strstr (p1, p2);
10899 return build_int_cst (TREE_TYPE (s1), 0);
10901 /* Return an offset into the constant string argument. */
10902 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10903 s1, size_int (r - p1));
10904 return fold_convert (type, tem);
10907 /* The argument is const char *, and the result is char *, so we need
10908 a type conversion here to avoid a warning. */
10910 return fold_convert (type, s1);
10915 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10919 /* New argument list transforming strstr(s1, s2) to
10920 strchr(s1, s2[0]). */
10921 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10925 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10926 the call, and TYPE is its return type.
10928 Return NULL_TREE if no simplification was possible, otherwise return the
10929 simplified form of the call as a tree.
10931 The simplified form may be a constant or other expression which
10932 computes the same value, but in a more efficient manner (including
10933 calls to other builtin functions).
10935 The call may contain arguments which need to be evaluated, but
10936 which are not useful to determine the result of the call. In
10937 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10938 COMPOUND_EXPR will be an argument which must be evaluated.
10939 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10940 COMPOUND_EXPR in the chain will contain the tree for the simplified
10941 form of the builtin function call. */
10944 fold_builtin_strchr (tree s1, tree s2, tree type)
10946 if (!validate_arg (s1, POINTER_TYPE)
10947 || !validate_arg (s2, INTEGER_TYPE))
10953 if (TREE_CODE (s2) != INTEGER_CST)
10956 p1 = c_getstr (s1);
10963 if (target_char_cast (s2, &c))
10966 r = strchr (p1, c);
10969 return build_int_cst (TREE_TYPE (s1), 0);
10971 /* Return an offset into the constant string argument. */
10972 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10973 s1, size_int (r - p1));
10974 return fold_convert (type, tem);
10980 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10981 the call, and TYPE is its return type.
10983 Return NULL_TREE if no simplification was possible, otherwise return the
10984 simplified form of the call as a tree.
10986 The simplified form may be a constant or other expression which
10987 computes the same value, but in a more efficient manner (including
10988 calls to other builtin functions).
10990 The call may contain arguments which need to be evaluated, but
10991 which are not useful to determine the result of the call. In
10992 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10993 COMPOUND_EXPR will be an argument which must be evaluated.
10994 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10995 COMPOUND_EXPR in the chain will contain the tree for the simplified
10996 form of the builtin function call. */
10999 fold_builtin_strrchr (tree s1, tree s2, tree type)
11001 if (!validate_arg (s1, POINTER_TYPE)
11002 || !validate_arg (s2, INTEGER_TYPE))
11009 if (TREE_CODE (s2) != INTEGER_CST)
11012 p1 = c_getstr (s1);
11019 if (target_char_cast (s2, &c))
11022 r = strrchr (p1, c);
11025 return build_int_cst (TREE_TYPE (s1), 0);
11027 /* Return an offset into the constant string argument. */
11028 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11029 s1, size_int (r - p1));
11030 return fold_convert (type, tem);
11033 if (! integer_zerop (s2))
11036 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11040 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11041 return build_call_expr (fn, 2, s1, s2);
11045 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11046 to the call, and TYPE is its return type.
11048 Return NULL_TREE if no simplification was possible, otherwise return the
11049 simplified form of the call as a tree.
11051 The simplified form may be a constant or other expression which
11052 computes the same value, but in a more efficient manner (including
11053 calls to other builtin functions).
11055 The call may contain arguments which need to be evaluated, but
11056 which are not useful to determine the result of the call. In
11057 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11058 COMPOUND_EXPR will be an argument which must be evaluated.
11059 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11060 COMPOUND_EXPR in the chain will contain the tree for the simplified
11061 form of the builtin function call. */
11064 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11066 if (!validate_arg (s1, POINTER_TYPE)
11067 || !validate_arg (s2, POINTER_TYPE))
11072 const char *p1, *p2;
11074 p2 = c_getstr (s2);
11078 p1 = c_getstr (s1);
11081 const char *r = strpbrk (p1, p2);
11085 return build_int_cst (TREE_TYPE (s1), 0);
11087 /* Return an offset into the constant string argument. */
11088 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11089 s1, size_int (r - p1));
11090 return fold_convert (type, tem);
11094 /* strpbrk(x, "") == NULL.
11095 Evaluate and ignore s1 in case it had side-effects. */
11096 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11099 return NULL_TREE; /* Really call strpbrk. */
11101 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11105 /* New argument list transforming strpbrk(s1, s2) to
11106 strchr(s1, s2[0]). */
11107 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11111 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11114 Return NULL_TREE if no simplification was possible, otherwise return the
11115 simplified form of the call as a tree.
11117 The simplified form may be a constant or other expression which
11118 computes the same value, but in a more efficient manner (including
11119 calls to other builtin functions).
11121 The call may contain arguments which need to be evaluated, but
11122 which are not useful to determine the result of the call. In
11123 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11124 COMPOUND_EXPR will be an argument which must be evaluated.
11125 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11126 COMPOUND_EXPR in the chain will contain the tree for the simplified
11127 form of the builtin function call. */
11130 fold_builtin_strcat (tree dst, tree src)
11132 if (!validate_arg (dst, POINTER_TYPE)
11133 || !validate_arg (src, POINTER_TYPE))
11137 const char *p = c_getstr (src);
11139 /* If the string length is zero, return the dst parameter. */
11140 if (p && *p == '\0')
11147 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11148 arguments to the call.
11150 Return NULL_TREE if no simplification was possible, otherwise return the
11151 simplified form of the call as a tree.
11153 The simplified form may be a constant or other expression which
11154 computes the same value, but in a more efficient manner (including
11155 calls to other builtin functions).
11157 The call may contain arguments which need to be evaluated, but
11158 which are not useful to determine the result of the call. In
11159 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11160 COMPOUND_EXPR will be an argument which must be evaluated.
11161 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11162 COMPOUND_EXPR in the chain will contain the tree for the simplified
11163 form of the builtin function call. */
11166 fold_builtin_strncat (tree dst, tree src, tree len)
11168 if (!validate_arg (dst, POINTER_TYPE)
11169 || !validate_arg (src, POINTER_TYPE)
11170 || !validate_arg (len, INTEGER_TYPE))
11174 const char *p = c_getstr (src);
11176 /* If the requested length is zero, or the src parameter string
11177 length is zero, return the dst parameter. */
11178 if (integer_zerop (len) || (p && *p == '\0'))
11179 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11181 /* If the requested len is greater than or equal to the string
11182 length, call strcat. */
11183 if (TREE_CODE (len) == INTEGER_CST && p
11184 && compare_tree_int (len, strlen (p)) >= 0)
11186 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11188 /* If the replacement _DECL isn't initialized, don't do the
11193 return build_call_expr (fn, 2, dst, src);
11199 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11202 Return NULL_TREE if no simplification was possible, otherwise return the
11203 simplified form of the call as a tree.
11205 The simplified form may be a constant or other expression which
11206 computes the same value, but in a more efficient manner (including
11207 calls to other builtin functions).
11209 The call may contain arguments which need to be evaluated, but
11210 which are not useful to determine the result of the call. In
11211 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11212 COMPOUND_EXPR will be an argument which must be evaluated.
11213 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11214 COMPOUND_EXPR in the chain will contain the tree for the simplified
11215 form of the builtin function call. */
11218 fold_builtin_strspn (tree s1, tree s2)
11220 if (!validate_arg (s1, POINTER_TYPE)
11221 || !validate_arg (s2, POINTER_TYPE))
11225 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11227 /* If both arguments are constants, evaluate at compile-time. */
11230 const size_t r = strspn (p1, p2);
11231 return size_int (r);
11234 /* If either argument is "", return NULL_TREE. */
11235 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11236 /* Evaluate and ignore both arguments in case either one has
11238 return omit_two_operands (integer_type_node, integer_zero_node,
11244 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11247 Return NULL_TREE if no simplification was possible, otherwise return the
11248 simplified form of the call as a tree.
11250 The simplified form may be a constant or other expression which
11251 computes the same value, but in a more efficient manner (including
11252 calls to other builtin functions).
11254 The call may contain arguments which need to be evaluated, but
11255 which are not useful to determine the result of the call. In
11256 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11257 COMPOUND_EXPR will be an argument which must be evaluated.
11258 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11259 COMPOUND_EXPR in the chain will contain the tree for the simplified
11260 form of the builtin function call. */
11263 fold_builtin_strcspn (tree s1, tree s2)
11265 if (!validate_arg (s1, POINTER_TYPE)
11266 || !validate_arg (s2, POINTER_TYPE))
11270 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11272 /* If both arguments are constants, evaluate at compile-time. */
11275 const size_t r = strcspn (p1, p2);
11276 return size_int (r);
11279 /* If the first argument is "", return NULL_TREE. */
11280 if (p1 && *p1 == '\0')
11282 /* Evaluate and ignore argument s2 in case it has
11284 return omit_one_operand (integer_type_node,
11285 integer_zero_node, s2);
11288 /* If the second argument is "", return __builtin_strlen(s1). */
11289 if (p2 && *p2 == '\0')
11291 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11293 /* If the replacement _DECL isn't initialized, don't do the
11298 return build_call_expr (fn, 1, s1);
11304 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11305 to the call. IGNORE is true if the value returned
11306 by the builtin will be ignored. UNLOCKED is true is true if this
11307 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11308 the known length of the string. Return NULL_TREE if no simplification
11312 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11314 /* If we're using an unlocked function, assume the other unlocked
11315 functions exist explicitly. */
11316 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11317 : implicit_built_in_decls[BUILT_IN_FPUTC];
11318 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11319 : implicit_built_in_decls[BUILT_IN_FWRITE];
11321 /* If the return value is used, don't do the transformation. */
11325 /* Verify the arguments in the original call. */
11326 if (!validate_arg (arg0, POINTER_TYPE)
11327 || !validate_arg (arg1, POINTER_TYPE))
11331 len = c_strlen (arg0, 0);
11333 /* Get the length of the string passed to fputs. If the length
11334 can't be determined, punt. */
11336 || TREE_CODE (len) != INTEGER_CST)
11339 switch (compare_tree_int (len, 1))
11341 case -1: /* length is 0, delete the call entirely . */
11342 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11344 case 0: /* length is 1, call fputc. */
11346 const char *p = c_getstr (arg0);
11351 return build_call_expr (fn_fputc, 2,
11352 build_int_cst (NULL_TREE, p[0]), arg1);
11358 case 1: /* length is greater than 1, call fwrite. */
11360 /* If optimizing for size keep fputs. */
11363 /* New argument list transforming fputs(string, stream) to
11364 fwrite(string, 1, len, stream). */
11366 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11371 gcc_unreachable ();
11376 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11377 produced. False otherwise. This is done so that we don't output the error
11378 or warning twice or three times. */
11380 fold_builtin_next_arg (tree exp, bool va_start_p)
11382 tree fntype = TREE_TYPE (current_function_decl);
11383 int nargs = call_expr_nargs (exp);
11386 if (TYPE_ARG_TYPES (fntype) == 0
11387 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11388 == void_type_node))
11390 error ("%<va_start%> used in function with fixed args");
11396 if (va_start_p && (nargs != 2))
11398 error ("wrong number of arguments to function %<va_start%>");
11401 arg = CALL_EXPR_ARG (exp, 1);
11403 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11404 when we checked the arguments and if needed issued a warning. */
11409 /* Evidently an out of date version of <stdarg.h>; can't validate
11410 va_start's second argument, but can still work as intended. */
11411 warning (0, "%<__builtin_next_arg%> called without an argument");
11414 else if (nargs > 1)
11416 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11419 arg = CALL_EXPR_ARG (exp, 0);
11422 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11423 or __builtin_next_arg (0) the first time we see it, after checking
11424 the arguments and if needed issuing a warning. */
11425 if (!integer_zerop (arg))
11427 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11429 /* Strip off all nops for the sake of the comparison. This
11430 is not quite the same as STRIP_NOPS. It does more.
11431 We must also strip off INDIRECT_EXPR for C++ reference
11433 while (TREE_CODE (arg) == NOP_EXPR
11434 || TREE_CODE (arg) == CONVERT_EXPR
11435 || TREE_CODE (arg) == NON_LVALUE_EXPR
11436 || TREE_CODE (arg) == INDIRECT_REF)
11437 arg = TREE_OPERAND (arg, 0);
11438 if (arg != last_parm)
11440 /* FIXME: Sometimes with the tree optimizers we can get the
11441 not the last argument even though the user used the last
11442 argument. We just warn and set the arg to be the last
11443 argument so that we will get wrong-code because of
11445 warning (0, "second parameter of %<va_start%> not last named argument");
11447 /* We want to verify the second parameter just once before the tree
11448 optimizers are run and then avoid keeping it in the tree,
11449 as otherwise we could warn even for correct code like:
11450 void foo (int i, ...)
11451 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11453 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11455 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11461 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11462 ORIG may be null if this is a 2-argument call. We don't attempt to
11463 simplify calls with more than 3 arguments.
11465 Return NULL_TREE if no simplification was possible, otherwise return the
11466 simplified form of the call as a tree. If IGNORED is true, it means that
11467 the caller does not use the returned value of the function. */
11470 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11473 const char *fmt_str = NULL;
11475 /* Verify the required arguments in the original call. We deal with two
11476 types of sprintf() calls: 'sprintf (str, fmt)' and
11477 'sprintf (dest, "%s", orig)'. */
11478 if (!validate_arg (dest, POINTER_TYPE)
11479 || !validate_arg (fmt, POINTER_TYPE))
11481 if (orig && !validate_arg (orig, POINTER_TYPE))
11484 /* Check whether the format is a literal string constant. */
11485 fmt_str = c_getstr (fmt);
11486 if (fmt_str == NULL)
11490 retval = NULL_TREE;
11492 if (!init_target_chars ())
11495 /* If the format doesn't contain % args or %%, use strcpy. */
11496 if (strchr (fmt_str, target_percent) == NULL)
11498 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11503 /* Don't optimize sprintf (buf, "abc", ptr++). */
11507 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11508 'format' is known to contain no % formats. */
11509 call = build_call_expr (fn, 2, dest, fmt);
11511 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11514 /* If the format is "%s", use strcpy if the result isn't used. */
11515 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11518 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11523 /* Don't crash on sprintf (str1, "%s"). */
11527 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11530 retval = c_strlen (orig, 1);
11531 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11534 call = build_call_expr (fn, 2, dest, orig);
11537 if (call && retval)
11539 retval = fold_convert
11540 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11542 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11548 /* Expand a call EXP to __builtin_object_size. */
11551 expand_builtin_object_size (tree exp)
11554 int object_size_type;
11555 tree fndecl = get_callee_fndecl (exp);
11557 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11559 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11561 expand_builtin_trap ();
11565 ost = CALL_EXPR_ARG (exp, 1);
11568 if (TREE_CODE (ost) != INTEGER_CST
11569 || tree_int_cst_sgn (ost) < 0
11570 || compare_tree_int (ost, 3) > 0)
11572 error ("%Klast argument of %D is not integer constant between 0 and 3",
11574 expand_builtin_trap ();
11578 object_size_type = tree_low_cst (ost, 0);
11580 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11583 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11584 FCODE is the BUILT_IN_* to use.
11585 Return NULL_RTX if we failed; the caller should emit a normal call,
11586 otherwise try to get the result in TARGET, if convenient (and in
11587 mode MODE if that's convenient). */
11590 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11591 enum built_in_function fcode)
11593 tree dest, src, len, size;
11595 if (!validate_arglist (exp,
11597 fcode == BUILT_IN_MEMSET_CHK
11598 ? INTEGER_TYPE : POINTER_TYPE,
11599 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11602 dest = CALL_EXPR_ARG (exp, 0);
11603 src = CALL_EXPR_ARG (exp, 1);
11604 len = CALL_EXPR_ARG (exp, 2);
11605 size = CALL_EXPR_ARG (exp, 3);
11607 if (! host_integerp (size, 1))
11610 if (host_integerp (len, 1) || integer_all_onesp (size))
11614 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11616 warning (0, "%Kcall to %D will always overflow destination buffer",
11617 exp, get_callee_fndecl (exp));
11622 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11623 mem{cpy,pcpy,move,set} is available. */
11626 case BUILT_IN_MEMCPY_CHK:
11627 fn = built_in_decls[BUILT_IN_MEMCPY];
11629 case BUILT_IN_MEMPCPY_CHK:
11630 fn = built_in_decls[BUILT_IN_MEMPCPY];
11632 case BUILT_IN_MEMMOVE_CHK:
11633 fn = built_in_decls[BUILT_IN_MEMMOVE];
11635 case BUILT_IN_MEMSET_CHK:
11636 fn = built_in_decls[BUILT_IN_MEMSET];
11645 fn = build_call_expr (fn, 3, dest, src, len);
11646 STRIP_TYPE_NOPS (fn);
11647 while (TREE_CODE (fn) == COMPOUND_EXPR)
11649 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11651 fn = TREE_OPERAND (fn, 1);
11653 if (TREE_CODE (fn) == CALL_EXPR)
11654 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11655 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11657 else if (fcode == BUILT_IN_MEMSET_CHK)
11661 unsigned int dest_align
11662 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11664 /* If DEST is not a pointer type, call the normal function. */
11665 if (dest_align == 0)
11668 /* If SRC and DEST are the same (and not volatile), do nothing. */
11669 if (operand_equal_p (src, dest, 0))
11673 if (fcode != BUILT_IN_MEMPCPY_CHK)
11675 /* Evaluate and ignore LEN in case it has side-effects. */
11676 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11677 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11680 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11681 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11684 /* __memmove_chk special case. */
11685 if (fcode == BUILT_IN_MEMMOVE_CHK)
11687 unsigned int src_align
11688 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11690 if (src_align == 0)
11693 /* If src is categorized for a readonly section we can use
11694 normal __memcpy_chk. */
11695 if (readonly_data_expr (src))
11697 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11700 fn = build_call_expr (fn, 4, dest, src, len, size);
11701 STRIP_TYPE_NOPS (fn);
11702 while (TREE_CODE (fn) == COMPOUND_EXPR)
11704 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11706 fn = TREE_OPERAND (fn, 1);
11708 if (TREE_CODE (fn) == CALL_EXPR)
11709 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11710 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11717 /* Emit warning if a buffer overflow is detected at compile time. */
11720 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11727 case BUILT_IN_STRCPY_CHK:
11728 case BUILT_IN_STPCPY_CHK:
11729 /* For __strcat_chk the warning will be emitted only if overflowing
11730 by at least strlen (dest) + 1 bytes. */
11731 case BUILT_IN_STRCAT_CHK:
11732 len = CALL_EXPR_ARG (exp, 1);
11733 size = CALL_EXPR_ARG (exp, 2);
11736 case BUILT_IN_STRNCAT_CHK:
11737 case BUILT_IN_STRNCPY_CHK:
11738 len = CALL_EXPR_ARG (exp, 2);
11739 size = CALL_EXPR_ARG (exp, 3);
11741 case BUILT_IN_SNPRINTF_CHK:
11742 case BUILT_IN_VSNPRINTF_CHK:
11743 len = CALL_EXPR_ARG (exp, 1);
11744 size = CALL_EXPR_ARG (exp, 3);
11747 gcc_unreachable ();
11753 if (! host_integerp (size, 1) || integer_all_onesp (size))
11758 len = c_strlen (len, 1);
11759 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11762 else if (fcode == BUILT_IN_STRNCAT_CHK)
11764 tree src = CALL_EXPR_ARG (exp, 1);
11765 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11767 src = c_strlen (src, 1);
11768 if (! src || ! host_integerp (src, 1))
11770 warning (0, "%Kcall to %D might overflow destination buffer",
11771 exp, get_callee_fndecl (exp));
11774 else if (tree_int_cst_lt (src, size))
11777 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11780 warning (0, "%Kcall to %D will always overflow destination buffer",
11781 exp, get_callee_fndecl (exp));
11784 /* Emit warning if a buffer overflow is detected at compile time
11785 in __sprintf_chk/__vsprintf_chk calls. */
11788 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11790 tree dest, size, len, fmt, flag;
11791 const char *fmt_str;
11792 int nargs = call_expr_nargs (exp);
11794 /* Verify the required arguments in the original call. */
11798 dest = CALL_EXPR_ARG (exp, 0);
11799 flag = CALL_EXPR_ARG (exp, 1);
11800 size = CALL_EXPR_ARG (exp, 2);
11801 fmt = CALL_EXPR_ARG (exp, 3);
11803 if (! host_integerp (size, 1) || integer_all_onesp (size))
11806 /* Check whether the format is a literal string constant. */
11807 fmt_str = c_getstr (fmt);
11808 if (fmt_str == NULL)
11811 if (!init_target_chars ())
11814 /* If the format doesn't contain % args or %%, we know its size. */
11815 if (strchr (fmt_str, target_percent) == 0)
11816 len = build_int_cstu (size_type_node, strlen (fmt_str));
11817 /* If the format is "%s" and first ... argument is a string literal,
11819 else if (fcode == BUILT_IN_SPRINTF_CHK
11820 && strcmp (fmt_str, target_percent_s) == 0)
11826 arg = CALL_EXPR_ARG (exp, 4);
11827 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11830 len = c_strlen (arg, 1);
11831 if (!len || ! host_integerp (len, 1))
11837 if (! tree_int_cst_lt (len, size))
11839 warning (0, "%Kcall to %D will always overflow destination buffer",
11840 exp, get_callee_fndecl (exp));
11844 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11848 fold_builtin_object_size (tree ptr, tree ost)
11850 tree ret = NULL_TREE;
11851 int object_size_type;
11853 if (!validate_arg (ptr, POINTER_TYPE)
11854 || !validate_arg (ost, INTEGER_TYPE))
11859 if (TREE_CODE (ost) != INTEGER_CST
11860 || tree_int_cst_sgn (ost) < 0
11861 || compare_tree_int (ost, 3) > 0)
11864 object_size_type = tree_low_cst (ost, 0);
11866 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11867 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11868 and (size_t) 0 for types 2 and 3. */
11869 if (TREE_SIDE_EFFECTS (ptr))
11870 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11872 if (TREE_CODE (ptr) == ADDR_EXPR)
11873 ret = build_int_cstu (size_type_node,
11874 compute_builtin_object_size (ptr, object_size_type));
11876 else if (TREE_CODE (ptr) == SSA_NAME)
11878 unsigned HOST_WIDE_INT bytes;
11880 /* If object size is not known yet, delay folding until
11881 later. Maybe subsequent passes will help determining
11883 bytes = compute_builtin_object_size (ptr, object_size_type);
11884 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11886 ret = build_int_cstu (size_type_node, bytes);
11891 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11892 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11893 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11900 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11901 DEST, SRC, LEN, and SIZE are the arguments to the call.
11902 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11903 code of the builtin. If MAXLEN is not NULL, it is maximum length
11904 passed as third argument. */
11907 fold_builtin_memory_chk (tree fndecl,
11908 tree dest, tree src, tree len, tree size,
11909 tree maxlen, bool ignore,
11910 enum built_in_function fcode)
11914 if (!validate_arg (dest, POINTER_TYPE)
11915 || !validate_arg (src,
11916 (fcode == BUILT_IN_MEMSET_CHK
11917 ? INTEGER_TYPE : POINTER_TYPE))
11918 || !validate_arg (len, INTEGER_TYPE)
11919 || !validate_arg (size, INTEGER_TYPE))
11922 /* If SRC and DEST are the same (and not volatile), return DEST
11923 (resp. DEST+LEN for __mempcpy_chk). */
11924 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11926 if (fcode != BUILT_IN_MEMPCPY_CHK)
11927 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11930 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11931 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11935 if (! host_integerp (size, 1))
11938 if (! integer_all_onesp (size))
11940 if (! host_integerp (len, 1))
11942 /* If LEN is not constant, try MAXLEN too.
11943 For MAXLEN only allow optimizing into non-_ocs function
11944 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11945 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11947 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11949 /* (void) __mempcpy_chk () can be optimized into
11950 (void) __memcpy_chk (). */
11951 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11955 return build_call_expr (fn, 4, dest, src, len, size);
11963 if (tree_int_cst_lt (size, maxlen))
11968 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11969 mem{cpy,pcpy,move,set} is available. */
11972 case BUILT_IN_MEMCPY_CHK:
11973 fn = built_in_decls[BUILT_IN_MEMCPY];
11975 case BUILT_IN_MEMPCPY_CHK:
11976 fn = built_in_decls[BUILT_IN_MEMPCPY];
11978 case BUILT_IN_MEMMOVE_CHK:
11979 fn = built_in_decls[BUILT_IN_MEMMOVE];
11981 case BUILT_IN_MEMSET_CHK:
11982 fn = built_in_decls[BUILT_IN_MEMSET];
11991 return build_call_expr (fn, 3, dest, src, len);
11994 /* Fold a call to the __st[rp]cpy_chk builtin.
11995 DEST, SRC, and SIZE are the arguments to the call.
11996 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11997 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11998 strings passed as second argument. */
12001 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12002 tree maxlen, bool ignore,
12003 enum built_in_function fcode)
12007 if (!validate_arg (dest, POINTER_TYPE)
12008 || !validate_arg (src, POINTER_TYPE)
12009 || !validate_arg (size, INTEGER_TYPE))
12012 /* If SRC and DEST are the same (and not volatile), return DEST. */
12013 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12014 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12016 if (! host_integerp (size, 1))
12019 if (! integer_all_onesp (size))
12021 len = c_strlen (src, 1);
12022 if (! len || ! host_integerp (len, 1))
12024 /* If LEN is not constant, try MAXLEN too.
12025 For MAXLEN only allow optimizing into non-_ocs function
12026 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12027 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12029 if (fcode == BUILT_IN_STPCPY_CHK)
12034 /* If return value of __stpcpy_chk is ignored,
12035 optimize into __strcpy_chk. */
12036 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12040 return build_call_expr (fn, 3, dest, src, size);
12043 if (! len || TREE_SIDE_EFFECTS (len))
12046 /* If c_strlen returned something, but not a constant,
12047 transform __strcpy_chk into __memcpy_chk. */
12048 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12052 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12053 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12054 build_call_expr (fn, 4,
12055 dest, src, len, size));
12061 if (! tree_int_cst_lt (maxlen, size))
12065 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12066 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12067 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12071 return build_call_expr (fn, 2, dest, src);
12074 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12075 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12076 length passed as third argument. */
12079 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12084 if (!validate_arg (dest, POINTER_TYPE)
12085 || !validate_arg (src, POINTER_TYPE)
12086 || !validate_arg (len, INTEGER_TYPE)
12087 || !validate_arg (size, INTEGER_TYPE))
12090 if (! host_integerp (size, 1))
12093 if (! integer_all_onesp (size))
12095 if (! host_integerp (len, 1))
12097 /* If LEN is not constant, try MAXLEN too.
12098 For MAXLEN only allow optimizing into non-_ocs function
12099 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12100 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12106 if (tree_int_cst_lt (size, maxlen))
12110 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12111 fn = built_in_decls[BUILT_IN_STRNCPY];
12115 return build_call_expr (fn, 3, dest, src, len);
12118 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12119 are the arguments to the call. */
12122 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12127 if (!validate_arg (dest, POINTER_TYPE)
12128 || !validate_arg (src, POINTER_TYPE)
12129 || !validate_arg (size, INTEGER_TYPE))
12132 p = c_getstr (src);
12133 /* If the SRC parameter is "", return DEST. */
12134 if (p && *p == '\0')
12135 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12137 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12140 /* If __builtin_strcat_chk is used, assume strcat is available. */
12141 fn = built_in_decls[BUILT_IN_STRCAT];
12145 return build_call_expr (fn, 2, dest, src);
12148 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12152 fold_builtin_strncat_chk (tree fndecl,
12153 tree dest, tree src, tree len, tree size)
12158 if (!validate_arg (dest, POINTER_TYPE)
12159 || !validate_arg (src, POINTER_TYPE)
12160 || !validate_arg (size, INTEGER_TYPE)
12161 || !validate_arg (size, INTEGER_TYPE))
12164 p = c_getstr (src);
12165 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12166 if (p && *p == '\0')
12167 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12168 else if (integer_zerop (len))
12169 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12171 if (! host_integerp (size, 1))
12174 if (! integer_all_onesp (size))
12176 tree src_len = c_strlen (src, 1);
12178 && host_integerp (src_len, 1)
12179 && host_integerp (len, 1)
12180 && ! tree_int_cst_lt (len, src_len))
12182 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12183 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12187 return build_call_expr (fn, 3, dest, src, size);
12192 /* If __builtin_strncat_chk is used, assume strncat is available. */
12193 fn = built_in_decls[BUILT_IN_STRNCAT];
12197 return build_call_expr (fn, 3, dest, src, len);
12200 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12201 a normal call should be emitted rather than expanding the function
12202 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12205 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12207 tree dest, size, len, fn, fmt, flag;
12208 const char *fmt_str;
12209 int nargs = call_expr_nargs (exp);
12211 /* Verify the required arguments in the original call. */
12214 dest = CALL_EXPR_ARG (exp, 0);
12215 if (!validate_arg (dest, POINTER_TYPE))
12217 flag = CALL_EXPR_ARG (exp, 1);
12218 if (!validate_arg (flag, INTEGER_TYPE))
12220 size = CALL_EXPR_ARG (exp, 2);
12221 if (!validate_arg (size, INTEGER_TYPE))
12223 fmt = CALL_EXPR_ARG (exp, 3);
12224 if (!validate_arg (fmt, POINTER_TYPE))
12227 if (! host_integerp (size, 1))
12232 if (!init_target_chars ())
12235 /* Check whether the format is a literal string constant. */
12236 fmt_str = c_getstr (fmt);
12237 if (fmt_str != NULL)
12239 /* If the format doesn't contain % args or %%, we know the size. */
12240 if (strchr (fmt_str, target_percent) == 0)
12242 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12243 len = build_int_cstu (size_type_node, strlen (fmt_str));
12245 /* If the format is "%s" and first ... argument is a string literal,
12246 we know the size too. */
12247 else if (fcode == BUILT_IN_SPRINTF_CHK
12248 && strcmp (fmt_str, target_percent_s) == 0)
12254 arg = CALL_EXPR_ARG (exp, 4);
12255 if (validate_arg (arg, POINTER_TYPE))
12257 len = c_strlen (arg, 1);
12258 if (! len || ! host_integerp (len, 1))
12265 if (! integer_all_onesp (size))
12267 if (! len || ! tree_int_cst_lt (len, size))
12271 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12272 or if format doesn't contain % chars or is "%s". */
12273 if (! integer_zerop (flag))
12275 if (fmt_str == NULL)
12277 if (strchr (fmt_str, target_percent) != NULL
12278 && strcmp (fmt_str, target_percent_s))
12282 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12283 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12284 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12288 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12291 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12292 a normal call should be emitted rather than expanding the function
12293 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12294 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12295 passed as second argument. */
12298 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12299 enum built_in_function fcode)
12301 tree dest, size, len, fn, fmt, flag;
12302 const char *fmt_str;
12304 /* Verify the required arguments in the original call. */
12305 if (call_expr_nargs (exp) < 5)
12307 dest = CALL_EXPR_ARG (exp, 0);
12308 if (!validate_arg (dest, POINTER_TYPE))
12310 len = CALL_EXPR_ARG (exp, 1);
12311 if (!validate_arg (len, INTEGER_TYPE))
12313 flag = CALL_EXPR_ARG (exp, 2);
12314 if (!validate_arg (flag, INTEGER_TYPE))
12316 size = CALL_EXPR_ARG (exp, 3);
12317 if (!validate_arg (size, INTEGER_TYPE))
12319 fmt = CALL_EXPR_ARG (exp, 4);
12320 if (!validate_arg (fmt, POINTER_TYPE))
12323 if (! host_integerp (size, 1))
12326 if (! integer_all_onesp (size))
12328 if (! host_integerp (len, 1))
12330 /* If LEN is not constant, try MAXLEN too.
12331 For MAXLEN only allow optimizing into non-_ocs function
12332 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12333 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12339 if (tree_int_cst_lt (size, maxlen))
12343 if (!init_target_chars ())
12346 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12347 or if format doesn't contain % chars or is "%s". */
12348 if (! integer_zerop (flag))
12350 fmt_str = c_getstr (fmt);
12351 if (fmt_str == NULL)
12353 if (strchr (fmt_str, target_percent) != NULL
12354 && strcmp (fmt_str, target_percent_s))
12358 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12360 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12361 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12365 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12368 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12369 FMT and ARG are the arguments to the call; we don't fold cases with
12370 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12372 Return NULL_TREE if no simplification was possible, otherwise return the
12373 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12374 code of the function to be simplified. */
12377 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12378 enum built_in_function fcode)
12380 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12381 const char *fmt_str = NULL;
12383 /* If the return value is used, don't do the transformation. */
12387 /* Verify the required arguments in the original call. */
12388 if (!validate_arg (fmt, POINTER_TYPE))
12391 /* Check whether the format is a literal string constant. */
12392 fmt_str = c_getstr (fmt);
12393 if (fmt_str == NULL)
12396 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12398 /* If we're using an unlocked function, assume the other
12399 unlocked functions exist explicitly. */
12400 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12401 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12405 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12406 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12409 if (!init_target_chars ())
12412 if (strcmp (fmt_str, target_percent_s) == 0
12413 || strchr (fmt_str, target_percent) == NULL)
12417 if (strcmp (fmt_str, target_percent_s) == 0)
12419 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12422 if (!arg || !validate_arg (arg, POINTER_TYPE))
12425 str = c_getstr (arg);
12431 /* The format specifier doesn't contain any '%' characters. */
12432 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12438 /* If the string was "", printf does nothing. */
12439 if (str[0] == '\0')
12440 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12442 /* If the string has length of 1, call putchar. */
12443 if (str[1] == '\0')
12445 /* Given printf("c"), (where c is any one character,)
12446 convert "c"[0] to an int and pass that to the replacement
12448 newarg = build_int_cst (NULL_TREE, str[0]);
12450 call = build_call_expr (fn_putchar, 1, newarg);
12454 /* If the string was "string\n", call puts("string"). */
12455 size_t len = strlen (str);
12456 if ((unsigned char)str[len - 1] == target_newline)
12458 /* Create a NUL-terminated string that's one char shorter
12459 than the original, stripping off the trailing '\n'. */
12460 char *newstr = alloca (len);
12461 memcpy (newstr, str, len - 1);
12462 newstr[len - 1] = 0;
12464 newarg = build_string_literal (len, newstr);
12466 call = build_call_expr (fn_puts, 1, newarg);
12469 /* We'd like to arrange to call fputs(string,stdout) here,
12470 but we need stdout and don't have a way to get it yet. */
12475 /* The other optimizations can be done only on the non-va_list variants. */
12476 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12479 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12480 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12482 if (!arg || !validate_arg (arg, POINTER_TYPE))
12485 call = build_call_expr (fn_puts, 1, arg);
12488 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12489 else if (strcmp (fmt_str, target_percent_c) == 0)
12491 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12494 call = build_call_expr (fn_putchar, 1, arg);
12500 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12503 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12504 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12505 more than 3 arguments, and ARG may be null in the 2-argument case.
12507 Return NULL_TREE if no simplification was possible, otherwise return the
12508 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12509 code of the function to be simplified. */
12512 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12513 enum built_in_function fcode)
12515 tree fn_fputc, fn_fputs, call = NULL_TREE;
12516 const char *fmt_str = NULL;
12518 /* If the return value is used, don't do the transformation. */
12522 /* Verify the required arguments in the original call. */
12523 if (!validate_arg (fp, POINTER_TYPE))
12525 if (!validate_arg (fmt, POINTER_TYPE))
12528 /* Check whether the format is a literal string constant. */
12529 fmt_str = c_getstr (fmt);
12530 if (fmt_str == NULL)
12533 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12535 /* If we're using an unlocked function, assume the other
12536 unlocked functions exist explicitly. */
12537 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12538 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12542 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12543 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12546 if (!init_target_chars ())
12549 /* If the format doesn't contain % args or %%, use strcpy. */
12550 if (strchr (fmt_str, target_percent) == NULL)
12552 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12556 /* If the format specifier was "", fprintf does nothing. */
12557 if (fmt_str[0] == '\0')
12559 /* If FP has side-effects, just wait until gimplification is
12561 if (TREE_SIDE_EFFECTS (fp))
12564 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12567 /* When "string" doesn't contain %, replace all cases of
12568 fprintf (fp, string) with fputs (string, fp). The fputs
12569 builtin will take care of special cases like length == 1. */
12571 call = build_call_expr (fn_fputs, 2, fmt, fp);
12574 /* The other optimizations can be done only on the non-va_list variants. */
12575 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12578 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12579 else if (strcmp (fmt_str, target_percent_s) == 0)
12581 if (!arg || !validate_arg (arg, POINTER_TYPE))
12584 call = build_call_expr (fn_fputs, 2, arg, fp);
12587 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12588 else if (strcmp (fmt_str, target_percent_c) == 0)
12590 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12593 call = build_call_expr (fn_fputc, 2, arg, fp);
12598 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12601 /* Initialize format string characters in the target charset. */
12604 init_target_chars (void)
12609 target_newline = lang_hooks.to_target_charset ('\n');
12610 target_percent = lang_hooks.to_target_charset ('%');
12611 target_c = lang_hooks.to_target_charset ('c');
12612 target_s = lang_hooks.to_target_charset ('s');
12613 if (target_newline == 0 || target_percent == 0 || target_c == 0
12617 target_percent_c[0] = target_percent;
12618 target_percent_c[1] = target_c;
12619 target_percent_c[2] = '\0';
12621 target_percent_s[0] = target_percent;
12622 target_percent_s[1] = target_s;
12623 target_percent_s[2] = '\0';
12625 target_percent_s_newline[0] = target_percent;
12626 target_percent_s_newline[1] = target_s;
12627 target_percent_s_newline[2] = target_newline;
12628 target_percent_s_newline[3] = '\0';
12635 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12636 and no overflow/underflow occurred. INEXACT is true if M was not
12637 exactly calculated. TYPE is the tree type for the result. This
12638 function assumes that you cleared the MPFR flags and then
12639 calculated M to see if anything subsequently set a flag prior to
12640 entering this function. Return NULL_TREE if any checks fail. */
12643 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12645 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12646 overflow/underflow occurred. If -frounding-math, proceed iff the
12647 result of calling FUNC was exact. */
12648 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12649 && (!flag_rounding_math || !inexact))
12651 REAL_VALUE_TYPE rr;
12653 real_from_mpfr (&rr, m, type, GMP_RNDN);
12654 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12655 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12656 but the mpft_t is not, then we underflowed in the
12658 if (real_isfinite (&rr)
12659 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12661 REAL_VALUE_TYPE rmode;
12663 real_convert (&rmode, TYPE_MODE (type), &rr);
12664 /* Proceed iff the specified mode can hold the value. */
12665 if (real_identical (&rmode, &rr))
12666 return build_real (type, rmode);
12672 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12673 FUNC on it and return the resulting value as a tree with type TYPE.
12674 If MIN and/or MAX are not NULL, then the supplied ARG must be
12675 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12676 acceptable values, otherwise they are not. The mpfr precision is
12677 set to the precision of TYPE. We assume that function FUNC returns
12678 zero if the result could be calculated exactly within the requested
12682 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12683 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12686 tree result = NULL_TREE;
12690 /* To proceed, MPFR must exactly represent the target floating point
12691 format, which only happens when the target base equals two. */
12692 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12693 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12695 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12697 if (real_isfinite (ra)
12698 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12699 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12701 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12705 mpfr_init2 (m, prec);
12706 mpfr_from_real (m, ra, GMP_RNDN);
12707 mpfr_clear_flags ();
12708 inexact = func (m, m, GMP_RNDN);
12709 result = do_mpfr_ckconv (m, type, inexact);
12717 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12718 FUNC on it and return the resulting value as a tree with type TYPE.
12719 The mpfr precision is set to the precision of TYPE. We assume that
12720 function FUNC returns zero if the result could be calculated
12721 exactly within the requested precision. */
12724 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12725 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12727 tree result = NULL_TREE;
12732 /* To proceed, MPFR must exactly represent the target floating point
12733 format, which only happens when the target base equals two. */
12734 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12735 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12736 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12738 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12739 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12741 if (real_isfinite (ra1) && real_isfinite (ra2))
12743 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12747 mpfr_inits2 (prec, m1, m2, NULL);
12748 mpfr_from_real (m1, ra1, GMP_RNDN);
12749 mpfr_from_real (m2, ra2, GMP_RNDN);
12750 mpfr_clear_flags ();
12751 inexact = func (m1, m1, m2, GMP_RNDN);
12752 result = do_mpfr_ckconv (m1, type, inexact);
12753 mpfr_clears (m1, m2, NULL);
12760 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12761 FUNC on it and return the resulting value as a tree with type TYPE.
12762 The mpfr precision is set to the precision of TYPE. We assume that
12763 function FUNC returns zero if the result could be calculated
12764 exactly within the requested precision. */
12767 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12768 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12770 tree result = NULL_TREE;
12776 /* To proceed, MPFR must exactly represent the target floating point
12777 format, which only happens when the target base equals two. */
12778 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12779 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12780 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12781 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12783 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12784 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12785 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12787 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12789 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12793 mpfr_inits2 (prec, m1, m2, m3, NULL);
12794 mpfr_from_real (m1, ra1, GMP_RNDN);
12795 mpfr_from_real (m2, ra2, GMP_RNDN);
12796 mpfr_from_real (m3, ra3, GMP_RNDN);
12797 mpfr_clear_flags ();
12798 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12799 result = do_mpfr_ckconv (m1, type, inexact);
12800 mpfr_clears (m1, m2, m3, NULL);
12807 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12808 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12809 If ARG_SINP and ARG_COSP are NULL then the result is returned
12810 as a complex value.
12811 The type is taken from the type of ARG and is used for setting the
12812 precision of the calculation and results. */
12815 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12817 tree const type = TREE_TYPE (arg);
12818 tree result = NULL_TREE;
12822 /* To proceed, MPFR must exactly represent the target floating point
12823 format, which only happens when the target base equals two. */
12824 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12825 && TREE_CODE (arg) == REAL_CST
12826 && !TREE_OVERFLOW (arg))
12828 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12830 if (real_isfinite (ra))
12832 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12833 tree result_s, result_c;
12837 mpfr_inits2 (prec, m, ms, mc, NULL);
12838 mpfr_from_real (m, ra, GMP_RNDN);
12839 mpfr_clear_flags ();
12840 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12841 result_s = do_mpfr_ckconv (ms, type, inexact);
12842 result_c = do_mpfr_ckconv (mc, type, inexact);
12843 mpfr_clears (m, ms, mc, NULL);
12844 if (result_s && result_c)
12846 /* If we are to return in a complex value do so. */
12847 if (!arg_sinp && !arg_cosp)
12848 return build_complex (build_complex_type (type),
12849 result_c, result_s);
12851 /* Dereference the sin/cos pointer arguments. */
12852 arg_sinp = build_fold_indirect_ref (arg_sinp);
12853 arg_cosp = build_fold_indirect_ref (arg_cosp);
12854 /* Proceed if valid pointer type were passed in. */
12855 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12856 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12858 /* Set the values. */
12859 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12861 TREE_SIDE_EFFECTS (result_s) = 1;
12862 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12864 TREE_SIDE_EFFECTS (result_c) = 1;
12865 /* Combine the assignments into a compound expr. */
12866 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12867 result_s, result_c));
12875 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12876 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12877 two-argument mpfr order N Bessel function FUNC on them and return
12878 the resulting value as a tree with type TYPE. The mpfr precision
12879 is set to the precision of TYPE. We assume that function FUNC
12880 returns zero if the result could be calculated exactly within the
12881 requested precision. */
12883 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12884 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12885 const REAL_VALUE_TYPE *min, bool inclusive)
12887 tree result = NULL_TREE;
12892 /* To proceed, MPFR must exactly represent the target floating point
12893 format, which only happens when the target base equals two. */
12894 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12895 && host_integerp (arg1, 0)
12896 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12898 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12899 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12902 && real_isfinite (ra)
12903 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12905 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12909 mpfr_init2 (m, prec);
12910 mpfr_from_real (m, ra, GMP_RNDN);
12911 mpfr_clear_flags ();
12912 inexact = func (m, n, m, GMP_RNDN);
12913 result = do_mpfr_ckconv (m, type, inexact);
12921 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12922 the pointer *(ARG_QUO) and return the result. The type is taken
12923 from the type of ARG0 and is used for setting the precision of the
12924 calculation and results. */
12927 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12929 tree const type = TREE_TYPE (arg0);
12930 tree result = NULL_TREE;
12935 /* To proceed, MPFR must exactly represent the target floating point
12936 format, which only happens when the target base equals two. */
12937 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12938 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12939 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12941 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12942 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12944 if (real_isfinite (ra0) && real_isfinite (ra1))
12946 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12951 mpfr_inits2 (prec, m0, m1, NULL);
12952 mpfr_from_real (m0, ra0, GMP_RNDN);
12953 mpfr_from_real (m1, ra1, GMP_RNDN);
12954 mpfr_clear_flags ();
12955 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12956 /* Remquo is independent of the rounding mode, so pass
12957 inexact=0 to do_mpfr_ckconv(). */
12958 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12959 mpfr_clears (m0, m1, NULL);
12962 /* MPFR calculates quo in the host's long so it may
12963 return more bits in quo than the target int can hold
12964 if sizeof(host long) > sizeof(target int). This can
12965 happen even for native compilers in LP64 mode. In
12966 these cases, modulo the quo value with the largest
12967 number that the target int can hold while leaving one
12968 bit for the sign. */
12969 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12970 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12972 /* Dereference the quo pointer argument. */
12973 arg_quo = build_fold_indirect_ref (arg_quo);
12974 /* Proceed iff a valid pointer type was passed in. */
12975 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12977 /* Set the value. */
12978 tree result_quo = fold_build2 (MODIFY_EXPR,
12979 TREE_TYPE (arg_quo), arg_quo,
12980 build_int_cst (NULL, integer_quo));
12981 TREE_SIDE_EFFECTS (result_quo) = 1;
12982 /* Combine the quo assignment with the rem. */
12983 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12984 result_quo, result_rem));
12992 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12993 resulting value as a tree with type TYPE. The mpfr precision is
12994 set to the precision of TYPE. We assume that this mpfr function
12995 returns zero if the result could be calculated exactly within the
12996 requested precision. In addition, the integer pointer represented
12997 by ARG_SG will be dereferenced and set to the appropriate signgam
13001 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13003 tree result = NULL_TREE;
13007 /* To proceed, MPFR must exactly represent the target floating point
13008 format, which only happens when the target base equals two. Also
13009 verify ARG is a constant and that ARG_SG is an int pointer. */
13010 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13011 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13012 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13013 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13015 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13017 /* In addition to NaN and Inf, the argument cannot be zero or a
13018 negative integer. */
13019 if (real_isfinite (ra)
13020 && ra->cl != rvc_zero
13021 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13023 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13028 mpfr_init2 (m, prec);
13029 mpfr_from_real (m, ra, GMP_RNDN);
13030 mpfr_clear_flags ();
13031 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
13032 result_lg = do_mpfr_ckconv (m, type, inexact);
13038 /* Dereference the arg_sg pointer argument. */
13039 arg_sg = build_fold_indirect_ref (arg_sg);
13040 /* Assign the signgam value into *arg_sg. */
13041 result_sg = fold_build2 (MODIFY_EXPR,
13042 TREE_TYPE (arg_sg), arg_sg,
13043 build_int_cst (NULL, sg));
13044 TREE_SIDE_EFFECTS (result_sg) = 1;
13045 /* Combine the signgam assignment with the lgamma result. */
13046 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13047 result_sg, result_lg));