1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static void maybe_emit_free_warning (tree);
211 static tree fold_builtin_object_size (tree, tree);
212 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
213 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
214 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
215 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
216 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
217 enum built_in_function);
218 static bool init_target_chars (void);
220 static unsigned HOST_WIDE_INT target_newline;
221 static unsigned HOST_WIDE_INT target_percent;
222 static unsigned HOST_WIDE_INT target_c;
223 static unsigned HOST_WIDE_INT target_s;
224 static char target_percent_c[3];
225 static char target_percent_s[3];
226 static char target_percent_s_newline[4];
227 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
228 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
229 static tree do_mpfr_arg2 (tree, tree, tree,
230 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
231 static tree do_mpfr_arg3 (tree, tree, tree, tree,
232 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
233 static tree do_mpfr_sincos (tree, tree, tree);
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
240 /* Return true if NODE should be considered for inline expansion regardless
241 of the optimization level. This means whenever a function is invoked with
242 its "internal" name, which normally contains the prefix "__builtin". */
244 static bool called_as_built_in (tree node)
246 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
247 if (strncmp (name, "__builtin_", 10) == 0)
249 if (strncmp (name, "__sync_", 7) == 0)
254 /* Return the alignment in bits of EXP, an object.
255 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
256 guessed alignment e.g. from type alignment. */
259 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
264 if (handled_component_p (exp))
266 HOST_WIDE_INT bitsize, bitpos;
268 enum machine_mode mode;
269 int unsignedp, volatilep;
271 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
272 &mode, &unsignedp, &volatilep, true);
274 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
279 if (TREE_CODE (offset) == PLUS_EXPR)
281 next_offset = TREE_OPERAND (offset, 0);
282 offset = TREE_OPERAND (offset, 1);
286 if (host_integerp (offset, 1))
288 /* Any overflow in calculating offset_bits won't change
291 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
294 inner = MIN (inner, (offset_bits & -offset_bits));
296 else if (TREE_CODE (offset) == MULT_EXPR
297 && host_integerp (TREE_OPERAND (offset, 1), 1))
299 /* Any overflow in calculating offset_factor won't change
301 unsigned offset_factor
302 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
306 inner = MIN (inner, (offset_factor & -offset_factor));
310 inner = MIN (inner, BITS_PER_UNIT);
313 offset = next_offset;
317 align = MIN (inner, DECL_ALIGN (exp));
318 #ifdef CONSTANT_ALIGNMENT
319 else if (CONSTANT_CLASS_P (exp))
320 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
322 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
323 || TREE_CODE (exp) == INDIRECT_REF)
324 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
326 align = MIN (align, inner);
327 return MIN (align, max_align);
330 /* Return the alignment in bits of EXP, a pointer valued expression.
331 But don't return more than MAX_ALIGN no matter what.
332 The alignment returned is, by default, the alignment of the thing that
333 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
335 Otherwise, look at the expression to see if we can do better, i.e., if the
336 expression is actually pointing at an object whose alignment is tighter. */
339 get_pointer_alignment (tree exp, unsigned int max_align)
341 unsigned int align, inner;
343 /* We rely on TER to compute accurate alignment information. */
344 if (!(optimize && flag_tree_ter))
347 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
350 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
351 align = MIN (align, max_align);
355 switch (TREE_CODE (exp))
358 exp = TREE_OPERAND (exp, 0);
359 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
362 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
363 align = MIN (inner, max_align);
366 case POINTER_PLUS_EXPR:
367 /* If sum of pointer + int, restrict our maximum alignment to that
368 imposed by the integer. If not, we can't do any better than
370 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
373 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
374 & (max_align / BITS_PER_UNIT - 1))
378 exp = TREE_OPERAND (exp, 0);
382 /* See what we are pointing at and look at its alignment. */
383 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
391 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
392 way, because it could contain a zero byte in the middle.
393 TREE_STRING_LENGTH is the size of the character array, not the string.
395 ONLY_VALUE should be nonzero if the result is not going to be emitted
396 into the instruction stream and zero if it is going to be expanded.
397 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
398 is returned, otherwise NULL, since
399 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
400 evaluate the side-effects.
402 The value returned is of type `ssizetype'.
404 Unfortunately, string_constant can't access the values of const char
405 arrays with initializers, so neither can we do so here. */
408 c_strlen (tree src, int only_value)
411 HOST_WIDE_INT offset;
416 if (TREE_CODE (src) == COND_EXPR
417 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
421 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
422 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
423 if (tree_int_cst_equal (len1, len2))
427 if (TREE_CODE (src) == COMPOUND_EXPR
428 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
429 return c_strlen (TREE_OPERAND (src, 1), only_value);
431 src = string_constant (src, &offset_node);
435 max = TREE_STRING_LENGTH (src) - 1;
436 ptr = TREE_STRING_POINTER (src);
438 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
440 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
441 compute the offset to the following null if we don't know where to
442 start searching for it. */
445 for (i = 0; i < max; i++)
449 /* We don't know the starting offset, but we do know that the string
450 has no internal zero bytes. We can assume that the offset falls
451 within the bounds of the string; otherwise, the programmer deserves
452 what he gets. Subtract the offset from the length of the string,
453 and return that. This would perhaps not be valid if we were dealing
454 with named arrays in addition to literal string constants. */
456 return size_diffop (size_int (max), offset_node);
459 /* We have a known offset into the string. Start searching there for
460 a null character if we can represent it as a single HOST_WIDE_INT. */
461 if (offset_node == 0)
463 else if (! host_integerp (offset_node, 0))
466 offset = tree_low_cst (offset_node, 0);
468 /* If the offset is known to be out of bounds, warn, and call strlen at
470 if (offset < 0 || offset > max)
472 /* Suppress multiple warnings for propagated constant strings. */
473 if (! TREE_NO_WARNING (src))
475 warning (0, "offset outside bounds of constant string");
476 TREE_NO_WARNING (src) = 1;
481 /* Use strlen to search for the first zero byte. Since any strings
482 constructed with build_string will have nulls appended, we win even
483 if we get handed something like (char[4])"abcd".
485 Since OFFSET is our starting index into the string, no further
486 calculation is needed. */
487 return ssize_int (strlen (ptr + offset));
490 /* Return a char pointer for a C string if it is a string constant
491 or sum of string constant and integer constant. */
498 src = string_constant (src, &offset_node);
502 if (offset_node == 0)
503 return TREE_STRING_POINTER (src);
504 else if (!host_integerp (offset_node, 1)
505 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
508 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
511 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
512 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
515 c_readstr (const char *str, enum machine_mode mode)
521 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
526 for (i = 0; i < GET_MODE_SIZE (mode); i++)
529 if (WORDS_BIG_ENDIAN)
530 j = GET_MODE_SIZE (mode) - i - 1;
531 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
532 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
533 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
535 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
538 ch = (unsigned char) str[i];
539 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
541 return immed_double_const (c[0], c[1], mode);
544 /* Cast a target constant CST to target CHAR and if that value fits into
545 host char type, return zero and put that value into variable pointed to by
549 target_char_cast (tree cst, char *p)
551 unsigned HOST_WIDE_INT val, hostval;
553 if (!host_integerp (cst, 1)
554 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
557 val = tree_low_cst (cst, 1);
558 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
559 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
562 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
563 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
572 /* Similar to save_expr, but assumes that arbitrary code is not executed
573 in between the multiple evaluations. In particular, we assume that a
574 non-addressable local variable will not be modified. */
577 builtin_save_expr (tree exp)
579 if (TREE_ADDRESSABLE (exp) == 0
580 && (TREE_CODE (exp) == PARM_DECL
581 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
584 return save_expr (exp);
587 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
588 times to get the address of either a higher stack frame, or a return
589 address located within it (depending on FNDECL_CODE). */
592 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
596 #ifdef INITIAL_FRAME_ADDRESS_RTX
597 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
601 /* For a zero count with __builtin_return_address, we don't care what
602 frame address we return, because target-specific definitions will
603 override us. Therefore frame pointer elimination is OK, and using
604 the soft frame pointer is OK.
606 For a nonzero count, or a zero count with __builtin_frame_address,
607 we require a stable offset from the current frame pointer to the
608 previous one, so we must use the hard frame pointer, and
609 we must disable frame pointer elimination. */
610 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
611 tem = frame_pointer_rtx;
614 tem = hard_frame_pointer_rtx;
616 /* Tell reload not to eliminate the frame pointer. */
617 crtl->accesses_prior_frames = 1;
621 /* Some machines need special handling before we can access
622 arbitrary frames. For example, on the SPARC, we must first flush
623 all register windows to the stack. */
624 #ifdef SETUP_FRAME_ADDRESSES
626 SETUP_FRAME_ADDRESSES ();
629 /* On the SPARC, the return address is not in the frame, it is in a
630 register. There is no way to access it off of the current frame
631 pointer, but it can be accessed off the previous frame pointer by
632 reading the value from the register window save area. */
633 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
634 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
638 /* Scan back COUNT frames to the specified frame. */
639 for (i = 0; i < count; i++)
641 /* Assume the dynamic chain pointer is in the word that the
642 frame address points to, unless otherwise specified. */
643 #ifdef DYNAMIC_CHAIN_ADDRESS
644 tem = DYNAMIC_CHAIN_ADDRESS (tem);
646 tem = memory_address (Pmode, tem);
647 tem = gen_frame_mem (Pmode, tem);
648 tem = copy_to_reg (tem);
651 /* For __builtin_frame_address, return what we've got. But, on
652 the SPARC for example, we may have to add a bias. */
653 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
654 #ifdef FRAME_ADDR_RTX
655 return FRAME_ADDR_RTX (tem);
660 /* For __builtin_return_address, get the return address from that frame. */
661 #ifdef RETURN_ADDR_RTX
662 tem = RETURN_ADDR_RTX (count, tem);
664 tem = memory_address (Pmode,
665 plus_constant (tem, GET_MODE_SIZE (Pmode)));
666 tem = gen_frame_mem (Pmode, tem);
671 /* Alias set used for setjmp buffer. */
672 static alias_set_type setjmp_alias_set = -1;
674 /* Construct the leading half of a __builtin_setjmp call. Control will
675 return to RECEIVER_LABEL. This is also called directly by the SJLJ
676 exception handling code. */
679 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
681 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
685 if (setjmp_alias_set == -1)
686 setjmp_alias_set = new_alias_set ();
688 buf_addr = convert_memory_address (Pmode, buf_addr);
690 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
692 /* We store the frame pointer and the address of receiver_label in
693 the buffer and use the rest of it for the stack save area, which
694 is machine-dependent. */
696 mem = gen_rtx_MEM (Pmode, buf_addr);
697 set_mem_alias_set (mem, setjmp_alias_set);
698 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
700 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
701 set_mem_alias_set (mem, setjmp_alias_set);
703 emit_move_insn (validize_mem (mem),
704 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
706 stack_save = gen_rtx_MEM (sa_mode,
707 plus_constant (buf_addr,
708 2 * GET_MODE_SIZE (Pmode)));
709 set_mem_alias_set (stack_save, setjmp_alias_set);
710 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
712 /* If there is further processing to do, do it. */
713 #ifdef HAVE_builtin_setjmp_setup
714 if (HAVE_builtin_setjmp_setup)
715 emit_insn (gen_builtin_setjmp_setup (buf_addr));
718 /* Tell optimize_save_area_alloca that extra work is going to
719 need to go on during alloca. */
720 cfun->calls_setjmp = 1;
722 /* We have a nonlocal label. */
723 cfun->has_nonlocal_label = 1;
726 /* Construct the trailing part of a __builtin_setjmp call. This is
727 also called directly by the SJLJ exception handling code. */
730 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
732 /* Clobber the FP when we get here, so we have to make sure it's
733 marked as used by this function. */
734 emit_use (hard_frame_pointer_rtx);
736 /* Mark the static chain as clobbered here so life information
737 doesn't get messed up for it. */
738 emit_clobber (static_chain_rtx);
740 /* Now put in the code to restore the frame pointer, and argument
741 pointer, if needed. */
742 #ifdef HAVE_nonlocal_goto
743 if (! HAVE_nonlocal_goto)
746 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
747 /* This might change the hard frame pointer in ways that aren't
748 apparent to early optimization passes, so force a clobber. */
749 emit_clobber (hard_frame_pointer_rtx);
752 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
753 if (fixed_regs[ARG_POINTER_REGNUM])
755 #ifdef ELIMINABLE_REGS
757 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
759 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
760 if (elim_regs[i].from == ARG_POINTER_REGNUM
761 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
764 if (i == ARRAY_SIZE (elim_regs))
767 /* Now restore our arg pointer from the address at which it
768 was saved in our stack frame. */
769 emit_move_insn (crtl->args.internal_arg_pointer,
770 copy_to_reg (get_arg_pointer_save_area ()));
775 #ifdef HAVE_builtin_setjmp_receiver
776 if (HAVE_builtin_setjmp_receiver)
777 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
780 #ifdef HAVE_nonlocal_goto_receiver
781 if (HAVE_nonlocal_goto_receiver)
782 emit_insn (gen_nonlocal_goto_receiver ());
787 /* We must not allow the code we just generated to be reordered by
788 scheduling. Specifically, the update of the frame pointer must
789 happen immediately, not later. */
790 emit_insn (gen_blockage ());
793 /* __builtin_longjmp is passed a pointer to an array of five words (not
794 all will be used on all machines). It operates similarly to the C
795 library function of the same name, but is more efficient. Much of
796 the code below is copied from the handling of non-local gotos. */
799 expand_builtin_longjmp (rtx buf_addr, rtx value)
801 rtx fp, lab, stack, insn, last;
802 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
804 /* DRAP is needed for stack realign if longjmp is expanded to current
806 if (SUPPORTS_STACK_ALIGNMENT)
807 crtl->need_drap = true;
809 if (setjmp_alias_set == -1)
810 setjmp_alias_set = new_alias_set ();
812 buf_addr = convert_memory_address (Pmode, buf_addr);
814 buf_addr = force_reg (Pmode, buf_addr);
816 /* We used to store value in static_chain_rtx, but that fails if pointers
817 are smaller than integers. We instead require that the user must pass
818 a second argument of 1, because that is what builtin_setjmp will
819 return. This also makes EH slightly more efficient, since we are no
820 longer copying around a value that we don't care about. */
821 gcc_assert (value == const1_rtx);
823 last = get_last_insn ();
824 #ifdef HAVE_builtin_longjmp
825 if (HAVE_builtin_longjmp)
826 emit_insn (gen_builtin_longjmp (buf_addr));
830 fp = gen_rtx_MEM (Pmode, buf_addr);
831 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
832 GET_MODE_SIZE (Pmode)));
834 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
835 2 * GET_MODE_SIZE (Pmode)));
836 set_mem_alias_set (fp, setjmp_alias_set);
837 set_mem_alias_set (lab, setjmp_alias_set);
838 set_mem_alias_set (stack, setjmp_alias_set);
840 /* Pick up FP, label, and SP from the block and jump. This code is
841 from expand_goto in stmt.c; see there for detailed comments. */
842 #ifdef HAVE_nonlocal_goto
843 if (HAVE_nonlocal_goto)
844 /* We have to pass a value to the nonlocal_goto pattern that will
845 get copied into the static_chain pointer, but it does not matter
846 what that value is, because builtin_setjmp does not use it. */
847 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
851 lab = copy_to_reg (lab);
853 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
854 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
856 emit_move_insn (hard_frame_pointer_rtx, fp);
857 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
859 emit_use (hard_frame_pointer_rtx);
860 emit_use (stack_pointer_rtx);
861 emit_indirect_jump (lab);
865 /* Search backwards and mark the jump insn as a non-local goto.
866 Note that this precludes the use of __builtin_longjmp to a
867 __builtin_setjmp target in the same function. However, we've
868 already cautioned the user that these functions are for
869 internal exception handling use only. */
870 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
872 gcc_assert (insn != last);
876 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
879 else if (CALL_P (insn))
884 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
885 and the address of the save area. */
888 expand_builtin_nonlocal_goto (tree exp)
890 tree t_label, t_save_area;
891 rtx r_label, r_save_area, r_fp, r_sp, insn;
893 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
896 t_label = CALL_EXPR_ARG (exp, 0);
897 t_save_area = CALL_EXPR_ARG (exp, 1);
899 r_label = expand_normal (t_label);
900 r_label = convert_memory_address (Pmode, r_label);
901 r_save_area = expand_normal (t_save_area);
902 r_save_area = convert_memory_address (Pmode, r_save_area);
903 /* Copy the address of the save location to a register just in case it was based
904 on the frame pointer. */
905 r_save_area = copy_to_reg (r_save_area);
906 r_fp = gen_rtx_MEM (Pmode, r_save_area);
907 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
908 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
910 crtl->has_nonlocal_goto = 1;
912 #ifdef HAVE_nonlocal_goto
913 /* ??? We no longer need to pass the static chain value, afaik. */
914 if (HAVE_nonlocal_goto)
915 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
919 r_label = copy_to_reg (r_label);
921 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
922 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
924 /* Restore frame pointer for containing function.
925 This sets the actual hard register used for the frame pointer
926 to the location of the function's incoming static chain info.
927 The non-local goto handler will then adjust it to contain the
928 proper value and reload the argument pointer, if needed. */
929 emit_move_insn (hard_frame_pointer_rtx, r_fp);
930 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
932 /* USE of hard_frame_pointer_rtx added for consistency;
933 not clear if really needed. */
934 emit_use (hard_frame_pointer_rtx);
935 emit_use (stack_pointer_rtx);
937 /* If the architecture is using a GP register, we must
938 conservatively assume that the target function makes use of it.
939 The prologue of functions with nonlocal gotos must therefore
940 initialize the GP register to the appropriate value, and we
941 must then make sure that this value is live at the point
942 of the jump. (Note that this doesn't necessarily apply
943 to targets with a nonlocal_goto pattern; they are free
944 to implement it in their own way. Note also that this is
945 a no-op if the GP register is a global invariant.) */
946 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
947 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
948 emit_use (pic_offset_table_rtx);
950 emit_indirect_jump (r_label);
953 /* Search backwards to the jump insn and mark it as a
955 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
959 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
962 else if (CALL_P (insn))
969 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
970 (not all will be used on all machines) that was passed to __builtin_setjmp.
971 It updates the stack pointer in that block to correspond to the current
975 expand_builtin_update_setjmp_buf (rtx buf_addr)
977 enum machine_mode sa_mode = Pmode;
981 #ifdef HAVE_save_stack_nonlocal
982 if (HAVE_save_stack_nonlocal)
983 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
985 #ifdef STACK_SAVEAREA_MODE
986 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
990 = gen_rtx_MEM (sa_mode,
993 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
997 emit_insn (gen_setjmp ());
1000 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1003 /* Expand a call to __builtin_prefetch. For a target that does not support
1004 data prefetch, evaluate the memory address argument in case it has side
1008 expand_builtin_prefetch (tree exp)
1010 tree arg0, arg1, arg2;
1014 if (!validate_arglist (exp, POINTER_TYPE, 0))
1017 arg0 = CALL_EXPR_ARG (exp, 0);
1019 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1020 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1022 nargs = call_expr_nargs (exp);
1024 arg1 = CALL_EXPR_ARG (exp, 1);
1026 arg1 = integer_zero_node;
1028 arg2 = CALL_EXPR_ARG (exp, 2);
1030 arg2 = build_int_cst (NULL_TREE, 3);
1032 /* Argument 0 is an address. */
1033 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1035 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1036 if (TREE_CODE (arg1) != INTEGER_CST)
1038 error ("second argument to %<__builtin_prefetch%> must be a constant");
1039 arg1 = integer_zero_node;
1041 op1 = expand_normal (arg1);
1042 /* Argument 1 must be either zero or one. */
1043 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1045 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1050 /* Argument 2 (locality) must be a compile-time constant int. */
1051 if (TREE_CODE (arg2) != INTEGER_CST)
1053 error ("third argument to %<__builtin_prefetch%> must be a constant");
1054 arg2 = integer_zero_node;
1056 op2 = expand_normal (arg2);
1057 /* Argument 2 must be 0, 1, 2, or 3. */
1058 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1060 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1064 #ifdef HAVE_prefetch
1067 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1069 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1070 || (GET_MODE (op0) != Pmode))
1072 op0 = convert_memory_address (Pmode, op0);
1073 op0 = force_reg (Pmode, op0);
1075 emit_insn (gen_prefetch (op0, op1, op2));
1079 /* Don't do anything with direct references to volatile memory, but
1080 generate code to handle other side effects. */
1081 if (!MEM_P (op0) && side_effects_p (op0))
1085 /* Get a MEM rtx for expression EXP which is the address of an operand
1086 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1087 the maximum length of the block of memory that might be accessed or
1091 get_memory_rtx (tree exp, tree len)
1093 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1094 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1096 /* Get an expression we can use to find the attributes to assign to MEM.
1097 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1098 we can. First remove any nops. */
1099 while (CONVERT_EXPR_P (exp)
1100 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1101 exp = TREE_OPERAND (exp, 0);
1103 if (TREE_CODE (exp) == ADDR_EXPR)
1104 exp = TREE_OPERAND (exp, 0);
1105 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1106 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1110 /* Honor attributes derived from exp, except for the alias set
1111 (as builtin stringops may alias with anything) and the size
1112 (as stringops may access multiple array elements). */
1115 set_mem_attributes (mem, exp, 0);
1117 /* Allow the string and memory builtins to overflow from one
1118 field into another, see http://gcc.gnu.org/PR23561.
1119 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1120 memory accessed by the string or memory builtin will fit
1121 within the field. */
1122 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1124 tree mem_expr = MEM_EXPR (mem);
1125 HOST_WIDE_INT offset = -1, length = -1;
1128 while (TREE_CODE (inner) == ARRAY_REF
1129 || CONVERT_EXPR_P (inner)
1130 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1131 || TREE_CODE (inner) == SAVE_EXPR)
1132 inner = TREE_OPERAND (inner, 0);
1134 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1136 if (MEM_OFFSET (mem)
1137 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1138 offset = INTVAL (MEM_OFFSET (mem));
1140 if (offset >= 0 && len && host_integerp (len, 0))
1141 length = tree_low_cst (len, 0);
1143 while (TREE_CODE (inner) == COMPONENT_REF)
1145 tree field = TREE_OPERAND (inner, 1);
1146 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1147 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1149 /* Bitfields are generally not byte-addressable. */
1150 gcc_assert (!DECL_BIT_FIELD (field)
1151 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1152 % BITS_PER_UNIT) == 0
1153 && host_integerp (DECL_SIZE (field), 0)
1154 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1155 % BITS_PER_UNIT) == 0));
1157 /* If we can prove that the memory starting at XEXP (mem, 0) and
1158 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1159 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1160 fields without DECL_SIZE_UNIT like flexible array members. */
1162 && DECL_SIZE_UNIT (field)
1163 && host_integerp (DECL_SIZE_UNIT (field), 0))
1166 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1169 && offset + length <= size)
1174 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1175 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1176 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1184 mem_expr = TREE_OPERAND (mem_expr, 0);
1185 inner = TREE_OPERAND (inner, 0);
1188 if (mem_expr == NULL)
1190 if (mem_expr != MEM_EXPR (mem))
1192 set_mem_expr (mem, mem_expr);
1193 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1196 set_mem_alias_set (mem, 0);
1197 set_mem_size (mem, NULL_RTX);
1203 /* Built-in functions to perform an untyped call and return. */
1205 /* For each register that may be used for calling a function, this
1206 gives a mode used to copy the register's value. VOIDmode indicates
1207 the register is not used for calling a function. If the machine
1208 has register windows, this gives only the outbound registers.
1209 INCOMING_REGNO gives the corresponding inbound register. */
1210 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1212 /* For each register that may be used for returning values, this gives
1213 a mode used to copy the register's value. VOIDmode indicates the
1214 register is not used for returning values. If the machine has
1215 register windows, this gives only the outbound registers.
1216 INCOMING_REGNO gives the corresponding inbound register. */
1217 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1219 /* For each register that may be used for calling a function, this
1220 gives the offset of that register into the block returned by
1221 __builtin_apply_args. 0 indicates that the register is not
1222 used for calling a function. */
1223 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1225 /* Return the size required for the block returned by __builtin_apply_args,
1226 and initialize apply_args_mode. */
1229 apply_args_size (void)
1231 static int size = -1;
1234 enum machine_mode mode;
1236 /* The values computed by this function never change. */
1239 /* The first value is the incoming arg-pointer. */
1240 size = GET_MODE_SIZE (Pmode);
1242 /* The second value is the structure value address unless this is
1243 passed as an "invisible" first argument. */
1244 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1245 size += GET_MODE_SIZE (Pmode);
1247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1248 if (FUNCTION_ARG_REGNO_P (regno))
1250 mode = reg_raw_mode[regno];
1252 gcc_assert (mode != VOIDmode);
1254 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1255 if (size % align != 0)
1256 size = CEIL (size, align) * align;
1257 apply_args_reg_offset[regno] = size;
1258 size += GET_MODE_SIZE (mode);
1259 apply_args_mode[regno] = mode;
1263 apply_args_mode[regno] = VOIDmode;
1264 apply_args_reg_offset[regno] = 0;
1270 /* Return the size required for the block returned by __builtin_apply,
1271 and initialize apply_result_mode. */
1274 apply_result_size (void)
1276 static int size = -1;
1278 enum machine_mode mode;
1280 /* The values computed by this function never change. */
1285 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1286 if (FUNCTION_VALUE_REGNO_P (regno))
1288 mode = reg_raw_mode[regno];
1290 gcc_assert (mode != VOIDmode);
1292 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1293 if (size % align != 0)
1294 size = CEIL (size, align) * align;
1295 size += GET_MODE_SIZE (mode);
1296 apply_result_mode[regno] = mode;
1299 apply_result_mode[regno] = VOIDmode;
1301 /* Allow targets that use untyped_call and untyped_return to override
1302 the size so that machine-specific information can be stored here. */
1303 #ifdef APPLY_RESULT_SIZE
1304 size = APPLY_RESULT_SIZE;
1310 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1311 /* Create a vector describing the result block RESULT. If SAVEP is true,
1312 the result block is used to save the values; otherwise it is used to
1313 restore the values. */
1316 result_vector (int savep, rtx result)
1318 int regno, size, align, nelts;
1319 enum machine_mode mode;
1321 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if ((mode = apply_result_mode[regno]) != VOIDmode)
1327 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1328 if (size % align != 0)
1329 size = CEIL (size, align) * align;
1330 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1331 mem = adjust_address (result, mode, size);
1332 savevec[nelts++] = (savep
1333 ? gen_rtx_SET (VOIDmode, mem, reg)
1334 : gen_rtx_SET (VOIDmode, reg, mem));
1335 size += GET_MODE_SIZE (mode);
1337 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1339 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1341 /* Save the state required to perform an untyped call with the same
1342 arguments as were passed to the current function. */
1345 expand_builtin_apply_args_1 (void)
1348 int size, align, regno;
1349 enum machine_mode mode;
1350 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1352 /* Create a block where the arg-pointer, structure value address,
1353 and argument registers can be saved. */
1354 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1356 /* Walk past the arg-pointer and structure value address. */
1357 size = GET_MODE_SIZE (Pmode);
1358 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1359 size += GET_MODE_SIZE (Pmode);
1361 /* Save each register used in calling a function to the block. */
1362 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1363 if ((mode = apply_args_mode[regno]) != VOIDmode)
1365 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1366 if (size % align != 0)
1367 size = CEIL (size, align) * align;
1369 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1371 emit_move_insn (adjust_address (registers, mode, size), tem);
1372 size += GET_MODE_SIZE (mode);
1375 /* Save the arg pointer to the block. */
1376 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1377 #ifdef STACK_GROWS_DOWNWARD
1378 /* We need the pointer as the caller actually passed them to us, not
1379 as we might have pretended they were passed. Make sure it's a valid
1380 operand, as emit_move_insn isn't expected to handle a PLUS. */
1382 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1385 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1387 size = GET_MODE_SIZE (Pmode);
1389 /* Save the structure value address unless this is passed as an
1390 "invisible" first argument. */
1391 if (struct_incoming_value)
1393 emit_move_insn (adjust_address (registers, Pmode, size),
1394 copy_to_reg (struct_incoming_value));
1395 size += GET_MODE_SIZE (Pmode);
1398 /* Return the address of the block. */
1399 return copy_addr_to_reg (XEXP (registers, 0));
1402 /* __builtin_apply_args returns block of memory allocated on
1403 the stack into which is stored the arg pointer, structure
1404 value address, static chain, and all the registers that might
1405 possibly be used in performing a function call. The code is
1406 moved to the start of the function so the incoming values are
1410 expand_builtin_apply_args (void)
1412 /* Don't do __builtin_apply_args more than once in a function.
1413 Save the result of the first call and reuse it. */
1414 if (apply_args_value != 0)
1415 return apply_args_value;
1417 /* When this function is called, it means that registers must be
1418 saved on entry to this function. So we migrate the
1419 call to the first insn of this function. */
1424 temp = expand_builtin_apply_args_1 ();
1428 apply_args_value = temp;
1430 /* Put the insns after the NOTE that starts the function.
1431 If this is inside a start_sequence, make the outer-level insn
1432 chain current, so the code is placed at the start of the
1434 push_topmost_sequence ();
1435 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1436 pop_topmost_sequence ();
1441 /* Perform an untyped call and save the state required to perform an
1442 untyped return of whatever value was returned by the given function. */
1445 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1447 int size, align, regno;
1448 enum machine_mode mode;
1449 rtx incoming_args, result, reg, dest, src, call_insn;
1450 rtx old_stack_level = 0;
1451 rtx call_fusage = 0;
1452 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1454 arguments = convert_memory_address (Pmode, arguments);
1456 /* Create a block where the return registers can be saved. */
1457 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1459 /* Fetch the arg pointer from the ARGUMENTS block. */
1460 incoming_args = gen_reg_rtx (Pmode);
1461 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1462 #ifndef STACK_GROWS_DOWNWARD
1463 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1464 incoming_args, 0, OPTAB_LIB_WIDEN);
1467 /* Push a new argument block and copy the arguments. Do not allow
1468 the (potential) memcpy call below to interfere with our stack
1470 do_pending_stack_adjust ();
1473 /* Save the stack with nonlocal if available. */
1474 #ifdef HAVE_save_stack_nonlocal
1475 if (HAVE_save_stack_nonlocal)
1476 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1479 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1481 /* Allocate a block of memory onto the stack and copy the memory
1482 arguments to the outgoing arguments address. */
1483 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1485 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1486 may have already set current_function_calls_alloca to true.
1487 current_function_calls_alloca won't be set if argsize is zero,
1488 so we have to guarantee need_drap is true here. */
1489 if (SUPPORTS_STACK_ALIGNMENT)
1490 crtl->need_drap = true;
1492 dest = virtual_outgoing_args_rtx;
1493 #ifndef STACK_GROWS_DOWNWARD
1494 if (GET_CODE (argsize) == CONST_INT)
1495 dest = plus_constant (dest, -INTVAL (argsize));
1497 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1499 dest = gen_rtx_MEM (BLKmode, dest);
1500 set_mem_align (dest, PARM_BOUNDARY);
1501 src = gen_rtx_MEM (BLKmode, incoming_args);
1502 set_mem_align (src, PARM_BOUNDARY);
1503 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1505 /* Refer to the argument block. */
1507 arguments = gen_rtx_MEM (BLKmode, arguments);
1508 set_mem_align (arguments, PARM_BOUNDARY);
1510 /* Walk past the arg-pointer and structure value address. */
1511 size = GET_MODE_SIZE (Pmode);
1513 size += GET_MODE_SIZE (Pmode);
1515 /* Restore each of the registers previously saved. Make USE insns
1516 for each of these registers for use in making the call. */
1517 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1518 if ((mode = apply_args_mode[regno]) != VOIDmode)
1520 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1521 if (size % align != 0)
1522 size = CEIL (size, align) * align;
1523 reg = gen_rtx_REG (mode, regno);
1524 emit_move_insn (reg, adjust_address (arguments, mode, size));
1525 use_reg (&call_fusage, reg);
1526 size += GET_MODE_SIZE (mode);
1529 /* Restore the structure value address unless this is passed as an
1530 "invisible" first argument. */
1531 size = GET_MODE_SIZE (Pmode);
1534 rtx value = gen_reg_rtx (Pmode);
1535 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1536 emit_move_insn (struct_value, value);
1537 if (REG_P (struct_value))
1538 use_reg (&call_fusage, struct_value);
1539 size += GET_MODE_SIZE (Pmode);
1542 /* All arguments and registers used for the call are set up by now! */
1543 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1545 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1546 and we don't want to load it into a register as an optimization,
1547 because prepare_call_address already did it if it should be done. */
1548 if (GET_CODE (function) != SYMBOL_REF)
1549 function = memory_address (FUNCTION_MODE, function);
1551 /* Generate the actual call instruction and save the return value. */
1552 #ifdef HAVE_untyped_call
1553 if (HAVE_untyped_call)
1554 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1555 result, result_vector (1, result)));
1558 #ifdef HAVE_call_value
1559 if (HAVE_call_value)
1563 /* Locate the unique return register. It is not possible to
1564 express a call that sets more than one return register using
1565 call_value; use untyped_call for that. In fact, untyped_call
1566 only needs to save the return registers in the given block. */
1567 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1568 if ((mode = apply_result_mode[regno]) != VOIDmode)
1570 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1572 valreg = gen_rtx_REG (mode, regno);
1575 emit_call_insn (GEN_CALL_VALUE (valreg,
1576 gen_rtx_MEM (FUNCTION_MODE, function),
1577 const0_rtx, NULL_RTX, const0_rtx));
1579 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1585 /* Find the CALL insn we just emitted, and attach the register usage
1587 call_insn = last_call_insn ();
1588 add_function_usage_to (call_insn, call_fusage);
1590 /* Restore the stack. */
1591 #ifdef HAVE_save_stack_nonlocal
1592 if (HAVE_save_stack_nonlocal)
1593 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1596 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1600 /* Return the address of the result block. */
1601 result = copy_addr_to_reg (XEXP (result, 0));
1602 return convert_memory_address (ptr_mode, result);
1605 /* Perform an untyped return. */
1608 expand_builtin_return (rtx result)
1610 int size, align, regno;
1611 enum machine_mode mode;
1613 rtx call_fusage = 0;
1615 result = convert_memory_address (Pmode, result);
1617 apply_result_size ();
1618 result = gen_rtx_MEM (BLKmode, result);
1620 #ifdef HAVE_untyped_return
1621 if (HAVE_untyped_return)
1623 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1629 /* Restore the return value and note that each value is used. */
1631 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1632 if ((mode = apply_result_mode[regno]) != VOIDmode)
1634 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1635 if (size % align != 0)
1636 size = CEIL (size, align) * align;
1637 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1638 emit_move_insn (reg, adjust_address (result, mode, size));
1640 push_to_sequence (call_fusage);
1642 call_fusage = get_insns ();
1644 size += GET_MODE_SIZE (mode);
1647 /* Put the USE insns before the return. */
1648 emit_insn (call_fusage);
1650 /* Return whatever values was restored by jumping directly to the end
1652 expand_naked_return ();
1655 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1657 static enum type_class
1658 type_to_class (tree type)
1660 switch (TREE_CODE (type))
1662 case VOID_TYPE: return void_type_class;
1663 case INTEGER_TYPE: return integer_type_class;
1664 case ENUMERAL_TYPE: return enumeral_type_class;
1665 case BOOLEAN_TYPE: return boolean_type_class;
1666 case POINTER_TYPE: return pointer_type_class;
1667 case REFERENCE_TYPE: return reference_type_class;
1668 case OFFSET_TYPE: return offset_type_class;
1669 case REAL_TYPE: return real_type_class;
1670 case COMPLEX_TYPE: return complex_type_class;
1671 case FUNCTION_TYPE: return function_type_class;
1672 case METHOD_TYPE: return method_type_class;
1673 case RECORD_TYPE: return record_type_class;
1675 case QUAL_UNION_TYPE: return union_type_class;
1676 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1677 ? string_type_class : array_type_class);
1678 case LANG_TYPE: return lang_type_class;
1679 default: return no_type_class;
1683 /* Expand a call EXP to __builtin_classify_type. */
1686 expand_builtin_classify_type (tree exp)
1688 if (call_expr_nargs (exp))
1689 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1690 return GEN_INT (no_type_class);
1693 /* This helper macro, meant to be used in mathfn_built_in below,
1694 determines which among a set of three builtin math functions is
1695 appropriate for a given type mode. The `F' and `L' cases are
1696 automatically generated from the `double' case. */
1697 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1698 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1699 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1700 fcodel = BUILT_IN_MATHFN##L ; break;
1701 /* Similar to above, but appends _R after any F/L suffix. */
1702 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1703 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1704 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1705 fcodel = BUILT_IN_MATHFN##L_R ; break;
1707 /* Return mathematic function equivalent to FN but operating directly
1708 on TYPE, if available. If IMPLICIT is true find the function in
1709 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1710 can't do the conversion, return zero. */
1713 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1715 tree const *const fn_arr
1716 = implicit ? implicit_built_in_decls : built_in_decls;
1717 enum built_in_function fcode, fcodef, fcodel;
1721 CASE_MATHFN (BUILT_IN_ACOS)
1722 CASE_MATHFN (BUILT_IN_ACOSH)
1723 CASE_MATHFN (BUILT_IN_ASIN)
1724 CASE_MATHFN (BUILT_IN_ASINH)
1725 CASE_MATHFN (BUILT_IN_ATAN)
1726 CASE_MATHFN (BUILT_IN_ATAN2)
1727 CASE_MATHFN (BUILT_IN_ATANH)
1728 CASE_MATHFN (BUILT_IN_CBRT)
1729 CASE_MATHFN (BUILT_IN_CEIL)
1730 CASE_MATHFN (BUILT_IN_CEXPI)
1731 CASE_MATHFN (BUILT_IN_COPYSIGN)
1732 CASE_MATHFN (BUILT_IN_COS)
1733 CASE_MATHFN (BUILT_IN_COSH)
1734 CASE_MATHFN (BUILT_IN_DREM)
1735 CASE_MATHFN (BUILT_IN_ERF)
1736 CASE_MATHFN (BUILT_IN_ERFC)
1737 CASE_MATHFN (BUILT_IN_EXP)
1738 CASE_MATHFN (BUILT_IN_EXP10)
1739 CASE_MATHFN (BUILT_IN_EXP2)
1740 CASE_MATHFN (BUILT_IN_EXPM1)
1741 CASE_MATHFN (BUILT_IN_FABS)
1742 CASE_MATHFN (BUILT_IN_FDIM)
1743 CASE_MATHFN (BUILT_IN_FLOOR)
1744 CASE_MATHFN (BUILT_IN_FMA)
1745 CASE_MATHFN (BUILT_IN_FMAX)
1746 CASE_MATHFN (BUILT_IN_FMIN)
1747 CASE_MATHFN (BUILT_IN_FMOD)
1748 CASE_MATHFN (BUILT_IN_FREXP)
1749 CASE_MATHFN (BUILT_IN_GAMMA)
1750 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1751 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1752 CASE_MATHFN (BUILT_IN_HYPOT)
1753 CASE_MATHFN (BUILT_IN_ILOGB)
1754 CASE_MATHFN (BUILT_IN_INF)
1755 CASE_MATHFN (BUILT_IN_ISINF)
1756 CASE_MATHFN (BUILT_IN_J0)
1757 CASE_MATHFN (BUILT_IN_J1)
1758 CASE_MATHFN (BUILT_IN_JN)
1759 CASE_MATHFN (BUILT_IN_LCEIL)
1760 CASE_MATHFN (BUILT_IN_LDEXP)
1761 CASE_MATHFN (BUILT_IN_LFLOOR)
1762 CASE_MATHFN (BUILT_IN_LGAMMA)
1763 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1764 CASE_MATHFN (BUILT_IN_LLCEIL)
1765 CASE_MATHFN (BUILT_IN_LLFLOOR)
1766 CASE_MATHFN (BUILT_IN_LLRINT)
1767 CASE_MATHFN (BUILT_IN_LLROUND)
1768 CASE_MATHFN (BUILT_IN_LOG)
1769 CASE_MATHFN (BUILT_IN_LOG10)
1770 CASE_MATHFN (BUILT_IN_LOG1P)
1771 CASE_MATHFN (BUILT_IN_LOG2)
1772 CASE_MATHFN (BUILT_IN_LOGB)
1773 CASE_MATHFN (BUILT_IN_LRINT)
1774 CASE_MATHFN (BUILT_IN_LROUND)
1775 CASE_MATHFN (BUILT_IN_MODF)
1776 CASE_MATHFN (BUILT_IN_NAN)
1777 CASE_MATHFN (BUILT_IN_NANS)
1778 CASE_MATHFN (BUILT_IN_NEARBYINT)
1779 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1780 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1781 CASE_MATHFN (BUILT_IN_POW)
1782 CASE_MATHFN (BUILT_IN_POWI)
1783 CASE_MATHFN (BUILT_IN_POW10)
1784 CASE_MATHFN (BUILT_IN_REMAINDER)
1785 CASE_MATHFN (BUILT_IN_REMQUO)
1786 CASE_MATHFN (BUILT_IN_RINT)
1787 CASE_MATHFN (BUILT_IN_ROUND)
1788 CASE_MATHFN (BUILT_IN_SCALB)
1789 CASE_MATHFN (BUILT_IN_SCALBLN)
1790 CASE_MATHFN (BUILT_IN_SCALBN)
1791 CASE_MATHFN (BUILT_IN_SIGNBIT)
1792 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1793 CASE_MATHFN (BUILT_IN_SIN)
1794 CASE_MATHFN (BUILT_IN_SINCOS)
1795 CASE_MATHFN (BUILT_IN_SINH)
1796 CASE_MATHFN (BUILT_IN_SQRT)
1797 CASE_MATHFN (BUILT_IN_TAN)
1798 CASE_MATHFN (BUILT_IN_TANH)
1799 CASE_MATHFN (BUILT_IN_TGAMMA)
1800 CASE_MATHFN (BUILT_IN_TRUNC)
1801 CASE_MATHFN (BUILT_IN_Y0)
1802 CASE_MATHFN (BUILT_IN_Y1)
1803 CASE_MATHFN (BUILT_IN_YN)
1809 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1810 return fn_arr[fcode];
1811 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1812 return fn_arr[fcodef];
1813 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1814 return fn_arr[fcodel];
1819 /* Like mathfn_built_in_1(), but always use the implicit array. */
1822 mathfn_built_in (tree type, enum built_in_function fn)
1824 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1827 /* If errno must be maintained, expand the RTL to check if the result,
1828 TARGET, of a built-in function call, EXP, is NaN, and if so set
1832 expand_errno_check (tree exp, rtx target)
1834 rtx lab = gen_label_rtx ();
1836 /* Test the result; if it is NaN, set errno=EDOM because
1837 the argument was not in the domain. */
1838 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1842 /* If this built-in doesn't throw an exception, set errno directly. */
1843 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1845 #ifdef GEN_ERRNO_RTX
1846 rtx errno_rtx = GEN_ERRNO_RTX;
1849 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1851 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1857 /* Make sure the library call isn't expanded as a tail call. */
1858 CALL_EXPR_TAILCALL (exp) = 0;
1860 /* We can't set errno=EDOM directly; let the library call do it.
1861 Pop the arguments right away in case the call gets deleted. */
1863 expand_call (exp, target, 0);
1868 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1869 Return NULL_RTX if a normal call should be emitted rather than expanding
1870 the function in-line. EXP is the expression that is a call to the builtin
1871 function; if convenient, the result should be placed in TARGET.
1872 SUBTARGET may be used as the target for computing one of EXP's operands. */
1875 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1877 optab builtin_optab;
1878 rtx op0, insns, before_call;
1879 tree fndecl = get_callee_fndecl (exp);
1880 enum machine_mode mode;
1881 bool errno_set = false;
1884 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1887 arg = CALL_EXPR_ARG (exp, 0);
1889 switch (DECL_FUNCTION_CODE (fndecl))
1891 CASE_FLT_FN (BUILT_IN_SQRT):
1892 errno_set = ! tree_expr_nonnegative_p (arg);
1893 builtin_optab = sqrt_optab;
1895 CASE_FLT_FN (BUILT_IN_EXP):
1896 errno_set = true; builtin_optab = exp_optab; break;
1897 CASE_FLT_FN (BUILT_IN_EXP10):
1898 CASE_FLT_FN (BUILT_IN_POW10):
1899 errno_set = true; builtin_optab = exp10_optab; break;
1900 CASE_FLT_FN (BUILT_IN_EXP2):
1901 errno_set = true; builtin_optab = exp2_optab; break;
1902 CASE_FLT_FN (BUILT_IN_EXPM1):
1903 errno_set = true; builtin_optab = expm1_optab; break;
1904 CASE_FLT_FN (BUILT_IN_LOGB):
1905 errno_set = true; builtin_optab = logb_optab; break;
1906 CASE_FLT_FN (BUILT_IN_LOG):
1907 errno_set = true; builtin_optab = log_optab; break;
1908 CASE_FLT_FN (BUILT_IN_LOG10):
1909 errno_set = true; builtin_optab = log10_optab; break;
1910 CASE_FLT_FN (BUILT_IN_LOG2):
1911 errno_set = true; builtin_optab = log2_optab; break;
1912 CASE_FLT_FN (BUILT_IN_LOG1P):
1913 errno_set = true; builtin_optab = log1p_optab; break;
1914 CASE_FLT_FN (BUILT_IN_ASIN):
1915 builtin_optab = asin_optab; break;
1916 CASE_FLT_FN (BUILT_IN_ACOS):
1917 builtin_optab = acos_optab; break;
1918 CASE_FLT_FN (BUILT_IN_TAN):
1919 builtin_optab = tan_optab; break;
1920 CASE_FLT_FN (BUILT_IN_ATAN):
1921 builtin_optab = atan_optab; break;
1922 CASE_FLT_FN (BUILT_IN_FLOOR):
1923 builtin_optab = floor_optab; break;
1924 CASE_FLT_FN (BUILT_IN_CEIL):
1925 builtin_optab = ceil_optab; break;
1926 CASE_FLT_FN (BUILT_IN_TRUNC):
1927 builtin_optab = btrunc_optab; break;
1928 CASE_FLT_FN (BUILT_IN_ROUND):
1929 builtin_optab = round_optab; break;
1930 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1931 builtin_optab = nearbyint_optab;
1932 if (flag_trapping_math)
1934 /* Else fallthrough and expand as rint. */
1935 CASE_FLT_FN (BUILT_IN_RINT):
1936 builtin_optab = rint_optab; break;
1941 /* Make a suitable register to place result in. */
1942 mode = TYPE_MODE (TREE_TYPE (exp));
1944 if (! flag_errno_math || ! HONOR_NANS (mode))
1947 /* Before working hard, check whether the instruction is available. */
1948 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1950 target = gen_reg_rtx (mode);
1952 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1953 need to expand the argument again. This way, we will not perform
1954 side-effects more the once. */
1955 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1957 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1961 /* Compute into TARGET.
1962 Set TARGET to wherever the result comes back. */
1963 target = expand_unop (mode, builtin_optab, op0, target, 0);
1968 expand_errno_check (exp, target);
1970 /* Output the entire sequence. */
1971 insns = get_insns ();
1977 /* If we were unable to expand via the builtin, stop the sequence
1978 (without outputting the insns) and call to the library function
1979 with the stabilized argument list. */
1983 before_call = get_last_insn ();
1985 return expand_call (exp, target, target == const0_rtx);
1988 /* Expand a call to the builtin binary math functions (pow and atan2).
1989 Return NULL_RTX if a normal call should be emitted rather than expanding the
1990 function in-line. EXP is the expression that is a call to the builtin
1991 function; if convenient, the result should be placed in TARGET.
1992 SUBTARGET may be used as the target for computing one of EXP's
1996 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1998 optab builtin_optab;
1999 rtx op0, op1, insns;
2000 int op1_type = REAL_TYPE;
2001 tree fndecl = get_callee_fndecl (exp);
2003 enum machine_mode mode;
2004 bool errno_set = true;
2006 switch (DECL_FUNCTION_CODE (fndecl))
2008 CASE_FLT_FN (BUILT_IN_SCALBN):
2009 CASE_FLT_FN (BUILT_IN_SCALBLN):
2010 CASE_FLT_FN (BUILT_IN_LDEXP):
2011 op1_type = INTEGER_TYPE;
2016 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2019 arg0 = CALL_EXPR_ARG (exp, 0);
2020 arg1 = CALL_EXPR_ARG (exp, 1);
2022 switch (DECL_FUNCTION_CODE (fndecl))
2024 CASE_FLT_FN (BUILT_IN_POW):
2025 builtin_optab = pow_optab; break;
2026 CASE_FLT_FN (BUILT_IN_ATAN2):
2027 builtin_optab = atan2_optab; break;
2028 CASE_FLT_FN (BUILT_IN_SCALB):
2029 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2031 builtin_optab = scalb_optab; break;
2032 CASE_FLT_FN (BUILT_IN_SCALBN):
2033 CASE_FLT_FN (BUILT_IN_SCALBLN):
2034 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2036 /* Fall through... */
2037 CASE_FLT_FN (BUILT_IN_LDEXP):
2038 builtin_optab = ldexp_optab; break;
2039 CASE_FLT_FN (BUILT_IN_FMOD):
2040 builtin_optab = fmod_optab; break;
2041 CASE_FLT_FN (BUILT_IN_REMAINDER):
2042 CASE_FLT_FN (BUILT_IN_DREM):
2043 builtin_optab = remainder_optab; break;
2048 /* Make a suitable register to place result in. */
2049 mode = TYPE_MODE (TREE_TYPE (exp));
2051 /* Before working hard, check whether the instruction is available. */
2052 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2055 target = gen_reg_rtx (mode);
2057 if (! flag_errno_math || ! HONOR_NANS (mode))
2060 /* Always stabilize the argument list. */
2061 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2062 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2064 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2065 op1 = expand_normal (arg1);
2069 /* Compute into TARGET.
2070 Set TARGET to wherever the result comes back. */
2071 target = expand_binop (mode, builtin_optab, op0, op1,
2072 target, 0, OPTAB_DIRECT);
2074 /* If we were unable to expand via the builtin, stop the sequence
2075 (without outputting the insns) and call to the library function
2076 with the stabilized argument list. */
2080 return expand_call (exp, target, target == const0_rtx);
2084 expand_errno_check (exp, target);
2086 /* Output the entire sequence. */
2087 insns = get_insns ();
2094 /* Expand a call to the builtin sin and cos math functions.
2095 Return NULL_RTX if a normal call should be emitted rather than expanding the
2096 function in-line. EXP is the expression that is a call to the builtin
2097 function; if convenient, the result should be placed in TARGET.
2098 SUBTARGET may be used as the target for computing one of EXP's
2102 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2104 optab builtin_optab;
2106 tree fndecl = get_callee_fndecl (exp);
2107 enum machine_mode mode;
2110 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2113 arg = CALL_EXPR_ARG (exp, 0);
2115 switch (DECL_FUNCTION_CODE (fndecl))
2117 CASE_FLT_FN (BUILT_IN_SIN):
2118 CASE_FLT_FN (BUILT_IN_COS):
2119 builtin_optab = sincos_optab; break;
2124 /* Make a suitable register to place result in. */
2125 mode = TYPE_MODE (TREE_TYPE (exp));
2127 /* Check if sincos insn is available, otherwise fallback
2128 to sin or cos insn. */
2129 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2130 switch (DECL_FUNCTION_CODE (fndecl))
2132 CASE_FLT_FN (BUILT_IN_SIN):
2133 builtin_optab = sin_optab; break;
2134 CASE_FLT_FN (BUILT_IN_COS):
2135 builtin_optab = cos_optab; break;
2140 /* Before working hard, check whether the instruction is available. */
2141 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2143 target = gen_reg_rtx (mode);
2145 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2146 need to expand the argument again. This way, we will not perform
2147 side-effects more the once. */
2148 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2150 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2154 /* Compute into TARGET.
2155 Set TARGET to wherever the result comes back. */
2156 if (builtin_optab == sincos_optab)
2160 switch (DECL_FUNCTION_CODE (fndecl))
2162 CASE_FLT_FN (BUILT_IN_SIN):
2163 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2171 gcc_assert (result);
2175 target = expand_unop (mode, builtin_optab, op0, target, 0);
2180 /* Output the entire sequence. */
2181 insns = get_insns ();
2187 /* If we were unable to expand via the builtin, stop the sequence
2188 (without outputting the insns) and call to the library function
2189 with the stabilized argument list. */
2193 target = expand_call (exp, target, target == const0_rtx);
2198 /* Expand a call to one of the builtin math functions that operate on
2199 floating point argument and output an integer result (ilogb, isinf,
2201 Return 0 if a normal call should be emitted rather than expanding the
2202 function in-line. EXP is the expression that is a call to the builtin
2203 function; if convenient, the result should be placed in TARGET.
2204 SUBTARGET may be used as the target for computing one of EXP's operands. */
2207 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2209 optab builtin_optab = 0;
2210 enum insn_code icode = CODE_FOR_nothing;
2212 tree fndecl = get_callee_fndecl (exp);
2213 enum machine_mode mode;
2214 bool errno_set = false;
2217 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2220 arg = CALL_EXPR_ARG (exp, 0);
2222 switch (DECL_FUNCTION_CODE (fndecl))
2224 CASE_FLT_FN (BUILT_IN_ILOGB):
2225 errno_set = true; builtin_optab = ilogb_optab; break;
2226 CASE_FLT_FN (BUILT_IN_ISINF):
2227 builtin_optab = isinf_optab; break;
2228 case BUILT_IN_ISNORMAL:
2229 case BUILT_IN_ISFINITE:
2230 CASE_FLT_FN (BUILT_IN_FINITE):
2231 /* These builtins have no optabs (yet). */
2237 /* There's no easy way to detect the case we need to set EDOM. */
2238 if (flag_errno_math && errno_set)
2241 /* Optab mode depends on the mode of the input argument. */
2242 mode = TYPE_MODE (TREE_TYPE (arg));
2245 icode = optab_handler (builtin_optab, mode)->insn_code;
2247 /* Before working hard, check whether the instruction is available. */
2248 if (icode != CODE_FOR_nothing)
2250 /* Make a suitable register to place result in. */
2252 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2253 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2255 gcc_assert (insn_data[icode].operand[0].predicate
2256 (target, GET_MODE (target)));
2258 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2259 need to expand the argument again. This way, we will not perform
2260 side-effects more the once. */
2261 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2263 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2265 if (mode != GET_MODE (op0))
2266 op0 = convert_to_mode (mode, op0, 0);
2268 /* Compute into TARGET.
2269 Set TARGET to wherever the result comes back. */
2270 emit_unop_insn (icode, target, op0, UNKNOWN);
2274 /* If there is no optab, try generic code. */
2275 switch (DECL_FUNCTION_CODE (fndecl))
2279 CASE_FLT_FN (BUILT_IN_ISINF):
2281 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2282 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2283 tree const type = TREE_TYPE (arg);
2287 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2288 real_from_string (&r, buf);
2289 result = build_call_expr (isgr_fn, 2,
2290 fold_build1 (ABS_EXPR, type, arg),
2291 build_real (type, r));
2292 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2294 CASE_FLT_FN (BUILT_IN_FINITE):
2295 case BUILT_IN_ISFINITE:
2297 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2298 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2299 tree const type = TREE_TYPE (arg);
2303 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2304 real_from_string (&r, buf);
2305 result = build_call_expr (isle_fn, 2,
2306 fold_build1 (ABS_EXPR, type, arg),
2307 build_real (type, r));
2308 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2310 case BUILT_IN_ISNORMAL:
2312 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2313 islessequal(fabs(x),DBL_MAX). */
2314 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2315 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2316 tree const type = TREE_TYPE (arg);
2317 REAL_VALUE_TYPE rmax, rmin;
2320 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2321 real_from_string (&rmax, buf);
2322 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2323 real_from_string (&rmin, buf);
2324 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2325 result = build_call_expr (isle_fn, 2, arg,
2326 build_real (type, rmax));
2327 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2328 build_call_expr (isge_fn, 2, arg,
2329 build_real (type, rmin)));
2330 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2336 target = expand_call (exp, target, target == const0_rtx);
2341 /* Expand a call to the builtin sincos math function.
2342 Return NULL_RTX if a normal call should be emitted rather than expanding the
2343 function in-line. EXP is the expression that is a call to the builtin
2347 expand_builtin_sincos (tree exp)
2349 rtx op0, op1, op2, target1, target2;
2350 enum machine_mode mode;
2351 tree arg, sinp, cosp;
2354 if (!validate_arglist (exp, REAL_TYPE,
2355 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2358 arg = CALL_EXPR_ARG (exp, 0);
2359 sinp = CALL_EXPR_ARG (exp, 1);
2360 cosp = CALL_EXPR_ARG (exp, 2);
2362 /* Make a suitable register to place result in. */
2363 mode = TYPE_MODE (TREE_TYPE (arg));
2365 /* Check if sincos insn is available, otherwise emit the call. */
2366 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2369 target1 = gen_reg_rtx (mode);
2370 target2 = gen_reg_rtx (mode);
2372 op0 = expand_normal (arg);
2373 op1 = expand_normal (build_fold_indirect_ref (sinp));
2374 op2 = expand_normal (build_fold_indirect_ref (cosp));
2376 /* Compute into target1 and target2.
2377 Set TARGET to wherever the result comes back. */
2378 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2379 gcc_assert (result);
2381 /* Move target1 and target2 to the memory locations indicated
2383 emit_move_insn (op1, target1);
2384 emit_move_insn (op2, target2);
2389 /* Expand a call to the internal cexpi builtin to the sincos math function.
2390 EXP is the expression that is a call to the builtin function; if convenient,
2391 the result should be placed in TARGET. SUBTARGET may be used as the target
2392 for computing one of EXP's operands. */
2395 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2397 tree fndecl = get_callee_fndecl (exp);
2399 enum machine_mode mode;
2402 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2405 arg = CALL_EXPR_ARG (exp, 0);
2406 type = TREE_TYPE (arg);
2407 mode = TYPE_MODE (TREE_TYPE (arg));
2409 /* Try expanding via a sincos optab, fall back to emitting a libcall
2410 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2411 is only generated from sincos, cexp or if we have either of them. */
2412 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2414 op1 = gen_reg_rtx (mode);
2415 op2 = gen_reg_rtx (mode);
2417 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2419 /* Compute into op1 and op2. */
2420 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2422 else if (TARGET_HAS_SINCOS)
2424 tree call, fn = NULL_TREE;
2428 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2429 fn = built_in_decls[BUILT_IN_SINCOSF];
2430 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2431 fn = built_in_decls[BUILT_IN_SINCOS];
2432 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2433 fn = built_in_decls[BUILT_IN_SINCOSL];
2437 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2438 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2439 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2440 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2441 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2442 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2444 /* Make sure not to fold the sincos call again. */
2445 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2446 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2447 call, 3, arg, top1, top2));
2451 tree call, fn = NULL_TREE, narg;
2452 tree ctype = build_complex_type (type);
2454 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2455 fn = built_in_decls[BUILT_IN_CEXPF];
2456 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2457 fn = built_in_decls[BUILT_IN_CEXP];
2458 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2459 fn = built_in_decls[BUILT_IN_CEXPL];
2463 /* If we don't have a decl for cexp create one. This is the
2464 friendliest fallback if the user calls __builtin_cexpi
2465 without full target C99 function support. */
2466 if (fn == NULL_TREE)
2469 const char *name = NULL;
2471 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2473 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2475 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2478 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2479 fn = build_fn_decl (name, fntype);
2482 narg = fold_build2 (COMPLEX_EXPR, ctype,
2483 build_real (type, dconst0), arg);
2485 /* Make sure not to fold the cexp call again. */
2486 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2487 return expand_expr (build_call_nary (ctype, call, 1, narg),
2488 target, VOIDmode, EXPAND_NORMAL);
2491 /* Now build the proper return type. */
2492 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2493 make_tree (TREE_TYPE (arg), op2),
2494 make_tree (TREE_TYPE (arg), op1)),
2495 target, VOIDmode, EXPAND_NORMAL);
2498 /* Expand a call to one of the builtin rounding functions gcc defines
2499 as an extension (lfloor and lceil). As these are gcc extensions we
2500 do not need to worry about setting errno to EDOM.
2501 If expanding via optab fails, lower expression to (int)(floor(x)).
2502 EXP is the expression that is a call to the builtin function;
2503 if convenient, the result should be placed in TARGET. */
2506 expand_builtin_int_roundingfn (tree exp, rtx target)
2508 convert_optab builtin_optab;
2509 rtx op0, insns, tmp;
2510 tree fndecl = get_callee_fndecl (exp);
2511 enum built_in_function fallback_fn;
2512 tree fallback_fndecl;
2513 enum machine_mode mode;
2516 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2519 arg = CALL_EXPR_ARG (exp, 0);
2521 switch (DECL_FUNCTION_CODE (fndecl))
2523 CASE_FLT_FN (BUILT_IN_LCEIL):
2524 CASE_FLT_FN (BUILT_IN_LLCEIL):
2525 builtin_optab = lceil_optab;
2526 fallback_fn = BUILT_IN_CEIL;
2529 CASE_FLT_FN (BUILT_IN_LFLOOR):
2530 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2531 builtin_optab = lfloor_optab;
2532 fallback_fn = BUILT_IN_FLOOR;
2539 /* Make a suitable register to place result in. */
2540 mode = TYPE_MODE (TREE_TYPE (exp));
2542 target = gen_reg_rtx (mode);
2544 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2545 need to expand the argument again. This way, we will not perform
2546 side-effects more the once. */
2547 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2549 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2553 /* Compute into TARGET. */
2554 if (expand_sfix_optab (target, op0, builtin_optab))
2556 /* Output the entire sequence. */
2557 insns = get_insns ();
2563 /* If we were unable to expand via the builtin, stop the sequence
2564 (without outputting the insns). */
2567 /* Fall back to floating point rounding optab. */
2568 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2570 /* For non-C99 targets we may end up without a fallback fndecl here
2571 if the user called __builtin_lfloor directly. In this case emit
2572 a call to the floor/ceil variants nevertheless. This should result
2573 in the best user experience for not full C99 targets. */
2574 if (fallback_fndecl == NULL_TREE)
2577 const char *name = NULL;
2579 switch (DECL_FUNCTION_CODE (fndecl))
2581 case BUILT_IN_LCEIL:
2582 case BUILT_IN_LLCEIL:
2585 case BUILT_IN_LCEILF:
2586 case BUILT_IN_LLCEILF:
2589 case BUILT_IN_LCEILL:
2590 case BUILT_IN_LLCEILL:
2593 case BUILT_IN_LFLOOR:
2594 case BUILT_IN_LLFLOOR:
2597 case BUILT_IN_LFLOORF:
2598 case BUILT_IN_LLFLOORF:
2601 case BUILT_IN_LFLOORL:
2602 case BUILT_IN_LLFLOORL:
2609 fntype = build_function_type_list (TREE_TYPE (arg),
2610 TREE_TYPE (arg), NULL_TREE);
2611 fallback_fndecl = build_fn_decl (name, fntype);
2614 exp = build_call_expr (fallback_fndecl, 1, arg);
2616 tmp = expand_normal (exp);
2618 /* Truncate the result of floating point optab to integer
2619 via expand_fix (). */
2620 target = gen_reg_rtx (mode);
2621 expand_fix (target, tmp, 0);
2626 /* Expand a call to one of the builtin math functions doing integer
2628 Return 0 if a normal call should be emitted rather than expanding the
2629 function in-line. EXP is the expression that is a call to the builtin
2630 function; if convenient, the result should be placed in TARGET. */
2633 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2635 convert_optab builtin_optab;
2637 tree fndecl = get_callee_fndecl (exp);
2639 enum machine_mode mode;
2641 /* There's no easy way to detect the case we need to set EDOM. */
2642 if (flag_errno_math)
2645 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2648 arg = CALL_EXPR_ARG (exp, 0);
2650 switch (DECL_FUNCTION_CODE (fndecl))
2652 CASE_FLT_FN (BUILT_IN_LRINT):
2653 CASE_FLT_FN (BUILT_IN_LLRINT):
2654 builtin_optab = lrint_optab; break;
2655 CASE_FLT_FN (BUILT_IN_LROUND):
2656 CASE_FLT_FN (BUILT_IN_LLROUND):
2657 builtin_optab = lround_optab; break;
2662 /* Make a suitable register to place result in. */
2663 mode = TYPE_MODE (TREE_TYPE (exp));
2665 target = gen_reg_rtx (mode);
2667 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2668 need to expand the argument again. This way, we will not perform
2669 side-effects more the once. */
2670 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2672 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2676 if (expand_sfix_optab (target, op0, builtin_optab))
2678 /* Output the entire sequence. */
2679 insns = get_insns ();
2685 /* If we were unable to expand via the builtin, stop the sequence
2686 (without outputting the insns) and call to the library function
2687 with the stabilized argument list. */
2690 target = expand_call (exp, target, target == const0_rtx);
2695 /* To evaluate powi(x,n), the floating point value x raised to the
2696 constant integer exponent n, we use a hybrid algorithm that
2697 combines the "window method" with look-up tables. For an
2698 introduction to exponentiation algorithms and "addition chains",
2699 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2700 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2701 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2702 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2704 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2705 multiplications to inline before calling the system library's pow
2706 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2707 so this default never requires calling pow, powf or powl. */
2709 #ifndef POWI_MAX_MULTS
2710 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2713 /* The size of the "optimal power tree" lookup table. All
2714 exponents less than this value are simply looked up in the
2715 powi_table below. This threshold is also used to size the
2716 cache of pseudo registers that hold intermediate results. */
2717 #define POWI_TABLE_SIZE 256
2719 /* The size, in bits of the window, used in the "window method"
2720 exponentiation algorithm. This is equivalent to a radix of
2721 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2722 #define POWI_WINDOW_SIZE 3
2724 /* The following table is an efficient representation of an
2725 "optimal power tree". For each value, i, the corresponding
2726 value, j, in the table states than an optimal evaluation
2727 sequence for calculating pow(x,i) can be found by evaluating
2728 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2729 100 integers is given in Knuth's "Seminumerical algorithms". */
2731 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2733 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2734 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2735 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2736 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2737 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2738 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2739 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2740 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2741 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2742 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2743 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2744 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2745 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2746 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2747 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2748 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2749 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2750 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2751 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2752 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2753 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2754 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2755 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2756 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2757 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2758 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2759 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2760 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2761 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2762 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2763 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2764 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2768 /* Return the number of multiplications required to calculate
2769 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2770 subroutine of powi_cost. CACHE is an array indicating
2771 which exponents have already been calculated. */
2774 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2776 /* If we've already calculated this exponent, then this evaluation
2777 doesn't require any additional multiplications. */
2782 return powi_lookup_cost (n - powi_table[n], cache)
2783 + powi_lookup_cost (powi_table[n], cache) + 1;
2786 /* Return the number of multiplications required to calculate
2787 powi(x,n) for an arbitrary x, given the exponent N. This
2788 function needs to be kept in sync with expand_powi below. */
2791 powi_cost (HOST_WIDE_INT n)
2793 bool cache[POWI_TABLE_SIZE];
2794 unsigned HOST_WIDE_INT digit;
2795 unsigned HOST_WIDE_INT val;
2801 /* Ignore the reciprocal when calculating the cost. */
2802 val = (n < 0) ? -n : n;
2804 /* Initialize the exponent cache. */
2805 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2810 while (val >= POWI_TABLE_SIZE)
2814 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2815 result += powi_lookup_cost (digit, cache)
2816 + POWI_WINDOW_SIZE + 1;
2817 val >>= POWI_WINDOW_SIZE;
2826 return result + powi_lookup_cost (val, cache);
2829 /* Recursive subroutine of expand_powi. This function takes the array,
2830 CACHE, of already calculated exponents and an exponent N and returns
2831 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2834 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2836 unsigned HOST_WIDE_INT digit;
2840 if (n < POWI_TABLE_SIZE)
2845 target = gen_reg_rtx (mode);
2848 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2849 op1 = expand_powi_1 (mode, powi_table[n], cache);
2853 target = gen_reg_rtx (mode);
2854 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2855 op0 = expand_powi_1 (mode, n - digit, cache);
2856 op1 = expand_powi_1 (mode, digit, cache);
2860 target = gen_reg_rtx (mode);
2861 op0 = expand_powi_1 (mode, n >> 1, cache);
2865 result = expand_mult (mode, op0, op1, target, 0);
2866 if (result != target)
2867 emit_move_insn (target, result);
2871 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2872 floating point operand in mode MODE, and N is the exponent. This
2873 function needs to be kept in sync with powi_cost above. */
2876 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2878 unsigned HOST_WIDE_INT val;
2879 rtx cache[POWI_TABLE_SIZE];
2883 return CONST1_RTX (mode);
2885 val = (n < 0) ? -n : n;
2887 memset (cache, 0, sizeof (cache));
2890 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2892 /* If the original exponent was negative, reciprocate the result. */
2894 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2895 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2900 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2901 a normal call should be emitted rather than expanding the function
2902 in-line. EXP is the expression that is a call to the builtin
2903 function; if convenient, the result should be placed in TARGET. */
2906 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2910 tree type = TREE_TYPE (exp);
2911 REAL_VALUE_TYPE cint, c, c2;
2914 enum machine_mode mode = TYPE_MODE (type);
2916 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2919 arg0 = CALL_EXPR_ARG (exp, 0);
2920 arg1 = CALL_EXPR_ARG (exp, 1);
2922 if (TREE_CODE (arg1) != REAL_CST
2923 || TREE_OVERFLOW (arg1))
2924 return expand_builtin_mathfn_2 (exp, target, subtarget);
2926 /* Handle constant exponents. */
2928 /* For integer valued exponents we can expand to an optimal multiplication
2929 sequence using expand_powi. */
2930 c = TREE_REAL_CST (arg1);
2931 n = real_to_integer (&c);
2932 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2933 if (real_identical (&c, &cint)
2934 && ((n >= -1 && n <= 2)
2935 || (flag_unsafe_math_optimizations
2936 && optimize_insn_for_speed_p ()
2937 && powi_cost (n) <= POWI_MAX_MULTS)))
2939 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2942 op = force_reg (mode, op);
2943 op = expand_powi (op, mode, n);
2948 narg0 = builtin_save_expr (arg0);
2950 /* If the exponent is not integer valued, check if it is half of an integer.
2951 In this case we can expand to sqrt (x) * x**(n/2). */
2952 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2953 if (fn != NULL_TREE)
2955 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2956 n = real_to_integer (&c2);
2957 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2958 if (real_identical (&c2, &cint)
2959 && ((flag_unsafe_math_optimizations
2960 && optimize_insn_for_speed_p ()
2961 && powi_cost (n/2) <= POWI_MAX_MULTS)
2964 tree call_expr = build_call_expr (fn, 1, narg0);
2965 /* Use expand_expr in case the newly built call expression
2966 was folded to a non-call. */
2967 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2970 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2971 op2 = force_reg (mode, op2);
2972 op2 = expand_powi (op2, mode, abs (n / 2));
2973 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2974 0, OPTAB_LIB_WIDEN);
2975 /* If the original exponent was negative, reciprocate the
2978 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2979 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2985 /* Try if the exponent is a third of an integer. In this case
2986 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2987 different from pow (x, 1./3.) due to rounding and behavior
2988 with negative x we need to constrain this transformation to
2989 unsafe math and positive x or finite math. */
2990 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2992 && flag_unsafe_math_optimizations
2993 && (tree_expr_nonnegative_p (arg0)
2994 || !HONOR_NANS (mode)))
2996 REAL_VALUE_TYPE dconst3;
2997 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2998 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2999 real_round (&c2, mode, &c2);
3000 n = real_to_integer (&c2);
3001 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3002 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3003 real_convert (&c2, mode, &c2);
3004 if (real_identical (&c2, &c)
3005 && ((optimize_insn_for_speed_p ()
3006 && powi_cost (n/3) <= POWI_MAX_MULTS)
3009 tree call_expr = build_call_expr (fn, 1,narg0);
3010 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3011 if (abs (n) % 3 == 2)
3012 op = expand_simple_binop (mode, MULT, op, op, op,
3013 0, OPTAB_LIB_WIDEN);
3016 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3017 op2 = force_reg (mode, op2);
3018 op2 = expand_powi (op2, mode, abs (n / 3));
3019 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3020 0, OPTAB_LIB_WIDEN);
3021 /* If the original exponent was negative, reciprocate the
3024 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3025 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3031 /* Fall back to optab expansion. */
3032 return expand_builtin_mathfn_2 (exp, target, subtarget);
3035 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3036 a normal call should be emitted rather than expanding the function
3037 in-line. EXP is the expression that is a call to the builtin
3038 function; if convenient, the result should be placed in TARGET. */
3041 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3045 enum machine_mode mode;
3046 enum machine_mode mode2;
3048 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3051 arg0 = CALL_EXPR_ARG (exp, 0);
3052 arg1 = CALL_EXPR_ARG (exp, 1);
3053 mode = TYPE_MODE (TREE_TYPE (exp));
3055 /* Handle constant power. */
3057 if (TREE_CODE (arg1) == INTEGER_CST
3058 && !TREE_OVERFLOW (arg1))
3060 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3062 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3063 Otherwise, check the number of multiplications required. */
3064 if ((TREE_INT_CST_HIGH (arg1) == 0
3065 || TREE_INT_CST_HIGH (arg1) == -1)
3066 && ((n >= -1 && n <= 2)
3067 || (optimize_insn_for_speed_p ()
3068 && powi_cost (n) <= POWI_MAX_MULTS)))
3070 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3071 op0 = force_reg (mode, op0);
3072 return expand_powi (op0, mode, n);
3076 /* Emit a libcall to libgcc. */
3078 /* Mode of the 2nd argument must match that of an int. */
3079 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3081 if (target == NULL_RTX)
3082 target = gen_reg_rtx (mode);
3084 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3085 if (GET_MODE (op0) != mode)
3086 op0 = convert_to_mode (mode, op0, 0);
3087 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3088 if (GET_MODE (op1) != mode2)
3089 op1 = convert_to_mode (mode2, op1, 0);
3091 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3092 target, LCT_CONST, mode, 2,
3093 op0, mode, op1, mode2);
3098 /* Expand expression EXP which is a call to the strlen builtin. Return
3099 NULL_RTX if we failed the caller should emit a normal call, otherwise
3100 try to get the result in TARGET, if convenient. */
3103 expand_builtin_strlen (tree exp, rtx target,
3104 enum machine_mode target_mode)
3106 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3112 tree src = CALL_EXPR_ARG (exp, 0);
3113 rtx result, src_reg, char_rtx, before_strlen;
3114 enum machine_mode insn_mode = target_mode, char_mode;
3115 enum insn_code icode = CODE_FOR_nothing;
3118 /* If the length can be computed at compile-time, return it. */
3119 len = c_strlen (src, 0);
3121 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3123 /* If the length can be computed at compile-time and is constant
3124 integer, but there are side-effects in src, evaluate
3125 src for side-effects, then return len.
3126 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3127 can be optimized into: i++; x = 3; */
3128 len = c_strlen (src, 1);
3129 if (len && TREE_CODE (len) == INTEGER_CST)
3131 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3132 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3135 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3137 /* If SRC is not a pointer type, don't do this operation inline. */
3141 /* Bail out if we can't compute strlen in the right mode. */
3142 while (insn_mode != VOIDmode)
3144 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3145 if (icode != CODE_FOR_nothing)
3148 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3150 if (insn_mode == VOIDmode)
3153 /* Make a place to write the result of the instruction. */
3157 && GET_MODE (result) == insn_mode
3158 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3159 result = gen_reg_rtx (insn_mode);
3161 /* Make a place to hold the source address. We will not expand
3162 the actual source until we are sure that the expansion will
3163 not fail -- there are trees that cannot be expanded twice. */
3164 src_reg = gen_reg_rtx (Pmode);
3166 /* Mark the beginning of the strlen sequence so we can emit the
3167 source operand later. */
3168 before_strlen = get_last_insn ();
3170 char_rtx = const0_rtx;
3171 char_mode = insn_data[(int) icode].operand[2].mode;
3172 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3174 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3176 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3177 char_rtx, GEN_INT (align));
3182 /* Now that we are assured of success, expand the source. */
3184 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3186 emit_move_insn (src_reg, pat);
3191 emit_insn_after (pat, before_strlen);
3193 emit_insn_before (pat, get_insns ());
3195 /* Return the value in the proper mode for this function. */
3196 if (GET_MODE (result) == target_mode)
3198 else if (target != 0)
3199 convert_move (target, result, 0);
3201 target = convert_to_mode (target_mode, result, 0);
3207 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3208 caller should emit a normal call, otherwise try to get the result
3209 in TARGET, if convenient (and in mode MODE if that's convenient). */
3212 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3214 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3216 tree type = TREE_TYPE (exp);
3217 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3218 CALL_EXPR_ARG (exp, 1), type);
3220 return expand_expr (result, target, mode, EXPAND_NORMAL);
3225 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3226 caller should emit a normal call, otherwise try to get the result
3227 in TARGET, if convenient (and in mode MODE if that's convenient). */
3230 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3232 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3234 tree type = TREE_TYPE (exp);
3235 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3236 CALL_EXPR_ARG (exp, 1), type);
3238 return expand_expr (result, target, mode, EXPAND_NORMAL);
3240 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3245 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3246 caller should emit a normal call, otherwise try to get the result
3247 in TARGET, if convenient (and in mode MODE if that's convenient). */
3250 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3252 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3254 tree type = TREE_TYPE (exp);
3255 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3256 CALL_EXPR_ARG (exp, 1), type);
3258 return expand_expr (result, target, mode, EXPAND_NORMAL);
3263 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3264 caller should emit a normal call, otherwise try to get the result
3265 in TARGET, if convenient (and in mode MODE if that's convenient). */
3268 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3270 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3272 tree type = TREE_TYPE (exp);
3273 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3274 CALL_EXPR_ARG (exp, 1), type);
3276 return expand_expr (result, target, mode, EXPAND_NORMAL);
3281 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3282 bytes from constant string DATA + OFFSET and return it as target
3286 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3287 enum machine_mode mode)
3289 const char *str = (const char *) data;
3291 gcc_assert (offset >= 0
3292 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3293 <= strlen (str) + 1));
3295 return c_readstr (str + offset, mode);
3298 /* Expand a call EXP to the memcpy builtin.
3299 Return NULL_RTX if we failed, the caller should emit a normal call,
3300 otherwise try to get the result in TARGET, if convenient (and in
3301 mode MODE if that's convenient). */
3304 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3306 tree fndecl = get_callee_fndecl (exp);
3308 if (!validate_arglist (exp,
3309 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3313 tree dest = CALL_EXPR_ARG (exp, 0);
3314 tree src = CALL_EXPR_ARG (exp, 1);
3315 tree len = CALL_EXPR_ARG (exp, 2);
3316 const char *src_str;
3317 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3318 unsigned int dest_align
3319 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3320 rtx dest_mem, src_mem, dest_addr, len_rtx;
3321 tree result = fold_builtin_memory_op (dest, src, len,
3322 TREE_TYPE (TREE_TYPE (fndecl)),
3324 HOST_WIDE_INT expected_size = -1;
3325 unsigned int expected_align = 0;
3326 tree_ann_common_t ann;
3330 while (TREE_CODE (result) == COMPOUND_EXPR)
3332 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3334 result = TREE_OPERAND (result, 1);
3336 return expand_expr (result, target, mode, EXPAND_NORMAL);
3339 /* If DEST is not a pointer type, call the normal function. */
3340 if (dest_align == 0)
3343 /* If either SRC is not a pointer type, don't do this
3344 operation in-line. */
3348 ann = tree_common_ann (exp);
3350 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3352 if (expected_align < dest_align)
3353 expected_align = dest_align;
3354 dest_mem = get_memory_rtx (dest, len);
3355 set_mem_align (dest_mem, dest_align);
3356 len_rtx = expand_normal (len);
3357 src_str = c_getstr (src);
3359 /* If SRC is a string constant and block move would be done
3360 by pieces, we can avoid loading the string from memory
3361 and only stored the computed constants. */
3363 && GET_CODE (len_rtx) == CONST_INT
3364 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3365 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3366 CONST_CAST (char *, src_str),
3369 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3370 builtin_memcpy_read_str,
3371 CONST_CAST (char *, src_str),
3372 dest_align, false, 0);
3373 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3374 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3378 src_mem = get_memory_rtx (src, len);
3379 set_mem_align (src_mem, src_align);
3381 /* Copy word part most expediently. */
3382 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3383 CALL_EXPR_TAILCALL (exp)
3384 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3385 expected_align, expected_size);
3389 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3390 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3396 /* Expand a call EXP to the mempcpy builtin.
3397 Return NULL_RTX if we failed; the caller should emit a normal call,
3398 otherwise try to get the result in TARGET, if convenient (and in
3399 mode MODE if that's convenient). If ENDP is 0 return the
3400 destination pointer, if ENDP is 1 return the end pointer ala
3401 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3405 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3407 if (!validate_arglist (exp,
3408 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3412 tree dest = CALL_EXPR_ARG (exp, 0);
3413 tree src = CALL_EXPR_ARG (exp, 1);
3414 tree len = CALL_EXPR_ARG (exp, 2);
3415 return expand_builtin_mempcpy_args (dest, src, len,
3417 target, mode, /*endp=*/ 1);
3421 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3422 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3423 so that this can also be called without constructing an actual CALL_EXPR.
3424 TYPE is the return type of the call. The other arguments and return value
3425 are the same as for expand_builtin_mempcpy. */
3428 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3429 rtx target, enum machine_mode mode, int endp)
3431 /* If return value is ignored, transform mempcpy into memcpy. */
3432 if (target == const0_rtx)
3434 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3439 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3440 target, mode, EXPAND_NORMAL);
3444 const char *src_str;
3445 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3446 unsigned int dest_align
3447 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3448 rtx dest_mem, src_mem, len_rtx;
3449 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3453 while (TREE_CODE (result) == COMPOUND_EXPR)
3455 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3457 result = TREE_OPERAND (result, 1);
3459 return expand_expr (result, target, mode, EXPAND_NORMAL);
3462 /* If either SRC or DEST is not a pointer type, don't do this
3463 operation in-line. */
3464 if (dest_align == 0 || src_align == 0)
3467 /* If LEN is not constant, call the normal function. */
3468 if (! host_integerp (len, 1))
3471 len_rtx = expand_normal (len);
3472 src_str = c_getstr (src);
3474 /* If SRC is a string constant and block move would be done
3475 by pieces, we can avoid loading the string from memory
3476 and only stored the computed constants. */
3478 && GET_CODE (len_rtx) == CONST_INT
3479 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3480 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3481 CONST_CAST (char *, src_str),
3484 dest_mem = get_memory_rtx (dest, len);
3485 set_mem_align (dest_mem, dest_align);
3486 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3487 builtin_memcpy_read_str,
3488 CONST_CAST (char *, src_str),
3489 dest_align, false, endp);
3490 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3491 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3495 if (GET_CODE (len_rtx) == CONST_INT
3496 && can_move_by_pieces (INTVAL (len_rtx),
3497 MIN (dest_align, src_align)))
3499 dest_mem = get_memory_rtx (dest, len);
3500 set_mem_align (dest_mem, dest_align);
3501 src_mem = get_memory_rtx (src, len);
3502 set_mem_align (src_mem, src_align);
3503 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3504 MIN (dest_align, src_align), endp);
3505 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3506 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3514 /* Expand expression EXP, which is a call to the memmove builtin. Return
3515 NULL_RTX if we failed; the caller should emit a normal call. */
3518 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3520 if (!validate_arglist (exp,
3521 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3525 tree dest = CALL_EXPR_ARG (exp, 0);
3526 tree src = CALL_EXPR_ARG (exp, 1);
3527 tree len = CALL_EXPR_ARG (exp, 2);
3528 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3529 target, mode, ignore);
3533 /* Helper function to do the actual work for expand_builtin_memmove. The
3534 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3535 so that this can also be called without constructing an actual CALL_EXPR.
3536 TYPE is the return type of the call. The other arguments and return value
3537 are the same as for expand_builtin_memmove. */
3540 expand_builtin_memmove_args (tree dest, tree src, tree len,
3541 tree type, rtx target, enum machine_mode mode,
3544 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3548 STRIP_TYPE_NOPS (result);
3549 while (TREE_CODE (result) == COMPOUND_EXPR)
3551 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3553 result = TREE_OPERAND (result, 1);
3555 return expand_expr (result, target, mode, EXPAND_NORMAL);
3558 /* Otherwise, call the normal function. */
3562 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3563 NULL_RTX if we failed the caller should emit a normal call. */
3566 expand_builtin_bcopy (tree exp, int ignore)
3568 tree type = TREE_TYPE (exp);
3569 tree src, dest, size;
3571 if (!validate_arglist (exp,
3572 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3575 src = CALL_EXPR_ARG (exp, 0);
3576 dest = CALL_EXPR_ARG (exp, 1);
3577 size = CALL_EXPR_ARG (exp, 2);
3579 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3580 This is done this way so that if it isn't expanded inline, we fall
3581 back to calling bcopy instead of memmove. */
3582 return expand_builtin_memmove_args (dest, src,
3583 fold_convert (sizetype, size),
3584 type, const0_rtx, VOIDmode,
3589 # define HAVE_movstr 0
3590 # define CODE_FOR_movstr CODE_FOR_nothing
3593 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3594 we failed, the caller should emit a normal call, otherwise try to
3595 get the result in TARGET, if convenient. If ENDP is 0 return the
3596 destination pointer, if ENDP is 1 return the end pointer ala
3597 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3601 expand_movstr (tree dest, tree src, rtx target, int endp)
3607 const struct insn_data * data;
3612 dest_mem = get_memory_rtx (dest, NULL);
3613 src_mem = get_memory_rtx (src, NULL);
3616 target = force_reg (Pmode, XEXP (dest_mem, 0));
3617 dest_mem = replace_equiv_address (dest_mem, target);
3618 end = gen_reg_rtx (Pmode);
3622 if (target == 0 || target == const0_rtx)
3624 end = gen_reg_rtx (Pmode);
3632 data = insn_data + CODE_FOR_movstr;
3634 if (data->operand[0].mode != VOIDmode)
3635 end = gen_lowpart (data->operand[0].mode, end);
3637 insn = data->genfun (end, dest_mem, src_mem);
3643 /* movstr is supposed to set end to the address of the NUL
3644 terminator. If the caller requested a mempcpy-like return value,
3646 if (endp == 1 && target != const0_rtx)
3648 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3649 emit_move_insn (target, force_operand (tem, NULL_RTX));
3655 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3656 NULL_RTX if we failed the caller should emit a normal call, otherwise
3657 try to get the result in TARGET, if convenient (and in mode MODE if that's
3661 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3663 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3665 tree dest = CALL_EXPR_ARG (exp, 0);
3666 tree src = CALL_EXPR_ARG (exp, 1);
3667 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3672 /* Helper function to do the actual work for expand_builtin_strcpy. The
3673 arguments to the builtin_strcpy call DEST and SRC are broken out
3674 so that this can also be called without constructing an actual CALL_EXPR.
3675 The other arguments and return value are the same as for
3676 expand_builtin_strcpy. */
3679 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3680 rtx target, enum machine_mode mode)
3682 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3684 return expand_expr (result, target, mode, EXPAND_NORMAL);
3685 return expand_movstr (dest, src, target, /*endp=*/0);
3689 /* Expand a call EXP to the stpcpy builtin.
3690 Return NULL_RTX if we failed the caller should emit a normal call,
3691 otherwise try to get the result in TARGET, if convenient (and in
3692 mode MODE if that's convenient). */
3695 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3699 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3702 dst = CALL_EXPR_ARG (exp, 0);
3703 src = CALL_EXPR_ARG (exp, 1);
3705 /* If return value is ignored, transform stpcpy into strcpy. */
3706 if (target == const0_rtx)
3708 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3712 return expand_expr (build_call_expr (fn, 2, dst, src),
3713 target, mode, EXPAND_NORMAL);
3720 /* Ensure we get an actual string whose length can be evaluated at
3721 compile-time, not an expression containing a string. This is
3722 because the latter will potentially produce pessimized code
3723 when used to produce the return value. */
3724 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3725 return expand_movstr (dst, src, target, /*endp=*/2);
3727 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3728 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3729 target, mode, /*endp=*/2);
3734 if (TREE_CODE (len) == INTEGER_CST)
3736 rtx len_rtx = expand_normal (len);
3738 if (GET_CODE (len_rtx) == CONST_INT)
3740 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3741 dst, src, target, mode);
3747 if (mode != VOIDmode)
3748 target = gen_reg_rtx (mode);
3750 target = gen_reg_rtx (GET_MODE (ret));
3752 if (GET_MODE (target) != GET_MODE (ret))
3753 ret = gen_lowpart (GET_MODE (target), ret);
3755 ret = plus_constant (ret, INTVAL (len_rtx));
3756 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3764 return expand_movstr (dst, src, target, /*endp=*/2);
3768 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3769 bytes from constant string DATA + OFFSET and return it as target
3773 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3774 enum machine_mode mode)
3776 const char *str = (const char *) data;
3778 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3781 return c_readstr (str + offset, mode);
3784 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3785 NULL_RTX if we failed the caller should emit a normal call. */
3788 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3790 tree fndecl = get_callee_fndecl (exp);
3792 if (validate_arglist (exp,
3793 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3795 tree dest = CALL_EXPR_ARG (exp, 0);
3796 tree src = CALL_EXPR_ARG (exp, 1);
3797 tree len = CALL_EXPR_ARG (exp, 2);
3798 tree slen = c_strlen (src, 1);
3799 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3803 while (TREE_CODE (result) == COMPOUND_EXPR)
3805 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3807 result = TREE_OPERAND (result, 1);
3809 return expand_expr (result, target, mode, EXPAND_NORMAL);
3812 /* We must be passed a constant len and src parameter. */
3813 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3816 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3818 /* We're required to pad with trailing zeros if the requested
3819 len is greater than strlen(s2)+1. In that case try to
3820 use store_by_pieces, if it fails, punt. */
3821 if (tree_int_cst_lt (slen, len))
3823 unsigned int dest_align
3824 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3825 const char *p = c_getstr (src);
3828 if (!p || dest_align == 0 || !host_integerp (len, 1)
3829 || !can_store_by_pieces (tree_low_cst (len, 1),
3830 builtin_strncpy_read_str,
3831 CONST_CAST (char *, p),
3835 dest_mem = get_memory_rtx (dest, len);
3836 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3837 builtin_strncpy_read_str,
3838 CONST_CAST (char *, p), dest_align, false, 0);
3839 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3840 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3847 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3848 bytes from constant string DATA + OFFSET and return it as target
3852 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3853 enum machine_mode mode)
3855 const char *c = (const char *) data;
3856 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3858 memset (p, *c, GET_MODE_SIZE (mode));
3860 return c_readstr (p, mode);
3863 /* Callback routine for store_by_pieces. Return the RTL of a register
3864 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3865 char value given in the RTL register data. For example, if mode is
3866 4 bytes wide, return the RTL for 0x01010101*data. */
3869 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3870 enum machine_mode mode)
3876 size = GET_MODE_SIZE (mode);
3880 p = XALLOCAVEC (char, size);
3881 memset (p, 1, size);
3882 coeff = c_readstr (p, mode);
3884 target = convert_to_mode (mode, (rtx) data, 1);
3885 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3886 return force_reg (mode, target);
3889 /* Expand expression EXP, which is a call to the memset builtin. Return
3890 NULL_RTX if we failed the caller should emit a normal call, otherwise
3891 try to get the result in TARGET, if convenient (and in mode MODE if that's
3895 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3897 if (!validate_arglist (exp,
3898 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3902 tree dest = CALL_EXPR_ARG (exp, 0);
3903 tree val = CALL_EXPR_ARG (exp, 1);
3904 tree len = CALL_EXPR_ARG (exp, 2);
3905 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3909 /* Helper function to do the actual work for expand_builtin_memset. The
3910 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3911 so that this can also be called without constructing an actual CALL_EXPR.
3912 The other arguments and return value are the same as for
3913 expand_builtin_memset. */
3916 expand_builtin_memset_args (tree dest, tree val, tree len,
3917 rtx target, enum machine_mode mode, tree orig_exp)
3920 enum built_in_function fcode;
3922 unsigned int dest_align;
3923 rtx dest_mem, dest_addr, len_rtx;
3924 HOST_WIDE_INT expected_size = -1;
3925 unsigned int expected_align = 0;
3926 tree_ann_common_t ann;
3928 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3930 /* If DEST is not a pointer type, don't do this operation in-line. */
3931 if (dest_align == 0)
3934 ann = tree_common_ann (orig_exp);
3936 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3938 if (expected_align < dest_align)
3939 expected_align = dest_align;
3941 /* If the LEN parameter is zero, return DEST. */
3942 if (integer_zerop (len))
3944 /* Evaluate and ignore VAL in case it has side-effects. */
3945 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3946 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3949 /* Stabilize the arguments in case we fail. */
3950 dest = builtin_save_expr (dest);
3951 val = builtin_save_expr (val);
3952 len = builtin_save_expr (len);
3954 len_rtx = expand_normal (len);
3955 dest_mem = get_memory_rtx (dest, len);
3957 if (TREE_CODE (val) != INTEGER_CST)
3961 val_rtx = expand_normal (val);
3962 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3965 /* Assume that we can memset by pieces if we can store
3966 * the coefficients by pieces (in the required modes).
3967 * We can't pass builtin_memset_gen_str as that emits RTL. */
3969 if (host_integerp (len, 1)
3970 && can_store_by_pieces (tree_low_cst (len, 1),
3971 builtin_memset_read_str, &c, dest_align,
3974 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3976 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3977 builtin_memset_gen_str, val_rtx, dest_align,
3980 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3981 dest_align, expected_align,
3985 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3986 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3990 if (target_char_cast (val, &c))
3995 if (host_integerp (len, 1)
3996 && can_store_by_pieces (tree_low_cst (len, 1),
3997 builtin_memset_read_str, &c, dest_align,
3999 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4000 builtin_memset_read_str, &c, dest_align, true, 0);
4001 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4002 dest_align, expected_align,
4006 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4007 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4011 set_mem_align (dest_mem, dest_align);
4012 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4013 CALL_EXPR_TAILCALL (orig_exp)
4014 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4015 expected_align, expected_size);
4019 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4020 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4026 fndecl = get_callee_fndecl (orig_exp);
4027 fcode = DECL_FUNCTION_CODE (fndecl);
4028 if (fcode == BUILT_IN_MEMSET)
4029 fn = build_call_expr (fndecl, 3, dest, val, len);
4030 else if (fcode == BUILT_IN_BZERO)
4031 fn = build_call_expr (fndecl, 2, dest, len);
4034 if (TREE_CODE (fn) == CALL_EXPR)
4035 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4036 return expand_call (fn, target, target == const0_rtx);
4039 /* Expand expression EXP, which is a call to the bzero builtin. Return
4040 NULL_RTX if we failed the caller should emit a normal call. */
4043 expand_builtin_bzero (tree exp)
4047 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4050 dest = CALL_EXPR_ARG (exp, 0);
4051 size = CALL_EXPR_ARG (exp, 1);
4053 /* New argument list transforming bzero(ptr x, int y) to
4054 memset(ptr x, int 0, size_t y). This is done this way
4055 so that if it isn't expanded inline, we fallback to
4056 calling bzero instead of memset. */
4058 return expand_builtin_memset_args (dest, integer_zero_node,
4059 fold_convert (sizetype, size),
4060 const0_rtx, VOIDmode, exp);
4063 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4064 caller should emit a normal call, otherwise try to get the result
4065 in TARGET, if convenient (and in mode MODE if that's convenient). */
4068 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4070 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4071 INTEGER_TYPE, VOID_TYPE))
4073 tree type = TREE_TYPE (exp);
4074 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4075 CALL_EXPR_ARG (exp, 1),
4076 CALL_EXPR_ARG (exp, 2), type);
4078 return expand_expr (result, target, mode, EXPAND_NORMAL);
4083 /* Expand expression EXP, which is a call to the memcmp built-in function.
4084 Return NULL_RTX if we failed and the
4085 caller should emit a normal call, otherwise try to get the result in
4086 TARGET, if convenient (and in mode MODE, if that's convenient). */
4089 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4091 if (!validate_arglist (exp,
4092 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4096 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4097 CALL_EXPR_ARG (exp, 1),
4098 CALL_EXPR_ARG (exp, 2));
4100 return expand_expr (result, target, mode, EXPAND_NORMAL);
4103 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4105 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4108 tree arg1 = CALL_EXPR_ARG (exp, 0);
4109 tree arg2 = CALL_EXPR_ARG (exp, 1);
4110 tree len = CALL_EXPR_ARG (exp, 2);
4113 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4115 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4116 enum machine_mode insn_mode;
4118 #ifdef HAVE_cmpmemsi
4120 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4123 #ifdef HAVE_cmpstrnsi
4125 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4130 /* If we don't have POINTER_TYPE, call the function. */
4131 if (arg1_align == 0 || arg2_align == 0)
4134 /* Make a place to write the result of the instruction. */
4137 && REG_P (result) && GET_MODE (result) == insn_mode
4138 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4139 result = gen_reg_rtx (insn_mode);
4141 arg1_rtx = get_memory_rtx (arg1, len);
4142 arg2_rtx = get_memory_rtx (arg2, len);
4143 arg3_rtx = expand_normal (len);
4145 /* Set MEM_SIZE as appropriate. */
4146 if (GET_CODE (arg3_rtx) == CONST_INT)
4148 set_mem_size (arg1_rtx, arg3_rtx);
4149 set_mem_size (arg2_rtx, arg3_rtx);
4152 #ifdef HAVE_cmpmemsi
4154 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4155 GEN_INT (MIN (arg1_align, arg2_align)));
4158 #ifdef HAVE_cmpstrnsi
4160 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4161 GEN_INT (MIN (arg1_align, arg2_align)));
4169 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4170 TYPE_MODE (integer_type_node), 3,
4171 XEXP (arg1_rtx, 0), Pmode,
4172 XEXP (arg2_rtx, 0), Pmode,
4173 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4174 TYPE_UNSIGNED (sizetype)),
4175 TYPE_MODE (sizetype));
4177 /* Return the value in the proper mode for this function. */
4178 mode = TYPE_MODE (TREE_TYPE (exp));
4179 if (GET_MODE (result) == mode)
4181 else if (target != 0)
4183 convert_move (target, result, 0);
4187 return convert_to_mode (mode, result, 0);
4194 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4195 if we failed the caller should emit a normal call, otherwise try to get
4196 the result in TARGET, if convenient. */
4199 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4201 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4205 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4206 CALL_EXPR_ARG (exp, 1));
4208 return expand_expr (result, target, mode, EXPAND_NORMAL);
4211 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4212 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4213 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4215 rtx arg1_rtx, arg2_rtx;
4216 rtx result, insn = NULL_RTX;
4218 tree arg1 = CALL_EXPR_ARG (exp, 0);
4219 tree arg2 = CALL_EXPR_ARG (exp, 1);
4222 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4224 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4226 /* If we don't have POINTER_TYPE, call the function. */
4227 if (arg1_align == 0 || arg2_align == 0)
4230 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4231 arg1 = builtin_save_expr (arg1);
4232 arg2 = builtin_save_expr (arg2);
4234 arg1_rtx = get_memory_rtx (arg1, NULL);
4235 arg2_rtx = get_memory_rtx (arg2, NULL);
4237 #ifdef HAVE_cmpstrsi
4238 /* Try to call cmpstrsi. */
4241 enum machine_mode insn_mode
4242 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4244 /* Make a place to write the result of the instruction. */
4247 && REG_P (result) && GET_MODE (result) == insn_mode
4248 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4249 result = gen_reg_rtx (insn_mode);
4251 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4252 GEN_INT (MIN (arg1_align, arg2_align)));
4255 #ifdef HAVE_cmpstrnsi
4256 /* Try to determine at least one length and call cmpstrnsi. */
4257 if (!insn && HAVE_cmpstrnsi)
4262 enum machine_mode insn_mode
4263 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4264 tree len1 = c_strlen (arg1, 1);
4265 tree len2 = c_strlen (arg2, 1);
4268 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4270 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4272 /* If we don't have a constant length for the first, use the length
4273 of the second, if we know it. We don't require a constant for
4274 this case; some cost analysis could be done if both are available
4275 but neither is constant. For now, assume they're equally cheap,
4276 unless one has side effects. If both strings have constant lengths,
4283 else if (TREE_SIDE_EFFECTS (len1))
4285 else if (TREE_SIDE_EFFECTS (len2))
4287 else if (TREE_CODE (len1) != INTEGER_CST)
4289 else if (TREE_CODE (len2) != INTEGER_CST)
4291 else if (tree_int_cst_lt (len1, len2))
4296 /* If both arguments have side effects, we cannot optimize. */
4297 if (!len || TREE_SIDE_EFFECTS (len))
4300 arg3_rtx = expand_normal (len);
4302 /* Make a place to write the result of the instruction. */
4305 && REG_P (result) && GET_MODE (result) == insn_mode
4306 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4307 result = gen_reg_rtx (insn_mode);
4309 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4310 GEN_INT (MIN (arg1_align, arg2_align)));
4318 /* Return the value in the proper mode for this function. */
4319 mode = TYPE_MODE (TREE_TYPE (exp));
4320 if (GET_MODE (result) == mode)
4323 return convert_to_mode (mode, result, 0);
4324 convert_move (target, result, 0);
4328 /* Expand the library call ourselves using a stabilized argument
4329 list to avoid re-evaluating the function's arguments twice. */
4330 #ifdef HAVE_cmpstrnsi
4333 fndecl = get_callee_fndecl (exp);
4334 fn = build_call_expr (fndecl, 2, arg1, arg2);
4335 if (TREE_CODE (fn) == CALL_EXPR)
4336 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4337 return expand_call (fn, target, target == const0_rtx);
4343 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4344 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4345 the result in TARGET, if convenient. */
4348 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4350 if (!validate_arglist (exp,
4351 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4355 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4356 CALL_EXPR_ARG (exp, 1),
4357 CALL_EXPR_ARG (exp, 2));
4359 return expand_expr (result, target, mode, EXPAND_NORMAL);
4362 /* If c_strlen can determine an expression for one of the string
4363 lengths, and it doesn't have side effects, then emit cmpstrnsi
4364 using length MIN(strlen(string)+1, arg3). */
4365 #ifdef HAVE_cmpstrnsi
4368 tree len, len1, len2;
4369 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4372 tree arg1 = CALL_EXPR_ARG (exp, 0);
4373 tree arg2 = CALL_EXPR_ARG (exp, 1);
4374 tree arg3 = CALL_EXPR_ARG (exp, 2);
4377 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4379 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4380 enum machine_mode insn_mode
4381 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4383 len1 = c_strlen (arg1, 1);
4384 len2 = c_strlen (arg2, 1);
4387 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4389 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4391 /* If we don't have a constant length for the first, use the length
4392 of the second, if we know it. We don't require a constant for
4393 this case; some cost analysis could be done if both are available
4394 but neither is constant. For now, assume they're equally cheap,
4395 unless one has side effects. If both strings have constant lengths,
4402 else if (TREE_SIDE_EFFECTS (len1))
4404 else if (TREE_SIDE_EFFECTS (len2))
4406 else if (TREE_CODE (len1) != INTEGER_CST)
4408 else if (TREE_CODE (len2) != INTEGER_CST)
4410 else if (tree_int_cst_lt (len1, len2))
4415 /* If both arguments have side effects, we cannot optimize. */
4416 if (!len || TREE_SIDE_EFFECTS (len))
4419 /* The actual new length parameter is MIN(len,arg3). */
4420 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4421 fold_convert (TREE_TYPE (len), arg3));
4423 /* If we don't have POINTER_TYPE, call the function. */
4424 if (arg1_align == 0 || arg2_align == 0)
4427 /* Make a place to write the result of the instruction. */
4430 && REG_P (result) && GET_MODE (result) == insn_mode
4431 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4432 result = gen_reg_rtx (insn_mode);
4434 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4435 arg1 = builtin_save_expr (arg1);
4436 arg2 = builtin_save_expr (arg2);
4437 len = builtin_save_expr (len);
4439 arg1_rtx = get_memory_rtx (arg1, len);
4440 arg2_rtx = get_memory_rtx (arg2, len);
4441 arg3_rtx = expand_normal (len);
4442 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4443 GEN_INT (MIN (arg1_align, arg2_align)));
4448 /* Return the value in the proper mode for this function. */
4449 mode = TYPE_MODE (TREE_TYPE (exp));
4450 if (GET_MODE (result) == mode)
4453 return convert_to_mode (mode, result, 0);
4454 convert_move (target, result, 0);
4458 /* Expand the library call ourselves using a stabilized argument
4459 list to avoid re-evaluating the function's arguments twice. */
4460 fndecl = get_callee_fndecl (exp);
4461 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4462 if (TREE_CODE (fn) == CALL_EXPR)
4463 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4464 return expand_call (fn, target, target == const0_rtx);
4470 /* Expand expression EXP, which is a call to the strcat builtin.
4471 Return NULL_RTX if we failed the caller should emit a normal call,
4472 otherwise try to get the result in TARGET, if convenient. */
4475 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4477 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4481 tree dst = CALL_EXPR_ARG (exp, 0);
4482 tree src = CALL_EXPR_ARG (exp, 1);
4483 const char *p = c_getstr (src);
4485 /* If the string length is zero, return the dst parameter. */
4486 if (p && *p == '\0')
4487 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4489 if (optimize_insn_for_speed_p ())
4491 /* See if we can store by pieces into (dst + strlen(dst)). */
4492 tree newsrc, newdst,
4493 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4496 /* Stabilize the argument list. */
4497 newsrc = builtin_save_expr (src);
4498 dst = builtin_save_expr (dst);
4502 /* Create strlen (dst). */
4503 newdst = build_call_expr (strlen_fn, 1, dst);
4504 /* Create (dst p+ strlen (dst)). */
4506 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4507 newdst = builtin_save_expr (newdst);
4509 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4511 end_sequence (); /* Stop sequence. */
4515 /* Output the entire sequence. */
4516 insns = get_insns ();
4520 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4527 /* Expand expression EXP, which is a call to the strncat builtin.
4528 Return NULL_RTX if we failed the caller should emit a normal call,
4529 otherwise try to get the result in TARGET, if convenient. */
4532 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4534 if (validate_arglist (exp,
4535 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4537 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4538 CALL_EXPR_ARG (exp, 1),
4539 CALL_EXPR_ARG (exp, 2));
4541 return expand_expr (result, target, mode, EXPAND_NORMAL);
4546 /* Expand expression EXP, which is a call to the strspn builtin.
4547 Return NULL_RTX if we failed the caller should emit a normal call,
4548 otherwise try to get the result in TARGET, if convenient. */
4551 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4553 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4555 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4556 CALL_EXPR_ARG (exp, 1));
4558 return expand_expr (result, target, mode, EXPAND_NORMAL);
4563 /* Expand expression EXP, which is a call to the strcspn builtin.
4564 Return NULL_RTX if we failed the caller should emit a normal call,
4565 otherwise try to get the result in TARGET, if convenient. */
4568 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4570 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4572 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4573 CALL_EXPR_ARG (exp, 1));
4575 return expand_expr (result, target, mode, EXPAND_NORMAL);
4580 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4581 if that's convenient. */
4584 expand_builtin_saveregs (void)
4588 /* Don't do __builtin_saveregs more than once in a function.
4589 Save the result of the first call and reuse it. */
4590 if (saveregs_value != 0)
4591 return saveregs_value;
4593 /* When this function is called, it means that registers must be
4594 saved on entry to this function. So we migrate the call to the
4595 first insn of this function. */
4599 /* Do whatever the machine needs done in this case. */
4600 val = targetm.calls.expand_builtin_saveregs ();
4605 saveregs_value = val;
4607 /* Put the insns after the NOTE that starts the function. If this
4608 is inside a start_sequence, make the outer-level insn chain current, so
4609 the code is placed at the start of the function. */
4610 push_topmost_sequence ();
4611 emit_insn_after (seq, entry_of_function ());
4612 pop_topmost_sequence ();
4617 /* __builtin_args_info (N) returns word N of the arg space info
4618 for the current function. The number and meanings of words
4619 is controlled by the definition of CUMULATIVE_ARGS. */
4622 expand_builtin_args_info (tree exp)
4624 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4625 int *word_ptr = (int *) &crtl->args.info;
4627 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4629 if (call_expr_nargs (exp) != 0)
4631 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4632 error ("argument of %<__builtin_args_info%> must be constant");
4635 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4637 if (wordnum < 0 || wordnum >= nwords)
4638 error ("argument of %<__builtin_args_info%> out of range");
4640 return GEN_INT (word_ptr[wordnum]);
4644 error ("missing argument in %<__builtin_args_info%>");
4649 /* Expand a call to __builtin_next_arg. */
4652 expand_builtin_next_arg (void)
4654 /* Checking arguments is already done in fold_builtin_next_arg
4655 that must be called before this function. */
4656 return expand_binop (ptr_mode, add_optab,
4657 crtl->args.internal_arg_pointer,
4658 crtl->args.arg_offset_rtx,
4659 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4662 /* Make it easier for the backends by protecting the valist argument
4663 from multiple evaluations. */
4666 stabilize_va_list (tree valist, int needs_lvalue)
4668 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4670 gcc_assert (vatype != NULL_TREE);
4672 if (TREE_CODE (vatype) == ARRAY_TYPE)
4674 if (TREE_SIDE_EFFECTS (valist))
4675 valist = save_expr (valist);
4677 /* For this case, the backends will be expecting a pointer to
4678 vatype, but it's possible we've actually been given an array
4679 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4681 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4683 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4684 valist = build_fold_addr_expr_with_type (valist, p1);
4693 if (! TREE_SIDE_EFFECTS (valist))
4696 pt = build_pointer_type (vatype);
4697 valist = fold_build1 (ADDR_EXPR, pt, valist);
4698 TREE_SIDE_EFFECTS (valist) = 1;
4701 if (TREE_SIDE_EFFECTS (valist))
4702 valist = save_expr (valist);
4703 valist = build_fold_indirect_ref (valist);
4709 /* The "standard" definition of va_list is void*. */
4712 std_build_builtin_va_list (void)
4714 return ptr_type_node;
4717 /* The "standard" abi va_list is va_list_type_node. */
4720 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4722 return va_list_type_node;
4725 /* The "standard" type of va_list is va_list_type_node. */
4728 std_canonical_va_list_type (tree type)
4732 if (INDIRECT_REF_P (type))
4733 type = TREE_TYPE (type);
4734 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4735 type = TREE_TYPE (type);
4736 wtype = va_list_type_node;
4738 /* Treat structure va_list types. */
4739 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4740 htype = TREE_TYPE (htype);
4741 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4743 /* If va_list is an array type, the argument may have decayed
4744 to a pointer type, e.g. by being passed to another function.
4745 In that case, unwrap both types so that we can compare the
4746 underlying records. */
4747 if (TREE_CODE (htype) == ARRAY_TYPE
4748 || POINTER_TYPE_P (htype))
4750 wtype = TREE_TYPE (wtype);
4751 htype = TREE_TYPE (htype);
4754 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4755 return va_list_type_node;
4760 /* The "standard" implementation of va_start: just assign `nextarg' to
4764 std_expand_builtin_va_start (tree valist, rtx nextarg)
4766 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4767 convert_move (va_r, nextarg, 0);
4770 /* Expand EXP, a call to __builtin_va_start. */
4773 expand_builtin_va_start (tree exp)
4778 if (call_expr_nargs (exp) < 2)
4780 error ("too few arguments to function %<va_start%>");
4784 if (fold_builtin_next_arg (exp, true))
4787 nextarg = expand_builtin_next_arg ();
4788 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4790 if (targetm.expand_builtin_va_start)
4791 targetm.expand_builtin_va_start (valist, nextarg);
4793 std_expand_builtin_va_start (valist, nextarg);
4798 /* The "standard" implementation of va_arg: read the value from the
4799 current (padded) address and increment by the (padded) size. */
4802 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4805 tree addr, t, type_size, rounded_size, valist_tmp;
4806 unsigned HOST_WIDE_INT align, boundary;
4809 #ifdef ARGS_GROW_DOWNWARD
4810 /* All of the alignment and movement below is for args-grow-up machines.
4811 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4812 implement their own specialized gimplify_va_arg_expr routines. */
4816 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4818 type = build_pointer_type (type);
4820 align = PARM_BOUNDARY / BITS_PER_UNIT;
4821 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4823 /* When we align parameter on stack for caller, if the parameter
4824 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4825 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4826 here with caller. */
4827 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4828 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4830 boundary /= BITS_PER_UNIT;
4832 /* Hoist the valist value into a temporary for the moment. */
4833 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4835 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4836 requires greater alignment, we must perform dynamic alignment. */
4837 if (boundary > align
4838 && !integer_zerop (TYPE_SIZE (type)))
4840 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4841 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4842 valist_tmp, size_int (boundary - 1)));
4843 gimplify_and_add (t, pre_p);
4845 t = fold_convert (sizetype, valist_tmp);
4846 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4847 fold_convert (TREE_TYPE (valist),
4848 fold_build2 (BIT_AND_EXPR, sizetype, t,
4849 size_int (-boundary))));
4850 gimplify_and_add (t, pre_p);
4855 /* If the actual alignment is less than the alignment of the type,
4856 adjust the type accordingly so that we don't assume strict alignment
4857 when dereferencing the pointer. */
4858 boundary *= BITS_PER_UNIT;
4859 if (boundary < TYPE_ALIGN (type))
4861 type = build_variant_type_copy (type);
4862 TYPE_ALIGN (type) = boundary;
4865 /* Compute the rounded size of the type. */
4866 type_size = size_in_bytes (type);
4867 rounded_size = round_up (type_size, align);
4869 /* Reduce rounded_size so it's sharable with the postqueue. */
4870 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4874 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4876 /* Small args are padded downward. */
4877 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4878 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4879 size_binop (MINUS_EXPR, rounded_size, type_size));
4880 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4883 /* Compute new value for AP. */
4884 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4885 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4886 gimplify_and_add (t, pre_p);
4888 addr = fold_convert (build_pointer_type (type), addr);
4891 addr = build_va_arg_indirect_ref (addr);
4893 return build_va_arg_indirect_ref (addr);
4896 /* Build an indirect-ref expression over the given TREE, which represents a
4897 piece of a va_arg() expansion. */
4899 build_va_arg_indirect_ref (tree addr)
4901 addr = build_fold_indirect_ref (addr);
4903 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4909 /* Return a dummy expression of type TYPE in order to keep going after an
4913 dummy_object (tree type)
4915 tree t = build_int_cst (build_pointer_type (type), 0);
4916 return build1 (INDIRECT_REF, type, t);
4919 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4920 builtin function, but a very special sort of operator. */
4922 enum gimplify_status
4923 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4925 tree promoted_type, have_va_type;
4926 tree valist = TREE_OPERAND (*expr_p, 0);
4927 tree type = TREE_TYPE (*expr_p);
4930 /* Verify that valist is of the proper type. */
4931 have_va_type = TREE_TYPE (valist);
4932 if (have_va_type == error_mark_node)
4934 have_va_type = targetm.canonical_va_list_type (have_va_type);
4936 if (have_va_type == NULL_TREE)
4938 error ("first argument to %<va_arg%> not of type %<va_list%>");
4942 /* Generate a diagnostic for requesting data of a type that cannot
4943 be passed through `...' due to type promotion at the call site. */
4944 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4947 static bool gave_help;
4950 /* Unfortunately, this is merely undefined, rather than a constraint
4951 violation, so we cannot make this an error. If this call is never
4952 executed, the program is still strictly conforming. */
4953 warned = warning (0, "%qT is promoted to %qT when passed through %<...%>",
4954 type, promoted_type);
4955 if (!gave_help && warned)
4958 inform (input_location, "(so you should pass %qT not %qT to %<va_arg%>)",
4959 promoted_type, type);
4962 /* We can, however, treat "undefined" any way we please.
4963 Call abort to encourage the user to fix the program. */
4965 inform (input_location, "if this code is reached, the program will abort");
4966 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4967 gimplify_and_add (t, pre_p);
4969 /* This is dead code, but go ahead and finish so that the
4970 mode of the result comes out right. */
4971 *expr_p = dummy_object (type);
4976 /* Make it easier for the backends by protecting the valist argument
4977 from multiple evaluations. */
4978 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4980 /* For this case, the backends will be expecting a pointer to
4981 TREE_TYPE (abi), but it's possible we've
4982 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4984 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4986 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4987 valist = build_fold_addr_expr_with_type (valist, p1);
4990 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4993 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4995 if (!targetm.gimplify_va_arg_expr)
4996 /* FIXME: Once most targets are converted we should merely
4997 assert this is non-null. */
5000 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5005 /* Expand EXP, a call to __builtin_va_end. */
5008 expand_builtin_va_end (tree exp)
5010 tree valist = CALL_EXPR_ARG (exp, 0);
5012 /* Evaluate for side effects, if needed. I hate macros that don't
5014 if (TREE_SIDE_EFFECTS (valist))
5015 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5020 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5021 builtin rather than just as an assignment in stdarg.h because of the
5022 nastiness of array-type va_list types. */
5025 expand_builtin_va_copy (tree exp)
5029 dst = CALL_EXPR_ARG (exp, 0);
5030 src = CALL_EXPR_ARG (exp, 1);
5032 dst = stabilize_va_list (dst, 1);
5033 src = stabilize_va_list (src, 0);
5035 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5037 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5039 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5040 TREE_SIDE_EFFECTS (t) = 1;
5041 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5045 rtx dstb, srcb, size;
5047 /* Evaluate to pointers. */
5048 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5049 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5050 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5051 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5053 dstb = convert_memory_address (Pmode, dstb);
5054 srcb = convert_memory_address (Pmode, srcb);
5056 /* "Dereference" to BLKmode memories. */
5057 dstb = gen_rtx_MEM (BLKmode, dstb);
5058 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5059 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5060 srcb = gen_rtx_MEM (BLKmode, srcb);
5061 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5062 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5065 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5071 /* Expand a call to one of the builtin functions __builtin_frame_address or
5072 __builtin_return_address. */
5075 expand_builtin_frame_address (tree fndecl, tree exp)
5077 /* The argument must be a nonnegative integer constant.
5078 It counts the number of frames to scan up the stack.
5079 The value is the return address saved in that frame. */
5080 if (call_expr_nargs (exp) == 0)
5081 /* Warning about missing arg was already issued. */
5083 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5085 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5086 error ("invalid argument to %<__builtin_frame_address%>");
5088 error ("invalid argument to %<__builtin_return_address%>");
5094 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5095 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5097 /* Some ports cannot access arbitrary stack frames. */
5100 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5101 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5103 warning (0, "unsupported argument to %<__builtin_return_address%>");
5107 /* For __builtin_frame_address, return what we've got. */
5108 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5112 && ! CONSTANT_P (tem))
5113 tem = copy_to_mode_reg (Pmode, tem);
5118 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5119 we failed and the caller should emit a normal call, otherwise try to get
5120 the result in TARGET, if convenient. */
5123 expand_builtin_alloca (tree exp, rtx target)
5128 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5129 should always expand to function calls. These can be intercepted
5134 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5137 /* Compute the argument. */
5138 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5140 /* Allocate the desired space. */
5141 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5142 result = convert_memory_address (ptr_mode, result);
5147 /* Expand a call to a bswap builtin with argument ARG0. MODE
5148 is the mode to expand with. */
5151 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5153 enum machine_mode mode;
5157 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5160 arg = CALL_EXPR_ARG (exp, 0);
5161 mode = TYPE_MODE (TREE_TYPE (arg));
5162 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5164 target = expand_unop (mode, bswap_optab, op0, target, 1);
5166 gcc_assert (target);
5168 return convert_to_mode (mode, target, 0);
5171 /* Expand a call to a unary builtin in EXP.
5172 Return NULL_RTX if a normal call should be emitted rather than expanding the
5173 function in-line. If convenient, the result should be placed in TARGET.
5174 SUBTARGET may be used as the target for computing one of EXP's operands. */
5177 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5178 rtx subtarget, optab op_optab)
5182 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5185 /* Compute the argument. */
5186 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5187 VOIDmode, EXPAND_NORMAL);
5188 /* Compute op, into TARGET if possible.
5189 Set TARGET to wherever the result comes back. */
5190 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5191 op_optab, op0, target, 1);
5192 gcc_assert (target);
5194 return convert_to_mode (target_mode, target, 0);
5197 /* If the string passed to fputs is a constant and is one character
5198 long, we attempt to transform this call into __builtin_fputc(). */
5201 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5203 /* Verify the arguments in the original call. */
5204 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5206 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5207 CALL_EXPR_ARG (exp, 1),
5208 (target == const0_rtx),
5209 unlocked, NULL_TREE);
5211 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5216 /* Expand a call to __builtin_expect. We just return our argument
5217 as the builtin_expect semantic should've been already executed by
5218 tree branch prediction pass. */
5221 expand_builtin_expect (tree exp, rtx target)
5225 if (call_expr_nargs (exp) < 2)
5227 arg = CALL_EXPR_ARG (exp, 0);
5228 c = CALL_EXPR_ARG (exp, 1);
5230 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5231 /* When guessing was done, the hints should be already stripped away. */
5232 gcc_assert (!flag_guess_branch_prob
5233 || optimize == 0 || errorcount || sorrycount);
5238 expand_builtin_trap (void)
5242 emit_insn (gen_trap ());
5245 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5249 /* Expand EXP, a call to fabs, fabsf or fabsl.
5250 Return NULL_RTX if a normal call should be emitted rather than expanding
5251 the function inline. If convenient, the result should be placed
5252 in TARGET. SUBTARGET may be used as the target for computing
5256 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5258 enum machine_mode mode;
5262 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5265 arg = CALL_EXPR_ARG (exp, 0);
5266 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5267 mode = TYPE_MODE (TREE_TYPE (arg));
5268 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5269 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5272 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5273 Return NULL is a normal call should be emitted rather than expanding the
5274 function inline. If convenient, the result should be placed in TARGET.
5275 SUBTARGET may be used as the target for computing the operand. */
5278 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5283 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5286 arg = CALL_EXPR_ARG (exp, 0);
5287 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5289 arg = CALL_EXPR_ARG (exp, 1);
5290 op1 = expand_normal (arg);
5292 return expand_copysign (op0, op1, target);
5295 /* Create a new constant string literal and return a char* pointer to it.
5296 The STRING_CST value is the LEN characters at STR. */
5298 build_string_literal (int len, const char *str)
5300 tree t, elem, index, type;
5302 t = build_string (len, str);
5303 elem = build_type_variant (char_type_node, 1, 0);
5304 index = build_index_type (size_int (len - 1));
5305 type = build_array_type (elem, index);
5306 TREE_TYPE (t) = type;
5307 TREE_CONSTANT (t) = 1;
5308 TREE_READONLY (t) = 1;
5309 TREE_STATIC (t) = 1;
5311 type = build_pointer_type (elem);
5312 t = build1 (ADDR_EXPR, type,
5313 build4 (ARRAY_REF, elem,
5314 t, integer_zero_node, NULL_TREE, NULL_TREE));
5318 /* Expand EXP, a call to printf or printf_unlocked.
5319 Return NULL_RTX if a normal call should be emitted rather than transforming
5320 the function inline. If convenient, the result should be placed in
5321 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5324 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5327 /* If we're using an unlocked function, assume the other unlocked
5328 functions exist explicitly. */
5329 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5330 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5331 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5332 : implicit_built_in_decls[BUILT_IN_PUTS];
5333 const char *fmt_str;
5336 int nargs = call_expr_nargs (exp);
5338 /* If the return value is used, don't do the transformation. */
5339 if (target != const0_rtx)
5342 /* Verify the required arguments in the original call. */
5345 fmt = CALL_EXPR_ARG (exp, 0);
5346 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5349 /* Check whether the format is a literal string constant. */
5350 fmt_str = c_getstr (fmt);
5351 if (fmt_str == NULL)
5354 if (!init_target_chars ())
5357 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5358 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5361 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5364 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5366 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5367 else if (strcmp (fmt_str, target_percent_c) == 0)
5370 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5373 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5377 /* We can't handle anything else with % args or %% ... yet. */
5378 if (strchr (fmt_str, target_percent))
5384 /* If the format specifier was "", printf does nothing. */
5385 if (fmt_str[0] == '\0')
5387 /* If the format specifier has length of 1, call putchar. */
5388 if (fmt_str[1] == '\0')
5390 /* Given printf("c"), (where c is any one character,)
5391 convert "c"[0] to an int and pass that to the replacement
5393 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5395 fn = build_call_expr (fn_putchar, 1, arg);
5399 /* If the format specifier was "string\n", call puts("string"). */
5400 size_t len = strlen (fmt_str);
5401 if ((unsigned char)fmt_str[len - 1] == target_newline)
5403 /* Create a NUL-terminated string that's one char shorter
5404 than the original, stripping off the trailing '\n'. */
5405 char *newstr = XALLOCAVEC (char, len);
5406 memcpy (newstr, fmt_str, len - 1);
5407 newstr[len - 1] = 0;
5408 arg = build_string_literal (len, newstr);
5410 fn = build_call_expr (fn_puts, 1, arg);
5413 /* We'd like to arrange to call fputs(string,stdout) here,
5414 but we need stdout and don't have a way to get it yet. */
5421 if (TREE_CODE (fn) == CALL_EXPR)
5422 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5423 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5426 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5427 Return NULL_RTX if a normal call should be emitted rather than transforming
5428 the function inline. If convenient, the result should be placed in
5429 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5432 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5435 /* If we're using an unlocked function, assume the other unlocked
5436 functions exist explicitly. */
5437 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5438 : implicit_built_in_decls[BUILT_IN_FPUTC];
5439 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5440 : implicit_built_in_decls[BUILT_IN_FPUTS];
5441 const char *fmt_str;
5444 int nargs = call_expr_nargs (exp);
5446 /* If the return value is used, don't do the transformation. */
5447 if (target != const0_rtx)
5450 /* Verify the required arguments in the original call. */
5453 fp = CALL_EXPR_ARG (exp, 0);
5454 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5456 fmt = CALL_EXPR_ARG (exp, 1);
5457 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5460 /* Check whether the format is a literal string constant. */
5461 fmt_str = c_getstr (fmt);
5462 if (fmt_str == NULL)
5465 if (!init_target_chars ())
5468 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5469 if (strcmp (fmt_str, target_percent_s) == 0)
5472 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5474 arg = CALL_EXPR_ARG (exp, 2);
5476 fn = build_call_expr (fn_fputs, 2, arg, fp);
5478 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5479 else if (strcmp (fmt_str, target_percent_c) == 0)
5482 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5484 arg = CALL_EXPR_ARG (exp, 2);
5486 fn = build_call_expr (fn_fputc, 2, arg, fp);
5490 /* We can't handle anything else with % args or %% ... yet. */
5491 if (strchr (fmt_str, target_percent))
5497 /* If the format specifier was "", fprintf does nothing. */
5498 if (fmt_str[0] == '\0')
5500 /* Evaluate and ignore FILE* argument for side-effects. */
5501 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5505 /* When "string" doesn't contain %, replace all cases of
5506 fprintf(stream,string) with fputs(string,stream). The fputs
5507 builtin will take care of special cases like length == 1. */
5509 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5514 if (TREE_CODE (fn) == CALL_EXPR)
5515 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5516 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5519 /* Expand a call EXP to sprintf. Return NULL_RTX if
5520 a normal call should be emitted rather than expanding the function
5521 inline. If convenient, the result should be placed in TARGET with
5525 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5528 const char *fmt_str;
5529 int nargs = call_expr_nargs (exp);
5531 /* Verify the required arguments in the original call. */
5534 dest = CALL_EXPR_ARG (exp, 0);
5535 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5537 fmt = CALL_EXPR_ARG (exp, 0);
5538 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5541 /* Check whether the format is a literal string constant. */
5542 fmt_str = c_getstr (fmt);
5543 if (fmt_str == NULL)
5546 if (!init_target_chars ())
5549 /* If the format doesn't contain % args or %%, use strcpy. */
5550 if (strchr (fmt_str, target_percent) == 0)
5552 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5555 if ((nargs > 2) || ! fn)
5557 expand_expr (build_call_expr (fn, 2, dest, fmt),
5558 const0_rtx, VOIDmode, EXPAND_NORMAL);
5559 if (target == const0_rtx)
5561 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5562 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5564 /* If the format is "%s", use strcpy if the result isn't used. */
5565 else if (strcmp (fmt_str, target_percent_s) == 0)
5568 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5574 arg = CALL_EXPR_ARG (exp, 2);
5575 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5578 if (target != const0_rtx)
5580 len = c_strlen (arg, 1);
5581 if (! len || TREE_CODE (len) != INTEGER_CST)
5587 expand_expr (build_call_expr (fn, 2, dest, arg),
5588 const0_rtx, VOIDmode, EXPAND_NORMAL);
5590 if (target == const0_rtx)
5592 return expand_expr (len, target, mode, EXPAND_NORMAL);
5598 /* Expand a call to either the entry or exit function profiler. */
5601 expand_builtin_profile_func (bool exitp)
5603 rtx this_rtx, which;
5605 this_rtx = DECL_RTL (current_function_decl);
5606 gcc_assert (MEM_P (this_rtx));
5607 this_rtx = XEXP (this_rtx, 0);
5610 which = profile_function_exit_libfunc;
5612 which = profile_function_entry_libfunc;
5614 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5615 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5622 /* Expand a call to __builtin___clear_cache. */
5625 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5627 #ifndef HAVE_clear_cache
5628 #ifdef CLEAR_INSN_CACHE
5629 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5630 does something. Just do the default expansion to a call to
5634 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5635 does nothing. There is no need to call it. Do nothing. */
5637 #endif /* CLEAR_INSN_CACHE */
5639 /* We have a "clear_cache" insn, and it will handle everything. */
5641 rtx begin_rtx, end_rtx;
5642 enum insn_code icode;
5644 /* We must not expand to a library call. If we did, any
5645 fallback library function in libgcc that might contain a call to
5646 __builtin___clear_cache() would recurse infinitely. */
5647 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5649 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5653 if (HAVE_clear_cache)
5655 icode = CODE_FOR_clear_cache;
5657 begin = CALL_EXPR_ARG (exp, 0);
5658 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5659 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5660 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5661 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5663 end = CALL_EXPR_ARG (exp, 1);
5664 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5665 end_rtx = convert_memory_address (Pmode, end_rtx);
5666 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5667 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5669 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5672 #endif /* HAVE_clear_cache */
5675 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5678 round_trampoline_addr (rtx tramp)
5680 rtx temp, addend, mask;
5682 /* If we don't need too much alignment, we'll have been guaranteed
5683 proper alignment by get_trampoline_type. */
5684 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5687 /* Round address up to desired boundary. */
5688 temp = gen_reg_rtx (Pmode);
5689 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5690 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5692 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5693 temp, 0, OPTAB_LIB_WIDEN);
5694 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5695 temp, 0, OPTAB_LIB_WIDEN);
5701 expand_builtin_init_trampoline (tree exp)
5703 tree t_tramp, t_func, t_chain;
5704 rtx r_tramp, r_func, r_chain;
5705 #ifdef TRAMPOLINE_TEMPLATE
5709 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5710 POINTER_TYPE, VOID_TYPE))
5713 t_tramp = CALL_EXPR_ARG (exp, 0);
5714 t_func = CALL_EXPR_ARG (exp, 1);
5715 t_chain = CALL_EXPR_ARG (exp, 2);
5717 r_tramp = expand_normal (t_tramp);
5718 r_func = expand_normal (t_func);
5719 r_chain = expand_normal (t_chain);
5721 /* Generate insns to initialize the trampoline. */
5722 r_tramp = round_trampoline_addr (r_tramp);
5723 #ifdef TRAMPOLINE_TEMPLATE
5724 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5725 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5726 emit_block_move (blktramp, assemble_trampoline_template (),
5727 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5729 trampolines_created = 1;
5730 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5736 expand_builtin_adjust_trampoline (tree exp)
5740 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5743 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5744 tramp = round_trampoline_addr (tramp);
5745 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5746 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5752 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5753 function. The function first checks whether the back end provides
5754 an insn to implement signbit for the respective mode. If not, it
5755 checks whether the floating point format of the value is such that
5756 the sign bit can be extracted. If that is not the case, the
5757 function returns NULL_RTX to indicate that a normal call should be
5758 emitted rather than expanding the function in-line. EXP is the
5759 expression that is a call to the builtin function; if convenient,
5760 the result should be placed in TARGET. */
5762 expand_builtin_signbit (tree exp, rtx target)
5764 const struct real_format *fmt;
5765 enum machine_mode fmode, imode, rmode;
5766 HOST_WIDE_INT hi, lo;
5769 enum insn_code icode;
5772 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5775 arg = CALL_EXPR_ARG (exp, 0);
5776 fmode = TYPE_MODE (TREE_TYPE (arg));
5777 rmode = TYPE_MODE (TREE_TYPE (exp));
5778 fmt = REAL_MODE_FORMAT (fmode);
5780 arg = builtin_save_expr (arg);
5782 /* Expand the argument yielding a RTX expression. */
5783 temp = expand_normal (arg);
5785 /* Check if the back end provides an insn that handles signbit for the
5787 icode = signbit_optab->handlers [(int) fmode].insn_code;
5788 if (icode != CODE_FOR_nothing)
5790 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5791 emit_unop_insn (icode, target, temp, UNKNOWN);
5795 /* For floating point formats without a sign bit, implement signbit
5797 bitpos = fmt->signbit_ro;
5800 /* But we can't do this if the format supports signed zero. */
5801 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5804 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5805 build_real (TREE_TYPE (arg), dconst0));
5806 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5809 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5811 imode = int_mode_for_mode (fmode);
5812 if (imode == BLKmode)
5814 temp = gen_lowpart (imode, temp);
5819 /* Handle targets with different FP word orders. */
5820 if (FLOAT_WORDS_BIG_ENDIAN)
5821 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5823 word = bitpos / BITS_PER_WORD;
5824 temp = operand_subword_force (temp, word, fmode);
5825 bitpos = bitpos % BITS_PER_WORD;
5828 /* Force the intermediate word_mode (or narrower) result into a
5829 register. This avoids attempting to create paradoxical SUBREGs
5830 of floating point modes below. */
5831 temp = force_reg (imode, temp);
5833 /* If the bitpos is within the "result mode" lowpart, the operation
5834 can be implement with a single bitwise AND. Otherwise, we need
5835 a right shift and an AND. */
5837 if (bitpos < GET_MODE_BITSIZE (rmode))
5839 if (bitpos < HOST_BITS_PER_WIDE_INT)
5842 lo = (HOST_WIDE_INT) 1 << bitpos;
5846 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5850 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5851 temp = gen_lowpart (rmode, temp);
5852 temp = expand_binop (rmode, and_optab, temp,
5853 immed_double_const (lo, hi, rmode),
5854 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5858 /* Perform a logical right shift to place the signbit in the least
5859 significant bit, then truncate the result to the desired mode
5860 and mask just this bit. */
5861 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5862 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5863 temp = gen_lowpart (rmode, temp);
5864 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5865 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5871 /* Expand fork or exec calls. TARGET is the desired target of the
5872 call. EXP is the call. FN is the
5873 identificator of the actual function. IGNORE is nonzero if the
5874 value is to be ignored. */
5877 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5882 /* If we are not profiling, just call the function. */
5883 if (!profile_arc_flag)
5886 /* Otherwise call the wrapper. This should be equivalent for the rest of
5887 compiler, so the code does not diverge, and the wrapper may run the
5888 code necessary for keeping the profiling sane. */
5890 switch (DECL_FUNCTION_CODE (fn))
5893 id = get_identifier ("__gcov_fork");
5896 case BUILT_IN_EXECL:
5897 id = get_identifier ("__gcov_execl");
5900 case BUILT_IN_EXECV:
5901 id = get_identifier ("__gcov_execv");
5904 case BUILT_IN_EXECLP:
5905 id = get_identifier ("__gcov_execlp");
5908 case BUILT_IN_EXECLE:
5909 id = get_identifier ("__gcov_execle");
5912 case BUILT_IN_EXECVP:
5913 id = get_identifier ("__gcov_execvp");
5916 case BUILT_IN_EXECVE:
5917 id = get_identifier ("__gcov_execve");
5924 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5925 DECL_EXTERNAL (decl) = 1;
5926 TREE_PUBLIC (decl) = 1;
5927 DECL_ARTIFICIAL (decl) = 1;
5928 TREE_NOTHROW (decl) = 1;
5929 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5930 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5931 call = rewrite_call_expr (exp, 0, decl, 0);
5932 return expand_call (call, target, ignore);
5937 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5938 the pointer in these functions is void*, the tree optimizers may remove
5939 casts. The mode computed in expand_builtin isn't reliable either, due
5940 to __sync_bool_compare_and_swap.
5942 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5943 group of builtins. This gives us log2 of the mode size. */
5945 static inline enum machine_mode
5946 get_builtin_sync_mode (int fcode_diff)
5948 /* The size is not negotiable, so ask not to get BLKmode in return
5949 if the target indicates that a smaller size would be better. */
5950 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5953 /* Expand the memory expression LOC and return the appropriate memory operand
5954 for the builtin_sync operations. */
5957 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5961 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5963 /* Note that we explicitly do not want any alias information for this
5964 memory, so that we kill all other live memories. Otherwise we don't
5965 satisfy the full barrier semantics of the intrinsic. */
5966 mem = validize_mem (gen_rtx_MEM (mode, addr));
5968 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5969 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5970 MEM_VOLATILE_P (mem) = 1;
5975 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5976 EXP is the CALL_EXPR. CODE is the rtx code
5977 that corresponds to the arithmetic or logical operation from the name;
5978 an exception here is that NOT actually means NAND. TARGET is an optional
5979 place for us to store the results; AFTER is true if this is the
5980 fetch_and_xxx form. IGNORE is true if we don't actually care about
5981 the result of the operation at all. */
5984 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5985 enum rtx_code code, bool after,
5986 rtx target, bool ignore)
5989 enum machine_mode old_mode;
5991 if (code == NOT && warn_sync_nand)
5993 tree fndecl = get_callee_fndecl (exp);
5994 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5996 static bool warned_f_a_n, warned_n_a_f;
6000 case BUILT_IN_FETCH_AND_NAND_1:
6001 case BUILT_IN_FETCH_AND_NAND_2:
6002 case BUILT_IN_FETCH_AND_NAND_4:
6003 case BUILT_IN_FETCH_AND_NAND_8:
6004 case BUILT_IN_FETCH_AND_NAND_16:
6009 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6010 inform (input_location,
6011 "%qD changed semantics in GCC 4.4", fndecl);
6012 warned_f_a_n = true;
6015 case BUILT_IN_NAND_AND_FETCH_1:
6016 case BUILT_IN_NAND_AND_FETCH_2:
6017 case BUILT_IN_NAND_AND_FETCH_4:
6018 case BUILT_IN_NAND_AND_FETCH_8:
6019 case BUILT_IN_NAND_AND_FETCH_16:
6024 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6025 inform (input_location,
6026 "%qD changed semantics in GCC 4.4", fndecl);
6027 warned_n_a_f = true;
6035 /* Expand the operands. */
6036 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6038 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6039 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6040 of CONST_INTs, where we know the old_mode only from the call argument. */
6041 old_mode = GET_MODE (val);
6042 if (old_mode == VOIDmode)
6043 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6044 val = convert_modes (mode, old_mode, val, 1);
6047 return expand_sync_operation (mem, val, code);
6049 return expand_sync_fetch_operation (mem, val, code, after, target);
6052 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6053 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6054 true if this is the boolean form. TARGET is a place for us to store the
6055 results; this is NOT optional if IS_BOOL is true. */
6058 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6059 bool is_bool, rtx target)
6061 rtx old_val, new_val, mem;
6062 enum machine_mode old_mode;
6064 /* Expand the operands. */
6065 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6068 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6069 mode, EXPAND_NORMAL);
6070 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6071 of CONST_INTs, where we know the old_mode only from the call argument. */
6072 old_mode = GET_MODE (old_val);
6073 if (old_mode == VOIDmode)
6074 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6075 old_val = convert_modes (mode, old_mode, old_val, 1);
6077 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6078 mode, EXPAND_NORMAL);
6079 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6080 of CONST_INTs, where we know the old_mode only from the call argument. */
6081 old_mode = GET_MODE (new_val);
6082 if (old_mode == VOIDmode)
6083 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6084 new_val = convert_modes (mode, old_mode, new_val, 1);
6087 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6089 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6092 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6093 general form is actually an atomic exchange, and some targets only
6094 support a reduced form with the second argument being a constant 1.
6095 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6099 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6103 enum machine_mode old_mode;
6105 /* Expand the operands. */
6106 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6107 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6108 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6109 of CONST_INTs, where we know the old_mode only from the call argument. */
6110 old_mode = GET_MODE (val);
6111 if (old_mode == VOIDmode)
6112 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6113 val = convert_modes (mode, old_mode, val, 1);
6115 return expand_sync_lock_test_and_set (mem, val, target);
6118 /* Expand the __sync_synchronize intrinsic. */
6121 expand_builtin_synchronize (void)
6125 #ifdef HAVE_memory_barrier
6126 if (HAVE_memory_barrier)
6128 emit_insn (gen_memory_barrier ());
6133 if (synchronize_libfunc != NULL_RTX)
6135 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6139 /* If no explicit memory barrier instruction is available, create an
6140 empty asm stmt with a memory clobber. */
6141 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6142 tree_cons (NULL, build_string (6, "memory"), NULL));
6143 ASM_VOLATILE_P (x) = 1;
6144 expand_asm_expr (x);
6147 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6150 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6152 enum insn_code icode;
6154 rtx val = const0_rtx;
6156 /* Expand the operands. */
6157 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6159 /* If there is an explicit operation in the md file, use it. */
6160 icode = sync_lock_release[mode];
6161 if (icode != CODE_FOR_nothing)
6163 if (!insn_data[icode].operand[1].predicate (val, mode))
6164 val = force_reg (mode, val);
6166 insn = GEN_FCN (icode) (mem, val);
6174 /* Otherwise we can implement this operation by emitting a barrier
6175 followed by a store of zero. */
6176 expand_builtin_synchronize ();
6177 emit_move_insn (mem, val);
6180 /* Expand an expression EXP that calls a built-in function,
6181 with result going to TARGET if that's convenient
6182 (and in mode MODE if that's convenient).
6183 SUBTARGET may be used as the target for computing one of EXP's operands.
6184 IGNORE is nonzero if the value is to be ignored. */
6187 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6190 tree fndecl = get_callee_fndecl (exp);
6191 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6192 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6194 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6195 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6197 /* When not optimizing, generate calls to library functions for a certain
6200 && !called_as_built_in (fndecl)
6201 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6202 && fcode != BUILT_IN_ALLOCA
6203 && fcode != BUILT_IN_FREE)
6204 return expand_call (exp, target, ignore);
6206 /* The built-in function expanders test for target == const0_rtx
6207 to determine whether the function's result will be ignored. */
6209 target = const0_rtx;
6211 /* If the result of a pure or const built-in function is ignored, and
6212 none of its arguments are volatile, we can avoid expanding the
6213 built-in call and just evaluate the arguments for side-effects. */
6214 if (target == const0_rtx
6215 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6217 bool volatilep = false;
6219 call_expr_arg_iterator iter;
6221 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6222 if (TREE_THIS_VOLATILE (arg))
6230 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6231 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6238 CASE_FLT_FN (BUILT_IN_FABS):
6239 target = expand_builtin_fabs (exp, target, subtarget);
6244 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6245 target = expand_builtin_copysign (exp, target, subtarget);
6250 /* Just do a normal library call if we were unable to fold
6252 CASE_FLT_FN (BUILT_IN_CABS):
6255 CASE_FLT_FN (BUILT_IN_EXP):
6256 CASE_FLT_FN (BUILT_IN_EXP10):
6257 CASE_FLT_FN (BUILT_IN_POW10):
6258 CASE_FLT_FN (BUILT_IN_EXP2):
6259 CASE_FLT_FN (BUILT_IN_EXPM1):
6260 CASE_FLT_FN (BUILT_IN_LOGB):
6261 CASE_FLT_FN (BUILT_IN_LOG):
6262 CASE_FLT_FN (BUILT_IN_LOG10):
6263 CASE_FLT_FN (BUILT_IN_LOG2):
6264 CASE_FLT_FN (BUILT_IN_LOG1P):
6265 CASE_FLT_FN (BUILT_IN_TAN):
6266 CASE_FLT_FN (BUILT_IN_ASIN):
6267 CASE_FLT_FN (BUILT_IN_ACOS):
6268 CASE_FLT_FN (BUILT_IN_ATAN):
6269 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6270 because of possible accuracy problems. */
6271 if (! flag_unsafe_math_optimizations)
6273 CASE_FLT_FN (BUILT_IN_SQRT):
6274 CASE_FLT_FN (BUILT_IN_FLOOR):
6275 CASE_FLT_FN (BUILT_IN_CEIL):
6276 CASE_FLT_FN (BUILT_IN_TRUNC):
6277 CASE_FLT_FN (BUILT_IN_ROUND):
6278 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6279 CASE_FLT_FN (BUILT_IN_RINT):
6280 target = expand_builtin_mathfn (exp, target, subtarget);
6285 CASE_FLT_FN (BUILT_IN_ILOGB):
6286 if (! flag_unsafe_math_optimizations)
6288 CASE_FLT_FN (BUILT_IN_ISINF):
6289 CASE_FLT_FN (BUILT_IN_FINITE):
6290 case BUILT_IN_ISFINITE:
6291 case BUILT_IN_ISNORMAL:
6292 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6297 CASE_FLT_FN (BUILT_IN_LCEIL):
6298 CASE_FLT_FN (BUILT_IN_LLCEIL):
6299 CASE_FLT_FN (BUILT_IN_LFLOOR):
6300 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6301 target = expand_builtin_int_roundingfn (exp, target);
6306 CASE_FLT_FN (BUILT_IN_LRINT):
6307 CASE_FLT_FN (BUILT_IN_LLRINT):
6308 CASE_FLT_FN (BUILT_IN_LROUND):
6309 CASE_FLT_FN (BUILT_IN_LLROUND):
6310 target = expand_builtin_int_roundingfn_2 (exp, target);
6315 CASE_FLT_FN (BUILT_IN_POW):
6316 target = expand_builtin_pow (exp, target, subtarget);
6321 CASE_FLT_FN (BUILT_IN_POWI):
6322 target = expand_builtin_powi (exp, target, subtarget);
6327 CASE_FLT_FN (BUILT_IN_ATAN2):
6328 CASE_FLT_FN (BUILT_IN_LDEXP):
6329 CASE_FLT_FN (BUILT_IN_SCALB):
6330 CASE_FLT_FN (BUILT_IN_SCALBN):
6331 CASE_FLT_FN (BUILT_IN_SCALBLN):
6332 if (! flag_unsafe_math_optimizations)
6335 CASE_FLT_FN (BUILT_IN_FMOD):
6336 CASE_FLT_FN (BUILT_IN_REMAINDER):
6337 CASE_FLT_FN (BUILT_IN_DREM):
6338 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6343 CASE_FLT_FN (BUILT_IN_CEXPI):
6344 target = expand_builtin_cexpi (exp, target, subtarget);
6345 gcc_assert (target);
6348 CASE_FLT_FN (BUILT_IN_SIN):
6349 CASE_FLT_FN (BUILT_IN_COS):
6350 if (! flag_unsafe_math_optimizations)
6352 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6357 CASE_FLT_FN (BUILT_IN_SINCOS):
6358 if (! flag_unsafe_math_optimizations)
6360 target = expand_builtin_sincos (exp);
6365 case BUILT_IN_APPLY_ARGS:
6366 return expand_builtin_apply_args ();
6368 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6369 FUNCTION with a copy of the parameters described by
6370 ARGUMENTS, and ARGSIZE. It returns a block of memory
6371 allocated on the stack into which is stored all the registers
6372 that might possibly be used for returning the result of a
6373 function. ARGUMENTS is the value returned by
6374 __builtin_apply_args. ARGSIZE is the number of bytes of
6375 arguments that must be copied. ??? How should this value be
6376 computed? We'll also need a safe worst case value for varargs
6378 case BUILT_IN_APPLY:
6379 if (!validate_arglist (exp, POINTER_TYPE,
6380 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6381 && !validate_arglist (exp, REFERENCE_TYPE,
6382 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6388 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6389 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6390 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6392 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6395 /* __builtin_return (RESULT) causes the function to return the
6396 value described by RESULT. RESULT is address of the block of
6397 memory returned by __builtin_apply. */
6398 case BUILT_IN_RETURN:
6399 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6400 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6403 case BUILT_IN_SAVEREGS:
6404 return expand_builtin_saveregs ();
6406 case BUILT_IN_ARGS_INFO:
6407 return expand_builtin_args_info (exp);
6409 case BUILT_IN_VA_ARG_PACK:
6410 /* All valid uses of __builtin_va_arg_pack () are removed during
6412 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6415 case BUILT_IN_VA_ARG_PACK_LEN:
6416 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6418 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6421 /* Return the address of the first anonymous stack arg. */
6422 case BUILT_IN_NEXT_ARG:
6423 if (fold_builtin_next_arg (exp, false))
6425 return expand_builtin_next_arg ();
6427 case BUILT_IN_CLEAR_CACHE:
6428 target = expand_builtin___clear_cache (exp);
6433 case BUILT_IN_CLASSIFY_TYPE:
6434 return expand_builtin_classify_type (exp);
6436 case BUILT_IN_CONSTANT_P:
6439 case BUILT_IN_FRAME_ADDRESS:
6440 case BUILT_IN_RETURN_ADDRESS:
6441 return expand_builtin_frame_address (fndecl, exp);
6443 /* Returns the address of the area where the structure is returned.
6445 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6446 if (call_expr_nargs (exp) != 0
6447 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6448 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6451 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6453 case BUILT_IN_ALLOCA:
6454 target = expand_builtin_alloca (exp, target);
6459 case BUILT_IN_STACK_SAVE:
6460 return expand_stack_save ();
6462 case BUILT_IN_STACK_RESTORE:
6463 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6466 case BUILT_IN_BSWAP32:
6467 case BUILT_IN_BSWAP64:
6468 target = expand_builtin_bswap (exp, target, subtarget);
6474 CASE_INT_FN (BUILT_IN_FFS):
6475 case BUILT_IN_FFSIMAX:
6476 target = expand_builtin_unop (target_mode, exp, target,
6477 subtarget, ffs_optab);
6482 CASE_INT_FN (BUILT_IN_CLZ):
6483 case BUILT_IN_CLZIMAX:
6484 target = expand_builtin_unop (target_mode, exp, target,
6485 subtarget, clz_optab);
6490 CASE_INT_FN (BUILT_IN_CTZ):
6491 case BUILT_IN_CTZIMAX:
6492 target = expand_builtin_unop (target_mode, exp, target,
6493 subtarget, ctz_optab);
6498 CASE_INT_FN (BUILT_IN_POPCOUNT):
6499 case BUILT_IN_POPCOUNTIMAX:
6500 target = expand_builtin_unop (target_mode, exp, target,
6501 subtarget, popcount_optab);
6506 CASE_INT_FN (BUILT_IN_PARITY):
6507 case BUILT_IN_PARITYIMAX:
6508 target = expand_builtin_unop (target_mode, exp, target,
6509 subtarget, parity_optab);
6514 case BUILT_IN_STRLEN:
6515 target = expand_builtin_strlen (exp, target, target_mode);
6520 case BUILT_IN_STRCPY:
6521 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6526 case BUILT_IN_STRNCPY:
6527 target = expand_builtin_strncpy (exp, target, mode);
6532 case BUILT_IN_STPCPY:
6533 target = expand_builtin_stpcpy (exp, target, mode);
6538 case BUILT_IN_STRCAT:
6539 target = expand_builtin_strcat (fndecl, exp, target, mode);
6544 case BUILT_IN_STRNCAT:
6545 target = expand_builtin_strncat (exp, target, mode);
6550 case BUILT_IN_STRSPN:
6551 target = expand_builtin_strspn (exp, target, mode);
6556 case BUILT_IN_STRCSPN:
6557 target = expand_builtin_strcspn (exp, target, mode);
6562 case BUILT_IN_STRSTR:
6563 target = expand_builtin_strstr (exp, target, mode);
6568 case BUILT_IN_STRPBRK:
6569 target = expand_builtin_strpbrk (exp, target, mode);
6574 case BUILT_IN_INDEX:
6575 case BUILT_IN_STRCHR:
6576 target = expand_builtin_strchr (exp, target, mode);
6581 case BUILT_IN_RINDEX:
6582 case BUILT_IN_STRRCHR:
6583 target = expand_builtin_strrchr (exp, target, mode);
6588 case BUILT_IN_MEMCPY:
6589 target = expand_builtin_memcpy (exp, target, mode);
6594 case BUILT_IN_MEMPCPY:
6595 target = expand_builtin_mempcpy (exp, target, mode);
6600 case BUILT_IN_MEMMOVE:
6601 target = expand_builtin_memmove (exp, target, mode, ignore);
6606 case BUILT_IN_BCOPY:
6607 target = expand_builtin_bcopy (exp, ignore);
6612 case BUILT_IN_MEMSET:
6613 target = expand_builtin_memset (exp, target, mode);
6618 case BUILT_IN_BZERO:
6619 target = expand_builtin_bzero (exp);
6624 case BUILT_IN_STRCMP:
6625 target = expand_builtin_strcmp (exp, target, mode);
6630 case BUILT_IN_STRNCMP:
6631 target = expand_builtin_strncmp (exp, target, mode);
6636 case BUILT_IN_MEMCHR:
6637 target = expand_builtin_memchr (exp, target, mode);
6643 case BUILT_IN_MEMCMP:
6644 target = expand_builtin_memcmp (exp, target, mode);
6649 case BUILT_IN_SETJMP:
6650 /* This should have been lowered to the builtins below. */
6653 case BUILT_IN_SETJMP_SETUP:
6654 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6655 and the receiver label. */
6656 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6658 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6659 VOIDmode, EXPAND_NORMAL);
6660 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6661 rtx label_r = label_rtx (label);
6663 /* This is copied from the handling of non-local gotos. */
6664 expand_builtin_setjmp_setup (buf_addr, label_r);
6665 nonlocal_goto_handler_labels
6666 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6667 nonlocal_goto_handler_labels);
6668 /* ??? Do not let expand_label treat us as such since we would
6669 not want to be both on the list of non-local labels and on
6670 the list of forced labels. */
6671 FORCED_LABEL (label) = 0;
6676 case BUILT_IN_SETJMP_DISPATCHER:
6677 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6678 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6680 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6681 rtx label_r = label_rtx (label);
6683 /* Remove the dispatcher label from the list of non-local labels
6684 since the receiver labels have been added to it above. */
6685 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6690 case BUILT_IN_SETJMP_RECEIVER:
6691 /* __builtin_setjmp_receiver is passed the receiver label. */
6692 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6694 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6695 rtx label_r = label_rtx (label);
6697 expand_builtin_setjmp_receiver (label_r);
6702 /* __builtin_longjmp is passed a pointer to an array of five words.
6703 It's similar to the C library longjmp function but works with
6704 __builtin_setjmp above. */
6705 case BUILT_IN_LONGJMP:
6706 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6708 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6709 VOIDmode, EXPAND_NORMAL);
6710 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6712 if (value != const1_rtx)
6714 error ("%<__builtin_longjmp%> second argument must be 1");
6718 expand_builtin_longjmp (buf_addr, value);
6723 case BUILT_IN_NONLOCAL_GOTO:
6724 target = expand_builtin_nonlocal_goto (exp);
6729 /* This updates the setjmp buffer that is its argument with the value
6730 of the current stack pointer. */
6731 case BUILT_IN_UPDATE_SETJMP_BUF:
6732 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6735 = expand_normal (CALL_EXPR_ARG (exp, 0));
6737 expand_builtin_update_setjmp_buf (buf_addr);
6743 expand_builtin_trap ();
6746 case BUILT_IN_PRINTF:
6747 target = expand_builtin_printf (exp, target, mode, false);
6752 case BUILT_IN_PRINTF_UNLOCKED:
6753 target = expand_builtin_printf (exp, target, mode, true);
6758 case BUILT_IN_FPUTS:
6759 target = expand_builtin_fputs (exp, target, false);
6763 case BUILT_IN_FPUTS_UNLOCKED:
6764 target = expand_builtin_fputs (exp, target, true);
6769 case BUILT_IN_FPRINTF:
6770 target = expand_builtin_fprintf (exp, target, mode, false);
6775 case BUILT_IN_FPRINTF_UNLOCKED:
6776 target = expand_builtin_fprintf (exp, target, mode, true);
6781 case BUILT_IN_SPRINTF:
6782 target = expand_builtin_sprintf (exp, target, mode);
6787 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6788 case BUILT_IN_SIGNBITD32:
6789 case BUILT_IN_SIGNBITD64:
6790 case BUILT_IN_SIGNBITD128:
6791 target = expand_builtin_signbit (exp, target);
6796 /* Various hooks for the DWARF 2 __throw routine. */
6797 case BUILT_IN_UNWIND_INIT:
6798 expand_builtin_unwind_init ();
6800 case BUILT_IN_DWARF_CFA:
6801 return virtual_cfa_rtx;
6802 #ifdef DWARF2_UNWIND_INFO
6803 case BUILT_IN_DWARF_SP_COLUMN:
6804 return expand_builtin_dwarf_sp_column ();
6805 case BUILT_IN_INIT_DWARF_REG_SIZES:
6806 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6809 case BUILT_IN_FROB_RETURN_ADDR:
6810 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6811 case BUILT_IN_EXTRACT_RETURN_ADDR:
6812 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6813 case BUILT_IN_EH_RETURN:
6814 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6815 CALL_EXPR_ARG (exp, 1));
6817 #ifdef EH_RETURN_DATA_REGNO
6818 case BUILT_IN_EH_RETURN_DATA_REGNO:
6819 return expand_builtin_eh_return_data_regno (exp);
6821 case BUILT_IN_EXTEND_POINTER:
6822 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6824 case BUILT_IN_VA_START:
6825 return expand_builtin_va_start (exp);
6826 case BUILT_IN_VA_END:
6827 return expand_builtin_va_end (exp);
6828 case BUILT_IN_VA_COPY:
6829 return expand_builtin_va_copy (exp);
6830 case BUILT_IN_EXPECT:
6831 return expand_builtin_expect (exp, target);
6832 case BUILT_IN_PREFETCH:
6833 expand_builtin_prefetch (exp);
6836 case BUILT_IN_PROFILE_FUNC_ENTER:
6837 return expand_builtin_profile_func (false);
6838 case BUILT_IN_PROFILE_FUNC_EXIT:
6839 return expand_builtin_profile_func (true);
6841 case BUILT_IN_INIT_TRAMPOLINE:
6842 return expand_builtin_init_trampoline (exp);
6843 case BUILT_IN_ADJUST_TRAMPOLINE:
6844 return expand_builtin_adjust_trampoline (exp);
6847 case BUILT_IN_EXECL:
6848 case BUILT_IN_EXECV:
6849 case BUILT_IN_EXECLP:
6850 case BUILT_IN_EXECLE:
6851 case BUILT_IN_EXECVP:
6852 case BUILT_IN_EXECVE:
6853 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6858 case BUILT_IN_FETCH_AND_ADD_1:
6859 case BUILT_IN_FETCH_AND_ADD_2:
6860 case BUILT_IN_FETCH_AND_ADD_4:
6861 case BUILT_IN_FETCH_AND_ADD_8:
6862 case BUILT_IN_FETCH_AND_ADD_16:
6863 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6864 target = expand_builtin_sync_operation (mode, exp, PLUS,
6865 false, target, ignore);
6870 case BUILT_IN_FETCH_AND_SUB_1:
6871 case BUILT_IN_FETCH_AND_SUB_2:
6872 case BUILT_IN_FETCH_AND_SUB_4:
6873 case BUILT_IN_FETCH_AND_SUB_8:
6874 case BUILT_IN_FETCH_AND_SUB_16:
6875 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6876 target = expand_builtin_sync_operation (mode, exp, MINUS,
6877 false, target, ignore);
6882 case BUILT_IN_FETCH_AND_OR_1:
6883 case BUILT_IN_FETCH_AND_OR_2:
6884 case BUILT_IN_FETCH_AND_OR_4:
6885 case BUILT_IN_FETCH_AND_OR_8:
6886 case BUILT_IN_FETCH_AND_OR_16:
6887 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6888 target = expand_builtin_sync_operation (mode, exp, IOR,
6889 false, target, ignore);
6894 case BUILT_IN_FETCH_AND_AND_1:
6895 case BUILT_IN_FETCH_AND_AND_2:
6896 case BUILT_IN_FETCH_AND_AND_4:
6897 case BUILT_IN_FETCH_AND_AND_8:
6898 case BUILT_IN_FETCH_AND_AND_16:
6899 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6900 target = expand_builtin_sync_operation (mode, exp, AND,
6901 false, target, ignore);
6906 case BUILT_IN_FETCH_AND_XOR_1:
6907 case BUILT_IN_FETCH_AND_XOR_2:
6908 case BUILT_IN_FETCH_AND_XOR_4:
6909 case BUILT_IN_FETCH_AND_XOR_8:
6910 case BUILT_IN_FETCH_AND_XOR_16:
6911 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6912 target = expand_builtin_sync_operation (mode, exp, XOR,
6913 false, target, ignore);
6918 case BUILT_IN_FETCH_AND_NAND_1:
6919 case BUILT_IN_FETCH_AND_NAND_2:
6920 case BUILT_IN_FETCH_AND_NAND_4:
6921 case BUILT_IN_FETCH_AND_NAND_8:
6922 case BUILT_IN_FETCH_AND_NAND_16:
6923 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6924 target = expand_builtin_sync_operation (mode, exp, NOT,
6925 false, target, ignore);
6930 case BUILT_IN_ADD_AND_FETCH_1:
6931 case BUILT_IN_ADD_AND_FETCH_2:
6932 case BUILT_IN_ADD_AND_FETCH_4:
6933 case BUILT_IN_ADD_AND_FETCH_8:
6934 case BUILT_IN_ADD_AND_FETCH_16:
6935 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6936 target = expand_builtin_sync_operation (mode, exp, PLUS,
6937 true, target, ignore);
6942 case BUILT_IN_SUB_AND_FETCH_1:
6943 case BUILT_IN_SUB_AND_FETCH_2:
6944 case BUILT_IN_SUB_AND_FETCH_4:
6945 case BUILT_IN_SUB_AND_FETCH_8:
6946 case BUILT_IN_SUB_AND_FETCH_16:
6947 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6948 target = expand_builtin_sync_operation (mode, exp, MINUS,
6949 true, target, ignore);
6954 case BUILT_IN_OR_AND_FETCH_1:
6955 case BUILT_IN_OR_AND_FETCH_2:
6956 case BUILT_IN_OR_AND_FETCH_4:
6957 case BUILT_IN_OR_AND_FETCH_8:
6958 case BUILT_IN_OR_AND_FETCH_16:
6959 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6960 target = expand_builtin_sync_operation (mode, exp, IOR,
6961 true, target, ignore);
6966 case BUILT_IN_AND_AND_FETCH_1:
6967 case BUILT_IN_AND_AND_FETCH_2:
6968 case BUILT_IN_AND_AND_FETCH_4:
6969 case BUILT_IN_AND_AND_FETCH_8:
6970 case BUILT_IN_AND_AND_FETCH_16:
6971 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6972 target = expand_builtin_sync_operation (mode, exp, AND,
6973 true, target, ignore);
6978 case BUILT_IN_XOR_AND_FETCH_1:
6979 case BUILT_IN_XOR_AND_FETCH_2:
6980 case BUILT_IN_XOR_AND_FETCH_4:
6981 case BUILT_IN_XOR_AND_FETCH_8:
6982 case BUILT_IN_XOR_AND_FETCH_16:
6983 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6984 target = expand_builtin_sync_operation (mode, exp, XOR,
6985 true, target, ignore);
6990 case BUILT_IN_NAND_AND_FETCH_1:
6991 case BUILT_IN_NAND_AND_FETCH_2:
6992 case BUILT_IN_NAND_AND_FETCH_4:
6993 case BUILT_IN_NAND_AND_FETCH_8:
6994 case BUILT_IN_NAND_AND_FETCH_16:
6995 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6996 target = expand_builtin_sync_operation (mode, exp, NOT,
6997 true, target, ignore);
7002 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7003 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7004 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7005 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7006 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7007 if (mode == VOIDmode)
7008 mode = TYPE_MODE (boolean_type_node);
7009 if (!target || !register_operand (target, mode))
7010 target = gen_reg_rtx (mode);
7012 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7013 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7018 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7019 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7020 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7021 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7022 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7023 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7024 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7029 case BUILT_IN_LOCK_TEST_AND_SET_1:
7030 case BUILT_IN_LOCK_TEST_AND_SET_2:
7031 case BUILT_IN_LOCK_TEST_AND_SET_4:
7032 case BUILT_IN_LOCK_TEST_AND_SET_8:
7033 case BUILT_IN_LOCK_TEST_AND_SET_16:
7034 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7035 target = expand_builtin_lock_test_and_set (mode, exp, target);
7040 case BUILT_IN_LOCK_RELEASE_1:
7041 case BUILT_IN_LOCK_RELEASE_2:
7042 case BUILT_IN_LOCK_RELEASE_4:
7043 case BUILT_IN_LOCK_RELEASE_8:
7044 case BUILT_IN_LOCK_RELEASE_16:
7045 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7046 expand_builtin_lock_release (mode, exp);
7049 case BUILT_IN_SYNCHRONIZE:
7050 expand_builtin_synchronize ();
7053 case BUILT_IN_OBJECT_SIZE:
7054 return expand_builtin_object_size (exp);
7056 case BUILT_IN_MEMCPY_CHK:
7057 case BUILT_IN_MEMPCPY_CHK:
7058 case BUILT_IN_MEMMOVE_CHK:
7059 case BUILT_IN_MEMSET_CHK:
7060 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7065 case BUILT_IN_STRCPY_CHK:
7066 case BUILT_IN_STPCPY_CHK:
7067 case BUILT_IN_STRNCPY_CHK:
7068 case BUILT_IN_STRCAT_CHK:
7069 case BUILT_IN_STRNCAT_CHK:
7070 case BUILT_IN_SNPRINTF_CHK:
7071 case BUILT_IN_VSNPRINTF_CHK:
7072 maybe_emit_chk_warning (exp, fcode);
7075 case BUILT_IN_SPRINTF_CHK:
7076 case BUILT_IN_VSPRINTF_CHK:
7077 maybe_emit_sprintf_chk_warning (exp, fcode);
7081 maybe_emit_free_warning (exp);
7084 default: /* just do library call, if unknown builtin */
7088 /* The switch statement above can drop through to cause the function
7089 to be called normally. */
7090 return expand_call (exp, target, ignore);
7093 /* Determine whether a tree node represents a call to a built-in
7094 function. If the tree T is a call to a built-in function with
7095 the right number of arguments of the appropriate types, return
7096 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7097 Otherwise the return value is END_BUILTINS. */
7099 enum built_in_function
7100 builtin_mathfn_code (const_tree t)
7102 const_tree fndecl, arg, parmlist;
7103 const_tree argtype, parmtype;
7104 const_call_expr_arg_iterator iter;
7106 if (TREE_CODE (t) != CALL_EXPR
7107 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7108 return END_BUILTINS;
7110 fndecl = get_callee_fndecl (t);
7111 if (fndecl == NULL_TREE
7112 || TREE_CODE (fndecl) != FUNCTION_DECL
7113 || ! DECL_BUILT_IN (fndecl)
7114 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7115 return END_BUILTINS;
7117 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7118 init_const_call_expr_arg_iterator (t, &iter);
7119 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7121 /* If a function doesn't take a variable number of arguments,
7122 the last element in the list will have type `void'. */
7123 parmtype = TREE_VALUE (parmlist);
7124 if (VOID_TYPE_P (parmtype))
7126 if (more_const_call_expr_args_p (&iter))
7127 return END_BUILTINS;
7128 return DECL_FUNCTION_CODE (fndecl);
7131 if (! more_const_call_expr_args_p (&iter))
7132 return END_BUILTINS;
7134 arg = next_const_call_expr_arg (&iter);
7135 argtype = TREE_TYPE (arg);
7137 if (SCALAR_FLOAT_TYPE_P (parmtype))
7139 if (! SCALAR_FLOAT_TYPE_P (argtype))
7140 return END_BUILTINS;
7142 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7144 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7145 return END_BUILTINS;
7147 else if (POINTER_TYPE_P (parmtype))
7149 if (! POINTER_TYPE_P (argtype))
7150 return END_BUILTINS;
7152 else if (INTEGRAL_TYPE_P (parmtype))
7154 if (! INTEGRAL_TYPE_P (argtype))
7155 return END_BUILTINS;
7158 return END_BUILTINS;
7161 /* Variable-length argument list. */
7162 return DECL_FUNCTION_CODE (fndecl);
7165 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7166 evaluate to a constant. */
7169 fold_builtin_constant_p (tree arg)
7171 /* We return 1 for a numeric type that's known to be a constant
7172 value at compile-time or for an aggregate type that's a
7173 literal constant. */
7176 /* If we know this is a constant, emit the constant of one. */
7177 if (CONSTANT_CLASS_P (arg)
7178 || (TREE_CODE (arg) == CONSTRUCTOR
7179 && TREE_CONSTANT (arg)))
7180 return integer_one_node;
7181 if (TREE_CODE (arg) == ADDR_EXPR)
7183 tree op = TREE_OPERAND (arg, 0);
7184 if (TREE_CODE (op) == STRING_CST
7185 || (TREE_CODE (op) == ARRAY_REF
7186 && integer_zerop (TREE_OPERAND (op, 1))
7187 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7188 return integer_one_node;
7191 /* If this expression has side effects, show we don't know it to be a
7192 constant. Likewise if it's a pointer or aggregate type since in
7193 those case we only want literals, since those are only optimized
7194 when generating RTL, not later.
7195 And finally, if we are compiling an initializer, not code, we
7196 need to return a definite result now; there's not going to be any
7197 more optimization done. */
7198 if (TREE_SIDE_EFFECTS (arg)
7199 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7200 || POINTER_TYPE_P (TREE_TYPE (arg))
7202 || folding_initializer)
7203 return integer_zero_node;
7208 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7209 return it as a truthvalue. */
7212 build_builtin_expect_predicate (tree pred, tree expected)
7214 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7216 fn = built_in_decls[BUILT_IN_EXPECT];
7217 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7218 ret_type = TREE_TYPE (TREE_TYPE (fn));
7219 pred_type = TREE_VALUE (arg_types);
7220 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7222 pred = fold_convert (pred_type, pred);
7223 expected = fold_convert (expected_type, expected);
7224 call_expr = build_call_expr (fn, 2, pred, expected);
7226 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7227 build_int_cst (ret_type, 0));
7230 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7231 NULL_TREE if no simplification is possible. */
7234 fold_builtin_expect (tree arg0, tree arg1)
7237 enum tree_code code;
7239 /* If this is a builtin_expect within a builtin_expect keep the
7240 inner one. See through a comparison against a constant. It
7241 might have been added to create a thruthvalue. */
7243 if (COMPARISON_CLASS_P (inner)
7244 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7245 inner = TREE_OPERAND (inner, 0);
7247 if (TREE_CODE (inner) == CALL_EXPR
7248 && (fndecl = get_callee_fndecl (inner))
7249 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7250 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7253 /* Distribute the expected value over short-circuiting operators.
7254 See through the cast from truthvalue_type_node to long. */
7256 while (TREE_CODE (inner) == NOP_EXPR
7257 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7258 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7259 inner = TREE_OPERAND (inner, 0);
7261 code = TREE_CODE (inner);
7262 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7264 tree op0 = TREE_OPERAND (inner, 0);
7265 tree op1 = TREE_OPERAND (inner, 1);
7267 op0 = build_builtin_expect_predicate (op0, arg1);
7268 op1 = build_builtin_expect_predicate (op1, arg1);
7269 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7271 return fold_convert (TREE_TYPE (arg0), inner);
7274 /* If the argument isn't invariant then there's nothing else we can do. */
7275 if (!TREE_CONSTANT (arg0))
7278 /* If we expect that a comparison against the argument will fold to
7279 a constant return the constant. In practice, this means a true
7280 constant or the address of a non-weak symbol. */
7283 if (TREE_CODE (inner) == ADDR_EXPR)
7287 inner = TREE_OPERAND (inner, 0);
7289 while (TREE_CODE (inner) == COMPONENT_REF
7290 || TREE_CODE (inner) == ARRAY_REF);
7291 if (DECL_P (inner) && DECL_WEAK (inner))
7295 /* Otherwise, ARG0 already has the proper type for the return value. */
7299 /* Fold a call to __builtin_classify_type with argument ARG. */
7302 fold_builtin_classify_type (tree arg)
7305 return build_int_cst (NULL_TREE, no_type_class);
7307 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7310 /* Fold a call to __builtin_strlen with argument ARG. */
7313 fold_builtin_strlen (tree arg)
7315 if (!validate_arg (arg, POINTER_TYPE))
7319 tree len = c_strlen (arg, 0);
7323 /* Convert from the internal "sizetype" type to "size_t". */
7325 len = fold_convert (size_type_node, len);
7333 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7336 fold_builtin_inf (tree type, int warn)
7338 REAL_VALUE_TYPE real;
7340 /* __builtin_inff is intended to be usable to define INFINITY on all
7341 targets. If an infinity is not available, INFINITY expands "to a
7342 positive constant of type float that overflows at translation
7343 time", footnote "In this case, using INFINITY will violate the
7344 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7345 Thus we pedwarn to ensure this constraint violation is
7347 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7348 pedwarn (input_location, 0, "target format does not support infinity");
7351 return build_real (type, real);
7354 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7357 fold_builtin_nan (tree arg, tree type, int quiet)
7359 REAL_VALUE_TYPE real;
7362 if (!validate_arg (arg, POINTER_TYPE))
7364 str = c_getstr (arg);
7368 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7371 return build_real (type, real);
7374 /* Return true if the floating point expression T has an integer value.
7375 We also allow +Inf, -Inf and NaN to be considered integer values. */
7378 integer_valued_real_p (tree t)
7380 switch (TREE_CODE (t))
7387 return integer_valued_real_p (TREE_OPERAND (t, 0));
7392 return integer_valued_real_p (TREE_OPERAND (t, 1));
7399 return integer_valued_real_p (TREE_OPERAND (t, 0))
7400 && integer_valued_real_p (TREE_OPERAND (t, 1));
7403 return integer_valued_real_p (TREE_OPERAND (t, 1))
7404 && integer_valued_real_p (TREE_OPERAND (t, 2));
7407 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7411 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7412 if (TREE_CODE (type) == INTEGER_TYPE)
7414 if (TREE_CODE (type) == REAL_TYPE)
7415 return integer_valued_real_p (TREE_OPERAND (t, 0));
7420 switch (builtin_mathfn_code (t))
7422 CASE_FLT_FN (BUILT_IN_CEIL):
7423 CASE_FLT_FN (BUILT_IN_FLOOR):
7424 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7425 CASE_FLT_FN (BUILT_IN_RINT):
7426 CASE_FLT_FN (BUILT_IN_ROUND):
7427 CASE_FLT_FN (BUILT_IN_TRUNC):
7430 CASE_FLT_FN (BUILT_IN_FMIN):
7431 CASE_FLT_FN (BUILT_IN_FMAX):
7432 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7433 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7446 /* FNDECL is assumed to be a builtin where truncation can be propagated
7447 across (for instance floor((double)f) == (double)floorf (f).
7448 Do the transformation for a call with argument ARG. */
7451 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7453 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7455 if (!validate_arg (arg, REAL_TYPE))
7458 /* Integer rounding functions are idempotent. */
7459 if (fcode == builtin_mathfn_code (arg))
7462 /* If argument is already integer valued, and we don't need to worry
7463 about setting errno, there's no need to perform rounding. */
7464 if (! flag_errno_math && integer_valued_real_p (arg))
7469 tree arg0 = strip_float_extensions (arg);
7470 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7471 tree newtype = TREE_TYPE (arg0);
7474 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7475 && (decl = mathfn_built_in (newtype, fcode)))
7476 return fold_convert (ftype,
7477 build_call_expr (decl, 1,
7478 fold_convert (newtype, arg0)));
7483 /* FNDECL is assumed to be builtin which can narrow the FP type of
7484 the argument, for instance lround((double)f) -> lroundf (f).
7485 Do the transformation for a call with argument ARG. */
7488 fold_fixed_mathfn (tree fndecl, tree arg)
7490 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7492 if (!validate_arg (arg, REAL_TYPE))
7495 /* If argument is already integer valued, and we don't need to worry
7496 about setting errno, there's no need to perform rounding. */
7497 if (! flag_errno_math && integer_valued_real_p (arg))
7498 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7502 tree ftype = TREE_TYPE (arg);
7503 tree arg0 = strip_float_extensions (arg);
7504 tree newtype = TREE_TYPE (arg0);
7507 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7508 && (decl = mathfn_built_in (newtype, fcode)))
7509 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7512 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7513 sizeof (long long) == sizeof (long). */
7514 if (TYPE_PRECISION (long_long_integer_type_node)
7515 == TYPE_PRECISION (long_integer_type_node))
7517 tree newfn = NULL_TREE;
7520 CASE_FLT_FN (BUILT_IN_LLCEIL):
7521 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7524 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7525 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7528 CASE_FLT_FN (BUILT_IN_LLROUND):
7529 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7532 CASE_FLT_FN (BUILT_IN_LLRINT):
7533 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7542 tree newcall = build_call_expr(newfn, 1, arg);
7543 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7550 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7551 return type. Return NULL_TREE if no simplification can be made. */
7554 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7558 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7559 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7562 /* Calculate the result when the argument is a constant. */
7563 if (TREE_CODE (arg) == COMPLEX_CST
7564 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7568 if (TREE_CODE (arg) == COMPLEX_EXPR)
7570 tree real = TREE_OPERAND (arg, 0);
7571 tree imag = TREE_OPERAND (arg, 1);
7573 /* If either part is zero, cabs is fabs of the other. */
7574 if (real_zerop (real))
7575 return fold_build1 (ABS_EXPR, type, imag);
7576 if (real_zerop (imag))
7577 return fold_build1 (ABS_EXPR, type, real);
7579 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7580 if (flag_unsafe_math_optimizations
7581 && operand_equal_p (real, imag, OEP_PURE_SAME))
7583 const REAL_VALUE_TYPE sqrt2_trunc
7584 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7586 return fold_build2 (MULT_EXPR, type,
7587 fold_build1 (ABS_EXPR, type, real),
7588 build_real (type, sqrt2_trunc));
7592 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7593 if (TREE_CODE (arg) == NEGATE_EXPR
7594 || TREE_CODE (arg) == CONJ_EXPR)
7595 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7597 /* Don't do this when optimizing for size. */
7598 if (flag_unsafe_math_optimizations
7599 && optimize && optimize_function_for_speed_p (cfun))
7601 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7603 if (sqrtfn != NULL_TREE)
7605 tree rpart, ipart, result;
7607 arg = builtin_save_expr (arg);
7609 rpart = fold_build1 (REALPART_EXPR, type, arg);
7610 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7612 rpart = builtin_save_expr (rpart);
7613 ipart = builtin_save_expr (ipart);
7615 result = fold_build2 (PLUS_EXPR, type,
7616 fold_build2 (MULT_EXPR, type,
7618 fold_build2 (MULT_EXPR, type,
7621 return build_call_expr (sqrtfn, 1, result);
7628 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7629 Return NULL_TREE if no simplification can be made. */
7632 fold_builtin_sqrt (tree arg, tree type)
7635 enum built_in_function fcode;
7638 if (!validate_arg (arg, REAL_TYPE))
7641 /* Calculate the result when the argument is a constant. */
7642 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7645 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7646 fcode = builtin_mathfn_code (arg);
7647 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7649 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7650 arg = fold_build2 (MULT_EXPR, type,
7651 CALL_EXPR_ARG (arg, 0),
7652 build_real (type, dconsthalf));
7653 return build_call_expr (expfn, 1, arg);
7656 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7657 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7659 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7663 tree arg0 = CALL_EXPR_ARG (arg, 0);
7665 /* The inner root was either sqrt or cbrt. */
7666 REAL_VALUE_TYPE dconstroot =
7667 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconst_third ();
7669 /* Adjust for the outer root. */
7670 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7671 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7672 tree_root = build_real (type, dconstroot);
7673 return build_call_expr (powfn, 2, arg0, tree_root);
7677 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7678 if (flag_unsafe_math_optimizations
7679 && (fcode == BUILT_IN_POW
7680 || fcode == BUILT_IN_POWF
7681 || fcode == BUILT_IN_POWL))
7683 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7684 tree arg0 = CALL_EXPR_ARG (arg, 0);
7685 tree arg1 = CALL_EXPR_ARG (arg, 1);
7687 if (!tree_expr_nonnegative_p (arg0))
7688 arg0 = build1 (ABS_EXPR, type, arg0);
7689 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7690 build_real (type, dconsthalf));
7691 return build_call_expr (powfn, 2, arg0, narg1);
7697 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7698 Return NULL_TREE if no simplification can be made. */
7701 fold_builtin_cbrt (tree arg, tree type)
7703 const enum built_in_function fcode = builtin_mathfn_code (arg);
7706 if (!validate_arg (arg, REAL_TYPE))
7709 /* Calculate the result when the argument is a constant. */
7710 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7713 if (flag_unsafe_math_optimizations)
7715 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7716 if (BUILTIN_EXPONENT_P (fcode))
7718 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7719 const REAL_VALUE_TYPE third_trunc =
7720 real_value_truncate (TYPE_MODE (type), dconst_third ());
7721 arg = fold_build2 (MULT_EXPR, type,
7722 CALL_EXPR_ARG (arg, 0),
7723 build_real (type, third_trunc));
7724 return build_call_expr (expfn, 1, arg);
7727 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7728 if (BUILTIN_SQRT_P (fcode))
7730 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7734 tree arg0 = CALL_EXPR_ARG (arg, 0);
7736 REAL_VALUE_TYPE dconstroot = dconst_third ();
7738 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7739 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7740 tree_root = build_real (type, dconstroot);
7741 return build_call_expr (powfn, 2, arg0, tree_root);
7745 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7746 if (BUILTIN_CBRT_P (fcode))
7748 tree arg0 = CALL_EXPR_ARG (arg, 0);
7749 if (tree_expr_nonnegative_p (arg0))
7751 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7756 REAL_VALUE_TYPE dconstroot;
7758 real_arithmetic (&dconstroot, MULT_EXPR,
7759 dconst_third_ptr (), dconst_third_ptr ());
7760 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7761 tree_root = build_real (type, dconstroot);
7762 return build_call_expr (powfn, 2, arg0, tree_root);
7767 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7768 if (fcode == BUILT_IN_POW
7769 || fcode == BUILT_IN_POWF
7770 || fcode == BUILT_IN_POWL)
7772 tree arg00 = CALL_EXPR_ARG (arg, 0);
7773 tree arg01 = CALL_EXPR_ARG (arg, 1);
7774 if (tree_expr_nonnegative_p (arg00))
7776 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7777 const REAL_VALUE_TYPE dconstroot
7778 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7779 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7780 build_real (type, dconstroot));
7781 return build_call_expr (powfn, 2, arg00, narg01);
7788 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7789 TYPE is the type of the return value. Return NULL_TREE if no
7790 simplification can be made. */
7793 fold_builtin_cos (tree arg, tree type, tree fndecl)
7797 if (!validate_arg (arg, REAL_TYPE))
7800 /* Calculate the result when the argument is a constant. */
7801 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7804 /* Optimize cos(-x) into cos (x). */
7805 if ((narg = fold_strip_sign_ops (arg)))
7806 return build_call_expr (fndecl, 1, narg);
7811 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7812 Return NULL_TREE if no simplification can be made. */
7815 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7817 if (validate_arg (arg, REAL_TYPE))
7821 /* Calculate the result when the argument is a constant. */
7822 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7825 /* Optimize cosh(-x) into cosh (x). */
7826 if ((narg = fold_strip_sign_ops (arg)))
7827 return build_call_expr (fndecl, 1, narg);
7833 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7834 Return NULL_TREE if no simplification can be made. */
7837 fold_builtin_tan (tree arg, tree type)
7839 enum built_in_function fcode;
7842 if (!validate_arg (arg, REAL_TYPE))
7845 /* Calculate the result when the argument is a constant. */
7846 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7849 /* Optimize tan(atan(x)) = x. */
7850 fcode = builtin_mathfn_code (arg);
7851 if (flag_unsafe_math_optimizations
7852 && (fcode == BUILT_IN_ATAN
7853 || fcode == BUILT_IN_ATANF
7854 || fcode == BUILT_IN_ATANL))
7855 return CALL_EXPR_ARG (arg, 0);
7860 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7861 NULL_TREE if no simplification can be made. */
7864 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7869 if (!validate_arg (arg0, REAL_TYPE)
7870 || !validate_arg (arg1, POINTER_TYPE)
7871 || !validate_arg (arg2, POINTER_TYPE))
7874 type = TREE_TYPE (arg0);
7876 /* Calculate the result when the argument is a constant. */
7877 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7880 /* Canonicalize sincos to cexpi. */
7881 if (!TARGET_C99_FUNCTIONS)
7883 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7887 call = build_call_expr (fn, 1, arg0);
7888 call = builtin_save_expr (call);
7890 return build2 (COMPOUND_EXPR, type,
7891 build2 (MODIFY_EXPR, void_type_node,
7892 build_fold_indirect_ref (arg1),
7893 build1 (IMAGPART_EXPR, type, call)),
7894 build2 (MODIFY_EXPR, void_type_node,
7895 build_fold_indirect_ref (arg2),
7896 build1 (REALPART_EXPR, type, call)));
7899 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7900 NULL_TREE if no simplification can be made. */
7903 fold_builtin_cexp (tree arg0, tree type)
7906 tree realp, imagp, ifn;
7908 if (!validate_arg (arg0, COMPLEX_TYPE))
7911 rtype = TREE_TYPE (TREE_TYPE (arg0));
7913 /* In case we can figure out the real part of arg0 and it is constant zero
7915 if (!TARGET_C99_FUNCTIONS)
7917 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7921 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7922 && real_zerop (realp))
7924 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7925 return build_call_expr (ifn, 1, narg);
7928 /* In case we can easily decompose real and imaginary parts split cexp
7929 to exp (r) * cexpi (i). */
7930 if (flag_unsafe_math_optimizations
7933 tree rfn, rcall, icall;
7935 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7939 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7943 icall = build_call_expr (ifn, 1, imagp);
7944 icall = builtin_save_expr (icall);
7945 rcall = build_call_expr (rfn, 1, realp);
7946 rcall = builtin_save_expr (rcall);
7947 return fold_build2 (COMPLEX_EXPR, type,
7948 fold_build2 (MULT_EXPR, rtype,
7950 fold_build1 (REALPART_EXPR, rtype, icall)),
7951 fold_build2 (MULT_EXPR, rtype,
7953 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7959 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7960 Return NULL_TREE if no simplification can be made. */
7963 fold_builtin_trunc (tree fndecl, tree arg)
7965 if (!validate_arg (arg, REAL_TYPE))
7968 /* Optimize trunc of constant value. */
7969 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7971 REAL_VALUE_TYPE r, x;
7972 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7974 x = TREE_REAL_CST (arg);
7975 real_trunc (&r, TYPE_MODE (type), &x);
7976 return build_real (type, r);
7979 return fold_trunc_transparent_mathfn (fndecl, arg);
7982 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7983 Return NULL_TREE if no simplification can be made. */
7986 fold_builtin_floor (tree fndecl, tree arg)
7988 if (!validate_arg (arg, REAL_TYPE))
7991 /* Optimize floor of constant value. */
7992 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7996 x = TREE_REAL_CST (arg);
7997 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7999 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8002 real_floor (&r, TYPE_MODE (type), &x);
8003 return build_real (type, r);
8007 /* Fold floor (x) where x is nonnegative to trunc (x). */
8008 if (tree_expr_nonnegative_p (arg))
8010 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8012 return build_call_expr (truncfn, 1, arg);
8015 return fold_trunc_transparent_mathfn (fndecl, arg);
8018 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8019 Return NULL_TREE if no simplification can be made. */
8022 fold_builtin_ceil (tree fndecl, tree arg)
8024 if (!validate_arg (arg, REAL_TYPE))
8027 /* Optimize ceil of constant value. */
8028 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8032 x = TREE_REAL_CST (arg);
8033 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8035 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8038 real_ceil (&r, TYPE_MODE (type), &x);
8039 return build_real (type, r);
8043 return fold_trunc_transparent_mathfn (fndecl, arg);
8046 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8047 Return NULL_TREE if no simplification can be made. */
8050 fold_builtin_round (tree fndecl, tree arg)
8052 if (!validate_arg (arg, REAL_TYPE))
8055 /* Optimize round of constant value. */
8056 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8060 x = TREE_REAL_CST (arg);
8061 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8063 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8066 real_round (&r, TYPE_MODE (type), &x);
8067 return build_real (type, r);
8071 return fold_trunc_transparent_mathfn (fndecl, arg);
8074 /* Fold function call to builtin lround, lroundf or lroundl (or the
8075 corresponding long long versions) and other rounding functions. ARG
8076 is the argument to the call. Return NULL_TREE if no simplification
8080 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8082 if (!validate_arg (arg, REAL_TYPE))
8085 /* Optimize lround of constant value. */
8086 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8088 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8090 if (real_isfinite (&x))
8092 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8093 tree ftype = TREE_TYPE (arg);
8094 unsigned HOST_WIDE_INT lo2;
8095 HOST_WIDE_INT hi, lo;
8098 switch (DECL_FUNCTION_CODE (fndecl))
8100 CASE_FLT_FN (BUILT_IN_LFLOOR):
8101 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8102 real_floor (&r, TYPE_MODE (ftype), &x);
8105 CASE_FLT_FN (BUILT_IN_LCEIL):
8106 CASE_FLT_FN (BUILT_IN_LLCEIL):
8107 real_ceil (&r, TYPE_MODE (ftype), &x);
8110 CASE_FLT_FN (BUILT_IN_LROUND):
8111 CASE_FLT_FN (BUILT_IN_LLROUND):
8112 real_round (&r, TYPE_MODE (ftype), &x);
8119 REAL_VALUE_TO_INT (&lo, &hi, r);
8120 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8121 return build_int_cst_wide (itype, lo2, hi);
8125 switch (DECL_FUNCTION_CODE (fndecl))
8127 CASE_FLT_FN (BUILT_IN_LFLOOR):
8128 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8129 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8130 if (tree_expr_nonnegative_p (arg))
8131 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8137 return fold_fixed_mathfn (fndecl, arg);
8140 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8141 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8142 the argument to the call. Return NULL_TREE if no simplification can
8146 fold_builtin_bitop (tree fndecl, tree arg)
8148 if (!validate_arg (arg, INTEGER_TYPE))
8151 /* Optimize for constant argument. */
8152 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8154 HOST_WIDE_INT hi, width, result;
8155 unsigned HOST_WIDE_INT lo;
8158 type = TREE_TYPE (arg);
8159 width = TYPE_PRECISION (type);
8160 lo = TREE_INT_CST_LOW (arg);
8162 /* Clear all the bits that are beyond the type's precision. */
8163 if (width > HOST_BITS_PER_WIDE_INT)
8165 hi = TREE_INT_CST_HIGH (arg);
8166 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8167 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8172 if (width < HOST_BITS_PER_WIDE_INT)
8173 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8176 switch (DECL_FUNCTION_CODE (fndecl))
8178 CASE_INT_FN (BUILT_IN_FFS):
8180 result = exact_log2 (lo & -lo) + 1;
8182 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8187 CASE_INT_FN (BUILT_IN_CLZ):
8189 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8191 result = width - floor_log2 (lo) - 1;
8192 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8196 CASE_INT_FN (BUILT_IN_CTZ):
8198 result = exact_log2 (lo & -lo);
8200 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8201 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8205 CASE_INT_FN (BUILT_IN_POPCOUNT):
8208 result++, lo &= lo - 1;
8210 result++, hi &= hi - 1;
8213 CASE_INT_FN (BUILT_IN_PARITY):
8216 result++, lo &= lo - 1;
8218 result++, hi &= hi - 1;
8226 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8232 /* Fold function call to builtin_bswap and the long and long long
8233 variants. Return NULL_TREE if no simplification can be made. */
8235 fold_builtin_bswap (tree fndecl, tree arg)
8237 if (! validate_arg (arg, INTEGER_TYPE))
8240 /* Optimize constant value. */
8241 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8243 HOST_WIDE_INT hi, width, r_hi = 0;
8244 unsigned HOST_WIDE_INT lo, r_lo = 0;
8247 type = TREE_TYPE (arg);
8248 width = TYPE_PRECISION (type);
8249 lo = TREE_INT_CST_LOW (arg);
8250 hi = TREE_INT_CST_HIGH (arg);
8252 switch (DECL_FUNCTION_CODE (fndecl))
8254 case BUILT_IN_BSWAP32:
8255 case BUILT_IN_BSWAP64:
8259 for (s = 0; s < width; s += 8)
8261 int d = width - s - 8;
8262 unsigned HOST_WIDE_INT byte;
8264 if (s < HOST_BITS_PER_WIDE_INT)
8265 byte = (lo >> s) & 0xff;
8267 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8269 if (d < HOST_BITS_PER_WIDE_INT)
8272 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8282 if (width < HOST_BITS_PER_WIDE_INT)
8283 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8285 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8291 /* Return true if EXPR is the real constant contained in VALUE. */
8294 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8298 return ((TREE_CODE (expr) == REAL_CST
8299 && !TREE_OVERFLOW (expr)
8300 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8301 || (TREE_CODE (expr) == COMPLEX_CST
8302 && real_dconstp (TREE_REALPART (expr), value)
8303 && real_zerop (TREE_IMAGPART (expr))));
8306 /* A subroutine of fold_builtin to fold the various logarithmic
8307 functions. Return NULL_TREE if no simplification can me made.
8308 FUNC is the corresponding MPFR logarithm function. */
8311 fold_builtin_logarithm (tree fndecl, tree arg,
8312 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8314 if (validate_arg (arg, REAL_TYPE))
8316 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8318 const enum built_in_function fcode = builtin_mathfn_code (arg);
8320 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8321 instead we'll look for 'e' truncated to MODE. So only do
8322 this if flag_unsafe_math_optimizations is set. */
8323 if (flag_unsafe_math_optimizations && func == mpfr_log)
8325 const REAL_VALUE_TYPE e_truncated =
8326 real_value_truncate (TYPE_MODE (type), dconst_e ());
8327 if (real_dconstp (arg, &e_truncated))
8328 return build_real (type, dconst1);
8331 /* Calculate the result when the argument is a constant. */
8332 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8335 /* Special case, optimize logN(expN(x)) = x. */
8336 if (flag_unsafe_math_optimizations
8337 && ((func == mpfr_log
8338 && (fcode == BUILT_IN_EXP
8339 || fcode == BUILT_IN_EXPF
8340 || fcode == BUILT_IN_EXPL))
8341 || (func == mpfr_log2
8342 && (fcode == BUILT_IN_EXP2
8343 || fcode == BUILT_IN_EXP2F
8344 || fcode == BUILT_IN_EXP2L))
8345 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8346 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8348 /* Optimize logN(func()) for various exponential functions. We
8349 want to determine the value "x" and the power "exponent" in
8350 order to transform logN(x**exponent) into exponent*logN(x). */
8351 if (flag_unsafe_math_optimizations)
8353 tree exponent = 0, x = 0;
8357 CASE_FLT_FN (BUILT_IN_EXP):
8358 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8359 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8361 exponent = CALL_EXPR_ARG (arg, 0);
8363 CASE_FLT_FN (BUILT_IN_EXP2):
8364 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8365 x = build_real (type, dconst2);
8366 exponent = CALL_EXPR_ARG (arg, 0);
8368 CASE_FLT_FN (BUILT_IN_EXP10):
8369 CASE_FLT_FN (BUILT_IN_POW10):
8370 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8372 REAL_VALUE_TYPE dconst10;
8373 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8374 x = build_real (type, dconst10);
8376 exponent = CALL_EXPR_ARG (arg, 0);
8378 CASE_FLT_FN (BUILT_IN_SQRT):
8379 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8380 x = CALL_EXPR_ARG (arg, 0);
8381 exponent = build_real (type, dconsthalf);
8383 CASE_FLT_FN (BUILT_IN_CBRT):
8384 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8385 x = CALL_EXPR_ARG (arg, 0);
8386 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8389 CASE_FLT_FN (BUILT_IN_POW):
8390 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8391 x = CALL_EXPR_ARG (arg, 0);
8392 exponent = CALL_EXPR_ARG (arg, 1);
8398 /* Now perform the optimization. */
8401 tree logfn = build_call_expr (fndecl, 1, x);
8402 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8410 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8411 NULL_TREE if no simplification can be made. */
8414 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8416 tree res, narg0, narg1;
8418 if (!validate_arg (arg0, REAL_TYPE)
8419 || !validate_arg (arg1, REAL_TYPE))
8422 /* Calculate the result when the argument is a constant. */
8423 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8426 /* If either argument to hypot has a negate or abs, strip that off.
8427 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8428 narg0 = fold_strip_sign_ops (arg0);
8429 narg1 = fold_strip_sign_ops (arg1);
8432 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8433 narg1 ? narg1 : arg1);
8436 /* If either argument is zero, hypot is fabs of the other. */
8437 if (real_zerop (arg0))
8438 return fold_build1 (ABS_EXPR, type, arg1);
8439 else if (real_zerop (arg1))
8440 return fold_build1 (ABS_EXPR, type, arg0);
8442 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8443 if (flag_unsafe_math_optimizations
8444 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8446 const REAL_VALUE_TYPE sqrt2_trunc
8447 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8448 return fold_build2 (MULT_EXPR, type,
8449 fold_build1 (ABS_EXPR, type, arg0),
8450 build_real (type, sqrt2_trunc));
8457 /* Fold a builtin function call to pow, powf, or powl. Return
8458 NULL_TREE if no simplification can be made. */
8460 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8464 if (!validate_arg (arg0, REAL_TYPE)
8465 || !validate_arg (arg1, REAL_TYPE))
8468 /* Calculate the result when the argument is a constant. */
8469 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8472 /* Optimize pow(1.0,y) = 1.0. */
8473 if (real_onep (arg0))
8474 return omit_one_operand (type, build_real (type, dconst1), arg1);
8476 if (TREE_CODE (arg1) == REAL_CST
8477 && !TREE_OVERFLOW (arg1))
8479 REAL_VALUE_TYPE cint;
8483 c = TREE_REAL_CST (arg1);
8485 /* Optimize pow(x,0.0) = 1.0. */
8486 if (REAL_VALUES_EQUAL (c, dconst0))
8487 return omit_one_operand (type, build_real (type, dconst1),
8490 /* Optimize pow(x,1.0) = x. */
8491 if (REAL_VALUES_EQUAL (c, dconst1))
8494 /* Optimize pow(x,-1.0) = 1.0/x. */
8495 if (REAL_VALUES_EQUAL (c, dconstm1))
8496 return fold_build2 (RDIV_EXPR, type,
8497 build_real (type, dconst1), arg0);
8499 /* Optimize pow(x,0.5) = sqrt(x). */
8500 if (flag_unsafe_math_optimizations
8501 && REAL_VALUES_EQUAL (c, dconsthalf))
8503 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8505 if (sqrtfn != NULL_TREE)
8506 return build_call_expr (sqrtfn, 1, arg0);
8509 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8510 if (flag_unsafe_math_optimizations)
8512 const REAL_VALUE_TYPE dconstroot
8513 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8515 if (REAL_VALUES_EQUAL (c, dconstroot))
8517 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8518 if (cbrtfn != NULL_TREE)
8519 return build_call_expr (cbrtfn, 1, arg0);
8523 /* Check for an integer exponent. */
8524 n = real_to_integer (&c);
8525 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8526 if (real_identical (&c, &cint))
8528 /* Attempt to evaluate pow at compile-time, unless this should
8529 raise an exception. */
8530 if (TREE_CODE (arg0) == REAL_CST
8531 && !TREE_OVERFLOW (arg0)
8533 || (!flag_trapping_math && !flag_errno_math)
8534 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8539 x = TREE_REAL_CST (arg0);
8540 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8541 if (flag_unsafe_math_optimizations || !inexact)
8542 return build_real (type, x);
8545 /* Strip sign ops from even integer powers. */
8546 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8548 tree narg0 = fold_strip_sign_ops (arg0);
8550 return build_call_expr (fndecl, 2, narg0, arg1);
8555 if (flag_unsafe_math_optimizations)
8557 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8559 /* Optimize pow(expN(x),y) = expN(x*y). */
8560 if (BUILTIN_EXPONENT_P (fcode))
8562 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8563 tree arg = CALL_EXPR_ARG (arg0, 0);
8564 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8565 return build_call_expr (expfn, 1, arg);
8568 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8569 if (BUILTIN_SQRT_P (fcode))
8571 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8572 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8573 build_real (type, dconsthalf));
8574 return build_call_expr (fndecl, 2, narg0, narg1);
8577 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8578 if (BUILTIN_CBRT_P (fcode))
8580 tree arg = CALL_EXPR_ARG (arg0, 0);
8581 if (tree_expr_nonnegative_p (arg))
8583 const REAL_VALUE_TYPE dconstroot
8584 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8585 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8586 build_real (type, dconstroot));
8587 return build_call_expr (fndecl, 2, arg, narg1);
8591 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8592 if (fcode == BUILT_IN_POW
8593 || fcode == BUILT_IN_POWF
8594 || fcode == BUILT_IN_POWL)
8596 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8597 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8598 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8599 return build_call_expr (fndecl, 2, arg00, narg1);
8606 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8607 Return NULL_TREE if no simplification can be made. */
8609 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8610 tree arg0, tree arg1, tree type)
8612 if (!validate_arg (arg0, REAL_TYPE)
8613 || !validate_arg (arg1, INTEGER_TYPE))
8616 /* Optimize pow(1.0,y) = 1.0. */
8617 if (real_onep (arg0))
8618 return omit_one_operand (type, build_real (type, dconst1), arg1);
8620 if (host_integerp (arg1, 0))
8622 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8624 /* Evaluate powi at compile-time. */
8625 if (TREE_CODE (arg0) == REAL_CST
8626 && !TREE_OVERFLOW (arg0))
8629 x = TREE_REAL_CST (arg0);
8630 real_powi (&x, TYPE_MODE (type), &x, c);
8631 return build_real (type, x);
8634 /* Optimize pow(x,0) = 1.0. */
8636 return omit_one_operand (type, build_real (type, dconst1),
8639 /* Optimize pow(x,1) = x. */
8643 /* Optimize pow(x,-1) = 1.0/x. */
8645 return fold_build2 (RDIV_EXPR, type,
8646 build_real (type, dconst1), arg0);
8652 /* A subroutine of fold_builtin to fold the various exponent
8653 functions. Return NULL_TREE if no simplification can be made.
8654 FUNC is the corresponding MPFR exponent function. */
8657 fold_builtin_exponent (tree fndecl, tree arg,
8658 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8660 if (validate_arg (arg, REAL_TYPE))
8662 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8665 /* Calculate the result when the argument is a constant. */
8666 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8669 /* Optimize expN(logN(x)) = x. */
8670 if (flag_unsafe_math_optimizations)
8672 const enum built_in_function fcode = builtin_mathfn_code (arg);
8674 if ((func == mpfr_exp
8675 && (fcode == BUILT_IN_LOG
8676 || fcode == BUILT_IN_LOGF
8677 || fcode == BUILT_IN_LOGL))
8678 || (func == mpfr_exp2
8679 && (fcode == BUILT_IN_LOG2
8680 || fcode == BUILT_IN_LOG2F
8681 || fcode == BUILT_IN_LOG2L))
8682 || (func == mpfr_exp10
8683 && (fcode == BUILT_IN_LOG10
8684 || fcode == BUILT_IN_LOG10F
8685 || fcode == BUILT_IN_LOG10L)))
8686 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8693 /* Return true if VAR is a VAR_DECL or a component thereof. */
8696 var_decl_component_p (tree var)
8699 while (handled_component_p (inner))
8700 inner = TREE_OPERAND (inner, 0);
8701 return SSA_VAR_P (inner);
8704 /* Fold function call to builtin memset. Return
8705 NULL_TREE if no simplification can be made. */
8708 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8711 unsigned HOST_WIDE_INT length, cval;
8713 if (! validate_arg (dest, POINTER_TYPE)
8714 || ! validate_arg (c, INTEGER_TYPE)
8715 || ! validate_arg (len, INTEGER_TYPE))
8718 if (! host_integerp (len, 1))
8721 /* If the LEN parameter is zero, return DEST. */
8722 if (integer_zerop (len))
8723 return omit_one_operand (type, dest, c);
8725 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8730 if (TREE_CODE (var) != ADDR_EXPR)
8733 var = TREE_OPERAND (var, 0);
8734 if (TREE_THIS_VOLATILE (var))
8737 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8738 && !POINTER_TYPE_P (TREE_TYPE (var)))
8741 if (! var_decl_component_p (var))
8744 length = tree_low_cst (len, 1);
8745 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8746 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8750 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8753 if (integer_zerop (c))
8757 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8760 cval = tree_low_cst (c, 1);
8764 cval |= (cval << 31) << 1;
8767 ret = build_int_cst_type (TREE_TYPE (var), cval);
8768 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8772 return omit_one_operand (type, dest, ret);
8775 /* Fold function call to builtin memset. Return
8776 NULL_TREE if no simplification can be made. */
8779 fold_builtin_bzero (tree dest, tree size, bool ignore)
8781 if (! validate_arg (dest, POINTER_TYPE)
8782 || ! validate_arg (size, INTEGER_TYPE))
8788 /* New argument list transforming bzero(ptr x, int y) to
8789 memset(ptr x, int 0, size_t y). This is done this way
8790 so that if it isn't expanded inline, we fallback to
8791 calling bzero instead of memset. */
8793 return fold_builtin_memset (dest, integer_zero_node,
8794 fold_convert (sizetype, size),
8795 void_type_node, ignore);
8798 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8799 NULL_TREE if no simplification can be made.
8800 If ENDP is 0, return DEST (like memcpy).
8801 If ENDP is 1, return DEST+LEN (like mempcpy).
8802 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8803 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8807 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8809 tree destvar, srcvar, expr;
8811 if (! validate_arg (dest, POINTER_TYPE)
8812 || ! validate_arg (src, POINTER_TYPE)
8813 || ! validate_arg (len, INTEGER_TYPE))
8816 /* If the LEN parameter is zero, return DEST. */
8817 if (integer_zerop (len))
8818 return omit_one_operand (type, dest, src);
8820 /* If SRC and DEST are the same (and not volatile), return
8821 DEST{,+LEN,+LEN-1}. */
8822 if (operand_equal_p (src, dest, 0))
8826 tree srctype, desttype;
8829 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8830 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8832 /* Both DEST and SRC must be pointer types.
8833 ??? This is what old code did. Is the testing for pointer types
8836 If either SRC is readonly or length is 1, we can use memcpy. */
8837 if (dest_align && src_align
8838 && (readonly_data_expr (src)
8839 || (host_integerp (len, 1)
8840 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8841 tree_low_cst (len, 1)))))
8843 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8846 return build_call_expr (fn, 3, dest, src, len);
8851 if (!host_integerp (len, 0))
8854 This logic lose for arguments like (type *)malloc (sizeof (type)),
8855 since we strip the casts of up to VOID return value from malloc.
8856 Perhaps we ought to inherit type from non-VOID argument here? */
8859 srctype = TREE_TYPE (TREE_TYPE (src));
8860 desttype = TREE_TYPE (TREE_TYPE (dest));
8861 if (!srctype || !desttype
8862 || !TYPE_SIZE_UNIT (srctype)
8863 || !TYPE_SIZE_UNIT (desttype)
8864 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8865 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8866 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8867 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8870 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8871 < (int) TYPE_ALIGN (desttype)
8872 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8873 < (int) TYPE_ALIGN (srctype)))
8877 dest = builtin_save_expr (dest);
8879 srcvar = build_fold_indirect_ref (src);
8880 if (TREE_THIS_VOLATILE (srcvar))
8882 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8884 /* With memcpy, it is possible to bypass aliasing rules, so without
8885 this check i.e. execute/20060930-2.c would be misoptimized, because
8886 it use conflicting alias set to hold argument for the memcpy call.
8887 This check is probably unnecessary with -fno-strict-aliasing.
8888 Similarly for destvar. See also PR29286. */
8889 if (!var_decl_component_p (srcvar)
8890 /* Accept: memcpy (*char_var, "test", 1); that simplify
8892 || is_gimple_min_invariant (srcvar)
8893 || readonly_data_expr (src))
8896 destvar = build_fold_indirect_ref (dest);
8897 if (TREE_THIS_VOLATILE (destvar))
8899 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8901 if (!var_decl_component_p (destvar))
8904 if (srctype == desttype
8905 || (gimple_in_ssa_p (cfun)
8906 && useless_type_conversion_p (desttype, srctype)))
8908 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8909 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8910 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8911 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8912 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8914 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8915 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8921 if (endp == 0 || endp == 3)
8922 return omit_one_operand (type, dest, expr);
8928 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8931 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8932 dest = fold_convert (type, dest);
8934 dest = omit_one_operand (type, dest, expr);
8938 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8939 If LEN is not NULL, it represents the length of the string to be
8940 copied. Return NULL_TREE if no simplification can be made. */
8943 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8947 if (!validate_arg (dest, POINTER_TYPE)
8948 || !validate_arg (src, POINTER_TYPE))
8951 /* If SRC and DEST are the same (and not volatile), return DEST. */
8952 if (operand_equal_p (src, dest, 0))
8953 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8955 if (optimize_function_for_size_p (cfun))
8958 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8964 len = c_strlen (src, 1);
8965 if (! len || TREE_SIDE_EFFECTS (len))
8969 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8970 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8971 build_call_expr (fn, 3, dest, src, len));
8974 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8975 If SLEN is not NULL, it represents the length of the source string.
8976 Return NULL_TREE if no simplification can be made. */
8979 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8983 if (!validate_arg (dest, POINTER_TYPE)
8984 || !validate_arg (src, POINTER_TYPE)
8985 || !validate_arg (len, INTEGER_TYPE))
8988 /* If the LEN parameter is zero, return DEST. */
8989 if (integer_zerop (len))
8990 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8992 /* We can't compare slen with len as constants below if len is not a
8994 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8998 slen = c_strlen (src, 1);
9000 /* Now, we must be passed a constant src ptr parameter. */
9001 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9004 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
9006 /* We do not support simplification of this case, though we do
9007 support it when expanding trees into RTL. */
9008 /* FIXME: generate a call to __builtin_memset. */
9009 if (tree_int_cst_lt (slen, len))
9012 /* OK transform into builtin memcpy. */
9013 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9016 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9017 build_call_expr (fn, 3, dest, src, len));
9020 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9021 arguments to the call, and TYPE is its return type.
9022 Return NULL_TREE if no simplification can be made. */
9025 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
9027 if (!validate_arg (arg1, POINTER_TYPE)
9028 || !validate_arg (arg2, INTEGER_TYPE)
9029 || !validate_arg (len, INTEGER_TYPE))
9035 if (TREE_CODE (arg2) != INTEGER_CST
9036 || !host_integerp (len, 1))
9039 p1 = c_getstr (arg1);
9040 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9046 if (target_char_cast (arg2, &c))
9049 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9052 return build_int_cst (TREE_TYPE (arg1), 0);
9054 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9056 return fold_convert (type, tem);
9062 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9063 Return NULL_TREE if no simplification can be made. */
9066 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9068 const char *p1, *p2;
9070 if (!validate_arg (arg1, POINTER_TYPE)
9071 || !validate_arg (arg2, POINTER_TYPE)
9072 || !validate_arg (len, INTEGER_TYPE))
9075 /* If the LEN parameter is zero, return zero. */
9076 if (integer_zerop (len))
9077 return omit_two_operands (integer_type_node, integer_zero_node,
9080 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9081 if (operand_equal_p (arg1, arg2, 0))
9082 return omit_one_operand (integer_type_node, integer_zero_node, len);
9084 p1 = c_getstr (arg1);
9085 p2 = c_getstr (arg2);
9087 /* If all arguments are constant, and the value of len is not greater
9088 than the lengths of arg1 and arg2, evaluate at compile-time. */
9089 if (host_integerp (len, 1) && p1 && p2
9090 && compare_tree_int (len, strlen (p1) + 1) <= 0
9091 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9093 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9096 return integer_one_node;
9098 return integer_minus_one_node;
9100 return integer_zero_node;
9103 /* If len parameter is one, return an expression corresponding to
9104 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9105 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9107 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9108 tree cst_uchar_ptr_node
9109 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9111 tree ind1 = fold_convert (integer_type_node,
9112 build1 (INDIRECT_REF, cst_uchar_node,
9113 fold_convert (cst_uchar_ptr_node,
9115 tree ind2 = fold_convert (integer_type_node,
9116 build1 (INDIRECT_REF, cst_uchar_node,
9117 fold_convert (cst_uchar_ptr_node,
9119 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9125 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9126 Return NULL_TREE if no simplification can be made. */
9129 fold_builtin_strcmp (tree arg1, tree arg2)
9131 const char *p1, *p2;
9133 if (!validate_arg (arg1, POINTER_TYPE)
9134 || !validate_arg (arg2, POINTER_TYPE))
9137 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9138 if (operand_equal_p (arg1, arg2, 0))
9139 return integer_zero_node;
9141 p1 = c_getstr (arg1);
9142 p2 = c_getstr (arg2);
9146 const int i = strcmp (p1, p2);
9148 return integer_minus_one_node;
9150 return integer_one_node;
9152 return integer_zero_node;
9155 /* If the second arg is "", return *(const unsigned char*)arg1. */
9156 if (p2 && *p2 == '\0')
9158 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9159 tree cst_uchar_ptr_node
9160 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9162 return fold_convert (integer_type_node,
9163 build1 (INDIRECT_REF, cst_uchar_node,
9164 fold_convert (cst_uchar_ptr_node,
9168 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9169 if (p1 && *p1 == '\0')
9171 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9172 tree cst_uchar_ptr_node
9173 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9175 tree temp = fold_convert (integer_type_node,
9176 build1 (INDIRECT_REF, cst_uchar_node,
9177 fold_convert (cst_uchar_ptr_node,
9179 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9185 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9186 Return NULL_TREE if no simplification can be made. */
9189 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9191 const char *p1, *p2;
9193 if (!validate_arg (arg1, POINTER_TYPE)
9194 || !validate_arg (arg2, POINTER_TYPE)
9195 || !validate_arg (len, INTEGER_TYPE))
9198 /* If the LEN parameter is zero, return zero. */
9199 if (integer_zerop (len))
9200 return omit_two_operands (integer_type_node, integer_zero_node,
9203 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9204 if (operand_equal_p (arg1, arg2, 0))
9205 return omit_one_operand (integer_type_node, integer_zero_node, len);
9207 p1 = c_getstr (arg1);
9208 p2 = c_getstr (arg2);
9210 if (host_integerp (len, 1) && p1 && p2)
9212 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9214 return integer_one_node;
9216 return integer_minus_one_node;
9218 return integer_zero_node;
9221 /* If the second arg is "", and the length is greater than zero,
9222 return *(const unsigned char*)arg1. */
9223 if (p2 && *p2 == '\0'
9224 && TREE_CODE (len) == INTEGER_CST
9225 && tree_int_cst_sgn (len) == 1)
9227 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9228 tree cst_uchar_ptr_node
9229 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9231 return fold_convert (integer_type_node,
9232 build1 (INDIRECT_REF, cst_uchar_node,
9233 fold_convert (cst_uchar_ptr_node,
9237 /* If the first arg is "", and the length is greater than zero,
9238 return -*(const unsigned char*)arg2. */
9239 if (p1 && *p1 == '\0'
9240 && TREE_CODE (len) == INTEGER_CST
9241 && tree_int_cst_sgn (len) == 1)
9243 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9244 tree cst_uchar_ptr_node
9245 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9247 tree temp = fold_convert (integer_type_node,
9248 build1 (INDIRECT_REF, cst_uchar_node,
9249 fold_convert (cst_uchar_ptr_node,
9251 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9254 /* If len parameter is one, return an expression corresponding to
9255 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9256 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9258 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9259 tree cst_uchar_ptr_node
9260 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9262 tree ind1 = fold_convert (integer_type_node,
9263 build1 (INDIRECT_REF, cst_uchar_node,
9264 fold_convert (cst_uchar_ptr_node,
9266 tree ind2 = fold_convert (integer_type_node,
9267 build1 (INDIRECT_REF, cst_uchar_node,
9268 fold_convert (cst_uchar_ptr_node,
9270 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9276 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9277 ARG. Return NULL_TREE if no simplification can be made. */
9280 fold_builtin_signbit (tree arg, tree type)
9284 if (!validate_arg (arg, REAL_TYPE))
9287 /* If ARG is a compile-time constant, determine the result. */
9288 if (TREE_CODE (arg) == REAL_CST
9289 && !TREE_OVERFLOW (arg))
9293 c = TREE_REAL_CST (arg);
9294 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9295 return fold_convert (type, temp);
9298 /* If ARG is non-negative, the result is always zero. */
9299 if (tree_expr_nonnegative_p (arg))
9300 return omit_one_operand (type, integer_zero_node, arg);
9302 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9303 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9304 return fold_build2 (LT_EXPR, type, arg,
9305 build_real (TREE_TYPE (arg), dconst0));
9310 /* Fold function call to builtin copysign, copysignf or copysignl with
9311 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9315 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9319 if (!validate_arg (arg1, REAL_TYPE)
9320 || !validate_arg (arg2, REAL_TYPE))
9323 /* copysign(X,X) is X. */
9324 if (operand_equal_p (arg1, arg2, 0))
9325 return fold_convert (type, arg1);
9327 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9328 if (TREE_CODE (arg1) == REAL_CST
9329 && TREE_CODE (arg2) == REAL_CST
9330 && !TREE_OVERFLOW (arg1)
9331 && !TREE_OVERFLOW (arg2))
9333 REAL_VALUE_TYPE c1, c2;
9335 c1 = TREE_REAL_CST (arg1);
9336 c2 = TREE_REAL_CST (arg2);
9337 /* c1.sign := c2.sign. */
9338 real_copysign (&c1, &c2);
9339 return build_real (type, c1);
9342 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9343 Remember to evaluate Y for side-effects. */
9344 if (tree_expr_nonnegative_p (arg2))
9345 return omit_one_operand (type,
9346 fold_build1 (ABS_EXPR, type, arg1),
9349 /* Strip sign changing operations for the first argument. */
9350 tem = fold_strip_sign_ops (arg1);
9352 return build_call_expr (fndecl, 2, tem, arg2);
9357 /* Fold a call to builtin isascii with argument ARG. */
9360 fold_builtin_isascii (tree arg)
9362 if (!validate_arg (arg, INTEGER_TYPE))
9366 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9367 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9368 build_int_cst (NULL_TREE,
9369 ~ (unsigned HOST_WIDE_INT) 0x7f));
9370 return fold_build2 (EQ_EXPR, integer_type_node,
9371 arg, integer_zero_node);
9375 /* Fold a call to builtin toascii with argument ARG. */
9378 fold_builtin_toascii (tree arg)
9380 if (!validate_arg (arg, INTEGER_TYPE))
9383 /* Transform toascii(c) -> (c & 0x7f). */
9384 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9385 build_int_cst (NULL_TREE, 0x7f));
9388 /* Fold a call to builtin isdigit with argument ARG. */
9391 fold_builtin_isdigit (tree arg)
9393 if (!validate_arg (arg, INTEGER_TYPE))
9397 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9398 /* According to the C standard, isdigit is unaffected by locale.
9399 However, it definitely is affected by the target character set. */
9400 unsigned HOST_WIDE_INT target_digit0
9401 = lang_hooks.to_target_charset ('0');
9403 if (target_digit0 == 0)
9406 arg = fold_convert (unsigned_type_node, arg);
9407 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9408 build_int_cst (unsigned_type_node, target_digit0));
9409 return fold_build2 (LE_EXPR, integer_type_node, arg,
9410 build_int_cst (unsigned_type_node, 9));
9414 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9417 fold_builtin_fabs (tree arg, tree type)
9419 if (!validate_arg (arg, REAL_TYPE))
9422 arg = fold_convert (type, arg);
9423 if (TREE_CODE (arg) == REAL_CST)
9424 return fold_abs_const (arg, type);
9425 return fold_build1 (ABS_EXPR, type, arg);
9428 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9431 fold_builtin_abs (tree arg, tree type)
9433 if (!validate_arg (arg, INTEGER_TYPE))
9436 arg = fold_convert (type, arg);
9437 if (TREE_CODE (arg) == INTEGER_CST)
9438 return fold_abs_const (arg, type);
9439 return fold_build1 (ABS_EXPR, type, arg);
9442 /* Fold a call to builtin fmin or fmax. */
9445 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9447 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9449 /* Calculate the result when the argument is a constant. */
9450 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9455 /* If either argument is NaN, return the other one. Avoid the
9456 transformation if we get (and honor) a signalling NaN. Using
9457 omit_one_operand() ensures we create a non-lvalue. */
9458 if (TREE_CODE (arg0) == REAL_CST
9459 && real_isnan (&TREE_REAL_CST (arg0))
9460 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9461 || ! TREE_REAL_CST (arg0).signalling))
9462 return omit_one_operand (type, arg1, arg0);
9463 if (TREE_CODE (arg1) == REAL_CST
9464 && real_isnan (&TREE_REAL_CST (arg1))
9465 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9466 || ! TREE_REAL_CST (arg1).signalling))
9467 return omit_one_operand (type, arg0, arg1);
9469 /* Transform fmin/fmax(x,x) -> x. */
9470 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9471 return omit_one_operand (type, arg0, arg1);
9473 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9474 functions to return the numeric arg if the other one is NaN.
9475 These tree codes don't honor that, so only transform if
9476 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9477 handled, so we don't have to worry about it either. */
9478 if (flag_finite_math_only)
9479 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9480 fold_convert (type, arg0),
9481 fold_convert (type, arg1));
9486 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9489 fold_builtin_carg (tree arg, tree type)
9491 if (validate_arg (arg, COMPLEX_TYPE))
9493 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9497 tree new_arg = builtin_save_expr (arg);
9498 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9499 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9500 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9507 /* Fold a call to builtin logb/ilogb. */
9510 fold_builtin_logb (tree arg, tree rettype)
9512 if (! validate_arg (arg, REAL_TYPE))
9517 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9519 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9525 /* If arg is Inf or NaN and we're logb, return it. */
9526 if (TREE_CODE (rettype) == REAL_TYPE)
9527 return fold_convert (rettype, arg);
9528 /* Fall through... */
9530 /* Zero may set errno and/or raise an exception for logb, also
9531 for ilogb we don't know FP_ILOGB0. */
9534 /* For normal numbers, proceed iff radix == 2. In GCC,
9535 normalized significands are in the range [0.5, 1.0). We
9536 want the exponent as if they were [1.0, 2.0) so get the
9537 exponent and subtract 1. */
9538 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9539 return fold_convert (rettype, build_int_cst (NULL_TREE,
9540 REAL_EXP (value)-1));
9548 /* Fold a call to builtin significand, if radix == 2. */
9551 fold_builtin_significand (tree arg, tree rettype)
9553 if (! validate_arg (arg, REAL_TYPE))
9558 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9560 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9567 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9568 return fold_convert (rettype, arg);
9570 /* For normal numbers, proceed iff radix == 2. */
9571 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9573 REAL_VALUE_TYPE result = *value;
9574 /* In GCC, normalized significands are in the range [0.5,
9575 1.0). We want them to be [1.0, 2.0) so set the
9577 SET_REAL_EXP (&result, 1);
9578 return build_real (rettype, result);
9587 /* Fold a call to builtin frexp, we can assume the base is 2. */
9590 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9592 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9597 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9600 arg1 = build_fold_indirect_ref (arg1);
9602 /* Proceed if a valid pointer type was passed in. */
9603 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9605 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9611 /* For +-0, return (*exp = 0, +-0). */
9612 exp = integer_zero_node;
9617 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9618 return omit_one_operand (rettype, arg0, arg1);
9621 /* Since the frexp function always expects base 2, and in
9622 GCC normalized significands are already in the range
9623 [0.5, 1.0), we have exactly what frexp wants. */
9624 REAL_VALUE_TYPE frac_rvt = *value;
9625 SET_REAL_EXP (&frac_rvt, 0);
9626 frac = build_real (rettype, frac_rvt);
9627 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9634 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9635 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9636 TREE_SIDE_EFFECTS (arg1) = 1;
9637 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9643 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9644 then we can assume the base is two. If it's false, then we have to
9645 check the mode of the TYPE parameter in certain cases. */
9648 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9650 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9655 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9656 if (real_zerop (arg0) || integer_zerop (arg1)
9657 || (TREE_CODE (arg0) == REAL_CST
9658 && !real_isfinite (&TREE_REAL_CST (arg0))))
9659 return omit_one_operand (type, arg0, arg1);
9661 /* If both arguments are constant, then try to evaluate it. */
9662 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9663 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9664 && host_integerp (arg1, 0))
9666 /* Bound the maximum adjustment to twice the range of the
9667 mode's valid exponents. Use abs to ensure the range is
9668 positive as a sanity check. */
9669 const long max_exp_adj = 2 *
9670 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9671 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9673 /* Get the user-requested adjustment. */
9674 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9676 /* The requested adjustment must be inside this range. This
9677 is a preliminary cap to avoid things like overflow, we
9678 may still fail to compute the result for other reasons. */
9679 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9681 REAL_VALUE_TYPE initial_result;
9683 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9685 /* Ensure we didn't overflow. */
9686 if (! real_isinf (&initial_result))
9688 const REAL_VALUE_TYPE trunc_result
9689 = real_value_truncate (TYPE_MODE (type), initial_result);
9691 /* Only proceed if the target mode can hold the
9693 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9694 return build_real (type, trunc_result);
9703 /* Fold a call to builtin modf. */
9706 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9708 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9713 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9716 arg1 = build_fold_indirect_ref (arg1);
9718 /* Proceed if a valid pointer type was passed in. */
9719 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9721 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9722 REAL_VALUE_TYPE trunc, frac;
9728 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9729 trunc = frac = *value;
9732 /* For +-Inf, return (*arg1 = arg0, +-0). */
9734 frac.sign = value->sign;
9738 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9739 real_trunc (&trunc, VOIDmode, value);
9740 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9741 /* If the original number was negative and already
9742 integral, then the fractional part is -0.0. */
9743 if (value->sign && frac.cl == rvc_zero)
9744 frac.sign = value->sign;
9748 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9749 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9750 build_real (rettype, trunc));
9751 TREE_SIDE_EFFECTS (arg1) = 1;
9752 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9753 build_real (rettype, frac));
9759 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9760 ARG is the argument for the call. */
9763 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9765 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9768 if (!validate_arg (arg, REAL_TYPE))
9771 switch (builtin_index)
9773 case BUILT_IN_ISINF:
9774 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9775 return omit_one_operand (type, integer_zero_node, arg);
9777 if (TREE_CODE (arg) == REAL_CST)
9779 r = TREE_REAL_CST (arg);
9780 if (real_isinf (&r))
9781 return real_compare (GT_EXPR, &r, &dconst0)
9782 ? integer_one_node : integer_minus_one_node;
9784 return integer_zero_node;
9789 case BUILT_IN_ISINF_SIGN:
9791 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9792 /* In a boolean context, GCC will fold the inner COND_EXPR to
9793 1. So e.g. "if (isinf_sign(x))" would be folded to just
9794 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9795 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9796 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9797 tree tmp = NULL_TREE;
9799 arg = builtin_save_expr (arg);
9801 if (signbit_fn && isinf_fn)
9803 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9804 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9806 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9807 signbit_call, integer_zero_node);
9808 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9809 isinf_call, integer_zero_node);
9811 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9812 integer_minus_one_node, integer_one_node);
9813 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9820 case BUILT_IN_ISFINITE:
9821 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9822 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9823 return omit_one_operand (type, integer_one_node, arg);
9825 if (TREE_CODE (arg) == REAL_CST)
9827 r = TREE_REAL_CST (arg);
9828 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9833 case BUILT_IN_ISNAN:
9834 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9835 return omit_one_operand (type, integer_zero_node, arg);
9837 if (TREE_CODE (arg) == REAL_CST)
9839 r = TREE_REAL_CST (arg);
9840 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9843 arg = builtin_save_expr (arg);
9844 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9851 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9852 This builtin will generate code to return the appropriate floating
9853 point classification depending on the value of the floating point
9854 number passed in. The possible return values must be supplied as
9855 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9856 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9857 one floating point argument which is "type generic". */
9860 fold_builtin_fpclassify (tree exp)
9862 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9863 arg, type, res, tmp;
9864 enum machine_mode mode;
9868 /* Verify the required arguments in the original call. */
9869 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9870 INTEGER_TYPE, INTEGER_TYPE,
9871 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9874 fp_nan = CALL_EXPR_ARG (exp, 0);
9875 fp_infinite = CALL_EXPR_ARG (exp, 1);
9876 fp_normal = CALL_EXPR_ARG (exp, 2);
9877 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9878 fp_zero = CALL_EXPR_ARG (exp, 4);
9879 arg = CALL_EXPR_ARG (exp, 5);
9880 type = TREE_TYPE (arg);
9881 mode = TYPE_MODE (type);
9882 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9886 (fabs(x) == Inf ? FP_INFINITE :
9887 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9888 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9890 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9891 build_real (type, dconst0));
9892 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9894 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9895 real_from_string (&r, buf);
9896 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9897 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9899 if (HONOR_INFINITIES (mode))
9902 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9903 build_real (type, r));
9904 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9907 if (HONOR_NANS (mode))
9909 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9910 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9916 /* Fold a call to an unordered comparison function such as
9917 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9918 being called and ARG0 and ARG1 are the arguments for the call.
9919 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9920 the opposite of the desired result. UNORDERED_CODE is used
9921 for modes that can hold NaNs and ORDERED_CODE is used for
9925 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9926 enum tree_code unordered_code,
9927 enum tree_code ordered_code)
9929 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9930 enum tree_code code;
9932 enum tree_code code0, code1;
9933 tree cmp_type = NULL_TREE;
9935 type0 = TREE_TYPE (arg0);
9936 type1 = TREE_TYPE (arg1);
9938 code0 = TREE_CODE (type0);
9939 code1 = TREE_CODE (type1);
9941 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9942 /* Choose the wider of two real types. */
9943 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9945 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9947 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9950 arg0 = fold_convert (cmp_type, arg0);
9951 arg1 = fold_convert (cmp_type, arg1);
9953 if (unordered_code == UNORDERED_EXPR)
9955 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9956 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9957 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9960 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9962 return fold_build1 (TRUTH_NOT_EXPR, type,
9963 fold_build2 (code, type, arg0, arg1));
9966 /* Fold a call to built-in function FNDECL with 0 arguments.
9967 IGNORE is true if the result of the function call is ignored. This
9968 function returns NULL_TREE if no simplification was possible. */
9971 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9973 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9974 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9977 CASE_FLT_FN (BUILT_IN_INF):
9978 case BUILT_IN_INFD32:
9979 case BUILT_IN_INFD64:
9980 case BUILT_IN_INFD128:
9981 return fold_builtin_inf (type, true);
9983 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9984 return fold_builtin_inf (type, false);
9986 case BUILT_IN_CLASSIFY_TYPE:
9987 return fold_builtin_classify_type (NULL_TREE);
9995 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9996 IGNORE is true if the result of the function call is ignored. This
9997 function returns NULL_TREE if no simplification was possible. */
10000 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
10002 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10003 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10007 case BUILT_IN_CONSTANT_P:
10009 tree val = fold_builtin_constant_p (arg0);
10011 /* Gimplification will pull the CALL_EXPR for the builtin out of
10012 an if condition. When not optimizing, we'll not CSE it back.
10013 To avoid link error types of regressions, return false now. */
10014 if (!val && !optimize)
10015 val = integer_zero_node;
10020 case BUILT_IN_CLASSIFY_TYPE:
10021 return fold_builtin_classify_type (arg0);
10023 case BUILT_IN_STRLEN:
10024 return fold_builtin_strlen (arg0);
10026 CASE_FLT_FN (BUILT_IN_FABS):
10027 return fold_builtin_fabs (arg0, type);
10030 case BUILT_IN_LABS:
10031 case BUILT_IN_LLABS:
10032 case BUILT_IN_IMAXABS:
10033 return fold_builtin_abs (arg0, type);
10035 CASE_FLT_FN (BUILT_IN_CONJ):
10036 if (validate_arg (arg0, COMPLEX_TYPE))
10037 return fold_build1 (CONJ_EXPR, type, arg0);
10040 CASE_FLT_FN (BUILT_IN_CREAL):
10041 if (validate_arg (arg0, COMPLEX_TYPE))
10042 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
10045 CASE_FLT_FN (BUILT_IN_CIMAG):
10046 if (validate_arg (arg0, COMPLEX_TYPE))
10047 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10050 CASE_FLT_FN (BUILT_IN_CCOS):
10051 CASE_FLT_FN (BUILT_IN_CCOSH):
10052 /* These functions are "even", i.e. f(x) == f(-x). */
10053 if (validate_arg (arg0, COMPLEX_TYPE))
10055 tree narg = fold_strip_sign_ops (arg0);
10057 return build_call_expr (fndecl, 1, narg);
10061 CASE_FLT_FN (BUILT_IN_CABS):
10062 return fold_builtin_cabs (arg0, type, fndecl);
10064 CASE_FLT_FN (BUILT_IN_CARG):
10065 return fold_builtin_carg (arg0, type);
10067 CASE_FLT_FN (BUILT_IN_SQRT):
10068 return fold_builtin_sqrt (arg0, type);
10070 CASE_FLT_FN (BUILT_IN_CBRT):
10071 return fold_builtin_cbrt (arg0, type);
10073 CASE_FLT_FN (BUILT_IN_ASIN):
10074 if (validate_arg (arg0, REAL_TYPE))
10075 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10076 &dconstm1, &dconst1, true);
10079 CASE_FLT_FN (BUILT_IN_ACOS):
10080 if (validate_arg (arg0, REAL_TYPE))
10081 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10082 &dconstm1, &dconst1, true);
10085 CASE_FLT_FN (BUILT_IN_ATAN):
10086 if (validate_arg (arg0, REAL_TYPE))
10087 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10090 CASE_FLT_FN (BUILT_IN_ASINH):
10091 if (validate_arg (arg0, REAL_TYPE))
10092 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10095 CASE_FLT_FN (BUILT_IN_ACOSH):
10096 if (validate_arg (arg0, REAL_TYPE))
10097 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10098 &dconst1, NULL, true);
10101 CASE_FLT_FN (BUILT_IN_ATANH):
10102 if (validate_arg (arg0, REAL_TYPE))
10103 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10104 &dconstm1, &dconst1, false);
10107 CASE_FLT_FN (BUILT_IN_SIN):
10108 if (validate_arg (arg0, REAL_TYPE))
10109 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10112 CASE_FLT_FN (BUILT_IN_COS):
10113 return fold_builtin_cos (arg0, type, fndecl);
10116 CASE_FLT_FN (BUILT_IN_TAN):
10117 return fold_builtin_tan (arg0, type);
10119 CASE_FLT_FN (BUILT_IN_CEXP):
10120 return fold_builtin_cexp (arg0, type);
10122 CASE_FLT_FN (BUILT_IN_CEXPI):
10123 if (validate_arg (arg0, REAL_TYPE))
10124 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10127 CASE_FLT_FN (BUILT_IN_SINH):
10128 if (validate_arg (arg0, REAL_TYPE))
10129 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10132 CASE_FLT_FN (BUILT_IN_COSH):
10133 return fold_builtin_cosh (arg0, type, fndecl);
10135 CASE_FLT_FN (BUILT_IN_TANH):
10136 if (validate_arg (arg0, REAL_TYPE))
10137 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10140 CASE_FLT_FN (BUILT_IN_ERF):
10141 if (validate_arg (arg0, REAL_TYPE))
10142 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10145 CASE_FLT_FN (BUILT_IN_ERFC):
10146 if (validate_arg (arg0, REAL_TYPE))
10147 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10150 CASE_FLT_FN (BUILT_IN_TGAMMA):
10151 if (validate_arg (arg0, REAL_TYPE))
10152 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10155 CASE_FLT_FN (BUILT_IN_EXP):
10156 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10158 CASE_FLT_FN (BUILT_IN_EXP2):
10159 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10161 CASE_FLT_FN (BUILT_IN_EXP10):
10162 CASE_FLT_FN (BUILT_IN_POW10):
10163 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10165 CASE_FLT_FN (BUILT_IN_EXPM1):
10166 if (validate_arg (arg0, REAL_TYPE))
10167 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10170 CASE_FLT_FN (BUILT_IN_LOG):
10171 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10173 CASE_FLT_FN (BUILT_IN_LOG2):
10174 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10176 CASE_FLT_FN (BUILT_IN_LOG10):
10177 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10179 CASE_FLT_FN (BUILT_IN_LOG1P):
10180 if (validate_arg (arg0, REAL_TYPE))
10181 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10182 &dconstm1, NULL, false);
10185 CASE_FLT_FN (BUILT_IN_J0):
10186 if (validate_arg (arg0, REAL_TYPE))
10187 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10191 CASE_FLT_FN (BUILT_IN_J1):
10192 if (validate_arg (arg0, REAL_TYPE))
10193 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10197 CASE_FLT_FN (BUILT_IN_Y0):
10198 if (validate_arg (arg0, REAL_TYPE))
10199 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10200 &dconst0, NULL, false);
10203 CASE_FLT_FN (BUILT_IN_Y1):
10204 if (validate_arg (arg0, REAL_TYPE))
10205 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10206 &dconst0, NULL, false);
10209 CASE_FLT_FN (BUILT_IN_NAN):
10210 case BUILT_IN_NAND32:
10211 case BUILT_IN_NAND64:
10212 case BUILT_IN_NAND128:
10213 return fold_builtin_nan (arg0, type, true);
10215 CASE_FLT_FN (BUILT_IN_NANS):
10216 return fold_builtin_nan (arg0, type, false);
10218 CASE_FLT_FN (BUILT_IN_FLOOR):
10219 return fold_builtin_floor (fndecl, arg0);
10221 CASE_FLT_FN (BUILT_IN_CEIL):
10222 return fold_builtin_ceil (fndecl, arg0);
10224 CASE_FLT_FN (BUILT_IN_TRUNC):
10225 return fold_builtin_trunc (fndecl, arg0);
10227 CASE_FLT_FN (BUILT_IN_ROUND):
10228 return fold_builtin_round (fndecl, arg0);
10230 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10231 CASE_FLT_FN (BUILT_IN_RINT):
10232 return fold_trunc_transparent_mathfn (fndecl, arg0);
10234 CASE_FLT_FN (BUILT_IN_LCEIL):
10235 CASE_FLT_FN (BUILT_IN_LLCEIL):
10236 CASE_FLT_FN (BUILT_IN_LFLOOR):
10237 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10238 CASE_FLT_FN (BUILT_IN_LROUND):
10239 CASE_FLT_FN (BUILT_IN_LLROUND):
10240 return fold_builtin_int_roundingfn (fndecl, arg0);
10242 CASE_FLT_FN (BUILT_IN_LRINT):
10243 CASE_FLT_FN (BUILT_IN_LLRINT):
10244 return fold_fixed_mathfn (fndecl, arg0);
10246 case BUILT_IN_BSWAP32:
10247 case BUILT_IN_BSWAP64:
10248 return fold_builtin_bswap (fndecl, arg0);
10250 CASE_INT_FN (BUILT_IN_FFS):
10251 CASE_INT_FN (BUILT_IN_CLZ):
10252 CASE_INT_FN (BUILT_IN_CTZ):
10253 CASE_INT_FN (BUILT_IN_POPCOUNT):
10254 CASE_INT_FN (BUILT_IN_PARITY):
10255 return fold_builtin_bitop (fndecl, arg0);
10257 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10258 return fold_builtin_signbit (arg0, type);
10260 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10261 return fold_builtin_significand (arg0, type);
10263 CASE_FLT_FN (BUILT_IN_ILOGB):
10264 CASE_FLT_FN (BUILT_IN_LOGB):
10265 return fold_builtin_logb (arg0, type);
10267 case BUILT_IN_ISASCII:
10268 return fold_builtin_isascii (arg0);
10270 case BUILT_IN_TOASCII:
10271 return fold_builtin_toascii (arg0);
10273 case BUILT_IN_ISDIGIT:
10274 return fold_builtin_isdigit (arg0);
10276 CASE_FLT_FN (BUILT_IN_FINITE):
10277 case BUILT_IN_FINITED32:
10278 case BUILT_IN_FINITED64:
10279 case BUILT_IN_FINITED128:
10280 case BUILT_IN_ISFINITE:
10281 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10283 CASE_FLT_FN (BUILT_IN_ISINF):
10284 case BUILT_IN_ISINFD32:
10285 case BUILT_IN_ISINFD64:
10286 case BUILT_IN_ISINFD128:
10287 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10289 case BUILT_IN_ISINF_SIGN:
10290 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10292 CASE_FLT_FN (BUILT_IN_ISNAN):
10293 case BUILT_IN_ISNAND32:
10294 case BUILT_IN_ISNAND64:
10295 case BUILT_IN_ISNAND128:
10296 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10298 case BUILT_IN_PRINTF:
10299 case BUILT_IN_PRINTF_UNLOCKED:
10300 case BUILT_IN_VPRINTF:
10301 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10311 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10312 IGNORE is true if the result of the function call is ignored. This
10313 function returns NULL_TREE if no simplification was possible. */
10316 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10318 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10319 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10323 CASE_FLT_FN (BUILT_IN_JN):
10324 if (validate_arg (arg0, INTEGER_TYPE)
10325 && validate_arg (arg1, REAL_TYPE))
10326 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10329 CASE_FLT_FN (BUILT_IN_YN):
10330 if (validate_arg (arg0, INTEGER_TYPE)
10331 && validate_arg (arg1, REAL_TYPE))
10332 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10336 CASE_FLT_FN (BUILT_IN_DREM):
10337 CASE_FLT_FN (BUILT_IN_REMAINDER):
10338 if (validate_arg (arg0, REAL_TYPE)
10339 && validate_arg(arg1, REAL_TYPE))
10340 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10343 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10344 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10345 if (validate_arg (arg0, REAL_TYPE)
10346 && validate_arg(arg1, POINTER_TYPE))
10347 return do_mpfr_lgamma_r (arg0, arg1, type);
10350 CASE_FLT_FN (BUILT_IN_ATAN2):
10351 if (validate_arg (arg0, REAL_TYPE)
10352 && validate_arg(arg1, REAL_TYPE))
10353 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10356 CASE_FLT_FN (BUILT_IN_FDIM):
10357 if (validate_arg (arg0, REAL_TYPE)
10358 && validate_arg(arg1, REAL_TYPE))
10359 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10362 CASE_FLT_FN (BUILT_IN_HYPOT):
10363 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10365 CASE_FLT_FN (BUILT_IN_LDEXP):
10366 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10367 CASE_FLT_FN (BUILT_IN_SCALBN):
10368 CASE_FLT_FN (BUILT_IN_SCALBLN):
10369 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10371 CASE_FLT_FN (BUILT_IN_FREXP):
10372 return fold_builtin_frexp (arg0, arg1, type);
10374 CASE_FLT_FN (BUILT_IN_MODF):
10375 return fold_builtin_modf (arg0, arg1, type);
10377 case BUILT_IN_BZERO:
10378 return fold_builtin_bzero (arg0, arg1, ignore);
10380 case BUILT_IN_FPUTS:
10381 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10383 case BUILT_IN_FPUTS_UNLOCKED:
10384 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10386 case BUILT_IN_STRSTR:
10387 return fold_builtin_strstr (arg0, arg1, type);
10389 case BUILT_IN_STRCAT:
10390 return fold_builtin_strcat (arg0, arg1);
10392 case BUILT_IN_STRSPN:
10393 return fold_builtin_strspn (arg0, arg1);
10395 case BUILT_IN_STRCSPN:
10396 return fold_builtin_strcspn (arg0, arg1);
10398 case BUILT_IN_STRCHR:
10399 case BUILT_IN_INDEX:
10400 return fold_builtin_strchr (arg0, arg1, type);
10402 case BUILT_IN_STRRCHR:
10403 case BUILT_IN_RINDEX:
10404 return fold_builtin_strrchr (arg0, arg1, type);
10406 case BUILT_IN_STRCPY:
10407 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10409 case BUILT_IN_STRCMP:
10410 return fold_builtin_strcmp (arg0, arg1);
10412 case BUILT_IN_STRPBRK:
10413 return fold_builtin_strpbrk (arg0, arg1, type);
10415 case BUILT_IN_EXPECT:
10416 return fold_builtin_expect (arg0, arg1);
10418 CASE_FLT_FN (BUILT_IN_POW):
10419 return fold_builtin_pow (fndecl, arg0, arg1, type);
10421 CASE_FLT_FN (BUILT_IN_POWI):
10422 return fold_builtin_powi (fndecl, arg0, arg1, type);
10424 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10425 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10427 CASE_FLT_FN (BUILT_IN_FMIN):
10428 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10430 CASE_FLT_FN (BUILT_IN_FMAX):
10431 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10433 case BUILT_IN_ISGREATER:
10434 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10435 case BUILT_IN_ISGREATEREQUAL:
10436 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10437 case BUILT_IN_ISLESS:
10438 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10439 case BUILT_IN_ISLESSEQUAL:
10440 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10441 case BUILT_IN_ISLESSGREATER:
10442 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10443 case BUILT_IN_ISUNORDERED:
10444 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10447 /* We do the folding for va_start in the expander. */
10448 case BUILT_IN_VA_START:
10451 case BUILT_IN_SPRINTF:
10452 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10454 case BUILT_IN_OBJECT_SIZE:
10455 return fold_builtin_object_size (arg0, arg1);
10457 case BUILT_IN_PRINTF:
10458 case BUILT_IN_PRINTF_UNLOCKED:
10459 case BUILT_IN_VPRINTF:
10460 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10462 case BUILT_IN_PRINTF_CHK:
10463 case BUILT_IN_VPRINTF_CHK:
10464 if (!validate_arg (arg0, INTEGER_TYPE)
10465 || TREE_SIDE_EFFECTS (arg0))
10468 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10471 case BUILT_IN_FPRINTF:
10472 case BUILT_IN_FPRINTF_UNLOCKED:
10473 case BUILT_IN_VFPRINTF:
10474 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10483 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10484 and ARG2. IGNORE is true if the result of the function call is ignored.
10485 This function returns NULL_TREE if no simplification was possible. */
10488 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10490 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10491 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10495 CASE_FLT_FN (BUILT_IN_SINCOS):
10496 return fold_builtin_sincos (arg0, arg1, arg2);
10498 CASE_FLT_FN (BUILT_IN_FMA):
10499 if (validate_arg (arg0, REAL_TYPE)
10500 && validate_arg(arg1, REAL_TYPE)
10501 && validate_arg(arg2, REAL_TYPE))
10502 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10505 CASE_FLT_FN (BUILT_IN_REMQUO):
10506 if (validate_arg (arg0, REAL_TYPE)
10507 && validate_arg(arg1, REAL_TYPE)
10508 && validate_arg(arg2, POINTER_TYPE))
10509 return do_mpfr_remquo (arg0, arg1, arg2);
10512 case BUILT_IN_MEMSET:
10513 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10515 case BUILT_IN_BCOPY:
10516 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10518 case BUILT_IN_MEMCPY:
10519 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10521 case BUILT_IN_MEMPCPY:
10522 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10524 case BUILT_IN_MEMMOVE:
10525 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10527 case BUILT_IN_STRNCAT:
10528 return fold_builtin_strncat (arg0, arg1, arg2);
10530 case BUILT_IN_STRNCPY:
10531 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10533 case BUILT_IN_STRNCMP:
10534 return fold_builtin_strncmp (arg0, arg1, arg2);
10536 case BUILT_IN_MEMCHR:
10537 return fold_builtin_memchr (arg0, arg1, arg2, type);
10539 case BUILT_IN_BCMP:
10540 case BUILT_IN_MEMCMP:
10541 return fold_builtin_memcmp (arg0, arg1, arg2);;
10543 case BUILT_IN_SPRINTF:
10544 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10546 case BUILT_IN_STRCPY_CHK:
10547 case BUILT_IN_STPCPY_CHK:
10548 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10551 case BUILT_IN_STRCAT_CHK:
10552 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10554 case BUILT_IN_PRINTF_CHK:
10555 case BUILT_IN_VPRINTF_CHK:
10556 if (!validate_arg (arg0, INTEGER_TYPE)
10557 || TREE_SIDE_EFFECTS (arg0))
10560 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10563 case BUILT_IN_FPRINTF:
10564 case BUILT_IN_FPRINTF_UNLOCKED:
10565 case BUILT_IN_VFPRINTF:
10566 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10568 case BUILT_IN_FPRINTF_CHK:
10569 case BUILT_IN_VFPRINTF_CHK:
10570 if (!validate_arg (arg1, INTEGER_TYPE)
10571 || TREE_SIDE_EFFECTS (arg1))
10574 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10583 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10584 ARG2, and ARG3. IGNORE is true if the result of the function call is
10585 ignored. This function returns NULL_TREE if no simplification was
10589 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10592 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10596 case BUILT_IN_MEMCPY_CHK:
10597 case BUILT_IN_MEMPCPY_CHK:
10598 case BUILT_IN_MEMMOVE_CHK:
10599 case BUILT_IN_MEMSET_CHK:
10600 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10602 DECL_FUNCTION_CODE (fndecl));
10604 case BUILT_IN_STRNCPY_CHK:
10605 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10607 case BUILT_IN_STRNCAT_CHK:
10608 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10610 case BUILT_IN_FPRINTF_CHK:
10611 case BUILT_IN_VFPRINTF_CHK:
10612 if (!validate_arg (arg1, INTEGER_TYPE)
10613 || TREE_SIDE_EFFECTS (arg1))
10616 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10626 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10627 arguments, where NARGS <= 4. IGNORE is true if the result of the
10628 function call is ignored. This function returns NULL_TREE if no
10629 simplification was possible. Note that this only folds builtins with
10630 fixed argument patterns. Foldings that do varargs-to-varargs
10631 transformations, or that match calls with more than 4 arguments,
10632 need to be handled with fold_builtin_varargs instead. */
10634 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10637 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10639 tree ret = NULL_TREE;
10644 ret = fold_builtin_0 (fndecl, ignore);
10647 ret = fold_builtin_1 (fndecl, args[0], ignore);
10650 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10653 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10656 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10664 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10665 TREE_NO_WARNING (ret) = 1;
10671 /* Builtins with folding operations that operate on "..." arguments
10672 need special handling; we need to store the arguments in a convenient
10673 data structure before attempting any folding. Fortunately there are
10674 only a few builtins that fall into this category. FNDECL is the
10675 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10676 result of the function call is ignored. */
10679 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10681 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10682 tree ret = NULL_TREE;
10686 case BUILT_IN_SPRINTF_CHK:
10687 case BUILT_IN_VSPRINTF_CHK:
10688 ret = fold_builtin_sprintf_chk (exp, fcode);
10691 case BUILT_IN_SNPRINTF_CHK:
10692 case BUILT_IN_VSNPRINTF_CHK:
10693 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10696 case BUILT_IN_FPCLASSIFY:
10697 ret = fold_builtin_fpclassify (exp);
10705 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10706 TREE_NO_WARNING (ret) = 1;
10712 /* A wrapper function for builtin folding that prevents warnings for
10713 "statement without effect" and the like, caused by removing the
10714 call node earlier than the warning is generated. */
10717 fold_call_expr (tree exp, bool ignore)
10719 tree ret = NULL_TREE;
10720 tree fndecl = get_callee_fndecl (exp);
10722 && TREE_CODE (fndecl) == FUNCTION_DECL
10723 && DECL_BUILT_IN (fndecl)
10724 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10725 yet. Defer folding until we see all the arguments
10726 (after inlining). */
10727 && !CALL_EXPR_VA_ARG_PACK (exp))
10729 int nargs = call_expr_nargs (exp);
10731 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10732 instead last argument is __builtin_va_arg_pack (). Defer folding
10733 even in that case, until arguments are finalized. */
10734 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10736 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10738 && TREE_CODE (fndecl2) == FUNCTION_DECL
10739 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10740 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10744 /* FIXME: Don't use a list in this interface. */
10745 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10746 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10749 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10751 tree *args = CALL_EXPR_ARGP (exp);
10752 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10755 ret = fold_builtin_varargs (fndecl, exp, ignore);
10758 /* Propagate location information from original call to
10759 expansion of builtin. Otherwise things like
10760 maybe_emit_chk_warning, that operate on the expansion
10761 of a builtin, will use the wrong location information. */
10762 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10764 tree realret = ret;
10765 if (TREE_CODE (ret) == NOP_EXPR)
10766 realret = TREE_OPERAND (ret, 0);
10767 if (CAN_HAVE_LOCATION_P (realret)
10768 && !EXPR_HAS_LOCATION (realret))
10769 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10779 /* Conveniently construct a function call expression. FNDECL names the
10780 function to be called and ARGLIST is a TREE_LIST of arguments. */
10783 build_function_call_expr (tree fndecl, tree arglist)
10785 tree fntype = TREE_TYPE (fndecl);
10786 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10787 int n = list_length (arglist);
10788 tree *argarray = (tree *) alloca (n * sizeof (tree));
10791 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10792 argarray[i] = TREE_VALUE (arglist);
10793 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10796 /* Conveniently construct a function call expression. FNDECL names the
10797 function to be called, N is the number of arguments, and the "..."
10798 parameters are the argument expressions. */
10801 build_call_expr (tree fndecl, int n, ...)
10804 tree fntype = TREE_TYPE (fndecl);
10805 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10806 tree *argarray = (tree *) alloca (n * sizeof (tree));
10810 for (i = 0; i < n; i++)
10811 argarray[i] = va_arg (ap, tree);
10813 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10816 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10817 N arguments are passed in the array ARGARRAY. */
10820 fold_builtin_call_array (tree type,
10825 tree ret = NULL_TREE;
10829 if (TREE_CODE (fn) == ADDR_EXPR)
10831 tree fndecl = TREE_OPERAND (fn, 0);
10832 if (TREE_CODE (fndecl) == FUNCTION_DECL
10833 && DECL_BUILT_IN (fndecl))
10835 /* If last argument is __builtin_va_arg_pack (), arguments to this
10836 function are not finalized yet. Defer folding until they are. */
10837 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10839 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10841 && TREE_CODE (fndecl2) == FUNCTION_DECL
10842 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10843 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10844 return build_call_array (type, fn, n, argarray);
10846 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10848 tree arglist = NULL_TREE;
10849 for (i = n - 1; i >= 0; i--)
10850 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10851 ret = targetm.fold_builtin (fndecl, arglist, false);
10855 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10857 /* First try the transformations that don't require consing up
10859 ret = fold_builtin_n (fndecl, argarray, n, false);
10864 /* If we got this far, we need to build an exp. */
10865 exp = build_call_array (type, fn, n, argarray);
10866 ret = fold_builtin_varargs (fndecl, exp, false);
10867 return ret ? ret : exp;
10871 return build_call_array (type, fn, n, argarray);
10874 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10875 along with N new arguments specified as the "..." parameters. SKIP
10876 is the number of arguments in EXP to be omitted. This function is used
10877 to do varargs-to-varargs transformations. */
10880 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10882 int oldnargs = call_expr_nargs (exp);
10883 int nargs = oldnargs - skip + n;
10884 tree fntype = TREE_TYPE (fndecl);
10885 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10893 buffer = XALLOCAVEC (tree, nargs);
10895 for (i = 0; i < n; i++)
10896 buffer[i] = va_arg (ap, tree);
10898 for (j = skip; j < oldnargs; j++, i++)
10899 buffer[i] = CALL_EXPR_ARG (exp, j);
10902 buffer = CALL_EXPR_ARGP (exp) + skip;
10904 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10907 /* Validate a single argument ARG against a tree code CODE representing
10911 validate_arg (const_tree arg, enum tree_code code)
10915 else if (code == POINTER_TYPE)
10916 return POINTER_TYPE_P (TREE_TYPE (arg));
10917 else if (code == INTEGER_TYPE)
10918 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10919 return code == TREE_CODE (TREE_TYPE (arg));
10922 /* This function validates the types of a function call argument list
10923 against a specified list of tree_codes. If the last specifier is a 0,
10924 that represents an ellipses, otherwise the last specifier must be a
10927 This is the GIMPLE version of validate_arglist. Eventually we want to
10928 completely convert builtins.c to work from GIMPLEs and the tree based
10929 validate_arglist will then be removed. */
10932 validate_gimple_arglist (const_gimple call, ...)
10934 enum tree_code code;
10940 va_start (ap, call);
10945 code = va_arg (ap, enum tree_code);
10949 /* This signifies an ellipses, any further arguments are all ok. */
10953 /* This signifies an endlink, if no arguments remain, return
10954 true, otherwise return false. */
10955 res = (i == gimple_call_num_args (call));
10958 /* If no parameters remain or the parameter's code does not
10959 match the specified code, return false. Otherwise continue
10960 checking any remaining arguments. */
10961 arg = gimple_call_arg (call, i++);
10962 if (!validate_arg (arg, code))
10969 /* We need gotos here since we can only have one VA_CLOSE in a
10977 /* This function validates the types of a function call argument list
10978 against a specified list of tree_codes. If the last specifier is a 0,
10979 that represents an ellipses, otherwise the last specifier must be a
10983 validate_arglist (const_tree callexpr, ...)
10985 enum tree_code code;
10988 const_call_expr_arg_iterator iter;
10991 va_start (ap, callexpr);
10992 init_const_call_expr_arg_iterator (callexpr, &iter);
10996 code = va_arg (ap, enum tree_code);
11000 /* This signifies an ellipses, any further arguments are all ok. */
11004 /* This signifies an endlink, if no arguments remain, return
11005 true, otherwise return false. */
11006 res = !more_const_call_expr_args_p (&iter);
11009 /* If no parameters remain or the parameter's code does not
11010 match the specified code, return false. Otherwise continue
11011 checking any remaining arguments. */
11012 arg = next_const_call_expr_arg (&iter);
11013 if (!validate_arg (arg, code))
11020 /* We need gotos here since we can only have one VA_CLOSE in a
11028 /* Default target-specific builtin expander that does nothing. */
11031 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11032 rtx target ATTRIBUTE_UNUSED,
11033 rtx subtarget ATTRIBUTE_UNUSED,
11034 enum machine_mode mode ATTRIBUTE_UNUSED,
11035 int ignore ATTRIBUTE_UNUSED)
11040 /* Returns true is EXP represents data that would potentially reside
11041 in a readonly section. */
11044 readonly_data_expr (tree exp)
11048 if (TREE_CODE (exp) != ADDR_EXPR)
11051 exp = get_base_address (TREE_OPERAND (exp, 0));
11055 /* Make sure we call decl_readonly_section only for trees it
11056 can handle (since it returns true for everything it doesn't
11058 if (TREE_CODE (exp) == STRING_CST
11059 || TREE_CODE (exp) == CONSTRUCTOR
11060 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11061 return decl_readonly_section (exp, 0);
11066 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11067 to the call, and TYPE is its return type.
11069 Return NULL_TREE if no simplification was possible, otherwise return the
11070 simplified form of the call as a tree.
11072 The simplified form may be a constant or other expression which
11073 computes the same value, but in a more efficient manner (including
11074 calls to other builtin functions).
11076 The call may contain arguments which need to be evaluated, but
11077 which are not useful to determine the result of the call. In
11078 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11079 COMPOUND_EXPR will be an argument which must be evaluated.
11080 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11081 COMPOUND_EXPR in the chain will contain the tree for the simplified
11082 form of the builtin function call. */
11085 fold_builtin_strstr (tree s1, tree s2, tree type)
11087 if (!validate_arg (s1, POINTER_TYPE)
11088 || !validate_arg (s2, POINTER_TYPE))
11093 const char *p1, *p2;
11095 p2 = c_getstr (s2);
11099 p1 = c_getstr (s1);
11102 const char *r = strstr (p1, p2);
11106 return build_int_cst (TREE_TYPE (s1), 0);
11108 /* Return an offset into the constant string argument. */
11109 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11110 s1, size_int (r - p1));
11111 return fold_convert (type, tem);
11114 /* The argument is const char *, and the result is char *, so we need
11115 a type conversion here to avoid a warning. */
11117 return fold_convert (type, s1);
11122 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11126 /* New argument list transforming strstr(s1, s2) to
11127 strchr(s1, s2[0]). */
11128 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11132 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11133 the call, and TYPE is its return type.
11135 Return NULL_TREE if no simplification was possible, otherwise return the
11136 simplified form of the call as a tree.
11138 The simplified form may be a constant or other expression which
11139 computes the same value, but in a more efficient manner (including
11140 calls to other builtin functions).
11142 The call may contain arguments which need to be evaluated, but
11143 which are not useful to determine the result of the call. In
11144 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11145 COMPOUND_EXPR will be an argument which must be evaluated.
11146 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11147 COMPOUND_EXPR in the chain will contain the tree for the simplified
11148 form of the builtin function call. */
11151 fold_builtin_strchr (tree s1, tree s2, tree type)
11153 if (!validate_arg (s1, POINTER_TYPE)
11154 || !validate_arg (s2, INTEGER_TYPE))
11160 if (TREE_CODE (s2) != INTEGER_CST)
11163 p1 = c_getstr (s1);
11170 if (target_char_cast (s2, &c))
11173 r = strchr (p1, c);
11176 return build_int_cst (TREE_TYPE (s1), 0);
11178 /* Return an offset into the constant string argument. */
11179 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11180 s1, size_int (r - p1));
11181 return fold_convert (type, tem);
11187 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11188 the call, and TYPE is its return type.
11190 Return NULL_TREE if no simplification was possible, otherwise return the
11191 simplified form of the call as a tree.
11193 The simplified form may be a constant or other expression which
11194 computes the same value, but in a more efficient manner (including
11195 calls to other builtin functions).
11197 The call may contain arguments which need to be evaluated, but
11198 which are not useful to determine the result of the call. In
11199 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11200 COMPOUND_EXPR will be an argument which must be evaluated.
11201 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11202 COMPOUND_EXPR in the chain will contain the tree for the simplified
11203 form of the builtin function call. */
11206 fold_builtin_strrchr (tree s1, tree s2, tree type)
11208 if (!validate_arg (s1, POINTER_TYPE)
11209 || !validate_arg (s2, INTEGER_TYPE))
11216 if (TREE_CODE (s2) != INTEGER_CST)
11219 p1 = c_getstr (s1);
11226 if (target_char_cast (s2, &c))
11229 r = strrchr (p1, c);
11232 return build_int_cst (TREE_TYPE (s1), 0);
11234 /* Return an offset into the constant string argument. */
11235 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11236 s1, size_int (r - p1));
11237 return fold_convert (type, tem);
11240 if (! integer_zerop (s2))
11243 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11247 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11248 return build_call_expr (fn, 2, s1, s2);
11252 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11253 to the call, and TYPE is its return type.
11255 Return NULL_TREE if no simplification was possible, otherwise return the
11256 simplified form of the call as a tree.
11258 The simplified form may be a constant or other expression which
11259 computes the same value, but in a more efficient manner (including
11260 calls to other builtin functions).
11262 The call may contain arguments which need to be evaluated, but
11263 which are not useful to determine the result of the call. In
11264 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11265 COMPOUND_EXPR will be an argument which must be evaluated.
11266 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11267 COMPOUND_EXPR in the chain will contain the tree for the simplified
11268 form of the builtin function call. */
11271 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11273 if (!validate_arg (s1, POINTER_TYPE)
11274 || !validate_arg (s2, POINTER_TYPE))
11279 const char *p1, *p2;
11281 p2 = c_getstr (s2);
11285 p1 = c_getstr (s1);
11288 const char *r = strpbrk (p1, p2);
11292 return build_int_cst (TREE_TYPE (s1), 0);
11294 /* Return an offset into the constant string argument. */
11295 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11296 s1, size_int (r - p1));
11297 return fold_convert (type, tem);
11301 /* strpbrk(x, "") == NULL.
11302 Evaluate and ignore s1 in case it had side-effects. */
11303 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11306 return NULL_TREE; /* Really call strpbrk. */
11308 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11312 /* New argument list transforming strpbrk(s1, s2) to
11313 strchr(s1, s2[0]). */
11314 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11318 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11321 Return NULL_TREE if no simplification was possible, otherwise return the
11322 simplified form of the call as a tree.
11324 The simplified form may be a constant or other expression which
11325 computes the same value, but in a more efficient manner (including
11326 calls to other builtin functions).
11328 The call may contain arguments which need to be evaluated, but
11329 which are not useful to determine the result of the call. In
11330 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11331 COMPOUND_EXPR will be an argument which must be evaluated.
11332 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11333 COMPOUND_EXPR in the chain will contain the tree for the simplified
11334 form of the builtin function call. */
11337 fold_builtin_strcat (tree dst, tree src)
11339 if (!validate_arg (dst, POINTER_TYPE)
11340 || !validate_arg (src, POINTER_TYPE))
11344 const char *p = c_getstr (src);
11346 /* If the string length is zero, return the dst parameter. */
11347 if (p && *p == '\0')
11354 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11355 arguments to the call.
11357 Return NULL_TREE if no simplification was possible, otherwise return the
11358 simplified form of the call as a tree.
11360 The simplified form may be a constant or other expression which
11361 computes the same value, but in a more efficient manner (including
11362 calls to other builtin functions).
11364 The call may contain arguments which need to be evaluated, but
11365 which are not useful to determine the result of the call. In
11366 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11367 COMPOUND_EXPR will be an argument which must be evaluated.
11368 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11369 COMPOUND_EXPR in the chain will contain the tree for the simplified
11370 form of the builtin function call. */
11373 fold_builtin_strncat (tree dst, tree src, tree len)
11375 if (!validate_arg (dst, POINTER_TYPE)
11376 || !validate_arg (src, POINTER_TYPE)
11377 || !validate_arg (len, INTEGER_TYPE))
11381 const char *p = c_getstr (src);
11383 /* If the requested length is zero, or the src parameter string
11384 length is zero, return the dst parameter. */
11385 if (integer_zerop (len) || (p && *p == '\0'))
11386 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11388 /* If the requested len is greater than or equal to the string
11389 length, call strcat. */
11390 if (TREE_CODE (len) == INTEGER_CST && p
11391 && compare_tree_int (len, strlen (p)) >= 0)
11393 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11395 /* If the replacement _DECL isn't initialized, don't do the
11400 return build_call_expr (fn, 2, dst, src);
11406 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11409 Return NULL_TREE if no simplification was possible, otherwise return the
11410 simplified form of the call as a tree.
11412 The simplified form may be a constant or other expression which
11413 computes the same value, but in a more efficient manner (including
11414 calls to other builtin functions).
11416 The call may contain arguments which need to be evaluated, but
11417 which are not useful to determine the result of the call. In
11418 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11419 COMPOUND_EXPR will be an argument which must be evaluated.
11420 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11421 COMPOUND_EXPR in the chain will contain the tree for the simplified
11422 form of the builtin function call. */
11425 fold_builtin_strspn (tree s1, tree s2)
11427 if (!validate_arg (s1, POINTER_TYPE)
11428 || !validate_arg (s2, POINTER_TYPE))
11432 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11434 /* If both arguments are constants, evaluate at compile-time. */
11437 const size_t r = strspn (p1, p2);
11438 return size_int (r);
11441 /* If either argument is "", return NULL_TREE. */
11442 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11443 /* Evaluate and ignore both arguments in case either one has
11445 return omit_two_operands (size_type_node, size_zero_node,
11451 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11454 Return NULL_TREE if no simplification was possible, otherwise return the
11455 simplified form of the call as a tree.
11457 The simplified form may be a constant or other expression which
11458 computes the same value, but in a more efficient manner (including
11459 calls to other builtin functions).
11461 The call may contain arguments which need to be evaluated, but
11462 which are not useful to determine the result of the call. In
11463 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11464 COMPOUND_EXPR will be an argument which must be evaluated.
11465 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11466 COMPOUND_EXPR in the chain will contain the tree for the simplified
11467 form of the builtin function call. */
11470 fold_builtin_strcspn (tree s1, tree s2)
11472 if (!validate_arg (s1, POINTER_TYPE)
11473 || !validate_arg (s2, POINTER_TYPE))
11477 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11479 /* If both arguments are constants, evaluate at compile-time. */
11482 const size_t r = strcspn (p1, p2);
11483 return size_int (r);
11486 /* If the first argument is "", return NULL_TREE. */
11487 if (p1 && *p1 == '\0')
11489 /* Evaluate and ignore argument s2 in case it has
11491 return omit_one_operand (size_type_node,
11492 size_zero_node, s2);
11495 /* If the second argument is "", return __builtin_strlen(s1). */
11496 if (p2 && *p2 == '\0')
11498 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11500 /* If the replacement _DECL isn't initialized, don't do the
11505 return build_call_expr (fn, 1, s1);
11511 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11512 to the call. IGNORE is true if the value returned
11513 by the builtin will be ignored. UNLOCKED is true is true if this
11514 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11515 the known length of the string. Return NULL_TREE if no simplification
11519 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11521 /* If we're using an unlocked function, assume the other unlocked
11522 functions exist explicitly. */
11523 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11524 : implicit_built_in_decls[BUILT_IN_FPUTC];
11525 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11526 : implicit_built_in_decls[BUILT_IN_FWRITE];
11528 /* If the return value is used, don't do the transformation. */
11532 /* Verify the arguments in the original call. */
11533 if (!validate_arg (arg0, POINTER_TYPE)
11534 || !validate_arg (arg1, POINTER_TYPE))
11538 len = c_strlen (arg0, 0);
11540 /* Get the length of the string passed to fputs. If the length
11541 can't be determined, punt. */
11543 || TREE_CODE (len) != INTEGER_CST)
11546 switch (compare_tree_int (len, 1))
11548 case -1: /* length is 0, delete the call entirely . */
11549 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11551 case 0: /* length is 1, call fputc. */
11553 const char *p = c_getstr (arg0);
11558 return build_call_expr (fn_fputc, 2,
11559 build_int_cst (NULL_TREE, p[0]), arg1);
11565 case 1: /* length is greater than 1, call fwrite. */
11567 /* If optimizing for size keep fputs. */
11568 if (optimize_function_for_size_p (cfun))
11570 /* New argument list transforming fputs(string, stream) to
11571 fwrite(string, 1, len, stream). */
11573 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11578 gcc_unreachable ();
11583 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11584 produced. False otherwise. This is done so that we don't output the error
11585 or warning twice or three times. */
11588 fold_builtin_next_arg (tree exp, bool va_start_p)
11590 tree fntype = TREE_TYPE (current_function_decl);
11591 int nargs = call_expr_nargs (exp);
11594 if (TYPE_ARG_TYPES (fntype) == 0
11595 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11596 == void_type_node))
11598 error ("%<va_start%> used in function with fixed args");
11604 if (va_start_p && (nargs != 2))
11606 error ("wrong number of arguments to function %<va_start%>");
11609 arg = CALL_EXPR_ARG (exp, 1);
11611 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11612 when we checked the arguments and if needed issued a warning. */
11617 /* Evidently an out of date version of <stdarg.h>; can't validate
11618 va_start's second argument, but can still work as intended. */
11619 warning (0, "%<__builtin_next_arg%> called without an argument");
11622 else if (nargs > 1)
11624 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11627 arg = CALL_EXPR_ARG (exp, 0);
11630 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11631 or __builtin_next_arg (0) the first time we see it, after checking
11632 the arguments and if needed issuing a warning. */
11633 if (!integer_zerop (arg))
11635 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11637 /* Strip off all nops for the sake of the comparison. This
11638 is not quite the same as STRIP_NOPS. It does more.
11639 We must also strip off INDIRECT_EXPR for C++ reference
11641 while (CONVERT_EXPR_P (arg)
11642 || TREE_CODE (arg) == INDIRECT_REF)
11643 arg = TREE_OPERAND (arg, 0);
11644 if (arg != last_parm)
11646 /* FIXME: Sometimes with the tree optimizers we can get the
11647 not the last argument even though the user used the last
11648 argument. We just warn and set the arg to be the last
11649 argument so that we will get wrong-code because of
11651 warning (0, "second parameter of %<va_start%> not last named argument");
11654 /* Undefined by C99 7.15.1.4p4 (va_start):
11655 "If the parameter parmN is declared with the register storage
11656 class, with a function or array type, or with a type that is
11657 not compatible with the type that results after application of
11658 the default argument promotions, the behavior is undefined."
11660 else if (DECL_REGISTER (arg))
11661 warning (0, "undefined behaviour when second parameter of "
11662 "%<va_start%> is declared with %<register%> storage");
11664 /* We want to verify the second parameter just once before the tree
11665 optimizers are run and then avoid keeping it in the tree,
11666 as otherwise we could warn even for correct code like:
11667 void foo (int i, ...)
11668 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11670 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11672 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11678 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11679 ORIG may be null if this is a 2-argument call. We don't attempt to
11680 simplify calls with more than 3 arguments.
11682 Return NULL_TREE if no simplification was possible, otherwise return the
11683 simplified form of the call as a tree. If IGNORED is true, it means that
11684 the caller does not use the returned value of the function. */
11687 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11690 const char *fmt_str = NULL;
11692 /* Verify the required arguments in the original call. We deal with two
11693 types of sprintf() calls: 'sprintf (str, fmt)' and
11694 'sprintf (dest, "%s", orig)'. */
11695 if (!validate_arg (dest, POINTER_TYPE)
11696 || !validate_arg (fmt, POINTER_TYPE))
11698 if (orig && !validate_arg (orig, POINTER_TYPE))
11701 /* Check whether the format is a literal string constant. */
11702 fmt_str = c_getstr (fmt);
11703 if (fmt_str == NULL)
11707 retval = NULL_TREE;
11709 if (!init_target_chars ())
11712 /* If the format doesn't contain % args or %%, use strcpy. */
11713 if (strchr (fmt_str, target_percent) == NULL)
11715 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11720 /* Don't optimize sprintf (buf, "abc", ptr++). */
11724 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11725 'format' is known to contain no % formats. */
11726 call = build_call_expr (fn, 2, dest, fmt);
11728 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11731 /* If the format is "%s", use strcpy if the result isn't used. */
11732 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11735 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11740 /* Don't crash on sprintf (str1, "%s"). */
11744 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11747 retval = c_strlen (orig, 1);
11748 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11751 call = build_call_expr (fn, 2, dest, orig);
11754 if (call && retval)
11756 retval = fold_convert
11757 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11759 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11765 /* Expand a call EXP to __builtin_object_size. */
11768 expand_builtin_object_size (tree exp)
11771 int object_size_type;
11772 tree fndecl = get_callee_fndecl (exp);
11774 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11776 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11778 expand_builtin_trap ();
11782 ost = CALL_EXPR_ARG (exp, 1);
11785 if (TREE_CODE (ost) != INTEGER_CST
11786 || tree_int_cst_sgn (ost) < 0
11787 || compare_tree_int (ost, 3) > 0)
11789 error ("%Klast argument of %D is not integer constant between 0 and 3",
11791 expand_builtin_trap ();
11795 object_size_type = tree_low_cst (ost, 0);
11797 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11800 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11801 FCODE is the BUILT_IN_* to use.
11802 Return NULL_RTX if we failed; the caller should emit a normal call,
11803 otherwise try to get the result in TARGET, if convenient (and in
11804 mode MODE if that's convenient). */
11807 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11808 enum built_in_function fcode)
11810 tree dest, src, len, size;
11812 if (!validate_arglist (exp,
11814 fcode == BUILT_IN_MEMSET_CHK
11815 ? INTEGER_TYPE : POINTER_TYPE,
11816 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11819 dest = CALL_EXPR_ARG (exp, 0);
11820 src = CALL_EXPR_ARG (exp, 1);
11821 len = CALL_EXPR_ARG (exp, 2);
11822 size = CALL_EXPR_ARG (exp, 3);
11824 if (! host_integerp (size, 1))
11827 if (host_integerp (len, 1) || integer_all_onesp (size))
11831 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11833 warning (0, "%Kcall to %D will always overflow destination buffer",
11834 exp, get_callee_fndecl (exp));
11839 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11840 mem{cpy,pcpy,move,set} is available. */
11843 case BUILT_IN_MEMCPY_CHK:
11844 fn = built_in_decls[BUILT_IN_MEMCPY];
11846 case BUILT_IN_MEMPCPY_CHK:
11847 fn = built_in_decls[BUILT_IN_MEMPCPY];
11849 case BUILT_IN_MEMMOVE_CHK:
11850 fn = built_in_decls[BUILT_IN_MEMMOVE];
11852 case BUILT_IN_MEMSET_CHK:
11853 fn = built_in_decls[BUILT_IN_MEMSET];
11862 fn = build_call_expr (fn, 3, dest, src, len);
11863 STRIP_TYPE_NOPS (fn);
11864 while (TREE_CODE (fn) == COMPOUND_EXPR)
11866 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11868 fn = TREE_OPERAND (fn, 1);
11870 if (TREE_CODE (fn) == CALL_EXPR)
11871 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11872 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11874 else if (fcode == BUILT_IN_MEMSET_CHK)
11878 unsigned int dest_align
11879 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11881 /* If DEST is not a pointer type, call the normal function. */
11882 if (dest_align == 0)
11885 /* If SRC and DEST are the same (and not volatile), do nothing. */
11886 if (operand_equal_p (src, dest, 0))
11890 if (fcode != BUILT_IN_MEMPCPY_CHK)
11892 /* Evaluate and ignore LEN in case it has side-effects. */
11893 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11894 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11897 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11898 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11901 /* __memmove_chk special case. */
11902 if (fcode == BUILT_IN_MEMMOVE_CHK)
11904 unsigned int src_align
11905 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11907 if (src_align == 0)
11910 /* If src is categorized for a readonly section we can use
11911 normal __memcpy_chk. */
11912 if (readonly_data_expr (src))
11914 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11917 fn = build_call_expr (fn, 4, dest, src, len, size);
11918 STRIP_TYPE_NOPS (fn);
11919 while (TREE_CODE (fn) == COMPOUND_EXPR)
11921 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11923 fn = TREE_OPERAND (fn, 1);
11925 if (TREE_CODE (fn) == CALL_EXPR)
11926 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11927 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11934 /* Emit warning if a buffer overflow is detected at compile time. */
11937 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11944 case BUILT_IN_STRCPY_CHK:
11945 case BUILT_IN_STPCPY_CHK:
11946 /* For __strcat_chk the warning will be emitted only if overflowing
11947 by at least strlen (dest) + 1 bytes. */
11948 case BUILT_IN_STRCAT_CHK:
11949 len = CALL_EXPR_ARG (exp, 1);
11950 size = CALL_EXPR_ARG (exp, 2);
11953 case BUILT_IN_STRNCAT_CHK:
11954 case BUILT_IN_STRNCPY_CHK:
11955 len = CALL_EXPR_ARG (exp, 2);
11956 size = CALL_EXPR_ARG (exp, 3);
11958 case BUILT_IN_SNPRINTF_CHK:
11959 case BUILT_IN_VSNPRINTF_CHK:
11960 len = CALL_EXPR_ARG (exp, 1);
11961 size = CALL_EXPR_ARG (exp, 3);
11964 gcc_unreachable ();
11970 if (! host_integerp (size, 1) || integer_all_onesp (size))
11975 len = c_strlen (len, 1);
11976 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11979 else if (fcode == BUILT_IN_STRNCAT_CHK)
11981 tree src = CALL_EXPR_ARG (exp, 1);
11982 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11984 src = c_strlen (src, 1);
11985 if (! src || ! host_integerp (src, 1))
11987 warning (0, "%Kcall to %D might overflow destination buffer",
11988 exp, get_callee_fndecl (exp));
11991 else if (tree_int_cst_lt (src, size))
11994 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11997 warning (0, "%Kcall to %D will always overflow destination buffer",
11998 exp, get_callee_fndecl (exp));
12001 /* Emit warning if a buffer overflow is detected at compile time
12002 in __sprintf_chk/__vsprintf_chk calls. */
12005 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12007 tree dest, size, len, fmt, flag;
12008 const char *fmt_str;
12009 int nargs = call_expr_nargs (exp);
12011 /* Verify the required arguments in the original call. */
12015 dest = CALL_EXPR_ARG (exp, 0);
12016 flag = CALL_EXPR_ARG (exp, 1);
12017 size = CALL_EXPR_ARG (exp, 2);
12018 fmt = CALL_EXPR_ARG (exp, 3);
12020 if (! host_integerp (size, 1) || integer_all_onesp (size))
12023 /* Check whether the format is a literal string constant. */
12024 fmt_str = c_getstr (fmt);
12025 if (fmt_str == NULL)
12028 if (!init_target_chars ())
12031 /* If the format doesn't contain % args or %%, we know its size. */
12032 if (strchr (fmt_str, target_percent) == 0)
12033 len = build_int_cstu (size_type_node, strlen (fmt_str));
12034 /* If the format is "%s" and first ... argument is a string literal,
12036 else if (fcode == BUILT_IN_SPRINTF_CHK
12037 && strcmp (fmt_str, target_percent_s) == 0)
12043 arg = CALL_EXPR_ARG (exp, 4);
12044 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12047 len = c_strlen (arg, 1);
12048 if (!len || ! host_integerp (len, 1))
12054 if (! tree_int_cst_lt (len, size))
12056 warning (0, "%Kcall to %D will always overflow destination buffer",
12057 exp, get_callee_fndecl (exp));
12061 /* Emit warning if a free is called with address of a variable. */
12064 maybe_emit_free_warning (tree exp)
12066 tree arg = CALL_EXPR_ARG (exp, 0);
12069 if (TREE_CODE (arg) != ADDR_EXPR)
12072 arg = get_base_address (TREE_OPERAND (arg, 0));
12073 if (arg == NULL || INDIRECT_REF_P (arg))
12076 if (SSA_VAR_P (arg))
12077 warning (0, "%Kattempt to free a non-heap object %qD", exp, arg);
12079 warning (0, "%Kattempt to free a non-heap object", exp);
12082 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12086 fold_builtin_object_size (tree ptr, tree ost)
12088 tree ret = NULL_TREE;
12089 int object_size_type;
12091 if (!validate_arg (ptr, POINTER_TYPE)
12092 || !validate_arg (ost, INTEGER_TYPE))
12097 if (TREE_CODE (ost) != INTEGER_CST
12098 || tree_int_cst_sgn (ost) < 0
12099 || compare_tree_int (ost, 3) > 0)
12102 object_size_type = tree_low_cst (ost, 0);
12104 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12105 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12106 and (size_t) 0 for types 2 and 3. */
12107 if (TREE_SIDE_EFFECTS (ptr))
12108 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12110 if (TREE_CODE (ptr) == ADDR_EXPR)
12111 ret = build_int_cstu (size_type_node,
12112 compute_builtin_object_size (ptr, object_size_type));
12114 else if (TREE_CODE (ptr) == SSA_NAME)
12116 unsigned HOST_WIDE_INT bytes;
12118 /* If object size is not known yet, delay folding until
12119 later. Maybe subsequent passes will help determining
12121 bytes = compute_builtin_object_size (ptr, object_size_type);
12122 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12124 ret = build_int_cstu (size_type_node, bytes);
12129 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12130 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12131 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12138 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12139 DEST, SRC, LEN, and SIZE are the arguments to the call.
12140 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12141 code of the builtin. If MAXLEN is not NULL, it is maximum length
12142 passed as third argument. */
12145 fold_builtin_memory_chk (tree fndecl,
12146 tree dest, tree src, tree len, tree size,
12147 tree maxlen, bool ignore,
12148 enum built_in_function fcode)
12152 if (!validate_arg (dest, POINTER_TYPE)
12153 || !validate_arg (src,
12154 (fcode == BUILT_IN_MEMSET_CHK
12155 ? INTEGER_TYPE : POINTER_TYPE))
12156 || !validate_arg (len, INTEGER_TYPE)
12157 || !validate_arg (size, INTEGER_TYPE))
12160 /* If SRC and DEST are the same (and not volatile), return DEST
12161 (resp. DEST+LEN for __mempcpy_chk). */
12162 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12164 if (fcode != BUILT_IN_MEMPCPY_CHK)
12165 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12168 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12169 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12173 if (! host_integerp (size, 1))
12176 if (! integer_all_onesp (size))
12178 if (! host_integerp (len, 1))
12180 /* If LEN is not constant, try MAXLEN too.
12181 For MAXLEN only allow optimizing into non-_ocs function
12182 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12183 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12185 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12187 /* (void) __mempcpy_chk () can be optimized into
12188 (void) __memcpy_chk (). */
12189 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12193 return build_call_expr (fn, 4, dest, src, len, size);
12201 if (tree_int_cst_lt (size, maxlen))
12206 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12207 mem{cpy,pcpy,move,set} is available. */
12210 case BUILT_IN_MEMCPY_CHK:
12211 fn = built_in_decls[BUILT_IN_MEMCPY];
12213 case BUILT_IN_MEMPCPY_CHK:
12214 fn = built_in_decls[BUILT_IN_MEMPCPY];
12216 case BUILT_IN_MEMMOVE_CHK:
12217 fn = built_in_decls[BUILT_IN_MEMMOVE];
12219 case BUILT_IN_MEMSET_CHK:
12220 fn = built_in_decls[BUILT_IN_MEMSET];
12229 return build_call_expr (fn, 3, dest, src, len);
12232 /* Fold a call to the __st[rp]cpy_chk builtin.
12233 DEST, SRC, and SIZE are the arguments to the call.
12234 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12235 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12236 strings passed as second argument. */
12239 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12240 tree maxlen, bool ignore,
12241 enum built_in_function fcode)
12245 if (!validate_arg (dest, POINTER_TYPE)
12246 || !validate_arg (src, POINTER_TYPE)
12247 || !validate_arg (size, INTEGER_TYPE))
12250 /* If SRC and DEST are the same (and not volatile), return DEST. */
12251 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12252 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12254 if (! host_integerp (size, 1))
12257 if (! integer_all_onesp (size))
12259 len = c_strlen (src, 1);
12260 if (! len || ! host_integerp (len, 1))
12262 /* If LEN is not constant, try MAXLEN too.
12263 For MAXLEN only allow optimizing into non-_ocs function
12264 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12265 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12267 if (fcode == BUILT_IN_STPCPY_CHK)
12272 /* If return value of __stpcpy_chk is ignored,
12273 optimize into __strcpy_chk. */
12274 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12278 return build_call_expr (fn, 3, dest, src, size);
12281 if (! len || TREE_SIDE_EFFECTS (len))
12284 /* If c_strlen returned something, but not a constant,
12285 transform __strcpy_chk into __memcpy_chk. */
12286 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12290 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12291 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12292 build_call_expr (fn, 4,
12293 dest, src, len, size));
12299 if (! tree_int_cst_lt (maxlen, size))
12303 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12304 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12305 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12309 return build_call_expr (fn, 2, dest, src);
12312 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12313 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12314 length passed as third argument. */
12317 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12322 if (!validate_arg (dest, POINTER_TYPE)
12323 || !validate_arg (src, POINTER_TYPE)
12324 || !validate_arg (len, INTEGER_TYPE)
12325 || !validate_arg (size, INTEGER_TYPE))
12328 if (! host_integerp (size, 1))
12331 if (! integer_all_onesp (size))
12333 if (! host_integerp (len, 1))
12335 /* If LEN is not constant, try MAXLEN too.
12336 For MAXLEN only allow optimizing into non-_ocs function
12337 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12338 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12344 if (tree_int_cst_lt (size, maxlen))
12348 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12349 fn = built_in_decls[BUILT_IN_STRNCPY];
12353 return build_call_expr (fn, 3, dest, src, len);
12356 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12357 are the arguments to the call. */
12360 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12365 if (!validate_arg (dest, POINTER_TYPE)
12366 || !validate_arg (src, POINTER_TYPE)
12367 || !validate_arg (size, INTEGER_TYPE))
12370 p = c_getstr (src);
12371 /* If the SRC parameter is "", return DEST. */
12372 if (p && *p == '\0')
12373 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12375 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12378 /* If __builtin_strcat_chk is used, assume strcat is available. */
12379 fn = built_in_decls[BUILT_IN_STRCAT];
12383 return build_call_expr (fn, 2, dest, src);
12386 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12390 fold_builtin_strncat_chk (tree fndecl,
12391 tree dest, tree src, tree len, tree size)
12396 if (!validate_arg (dest, POINTER_TYPE)
12397 || !validate_arg (src, POINTER_TYPE)
12398 || !validate_arg (size, INTEGER_TYPE)
12399 || !validate_arg (size, INTEGER_TYPE))
12402 p = c_getstr (src);
12403 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12404 if (p && *p == '\0')
12405 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12406 else if (integer_zerop (len))
12407 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12409 if (! host_integerp (size, 1))
12412 if (! integer_all_onesp (size))
12414 tree src_len = c_strlen (src, 1);
12416 && host_integerp (src_len, 1)
12417 && host_integerp (len, 1)
12418 && ! tree_int_cst_lt (len, src_len))
12420 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12421 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12425 return build_call_expr (fn, 3, dest, src, size);
12430 /* If __builtin_strncat_chk is used, assume strncat is available. */
12431 fn = built_in_decls[BUILT_IN_STRNCAT];
12435 return build_call_expr (fn, 3, dest, src, len);
12438 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12439 a normal call should be emitted rather than expanding the function
12440 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12443 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12445 tree dest, size, len, fn, fmt, flag;
12446 const char *fmt_str;
12447 int nargs = call_expr_nargs (exp);
12449 /* Verify the required arguments in the original call. */
12452 dest = CALL_EXPR_ARG (exp, 0);
12453 if (!validate_arg (dest, POINTER_TYPE))
12455 flag = CALL_EXPR_ARG (exp, 1);
12456 if (!validate_arg (flag, INTEGER_TYPE))
12458 size = CALL_EXPR_ARG (exp, 2);
12459 if (!validate_arg (size, INTEGER_TYPE))
12461 fmt = CALL_EXPR_ARG (exp, 3);
12462 if (!validate_arg (fmt, POINTER_TYPE))
12465 if (! host_integerp (size, 1))
12470 if (!init_target_chars ())
12473 /* Check whether the format is a literal string constant. */
12474 fmt_str = c_getstr (fmt);
12475 if (fmt_str != NULL)
12477 /* If the format doesn't contain % args or %%, we know the size. */
12478 if (strchr (fmt_str, target_percent) == 0)
12480 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12481 len = build_int_cstu (size_type_node, strlen (fmt_str));
12483 /* If the format is "%s" and first ... argument is a string literal,
12484 we know the size too. */
12485 else if (fcode == BUILT_IN_SPRINTF_CHK
12486 && strcmp (fmt_str, target_percent_s) == 0)
12492 arg = CALL_EXPR_ARG (exp, 4);
12493 if (validate_arg (arg, POINTER_TYPE))
12495 len = c_strlen (arg, 1);
12496 if (! len || ! host_integerp (len, 1))
12503 if (! integer_all_onesp (size))
12505 if (! len || ! tree_int_cst_lt (len, size))
12509 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12510 or if format doesn't contain % chars or is "%s". */
12511 if (! integer_zerop (flag))
12513 if (fmt_str == NULL)
12515 if (strchr (fmt_str, target_percent) != NULL
12516 && strcmp (fmt_str, target_percent_s))
12520 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12521 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12522 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12526 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12529 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12530 a normal call should be emitted rather than expanding the function
12531 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12532 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12533 passed as second argument. */
12536 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12537 enum built_in_function fcode)
12539 tree dest, size, len, fn, fmt, flag;
12540 const char *fmt_str;
12542 /* Verify the required arguments in the original call. */
12543 if (call_expr_nargs (exp) < 5)
12545 dest = CALL_EXPR_ARG (exp, 0);
12546 if (!validate_arg (dest, POINTER_TYPE))
12548 len = CALL_EXPR_ARG (exp, 1);
12549 if (!validate_arg (len, INTEGER_TYPE))
12551 flag = CALL_EXPR_ARG (exp, 2);
12552 if (!validate_arg (flag, INTEGER_TYPE))
12554 size = CALL_EXPR_ARG (exp, 3);
12555 if (!validate_arg (size, INTEGER_TYPE))
12557 fmt = CALL_EXPR_ARG (exp, 4);
12558 if (!validate_arg (fmt, POINTER_TYPE))
12561 if (! host_integerp (size, 1))
12564 if (! integer_all_onesp (size))
12566 if (! host_integerp (len, 1))
12568 /* If LEN is not constant, try MAXLEN too.
12569 For MAXLEN only allow optimizing into non-_ocs function
12570 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12571 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12577 if (tree_int_cst_lt (size, maxlen))
12581 if (!init_target_chars ())
12584 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12585 or if format doesn't contain % chars or is "%s". */
12586 if (! integer_zerop (flag))
12588 fmt_str = c_getstr (fmt);
12589 if (fmt_str == NULL)
12591 if (strchr (fmt_str, target_percent) != NULL
12592 && strcmp (fmt_str, target_percent_s))
12596 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12598 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12599 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12603 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12606 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12607 FMT and ARG are the arguments to the call; we don't fold cases with
12608 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12610 Return NULL_TREE if no simplification was possible, otherwise return the
12611 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12612 code of the function to be simplified. */
12615 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12616 enum built_in_function fcode)
12618 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12619 const char *fmt_str = NULL;
12621 /* If the return value is used, don't do the transformation. */
12625 /* Verify the required arguments in the original call. */
12626 if (!validate_arg (fmt, POINTER_TYPE))
12629 /* Check whether the format is a literal string constant. */
12630 fmt_str = c_getstr (fmt);
12631 if (fmt_str == NULL)
12634 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12636 /* If we're using an unlocked function, assume the other
12637 unlocked functions exist explicitly. */
12638 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12639 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12643 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12644 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12647 if (!init_target_chars ())
12650 if (strcmp (fmt_str, target_percent_s) == 0
12651 || strchr (fmt_str, target_percent) == NULL)
12655 if (strcmp (fmt_str, target_percent_s) == 0)
12657 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12660 if (!arg || !validate_arg (arg, POINTER_TYPE))
12663 str = c_getstr (arg);
12669 /* The format specifier doesn't contain any '%' characters. */
12670 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12676 /* If the string was "", printf does nothing. */
12677 if (str[0] == '\0')
12678 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12680 /* If the string has length of 1, call putchar. */
12681 if (str[1] == '\0')
12683 /* Given printf("c"), (where c is any one character,)
12684 convert "c"[0] to an int and pass that to the replacement
12686 newarg = build_int_cst (NULL_TREE, str[0]);
12688 call = build_call_expr (fn_putchar, 1, newarg);
12692 /* If the string was "string\n", call puts("string"). */
12693 size_t len = strlen (str);
12694 if ((unsigned char)str[len - 1] == target_newline)
12696 /* Create a NUL-terminated string that's one char shorter
12697 than the original, stripping off the trailing '\n'. */
12698 char *newstr = XALLOCAVEC (char, len);
12699 memcpy (newstr, str, len - 1);
12700 newstr[len - 1] = 0;
12702 newarg = build_string_literal (len, newstr);
12704 call = build_call_expr (fn_puts, 1, newarg);
12707 /* We'd like to arrange to call fputs(string,stdout) here,
12708 but we need stdout and don't have a way to get it yet. */
12713 /* The other optimizations can be done only on the non-va_list variants. */
12714 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12717 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12718 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12720 if (!arg || !validate_arg (arg, POINTER_TYPE))
12723 call = build_call_expr (fn_puts, 1, arg);
12726 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12727 else if (strcmp (fmt_str, target_percent_c) == 0)
12729 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12732 call = build_call_expr (fn_putchar, 1, arg);
12738 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12741 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12742 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12743 more than 3 arguments, and ARG may be null in the 2-argument case.
12745 Return NULL_TREE if no simplification was possible, otherwise return the
12746 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12747 code of the function to be simplified. */
12750 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12751 enum built_in_function fcode)
12753 tree fn_fputc, fn_fputs, call = NULL_TREE;
12754 const char *fmt_str = NULL;
12756 /* If the return value is used, don't do the transformation. */
12760 /* Verify the required arguments in the original call. */
12761 if (!validate_arg (fp, POINTER_TYPE))
12763 if (!validate_arg (fmt, POINTER_TYPE))
12766 /* Check whether the format is a literal string constant. */
12767 fmt_str = c_getstr (fmt);
12768 if (fmt_str == NULL)
12771 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12773 /* If we're using an unlocked function, assume the other
12774 unlocked functions exist explicitly. */
12775 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12776 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12780 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12781 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12784 if (!init_target_chars ())
12787 /* If the format doesn't contain % args or %%, use strcpy. */
12788 if (strchr (fmt_str, target_percent) == NULL)
12790 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12794 /* If the format specifier was "", fprintf does nothing. */
12795 if (fmt_str[0] == '\0')
12797 /* If FP has side-effects, just wait until gimplification is
12799 if (TREE_SIDE_EFFECTS (fp))
12802 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12805 /* When "string" doesn't contain %, replace all cases of
12806 fprintf (fp, string) with fputs (string, fp). The fputs
12807 builtin will take care of special cases like length == 1. */
12809 call = build_call_expr (fn_fputs, 2, fmt, fp);
12812 /* The other optimizations can be done only on the non-va_list variants. */
12813 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12816 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12817 else if (strcmp (fmt_str, target_percent_s) == 0)
12819 if (!arg || !validate_arg (arg, POINTER_TYPE))
12822 call = build_call_expr (fn_fputs, 2, arg, fp);
12825 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12826 else if (strcmp (fmt_str, target_percent_c) == 0)
12828 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12831 call = build_call_expr (fn_fputc, 2, arg, fp);
12836 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12839 /* Initialize format string characters in the target charset. */
12842 init_target_chars (void)
12847 target_newline = lang_hooks.to_target_charset ('\n');
12848 target_percent = lang_hooks.to_target_charset ('%');
12849 target_c = lang_hooks.to_target_charset ('c');
12850 target_s = lang_hooks.to_target_charset ('s');
12851 if (target_newline == 0 || target_percent == 0 || target_c == 0
12855 target_percent_c[0] = target_percent;
12856 target_percent_c[1] = target_c;
12857 target_percent_c[2] = '\0';
12859 target_percent_s[0] = target_percent;
12860 target_percent_s[1] = target_s;
12861 target_percent_s[2] = '\0';
12863 target_percent_s_newline[0] = target_percent;
12864 target_percent_s_newline[1] = target_s;
12865 target_percent_s_newline[2] = target_newline;
12866 target_percent_s_newline[3] = '\0';
12873 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12874 and no overflow/underflow occurred. INEXACT is true if M was not
12875 exactly calculated. TYPE is the tree type for the result. This
12876 function assumes that you cleared the MPFR flags and then
12877 calculated M to see if anything subsequently set a flag prior to
12878 entering this function. Return NULL_TREE if any checks fail. */
12881 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12883 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12884 overflow/underflow occurred. If -frounding-math, proceed iff the
12885 result of calling FUNC was exact. */
12886 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12887 && (!flag_rounding_math || !inexact))
12889 REAL_VALUE_TYPE rr;
12891 real_from_mpfr (&rr, m, type, GMP_RNDN);
12892 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12893 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12894 but the mpft_t is not, then we underflowed in the
12896 if (real_isfinite (&rr)
12897 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12899 REAL_VALUE_TYPE rmode;
12901 real_convert (&rmode, TYPE_MODE (type), &rr);
12902 /* Proceed iff the specified mode can hold the value. */
12903 if (real_identical (&rmode, &rr))
12904 return build_real (type, rmode);
12910 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12911 FUNC on it and return the resulting value as a tree with type TYPE.
12912 If MIN and/or MAX are not NULL, then the supplied ARG must be
12913 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12914 acceptable values, otherwise they are not. The mpfr precision is
12915 set to the precision of TYPE. We assume that function FUNC returns
12916 zero if the result could be calculated exactly within the requested
12920 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12921 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12924 tree result = NULL_TREE;
12928 /* To proceed, MPFR must exactly represent the target floating point
12929 format, which only happens when the target base equals two. */
12930 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12931 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12933 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12935 if (real_isfinite (ra)
12936 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12937 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12939 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12940 const int prec = fmt->p;
12941 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12945 mpfr_init2 (m, prec);
12946 mpfr_from_real (m, ra, GMP_RNDN);
12947 mpfr_clear_flags ();
12948 inexact = func (m, m, rnd);
12949 result = do_mpfr_ckconv (m, type, inexact);
12957 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12958 FUNC on it and return the resulting value as a tree with type TYPE.
12959 The mpfr precision is set to the precision of TYPE. We assume that
12960 function FUNC returns zero if the result could be calculated
12961 exactly within the requested precision. */
12964 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12965 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12967 tree result = NULL_TREE;
12972 /* To proceed, MPFR must exactly represent the target floating point
12973 format, which only happens when the target base equals two. */
12974 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12975 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12976 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12978 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12979 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12981 if (real_isfinite (ra1) && real_isfinite (ra2))
12983 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12984 const int prec = fmt->p;
12985 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12989 mpfr_inits2 (prec, m1, m2, NULL);
12990 mpfr_from_real (m1, ra1, GMP_RNDN);
12991 mpfr_from_real (m2, ra2, GMP_RNDN);
12992 mpfr_clear_flags ();
12993 inexact = func (m1, m1, m2, rnd);
12994 result = do_mpfr_ckconv (m1, type, inexact);
12995 mpfr_clears (m1, m2, NULL);
13002 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13003 FUNC on it and return the resulting value as a tree with type TYPE.
13004 The mpfr precision is set to the precision of TYPE. We assume that
13005 function FUNC returns zero if the result could be calculated
13006 exactly within the requested precision. */
13009 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13010 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13012 tree result = NULL_TREE;
13018 /* To proceed, MPFR must exactly represent the target floating point
13019 format, which only happens when the target base equals two. */
13020 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13021 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13022 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13023 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13025 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13026 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13027 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13029 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13031 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13032 const int prec = fmt->p;
13033 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13037 mpfr_inits2 (prec, m1, m2, m3, NULL);
13038 mpfr_from_real (m1, ra1, GMP_RNDN);
13039 mpfr_from_real (m2, ra2, GMP_RNDN);
13040 mpfr_from_real (m3, ra3, GMP_RNDN);
13041 mpfr_clear_flags ();
13042 inexact = func (m1, m1, m2, m3, rnd);
13043 result = do_mpfr_ckconv (m1, type, inexact);
13044 mpfr_clears (m1, m2, m3, NULL);
13051 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13052 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13053 If ARG_SINP and ARG_COSP are NULL then the result is returned
13054 as a complex value.
13055 The type is taken from the type of ARG and is used for setting the
13056 precision of the calculation and results. */
13059 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13061 tree const type = TREE_TYPE (arg);
13062 tree result = NULL_TREE;
13066 /* To proceed, MPFR must exactly represent the target floating point
13067 format, which only happens when the target base equals two. */
13068 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13069 && TREE_CODE (arg) == REAL_CST
13070 && !TREE_OVERFLOW (arg))
13072 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13074 if (real_isfinite (ra))
13076 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13077 const int prec = fmt->p;
13078 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13079 tree result_s, result_c;
13083 mpfr_inits2 (prec, m, ms, mc, NULL);
13084 mpfr_from_real (m, ra, GMP_RNDN);
13085 mpfr_clear_flags ();
13086 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13087 result_s = do_mpfr_ckconv (ms, type, inexact);
13088 result_c = do_mpfr_ckconv (mc, type, inexact);
13089 mpfr_clears (m, ms, mc, NULL);
13090 if (result_s && result_c)
13092 /* If we are to return in a complex value do so. */
13093 if (!arg_sinp && !arg_cosp)
13094 return build_complex (build_complex_type (type),
13095 result_c, result_s);
13097 /* Dereference the sin/cos pointer arguments. */
13098 arg_sinp = build_fold_indirect_ref (arg_sinp);
13099 arg_cosp = build_fold_indirect_ref (arg_cosp);
13100 /* Proceed if valid pointer type were passed in. */
13101 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13102 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13104 /* Set the values. */
13105 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13107 TREE_SIDE_EFFECTS (result_s) = 1;
13108 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13110 TREE_SIDE_EFFECTS (result_c) = 1;
13111 /* Combine the assignments into a compound expr. */
13112 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13113 result_s, result_c));
13121 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13122 two-argument mpfr order N Bessel function FUNC on them and return
13123 the resulting value as a tree with type TYPE. The mpfr precision
13124 is set to the precision of TYPE. We assume that function FUNC
13125 returns zero if the result could be calculated exactly within the
13126 requested precision. */
13128 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13129 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13130 const REAL_VALUE_TYPE *min, bool inclusive)
13132 tree result = NULL_TREE;
13137 /* To proceed, MPFR must exactly represent the target floating point
13138 format, which only happens when the target base equals two. */
13139 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13140 && host_integerp (arg1, 0)
13141 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13143 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13144 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13147 && real_isfinite (ra)
13148 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13150 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13151 const int prec = fmt->p;
13152 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13156 mpfr_init2 (m, prec);
13157 mpfr_from_real (m, ra, GMP_RNDN);
13158 mpfr_clear_flags ();
13159 inexact = func (m, n, m, rnd);
13160 result = do_mpfr_ckconv (m, type, inexact);
13168 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13169 the pointer *(ARG_QUO) and return the result. The type is taken
13170 from the type of ARG0 and is used for setting the precision of the
13171 calculation and results. */
13174 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13176 tree const type = TREE_TYPE (arg0);
13177 tree result = NULL_TREE;
13182 /* To proceed, MPFR must exactly represent the target floating point
13183 format, which only happens when the target base equals two. */
13184 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13185 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13186 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13188 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13189 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13191 if (real_isfinite (ra0) && real_isfinite (ra1))
13193 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13194 const int prec = fmt->p;
13195 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13200 mpfr_inits2 (prec, m0, m1, NULL);
13201 mpfr_from_real (m0, ra0, GMP_RNDN);
13202 mpfr_from_real (m1, ra1, GMP_RNDN);
13203 mpfr_clear_flags ();
13204 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13205 /* Remquo is independent of the rounding mode, so pass
13206 inexact=0 to do_mpfr_ckconv(). */
13207 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13208 mpfr_clears (m0, m1, NULL);
13211 /* MPFR calculates quo in the host's long so it may
13212 return more bits in quo than the target int can hold
13213 if sizeof(host long) > sizeof(target int). This can
13214 happen even for native compilers in LP64 mode. In
13215 these cases, modulo the quo value with the largest
13216 number that the target int can hold while leaving one
13217 bit for the sign. */
13218 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13219 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13221 /* Dereference the quo pointer argument. */
13222 arg_quo = build_fold_indirect_ref (arg_quo);
13223 /* Proceed iff a valid pointer type was passed in. */
13224 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13226 /* Set the value. */
13227 tree result_quo = fold_build2 (MODIFY_EXPR,
13228 TREE_TYPE (arg_quo), arg_quo,
13229 build_int_cst (NULL, integer_quo));
13230 TREE_SIDE_EFFECTS (result_quo) = 1;
13231 /* Combine the quo assignment with the rem. */
13232 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13233 result_quo, result_rem));
13241 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13242 resulting value as a tree with type TYPE. The mpfr precision is
13243 set to the precision of TYPE. We assume that this mpfr function
13244 returns zero if the result could be calculated exactly within the
13245 requested precision. In addition, the integer pointer represented
13246 by ARG_SG will be dereferenced and set to the appropriate signgam
13250 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13252 tree result = NULL_TREE;
13256 /* To proceed, MPFR must exactly represent the target floating point
13257 format, which only happens when the target base equals two. Also
13258 verify ARG is a constant and that ARG_SG is an int pointer. */
13259 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13260 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13261 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13262 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13264 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13266 /* In addition to NaN and Inf, the argument cannot be zero or a
13267 negative integer. */
13268 if (real_isfinite (ra)
13269 && ra->cl != rvc_zero
13270 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13272 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13273 const int prec = fmt->p;
13274 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13279 mpfr_init2 (m, prec);
13280 mpfr_from_real (m, ra, GMP_RNDN);
13281 mpfr_clear_flags ();
13282 inexact = mpfr_lgamma (m, &sg, m, rnd);
13283 result_lg = do_mpfr_ckconv (m, type, inexact);
13289 /* Dereference the arg_sg pointer argument. */
13290 arg_sg = build_fold_indirect_ref (arg_sg);
13291 /* Assign the signgam value into *arg_sg. */
13292 result_sg = fold_build2 (MODIFY_EXPR,
13293 TREE_TYPE (arg_sg), arg_sg,
13294 build_int_cst (NULL, sg));
13295 TREE_SIDE_EFFECTS (result_sg) = 1;
13296 /* Combine the signgam assignment with the lgamma result. */
13297 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13298 result_sg, result_lg));
13307 The functions below provide an alternate interface for folding
13308 builtin function calls presented as GIMPLE_CALL statements rather
13309 than as CALL_EXPRs. The folded result is still expressed as a
13310 tree. There is too much code duplication in the handling of
13311 varargs functions, and a more intrusive re-factoring would permit
13312 better sharing of code between the tree and statement-based
13313 versions of these functions. */
13315 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13316 along with N new arguments specified as the "..." parameters. SKIP
13317 is the number of arguments in STMT to be omitted. This function is used
13318 to do varargs-to-varargs transformations. */
13321 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13323 int oldnargs = gimple_call_num_args (stmt);
13324 int nargs = oldnargs - skip + n;
13325 tree fntype = TREE_TYPE (fndecl);
13326 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13331 buffer = XALLOCAVEC (tree, nargs);
13333 for (i = 0; i < n; i++)
13334 buffer[i] = va_arg (ap, tree);
13336 for (j = skip; j < oldnargs; j++, i++)
13337 buffer[i] = gimple_call_arg (stmt, j);
13339 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13342 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13343 a normal call should be emitted rather than expanding the function
13344 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13347 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13349 tree dest, size, len, fn, fmt, flag;
13350 const char *fmt_str;
13351 int nargs = gimple_call_num_args (stmt);
13353 /* Verify the required arguments in the original call. */
13356 dest = gimple_call_arg (stmt, 0);
13357 if (!validate_arg (dest, POINTER_TYPE))
13359 flag = gimple_call_arg (stmt, 1);
13360 if (!validate_arg (flag, INTEGER_TYPE))
13362 size = gimple_call_arg (stmt, 2);
13363 if (!validate_arg (size, INTEGER_TYPE))
13365 fmt = gimple_call_arg (stmt, 3);
13366 if (!validate_arg (fmt, POINTER_TYPE))
13369 if (! host_integerp (size, 1))
13374 if (!init_target_chars ())
13377 /* Check whether the format is a literal string constant. */
13378 fmt_str = c_getstr (fmt);
13379 if (fmt_str != NULL)
13381 /* If the format doesn't contain % args or %%, we know the size. */
13382 if (strchr (fmt_str, target_percent) == 0)
13384 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13385 len = build_int_cstu (size_type_node, strlen (fmt_str));
13387 /* If the format is "%s" and first ... argument is a string literal,
13388 we know the size too. */
13389 else if (fcode == BUILT_IN_SPRINTF_CHK
13390 && strcmp (fmt_str, target_percent_s) == 0)
13396 arg = gimple_call_arg (stmt, 4);
13397 if (validate_arg (arg, POINTER_TYPE))
13399 len = c_strlen (arg, 1);
13400 if (! len || ! host_integerp (len, 1))
13407 if (! integer_all_onesp (size))
13409 if (! len || ! tree_int_cst_lt (len, size))
13413 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13414 or if format doesn't contain % chars or is "%s". */
13415 if (! integer_zerop (flag))
13417 if (fmt_str == NULL)
13419 if (strchr (fmt_str, target_percent) != NULL
13420 && strcmp (fmt_str, target_percent_s))
13424 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13425 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13426 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13430 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13433 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13434 a normal call should be emitted rather than expanding the function
13435 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13436 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13437 passed as second argument. */
13440 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13441 enum built_in_function fcode)
13443 tree dest, size, len, fn, fmt, flag;
13444 const char *fmt_str;
13446 /* Verify the required arguments in the original call. */
13447 if (gimple_call_num_args (stmt) < 5)
13449 dest = gimple_call_arg (stmt, 0);
13450 if (!validate_arg (dest, POINTER_TYPE))
13452 len = gimple_call_arg (stmt, 1);
13453 if (!validate_arg (len, INTEGER_TYPE))
13455 flag = gimple_call_arg (stmt, 2);
13456 if (!validate_arg (flag, INTEGER_TYPE))
13458 size = gimple_call_arg (stmt, 3);
13459 if (!validate_arg (size, INTEGER_TYPE))
13461 fmt = gimple_call_arg (stmt, 4);
13462 if (!validate_arg (fmt, POINTER_TYPE))
13465 if (! host_integerp (size, 1))
13468 if (! integer_all_onesp (size))
13470 if (! host_integerp (len, 1))
13472 /* If LEN is not constant, try MAXLEN too.
13473 For MAXLEN only allow optimizing into non-_ocs function
13474 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13475 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13481 if (tree_int_cst_lt (size, maxlen))
13485 if (!init_target_chars ())
13488 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13489 or if format doesn't contain % chars or is "%s". */
13490 if (! integer_zerop (flag))
13492 fmt_str = c_getstr (fmt);
13493 if (fmt_str == NULL)
13495 if (strchr (fmt_str, target_percent) != NULL
13496 && strcmp (fmt_str, target_percent_s))
13500 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13502 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13503 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13507 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13510 /* Builtins with folding operations that operate on "..." arguments
13511 need special handling; we need to store the arguments in a convenient
13512 data structure before attempting any folding. Fortunately there are
13513 only a few builtins that fall into this category. FNDECL is the
13514 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13515 result of the function call is ignored. */
13518 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13520 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13521 tree ret = NULL_TREE;
13525 case BUILT_IN_SPRINTF_CHK:
13526 case BUILT_IN_VSPRINTF_CHK:
13527 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13530 case BUILT_IN_SNPRINTF_CHK:
13531 case BUILT_IN_VSNPRINTF_CHK:
13532 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13539 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13540 TREE_NO_WARNING (ret) = 1;
13546 /* A wrapper function for builtin folding that prevents warnings for
13547 "statement without effect" and the like, caused by removing the
13548 call node earlier than the warning is generated. */
13551 fold_call_stmt (gimple stmt, bool ignore)
13553 tree ret = NULL_TREE;
13554 tree fndecl = gimple_call_fndecl (stmt);
13556 && TREE_CODE (fndecl) == FUNCTION_DECL
13557 && DECL_BUILT_IN (fndecl)
13558 && !gimple_call_va_arg_pack_p (stmt))
13560 int nargs = gimple_call_num_args (stmt);
13562 /* FIXME: Don't use a list in this interface. */
13563 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13565 tree arglist = NULL_TREE;
13567 for (i = nargs - 1; i >= 0; i--)
13568 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13569 return targetm.fold_builtin (fndecl, arglist, ignore);
13573 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13575 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13577 for (i = 0; i < nargs; i++)
13578 args[i] = gimple_call_arg (stmt, i);
13579 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13582 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13585 /* Propagate location information from original call to
13586 expansion of builtin. Otherwise things like
13587 maybe_emit_chk_warning, that operate on the expansion
13588 of a builtin, will use the wrong location information. */
13589 if (gimple_has_location (stmt))
13591 tree realret = ret;
13592 if (TREE_CODE (ret) == NOP_EXPR)
13593 realret = TREE_OPERAND (ret, 0);
13594 if (CAN_HAVE_LOCATION_P (realret)
13595 && !EXPR_HAS_LOCATION (realret))
13596 SET_EXPR_LOCATION (realret, gimple_location (stmt));