1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
106 static rtx expand_builtin_sincos (tree);
107 static rtx expand_builtin_cexpi (tree, rtx, rtx);
108 static rtx expand_builtin_int_roundingfn (tree, rtx);
109 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
110 static rtx expand_builtin_args_info (tree);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
120 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
129 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
130 enum machine_mode, int);
131 static rtx expand_builtin_bcopy (tree, int);
132 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
133 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
134 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
136 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
137 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
139 static rtx expand_builtin_bzero (tree);
140 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
143 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
144 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
145 static rtx expand_builtin_alloca (tree, rtx);
146 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static rtx expand_builtin_fputs (tree, rtx, bool);
149 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
150 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
151 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
152 static tree stabilize_va_list (tree, int);
153 static rtx expand_builtin_expect (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_expect (tree, tree);
156 static tree fold_builtin_classify_type (tree);
157 static tree fold_builtin_strlen (tree);
158 static tree fold_builtin_inf (tree, int);
159 static tree fold_builtin_nan (tree, tree, int);
160 static tree rewrite_call_expr (tree, int, tree, int, ...);
161 static bool validate_arg (const_tree, enum tree_code code);
162 static bool integer_valued_real_p (tree);
163 static tree fold_trunc_transparent_mathfn (tree, tree);
164 static bool readonly_data_expr (tree);
165 static rtx expand_builtin_fabs (tree, rtx, rtx);
166 static rtx expand_builtin_signbit (tree, rtx);
167 static tree fold_builtin_sqrt (tree, tree);
168 static tree fold_builtin_cbrt (tree, tree);
169 static tree fold_builtin_pow (tree, tree, tree, tree);
170 static tree fold_builtin_powi (tree, tree, tree, tree);
171 static tree fold_builtin_cos (tree, tree, tree);
172 static tree fold_builtin_cosh (tree, tree, tree);
173 static tree fold_builtin_tan (tree, tree);
174 static tree fold_builtin_trunc (tree, tree);
175 static tree fold_builtin_floor (tree, tree);
176 static tree fold_builtin_ceil (tree, tree);
177 static tree fold_builtin_round (tree, tree);
178 static tree fold_builtin_int_roundingfn (tree, tree);
179 static tree fold_builtin_bitop (tree, tree);
180 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
181 static tree fold_builtin_strchr (tree, tree, tree);
182 static tree fold_builtin_memchr (tree, tree, tree, tree);
183 static tree fold_builtin_memcmp (tree, tree, tree);
184 static tree fold_builtin_strcmp (tree, tree);
185 static tree fold_builtin_strncmp (tree, tree, tree);
186 static tree fold_builtin_signbit (tree, tree);
187 static tree fold_builtin_copysign (tree, tree, tree, tree);
188 static tree fold_builtin_isascii (tree);
189 static tree fold_builtin_toascii (tree);
190 static tree fold_builtin_isdigit (tree);
191 static tree fold_builtin_fabs (tree, tree);
192 static tree fold_builtin_abs (tree, tree);
193 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
195 static tree fold_builtin_n (tree, tree *, int, bool);
196 static tree fold_builtin_0 (tree, bool);
197 static tree fold_builtin_1 (tree, tree, bool);
198 static tree fold_builtin_2 (tree, tree, tree, bool);
199 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
200 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
201 static tree fold_builtin_varargs (tree, tree, bool);
203 static tree fold_builtin_strpbrk (tree, tree, tree);
204 static tree fold_builtin_strstr (tree, tree, tree);
205 static tree fold_builtin_strrchr (tree, tree, tree);
206 static tree fold_builtin_strcat (tree, tree);
207 static tree fold_builtin_strncat (tree, tree, tree);
208 static tree fold_builtin_strspn (tree, tree);
209 static tree fold_builtin_strcspn (tree, tree);
210 static tree fold_builtin_sprintf (tree, tree, tree, int);
212 static rtx expand_builtin_object_size (tree);
213 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
214 enum built_in_function);
215 static void maybe_emit_chk_warning (tree, enum built_in_function);
216 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
217 static void maybe_emit_free_warning (tree);
218 static tree fold_builtin_object_size (tree, tree);
219 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
220 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
221 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
222 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
223 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
224 enum built_in_function);
225 static bool init_target_chars (void);
227 static unsigned HOST_WIDE_INT target_newline;
228 static unsigned HOST_WIDE_INT target_percent;
229 static unsigned HOST_WIDE_INT target_c;
230 static unsigned HOST_WIDE_INT target_s;
231 static char target_percent_c[3];
232 static char target_percent_s[3];
233 static char target_percent_s_newline[4];
234 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
235 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
236 static tree do_mpfr_arg2 (tree, tree, tree,
237 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
238 static tree do_mpfr_arg3 (tree, tree, tree, tree,
239 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
240 static tree do_mpfr_sincos (tree, tree, tree);
241 static tree do_mpfr_bessel_n (tree, tree, tree,
242 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
243 const REAL_VALUE_TYPE *, bool);
244 static tree do_mpfr_remquo (tree, tree, tree);
245 static tree do_mpfr_lgamma_r (tree, tree, tree);
248 is_builtin_name (const char *name)
250 if (strncmp (name, "__builtin_", 10) == 0)
252 if (strncmp (name, "__sync_", 7) == 0)
257 /* Return true if NODE should be considered for inline expansion regardless
258 of the optimization level. This means whenever a function is invoked with
259 its "internal" name, which normally contains the prefix "__builtin". */
262 called_as_built_in (tree node)
264 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
265 we want the name used to call the function, not the name it
267 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
268 return is_builtin_name (name);
271 /* Return the alignment in bits of EXP, an object.
272 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
273 guessed alignment e.g. from type alignment. */
276 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
281 if (handled_component_p (exp))
283 HOST_WIDE_INT bitsize, bitpos;
285 enum machine_mode mode;
286 int unsignedp, volatilep;
288 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
289 &mode, &unsignedp, &volatilep, true);
291 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
296 if (TREE_CODE (offset) == PLUS_EXPR)
298 next_offset = TREE_OPERAND (offset, 0);
299 offset = TREE_OPERAND (offset, 1);
303 if (host_integerp (offset, 1))
305 /* Any overflow in calculating offset_bits won't change
308 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
311 inner = MIN (inner, (offset_bits & -offset_bits));
313 else if (TREE_CODE (offset) == MULT_EXPR
314 && host_integerp (TREE_OPERAND (offset, 1), 1))
316 /* Any overflow in calculating offset_factor won't change
318 unsigned offset_factor
319 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
323 inner = MIN (inner, (offset_factor & -offset_factor));
327 inner = MIN (inner, BITS_PER_UNIT);
330 offset = next_offset;
334 align = MIN (inner, DECL_ALIGN (exp));
335 #ifdef CONSTANT_ALIGNMENT
336 else if (CONSTANT_CLASS_P (exp))
337 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
339 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
340 || TREE_CODE (exp) == INDIRECT_REF)
341 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
343 align = MIN (align, inner);
344 return MIN (align, max_align);
347 /* Returns true iff we can trust that alignment information has been
348 calculated properly. */
351 can_trust_pointer_alignment (void)
353 /* We rely on TER to compute accurate alignment information. */
354 return (optimize && flag_tree_ter);
357 /* Return the alignment in bits of EXP, a pointer valued expression.
358 But don't return more than MAX_ALIGN no matter what.
359 The alignment returned is, by default, the alignment of the thing that
360 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
362 Otherwise, look at the expression to see if we can do better, i.e., if the
363 expression is actually pointing at an object whose alignment is tighter. */
366 get_pointer_alignment (tree exp, unsigned int max_align)
368 unsigned int align, inner;
370 if (!can_trust_pointer_alignment ())
373 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
376 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
377 align = MIN (align, max_align);
381 switch (TREE_CODE (exp))
384 exp = TREE_OPERAND (exp, 0);
385 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
388 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
389 align = MIN (inner, max_align);
392 case POINTER_PLUS_EXPR:
393 /* If sum of pointer + int, restrict our maximum alignment to that
394 imposed by the integer. If not, we can't do any better than
396 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
399 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
400 & (max_align / BITS_PER_UNIT - 1))
404 exp = TREE_OPERAND (exp, 0);
408 /* See what we are pointing at and look at its alignment. */
409 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
417 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
418 way, because it could contain a zero byte in the middle.
419 TREE_STRING_LENGTH is the size of the character array, not the string.
421 ONLY_VALUE should be nonzero if the result is not going to be emitted
422 into the instruction stream and zero if it is going to be expanded.
423 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
424 is returned, otherwise NULL, since
425 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
426 evaluate the side-effects.
428 The value returned is of type `ssizetype'.
430 Unfortunately, string_constant can't access the values of const char
431 arrays with initializers, so neither can we do so here. */
434 c_strlen (tree src, int only_value)
437 HOST_WIDE_INT offset;
442 if (TREE_CODE (src) == COND_EXPR
443 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
447 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
448 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
449 if (tree_int_cst_equal (len1, len2))
453 if (TREE_CODE (src) == COMPOUND_EXPR
454 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
455 return c_strlen (TREE_OPERAND (src, 1), only_value);
457 src = string_constant (src, &offset_node);
461 max = TREE_STRING_LENGTH (src) - 1;
462 ptr = TREE_STRING_POINTER (src);
464 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
466 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
467 compute the offset to the following null if we don't know where to
468 start searching for it. */
471 for (i = 0; i < max; i++)
475 /* We don't know the starting offset, but we do know that the string
476 has no internal zero bytes. We can assume that the offset falls
477 within the bounds of the string; otherwise, the programmer deserves
478 what he gets. Subtract the offset from the length of the string,
479 and return that. This would perhaps not be valid if we were dealing
480 with named arrays in addition to literal string constants. */
482 return size_diffop (size_int (max), offset_node);
485 /* We have a known offset into the string. Start searching there for
486 a null character if we can represent it as a single HOST_WIDE_INT. */
487 if (offset_node == 0)
489 else if (! host_integerp (offset_node, 0))
492 offset = tree_low_cst (offset_node, 0);
494 /* If the offset is known to be out of bounds, warn, and call strlen at
496 if (offset < 0 || offset > max)
498 /* Suppress multiple warnings for propagated constant strings. */
499 if (! TREE_NO_WARNING (src))
501 warning (0, "offset outside bounds of constant string");
502 TREE_NO_WARNING (src) = 1;
507 /* Use strlen to search for the first zero byte. Since any strings
508 constructed with build_string will have nulls appended, we win even
509 if we get handed something like (char[4])"abcd".
511 Since OFFSET is our starting index into the string, no further
512 calculation is needed. */
513 return ssize_int (strlen (ptr + offset));
516 /* Return a char pointer for a C string if it is a string constant
517 or sum of string constant and integer constant. */
524 src = string_constant (src, &offset_node);
528 if (offset_node == 0)
529 return TREE_STRING_POINTER (src);
530 else if (!host_integerp (offset_node, 1)
531 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
534 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
537 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
538 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
541 c_readstr (const char *str, enum machine_mode mode)
547 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
552 for (i = 0; i < GET_MODE_SIZE (mode); i++)
555 if (WORDS_BIG_ENDIAN)
556 j = GET_MODE_SIZE (mode) - i - 1;
557 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
558 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
559 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
561 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
564 ch = (unsigned char) str[i];
565 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
567 return immed_double_const (c[0], c[1], mode);
570 /* Cast a target constant CST to target CHAR and if that value fits into
571 host char type, return zero and put that value into variable pointed to by
575 target_char_cast (tree cst, char *p)
577 unsigned HOST_WIDE_INT val, hostval;
579 if (!host_integerp (cst, 1)
580 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
583 val = tree_low_cst (cst, 1);
584 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
585 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
588 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
589 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
598 /* Similar to save_expr, but assumes that arbitrary code is not executed
599 in between the multiple evaluations. In particular, we assume that a
600 non-addressable local variable will not be modified. */
603 builtin_save_expr (tree exp)
605 if (TREE_ADDRESSABLE (exp) == 0
606 && (TREE_CODE (exp) == PARM_DECL
607 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
610 return save_expr (exp);
613 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
614 times to get the address of either a higher stack frame, or a return
615 address located within it (depending on FNDECL_CODE). */
618 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
622 #ifdef INITIAL_FRAME_ADDRESS_RTX
623 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
627 /* For a zero count with __builtin_return_address, we don't care what
628 frame address we return, because target-specific definitions will
629 override us. Therefore frame pointer elimination is OK, and using
630 the soft frame pointer is OK.
632 For a nonzero count, or a zero count with __builtin_frame_address,
633 we require a stable offset from the current frame pointer to the
634 previous one, so we must use the hard frame pointer, and
635 we must disable frame pointer elimination. */
636 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
637 tem = frame_pointer_rtx;
640 tem = hard_frame_pointer_rtx;
642 /* Tell reload not to eliminate the frame pointer. */
643 crtl->accesses_prior_frames = 1;
647 /* Some machines need special handling before we can access
648 arbitrary frames. For example, on the SPARC, we must first flush
649 all register windows to the stack. */
650 #ifdef SETUP_FRAME_ADDRESSES
652 SETUP_FRAME_ADDRESSES ();
655 /* On the SPARC, the return address is not in the frame, it is in a
656 register. There is no way to access it off of the current frame
657 pointer, but it can be accessed off the previous frame pointer by
658 reading the value from the register window save area. */
659 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
660 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
664 /* Scan back COUNT frames to the specified frame. */
665 for (i = 0; i < count; i++)
667 /* Assume the dynamic chain pointer is in the word that the
668 frame address points to, unless otherwise specified. */
669 #ifdef DYNAMIC_CHAIN_ADDRESS
670 tem = DYNAMIC_CHAIN_ADDRESS (tem);
672 tem = memory_address (Pmode, tem);
673 tem = gen_frame_mem (Pmode, tem);
674 tem = copy_to_reg (tem);
677 /* For __builtin_frame_address, return what we've got. But, on
678 the SPARC for example, we may have to add a bias. */
679 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
680 #ifdef FRAME_ADDR_RTX
681 return FRAME_ADDR_RTX (tem);
686 /* For __builtin_return_address, get the return address from that frame. */
687 #ifdef RETURN_ADDR_RTX
688 tem = RETURN_ADDR_RTX (count, tem);
690 tem = memory_address (Pmode,
691 plus_constant (tem, GET_MODE_SIZE (Pmode)));
692 tem = gen_frame_mem (Pmode, tem);
697 /* Alias set used for setjmp buffer. */
698 static alias_set_type setjmp_alias_set = -1;
700 /* Construct the leading half of a __builtin_setjmp call. Control will
701 return to RECEIVER_LABEL. This is also called directly by the SJLJ
702 exception handling code. */
705 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
707 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
711 if (setjmp_alias_set == -1)
712 setjmp_alias_set = new_alias_set ();
714 buf_addr = convert_memory_address (Pmode, buf_addr);
716 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
718 /* We store the frame pointer and the address of receiver_label in
719 the buffer and use the rest of it for the stack save area, which
720 is machine-dependent. */
722 mem = gen_rtx_MEM (Pmode, buf_addr);
723 set_mem_alias_set (mem, setjmp_alias_set);
724 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
726 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
727 set_mem_alias_set (mem, setjmp_alias_set);
729 emit_move_insn (validize_mem (mem),
730 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
732 stack_save = gen_rtx_MEM (sa_mode,
733 plus_constant (buf_addr,
734 2 * GET_MODE_SIZE (Pmode)));
735 set_mem_alias_set (stack_save, setjmp_alias_set);
736 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
738 /* If there is further processing to do, do it. */
739 #ifdef HAVE_builtin_setjmp_setup
740 if (HAVE_builtin_setjmp_setup)
741 emit_insn (gen_builtin_setjmp_setup (buf_addr));
744 /* Tell optimize_save_area_alloca that extra work is going to
745 need to go on during alloca. */
746 cfun->calls_setjmp = 1;
748 /* We have a nonlocal label. */
749 cfun->has_nonlocal_label = 1;
752 /* Construct the trailing part of a __builtin_setjmp call. This is
753 also called directly by the SJLJ exception handling code. */
756 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
758 /* Clobber the FP when we get here, so we have to make sure it's
759 marked as used by this function. */
760 emit_use (hard_frame_pointer_rtx);
762 /* Mark the static chain as clobbered here so life information
763 doesn't get messed up for it. */
764 emit_clobber (static_chain_rtx);
766 /* Now put in the code to restore the frame pointer, and argument
767 pointer, if needed. */
768 #ifdef HAVE_nonlocal_goto
769 if (! HAVE_nonlocal_goto)
772 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
773 /* This might change the hard frame pointer in ways that aren't
774 apparent to early optimization passes, so force a clobber. */
775 emit_clobber (hard_frame_pointer_rtx);
778 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
779 if (fixed_regs[ARG_POINTER_REGNUM])
781 #ifdef ELIMINABLE_REGS
783 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
785 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
786 if (elim_regs[i].from == ARG_POINTER_REGNUM
787 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
790 if (i == ARRAY_SIZE (elim_regs))
793 /* Now restore our arg pointer from the address at which it
794 was saved in our stack frame. */
795 emit_move_insn (crtl->args.internal_arg_pointer,
796 copy_to_reg (get_arg_pointer_save_area ()));
801 #ifdef HAVE_builtin_setjmp_receiver
802 if (HAVE_builtin_setjmp_receiver)
803 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
806 #ifdef HAVE_nonlocal_goto_receiver
807 if (HAVE_nonlocal_goto_receiver)
808 emit_insn (gen_nonlocal_goto_receiver ());
813 /* We must not allow the code we just generated to be reordered by
814 scheduling. Specifically, the update of the frame pointer must
815 happen immediately, not later. */
816 emit_insn (gen_blockage ());
819 /* __builtin_longjmp is passed a pointer to an array of five words (not
820 all will be used on all machines). It operates similarly to the C
821 library function of the same name, but is more efficient. Much of
822 the code below is copied from the handling of non-local gotos. */
825 expand_builtin_longjmp (rtx buf_addr, rtx value)
827 rtx fp, lab, stack, insn, last;
828 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
830 /* DRAP is needed for stack realign if longjmp is expanded to current
832 if (SUPPORTS_STACK_ALIGNMENT)
833 crtl->need_drap = true;
835 if (setjmp_alias_set == -1)
836 setjmp_alias_set = new_alias_set ();
838 buf_addr = convert_memory_address (Pmode, buf_addr);
840 buf_addr = force_reg (Pmode, buf_addr);
842 /* We used to store value in static_chain_rtx, but that fails if pointers
843 are smaller than integers. We instead require that the user must pass
844 a second argument of 1, because that is what builtin_setjmp will
845 return. This also makes EH slightly more efficient, since we are no
846 longer copying around a value that we don't care about. */
847 gcc_assert (value == const1_rtx);
849 last = get_last_insn ();
850 #ifdef HAVE_builtin_longjmp
851 if (HAVE_builtin_longjmp)
852 emit_insn (gen_builtin_longjmp (buf_addr));
856 fp = gen_rtx_MEM (Pmode, buf_addr);
857 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
858 GET_MODE_SIZE (Pmode)));
860 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
861 2 * GET_MODE_SIZE (Pmode)));
862 set_mem_alias_set (fp, setjmp_alias_set);
863 set_mem_alias_set (lab, setjmp_alias_set);
864 set_mem_alias_set (stack, setjmp_alias_set);
866 /* Pick up FP, label, and SP from the block and jump. This code is
867 from expand_goto in stmt.c; see there for detailed comments. */
868 #ifdef HAVE_nonlocal_goto
869 if (HAVE_nonlocal_goto)
870 /* We have to pass a value to the nonlocal_goto pattern that will
871 get copied into the static_chain pointer, but it does not matter
872 what that value is, because builtin_setjmp does not use it. */
873 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
877 lab = copy_to_reg (lab);
879 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
880 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
882 emit_move_insn (hard_frame_pointer_rtx, fp);
883 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
885 emit_use (hard_frame_pointer_rtx);
886 emit_use (stack_pointer_rtx);
887 emit_indirect_jump (lab);
891 /* Search backwards and mark the jump insn as a non-local goto.
892 Note that this precludes the use of __builtin_longjmp to a
893 __builtin_setjmp target in the same function. However, we've
894 already cautioned the user that these functions are for
895 internal exception handling use only. */
896 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
898 gcc_assert (insn != last);
902 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
905 else if (CALL_P (insn))
910 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
911 and the address of the save area. */
914 expand_builtin_nonlocal_goto (tree exp)
916 tree t_label, t_save_area;
917 rtx r_label, r_save_area, r_fp, r_sp, insn;
919 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
922 t_label = CALL_EXPR_ARG (exp, 0);
923 t_save_area = CALL_EXPR_ARG (exp, 1);
925 r_label = expand_normal (t_label);
926 r_label = convert_memory_address (Pmode, r_label);
927 r_save_area = expand_normal (t_save_area);
928 r_save_area = convert_memory_address (Pmode, r_save_area);
929 /* Copy the address of the save location to a register just in case it was based
930 on the frame pointer. */
931 r_save_area = copy_to_reg (r_save_area);
932 r_fp = gen_rtx_MEM (Pmode, r_save_area);
933 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
934 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
936 crtl->has_nonlocal_goto = 1;
938 #ifdef HAVE_nonlocal_goto
939 /* ??? We no longer need to pass the static chain value, afaik. */
940 if (HAVE_nonlocal_goto)
941 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
945 r_label = copy_to_reg (r_label);
947 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
948 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
950 /* Restore frame pointer for containing function.
951 This sets the actual hard register used for the frame pointer
952 to the location of the function's incoming static chain info.
953 The non-local goto handler will then adjust it to contain the
954 proper value and reload the argument pointer, if needed. */
955 emit_move_insn (hard_frame_pointer_rtx, r_fp);
956 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
958 /* USE of hard_frame_pointer_rtx added for consistency;
959 not clear if really needed. */
960 emit_use (hard_frame_pointer_rtx);
961 emit_use (stack_pointer_rtx);
963 /* If the architecture is using a GP register, we must
964 conservatively assume that the target function makes use of it.
965 The prologue of functions with nonlocal gotos must therefore
966 initialize the GP register to the appropriate value, and we
967 must then make sure that this value is live at the point
968 of the jump. (Note that this doesn't necessarily apply
969 to targets with a nonlocal_goto pattern; they are free
970 to implement it in their own way. Note also that this is
971 a no-op if the GP register is a global invariant.) */
972 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
973 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
974 emit_use (pic_offset_table_rtx);
976 emit_indirect_jump (r_label);
979 /* Search backwards to the jump insn and mark it as a
981 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
985 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
988 else if (CALL_P (insn))
995 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
996 (not all will be used on all machines) that was passed to __builtin_setjmp.
997 It updates the stack pointer in that block to correspond to the current
1001 expand_builtin_update_setjmp_buf (rtx buf_addr)
1003 enum machine_mode sa_mode = Pmode;
1007 #ifdef HAVE_save_stack_nonlocal
1008 if (HAVE_save_stack_nonlocal)
1009 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1011 #ifdef STACK_SAVEAREA_MODE
1012 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016 = gen_rtx_MEM (sa_mode,
1019 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1023 emit_insn (gen_setjmp ());
1026 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1029 /* Expand a call to __builtin_prefetch. For a target that does not support
1030 data prefetch, evaluate the memory address argument in case it has side
1034 expand_builtin_prefetch (tree exp)
1036 tree arg0, arg1, arg2;
1040 if (!validate_arglist (exp, POINTER_TYPE, 0))
1043 arg0 = CALL_EXPR_ARG (exp, 0);
1045 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1046 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1048 nargs = call_expr_nargs (exp);
1050 arg1 = CALL_EXPR_ARG (exp, 1);
1052 arg1 = integer_zero_node;
1054 arg2 = CALL_EXPR_ARG (exp, 2);
1056 arg2 = build_int_cst (NULL_TREE, 3);
1058 /* Argument 0 is an address. */
1059 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1061 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1062 if (TREE_CODE (arg1) != INTEGER_CST)
1064 error ("second argument to %<__builtin_prefetch%> must be a constant");
1065 arg1 = integer_zero_node;
1067 op1 = expand_normal (arg1);
1068 /* Argument 1 must be either zero or one. */
1069 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1071 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1076 /* Argument 2 (locality) must be a compile-time constant int. */
1077 if (TREE_CODE (arg2) != INTEGER_CST)
1079 error ("third argument to %<__builtin_prefetch%> must be a constant");
1080 arg2 = integer_zero_node;
1082 op2 = expand_normal (arg2);
1083 /* Argument 2 must be 0, 1, 2, or 3. */
1084 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1086 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1090 #ifdef HAVE_prefetch
1093 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1095 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1096 || (GET_MODE (op0) != Pmode))
1098 op0 = convert_memory_address (Pmode, op0);
1099 op0 = force_reg (Pmode, op0);
1101 emit_insn (gen_prefetch (op0, op1, op2));
1105 /* Don't do anything with direct references to volatile memory, but
1106 generate code to handle other side effects. */
1107 if (!MEM_P (op0) && side_effects_p (op0))
1111 /* Get a MEM rtx for expression EXP which is the address of an operand
1112 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1113 the maximum length of the block of memory that might be accessed or
1117 get_memory_rtx (tree exp, tree len)
1119 tree orig_exp = exp;
1123 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1124 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1125 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1126 exp = TREE_OPERAND (exp, 0);
1128 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1129 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1131 /* Get an expression we can use to find the attributes to assign to MEM.
1132 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1133 we can. First remove any nops. */
1134 while (CONVERT_EXPR_P (exp)
1135 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1136 exp = TREE_OPERAND (exp, 0);
1139 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1140 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1141 && host_integerp (TREE_OPERAND (exp, 1), 0)
1142 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1143 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1144 else if (TREE_CODE (exp) == ADDR_EXPR)
1145 exp = TREE_OPERAND (exp, 0);
1146 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1147 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1151 /* Honor attributes derived from exp, except for the alias set
1152 (as builtin stringops may alias with anything) and the size
1153 (as stringops may access multiple array elements). */
1156 set_mem_attributes (mem, exp, 0);
1159 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1161 /* Allow the string and memory builtins to overflow from one
1162 field into another, see http://gcc.gnu.org/PR23561.
1163 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1164 memory accessed by the string or memory builtin will fit
1165 within the field. */
1166 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1168 tree mem_expr = MEM_EXPR (mem);
1169 HOST_WIDE_INT offset = -1, length = -1;
1172 while (TREE_CODE (inner) == ARRAY_REF
1173 || CONVERT_EXPR_P (inner)
1174 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1175 || TREE_CODE (inner) == SAVE_EXPR)
1176 inner = TREE_OPERAND (inner, 0);
1178 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1180 if (MEM_OFFSET (mem)
1181 && CONST_INT_P (MEM_OFFSET (mem)))
1182 offset = INTVAL (MEM_OFFSET (mem));
1184 if (offset >= 0 && len && host_integerp (len, 0))
1185 length = tree_low_cst (len, 0);
1187 while (TREE_CODE (inner) == COMPONENT_REF)
1189 tree field = TREE_OPERAND (inner, 1);
1190 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1191 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1193 /* Bitfields are generally not byte-addressable. */
1194 gcc_assert (!DECL_BIT_FIELD (field)
1195 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1196 % BITS_PER_UNIT) == 0
1197 && host_integerp (DECL_SIZE (field), 0)
1198 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1199 % BITS_PER_UNIT) == 0));
1201 /* If we can prove that the memory starting at XEXP (mem, 0) and
1202 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1203 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1204 fields without DECL_SIZE_UNIT like flexible array members. */
1206 && DECL_SIZE_UNIT (field)
1207 && host_integerp (DECL_SIZE_UNIT (field), 0))
1210 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1213 && offset + length <= size)
1218 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1219 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1220 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1228 mem_expr = TREE_OPERAND (mem_expr, 0);
1229 inner = TREE_OPERAND (inner, 0);
1232 if (mem_expr == NULL)
1234 if (mem_expr != MEM_EXPR (mem))
1236 set_mem_expr (mem, mem_expr);
1237 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1240 set_mem_alias_set (mem, 0);
1241 set_mem_size (mem, NULL_RTX);
1247 /* Built-in functions to perform an untyped call and return. */
1249 /* For each register that may be used for calling a function, this
1250 gives a mode used to copy the register's value. VOIDmode indicates
1251 the register is not used for calling a function. If the machine
1252 has register windows, this gives only the outbound registers.
1253 INCOMING_REGNO gives the corresponding inbound register. */
1254 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1256 /* For each register that may be used for returning values, this gives
1257 a mode used to copy the register's value. VOIDmode indicates the
1258 register is not used for returning values. If the machine has
1259 register windows, this gives only the outbound registers.
1260 INCOMING_REGNO gives the corresponding inbound register. */
1261 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1263 /* For each register that may be used for calling a function, this
1264 gives the offset of that register into the block returned by
1265 __builtin_apply_args. 0 indicates that the register is not
1266 used for calling a function. */
1267 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1269 /* Return the size required for the block returned by __builtin_apply_args,
1270 and initialize apply_args_mode. */
1273 apply_args_size (void)
1275 static int size = -1;
1278 enum machine_mode mode;
1280 /* The values computed by this function never change. */
1283 /* The first value is the incoming arg-pointer. */
1284 size = GET_MODE_SIZE (Pmode);
1286 /* The second value is the structure value address unless this is
1287 passed as an "invisible" first argument. */
1288 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1289 size += GET_MODE_SIZE (Pmode);
1291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1292 if (FUNCTION_ARG_REGNO_P (regno))
1294 mode = reg_raw_mode[regno];
1296 gcc_assert (mode != VOIDmode);
1298 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1299 if (size % align != 0)
1300 size = CEIL (size, align) * align;
1301 apply_args_reg_offset[regno] = size;
1302 size += GET_MODE_SIZE (mode);
1303 apply_args_mode[regno] = mode;
1307 apply_args_mode[regno] = VOIDmode;
1308 apply_args_reg_offset[regno] = 0;
1314 /* Return the size required for the block returned by __builtin_apply,
1315 and initialize apply_result_mode. */
1318 apply_result_size (void)
1320 static int size = -1;
1322 enum machine_mode mode;
1324 /* The values computed by this function never change. */
1329 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1330 if (FUNCTION_VALUE_REGNO_P (regno))
1332 mode = reg_raw_mode[regno];
1334 gcc_assert (mode != VOIDmode);
1336 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1337 if (size % align != 0)
1338 size = CEIL (size, align) * align;
1339 size += GET_MODE_SIZE (mode);
1340 apply_result_mode[regno] = mode;
1343 apply_result_mode[regno] = VOIDmode;
1345 /* Allow targets that use untyped_call and untyped_return to override
1346 the size so that machine-specific information can be stored here. */
1347 #ifdef APPLY_RESULT_SIZE
1348 size = APPLY_RESULT_SIZE;
1354 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1355 /* Create a vector describing the result block RESULT. If SAVEP is true,
1356 the result block is used to save the values; otherwise it is used to
1357 restore the values. */
1360 result_vector (int savep, rtx result)
1362 int regno, size, align, nelts;
1363 enum machine_mode mode;
1365 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1368 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1369 if ((mode = apply_result_mode[regno]) != VOIDmode)
1371 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1372 if (size % align != 0)
1373 size = CEIL (size, align) * align;
1374 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1375 mem = adjust_address (result, mode, size);
1376 savevec[nelts++] = (savep
1377 ? gen_rtx_SET (VOIDmode, mem, reg)
1378 : gen_rtx_SET (VOIDmode, reg, mem));
1379 size += GET_MODE_SIZE (mode);
1381 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1383 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1385 /* Save the state required to perform an untyped call with the same
1386 arguments as were passed to the current function. */
1389 expand_builtin_apply_args_1 (void)
1392 int size, align, regno;
1393 enum machine_mode mode;
1394 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1396 /* Create a block where the arg-pointer, structure value address,
1397 and argument registers can be saved. */
1398 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1400 /* Walk past the arg-pointer and structure value address. */
1401 size = GET_MODE_SIZE (Pmode);
1402 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1403 size += GET_MODE_SIZE (Pmode);
1405 /* Save each register used in calling a function to the block. */
1406 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1407 if ((mode = apply_args_mode[regno]) != VOIDmode)
1409 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1410 if (size % align != 0)
1411 size = CEIL (size, align) * align;
1413 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1415 emit_move_insn (adjust_address (registers, mode, size), tem);
1416 size += GET_MODE_SIZE (mode);
1419 /* Save the arg pointer to the block. */
1420 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1421 #ifdef STACK_GROWS_DOWNWARD
1422 /* We need the pointer as the caller actually passed them to us, not
1423 as we might have pretended they were passed. Make sure it's a valid
1424 operand, as emit_move_insn isn't expected to handle a PLUS. */
1426 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1429 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1431 size = GET_MODE_SIZE (Pmode);
1433 /* Save the structure value address unless this is passed as an
1434 "invisible" first argument. */
1435 if (struct_incoming_value)
1437 emit_move_insn (adjust_address (registers, Pmode, size),
1438 copy_to_reg (struct_incoming_value));
1439 size += GET_MODE_SIZE (Pmode);
1442 /* Return the address of the block. */
1443 return copy_addr_to_reg (XEXP (registers, 0));
1446 /* __builtin_apply_args returns block of memory allocated on
1447 the stack into which is stored the arg pointer, structure
1448 value address, static chain, and all the registers that might
1449 possibly be used in performing a function call. The code is
1450 moved to the start of the function so the incoming values are
1454 expand_builtin_apply_args (void)
1456 /* Don't do __builtin_apply_args more than once in a function.
1457 Save the result of the first call and reuse it. */
1458 if (apply_args_value != 0)
1459 return apply_args_value;
1461 /* When this function is called, it means that registers must be
1462 saved on entry to this function. So we migrate the
1463 call to the first insn of this function. */
1468 temp = expand_builtin_apply_args_1 ();
1472 apply_args_value = temp;
1474 /* Put the insns after the NOTE that starts the function.
1475 If this is inside a start_sequence, make the outer-level insn
1476 chain current, so the code is placed at the start of the
1477 function. If internal_arg_pointer is a non-virtual pseudo,
1478 it needs to be placed after the function that initializes
1480 push_topmost_sequence ();
1481 if (REG_P (crtl->args.internal_arg_pointer)
1482 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1483 emit_insn_before (seq, parm_birth_insn);
1485 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1486 pop_topmost_sequence ();
1491 /* Perform an untyped call and save the state required to perform an
1492 untyped return of whatever value was returned by the given function. */
1495 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1497 int size, align, regno;
1498 enum machine_mode mode;
1499 rtx incoming_args, result, reg, dest, src, call_insn;
1500 rtx old_stack_level = 0;
1501 rtx call_fusage = 0;
1502 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1504 arguments = convert_memory_address (Pmode, arguments);
1506 /* Create a block where the return registers can be saved. */
1507 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1509 /* Fetch the arg pointer from the ARGUMENTS block. */
1510 incoming_args = gen_reg_rtx (Pmode);
1511 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1512 #ifndef STACK_GROWS_DOWNWARD
1513 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1514 incoming_args, 0, OPTAB_LIB_WIDEN);
1517 /* Push a new argument block and copy the arguments. Do not allow
1518 the (potential) memcpy call below to interfere with our stack
1520 do_pending_stack_adjust ();
1523 /* Save the stack with nonlocal if available. */
1524 #ifdef HAVE_save_stack_nonlocal
1525 if (HAVE_save_stack_nonlocal)
1526 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1529 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1531 /* Allocate a block of memory onto the stack and copy the memory
1532 arguments to the outgoing arguments address. */
1533 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1535 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1536 may have already set current_function_calls_alloca to true.
1537 current_function_calls_alloca won't be set if argsize is zero,
1538 so we have to guarantee need_drap is true here. */
1539 if (SUPPORTS_STACK_ALIGNMENT)
1540 crtl->need_drap = true;
1542 dest = virtual_outgoing_args_rtx;
1543 #ifndef STACK_GROWS_DOWNWARD
1544 if (CONST_INT_P (argsize))
1545 dest = plus_constant (dest, -INTVAL (argsize));
1547 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1549 dest = gen_rtx_MEM (BLKmode, dest);
1550 set_mem_align (dest, PARM_BOUNDARY);
1551 src = gen_rtx_MEM (BLKmode, incoming_args);
1552 set_mem_align (src, PARM_BOUNDARY);
1553 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1555 /* Refer to the argument block. */
1557 arguments = gen_rtx_MEM (BLKmode, arguments);
1558 set_mem_align (arguments, PARM_BOUNDARY);
1560 /* Walk past the arg-pointer and structure value address. */
1561 size = GET_MODE_SIZE (Pmode);
1563 size += GET_MODE_SIZE (Pmode);
1565 /* Restore each of the registers previously saved. Make USE insns
1566 for each of these registers for use in making the call. */
1567 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1568 if ((mode = apply_args_mode[regno]) != VOIDmode)
1570 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1571 if (size % align != 0)
1572 size = CEIL (size, align) * align;
1573 reg = gen_rtx_REG (mode, regno);
1574 emit_move_insn (reg, adjust_address (arguments, mode, size));
1575 use_reg (&call_fusage, reg);
1576 size += GET_MODE_SIZE (mode);
1579 /* Restore the structure value address unless this is passed as an
1580 "invisible" first argument. */
1581 size = GET_MODE_SIZE (Pmode);
1584 rtx value = gen_reg_rtx (Pmode);
1585 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1586 emit_move_insn (struct_value, value);
1587 if (REG_P (struct_value))
1588 use_reg (&call_fusage, struct_value);
1589 size += GET_MODE_SIZE (Pmode);
1592 /* All arguments and registers used for the call are set up by now! */
1593 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1595 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1596 and we don't want to load it into a register as an optimization,
1597 because prepare_call_address already did it if it should be done. */
1598 if (GET_CODE (function) != SYMBOL_REF)
1599 function = memory_address (FUNCTION_MODE, function);
1601 /* Generate the actual call instruction and save the return value. */
1602 #ifdef HAVE_untyped_call
1603 if (HAVE_untyped_call)
1604 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1605 result, result_vector (1, result)));
1608 #ifdef HAVE_call_value
1609 if (HAVE_call_value)
1613 /* Locate the unique return register. It is not possible to
1614 express a call that sets more than one return register using
1615 call_value; use untyped_call for that. In fact, untyped_call
1616 only needs to save the return registers in the given block. */
1617 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1618 if ((mode = apply_result_mode[regno]) != VOIDmode)
1620 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1622 valreg = gen_rtx_REG (mode, regno);
1625 emit_call_insn (GEN_CALL_VALUE (valreg,
1626 gen_rtx_MEM (FUNCTION_MODE, function),
1627 const0_rtx, NULL_RTX, const0_rtx));
1629 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1635 /* Find the CALL insn we just emitted, and attach the register usage
1637 call_insn = last_call_insn ();
1638 add_function_usage_to (call_insn, call_fusage);
1640 /* Restore the stack. */
1641 #ifdef HAVE_save_stack_nonlocal
1642 if (HAVE_save_stack_nonlocal)
1643 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1646 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1650 /* Return the address of the result block. */
1651 result = copy_addr_to_reg (XEXP (result, 0));
1652 return convert_memory_address (ptr_mode, result);
1655 /* Perform an untyped return. */
1658 expand_builtin_return (rtx result)
1660 int size, align, regno;
1661 enum machine_mode mode;
1663 rtx call_fusage = 0;
1665 result = convert_memory_address (Pmode, result);
1667 apply_result_size ();
1668 result = gen_rtx_MEM (BLKmode, result);
1670 #ifdef HAVE_untyped_return
1671 if (HAVE_untyped_return)
1673 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1679 /* Restore the return value and note that each value is used. */
1681 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1682 if ((mode = apply_result_mode[regno]) != VOIDmode)
1684 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1685 if (size % align != 0)
1686 size = CEIL (size, align) * align;
1687 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1688 emit_move_insn (reg, adjust_address (result, mode, size));
1690 push_to_sequence (call_fusage);
1692 call_fusage = get_insns ();
1694 size += GET_MODE_SIZE (mode);
1697 /* Put the USE insns before the return. */
1698 emit_insn (call_fusage);
1700 /* Return whatever values was restored by jumping directly to the end
1702 expand_naked_return ();
1705 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1707 static enum type_class
1708 type_to_class (tree type)
1710 switch (TREE_CODE (type))
1712 case VOID_TYPE: return void_type_class;
1713 case INTEGER_TYPE: return integer_type_class;
1714 case ENUMERAL_TYPE: return enumeral_type_class;
1715 case BOOLEAN_TYPE: return boolean_type_class;
1716 case POINTER_TYPE: return pointer_type_class;
1717 case REFERENCE_TYPE: return reference_type_class;
1718 case OFFSET_TYPE: return offset_type_class;
1719 case REAL_TYPE: return real_type_class;
1720 case COMPLEX_TYPE: return complex_type_class;
1721 case FUNCTION_TYPE: return function_type_class;
1722 case METHOD_TYPE: return method_type_class;
1723 case RECORD_TYPE: return record_type_class;
1725 case QUAL_UNION_TYPE: return union_type_class;
1726 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1727 ? string_type_class : array_type_class);
1728 case LANG_TYPE: return lang_type_class;
1729 default: return no_type_class;
1733 /* Expand a call EXP to __builtin_classify_type. */
1736 expand_builtin_classify_type (tree exp)
1738 if (call_expr_nargs (exp))
1739 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1740 return GEN_INT (no_type_class);
1743 /* This helper macro, meant to be used in mathfn_built_in below,
1744 determines which among a set of three builtin math functions is
1745 appropriate for a given type mode. The `F' and `L' cases are
1746 automatically generated from the `double' case. */
1747 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1749 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1750 fcodel = BUILT_IN_MATHFN##L ; break;
1751 /* Similar to above, but appends _R after any F/L suffix. */
1752 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1753 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1754 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1755 fcodel = BUILT_IN_MATHFN##L_R ; break;
1757 /* Return mathematic function equivalent to FN but operating directly
1758 on TYPE, if available. If IMPLICIT is true find the function in
1759 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1760 can't do the conversion, return zero. */
1763 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1765 tree const *const fn_arr
1766 = implicit ? implicit_built_in_decls : built_in_decls;
1767 enum built_in_function fcode, fcodef, fcodel;
1771 CASE_MATHFN (BUILT_IN_ACOS)
1772 CASE_MATHFN (BUILT_IN_ACOSH)
1773 CASE_MATHFN (BUILT_IN_ASIN)
1774 CASE_MATHFN (BUILT_IN_ASINH)
1775 CASE_MATHFN (BUILT_IN_ATAN)
1776 CASE_MATHFN (BUILT_IN_ATAN2)
1777 CASE_MATHFN (BUILT_IN_ATANH)
1778 CASE_MATHFN (BUILT_IN_CBRT)
1779 CASE_MATHFN (BUILT_IN_CEIL)
1780 CASE_MATHFN (BUILT_IN_CEXPI)
1781 CASE_MATHFN (BUILT_IN_COPYSIGN)
1782 CASE_MATHFN (BUILT_IN_COS)
1783 CASE_MATHFN (BUILT_IN_COSH)
1784 CASE_MATHFN (BUILT_IN_DREM)
1785 CASE_MATHFN (BUILT_IN_ERF)
1786 CASE_MATHFN (BUILT_IN_ERFC)
1787 CASE_MATHFN (BUILT_IN_EXP)
1788 CASE_MATHFN (BUILT_IN_EXP10)
1789 CASE_MATHFN (BUILT_IN_EXP2)
1790 CASE_MATHFN (BUILT_IN_EXPM1)
1791 CASE_MATHFN (BUILT_IN_FABS)
1792 CASE_MATHFN (BUILT_IN_FDIM)
1793 CASE_MATHFN (BUILT_IN_FLOOR)
1794 CASE_MATHFN (BUILT_IN_FMA)
1795 CASE_MATHFN (BUILT_IN_FMAX)
1796 CASE_MATHFN (BUILT_IN_FMIN)
1797 CASE_MATHFN (BUILT_IN_FMOD)
1798 CASE_MATHFN (BUILT_IN_FREXP)
1799 CASE_MATHFN (BUILT_IN_GAMMA)
1800 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1801 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1802 CASE_MATHFN (BUILT_IN_HYPOT)
1803 CASE_MATHFN (BUILT_IN_ILOGB)
1804 CASE_MATHFN (BUILT_IN_INF)
1805 CASE_MATHFN (BUILT_IN_ISINF)
1806 CASE_MATHFN (BUILT_IN_J0)
1807 CASE_MATHFN (BUILT_IN_J1)
1808 CASE_MATHFN (BUILT_IN_JN)
1809 CASE_MATHFN (BUILT_IN_LCEIL)
1810 CASE_MATHFN (BUILT_IN_LDEXP)
1811 CASE_MATHFN (BUILT_IN_LFLOOR)
1812 CASE_MATHFN (BUILT_IN_LGAMMA)
1813 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1814 CASE_MATHFN (BUILT_IN_LLCEIL)
1815 CASE_MATHFN (BUILT_IN_LLFLOOR)
1816 CASE_MATHFN (BUILT_IN_LLRINT)
1817 CASE_MATHFN (BUILT_IN_LLROUND)
1818 CASE_MATHFN (BUILT_IN_LOG)
1819 CASE_MATHFN (BUILT_IN_LOG10)
1820 CASE_MATHFN (BUILT_IN_LOG1P)
1821 CASE_MATHFN (BUILT_IN_LOG2)
1822 CASE_MATHFN (BUILT_IN_LOGB)
1823 CASE_MATHFN (BUILT_IN_LRINT)
1824 CASE_MATHFN (BUILT_IN_LROUND)
1825 CASE_MATHFN (BUILT_IN_MODF)
1826 CASE_MATHFN (BUILT_IN_NAN)
1827 CASE_MATHFN (BUILT_IN_NANS)
1828 CASE_MATHFN (BUILT_IN_NEARBYINT)
1829 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1830 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1831 CASE_MATHFN (BUILT_IN_POW)
1832 CASE_MATHFN (BUILT_IN_POWI)
1833 CASE_MATHFN (BUILT_IN_POW10)
1834 CASE_MATHFN (BUILT_IN_REMAINDER)
1835 CASE_MATHFN (BUILT_IN_REMQUO)
1836 CASE_MATHFN (BUILT_IN_RINT)
1837 CASE_MATHFN (BUILT_IN_ROUND)
1838 CASE_MATHFN (BUILT_IN_SCALB)
1839 CASE_MATHFN (BUILT_IN_SCALBLN)
1840 CASE_MATHFN (BUILT_IN_SCALBN)
1841 CASE_MATHFN (BUILT_IN_SIGNBIT)
1842 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1843 CASE_MATHFN (BUILT_IN_SIN)
1844 CASE_MATHFN (BUILT_IN_SINCOS)
1845 CASE_MATHFN (BUILT_IN_SINH)
1846 CASE_MATHFN (BUILT_IN_SQRT)
1847 CASE_MATHFN (BUILT_IN_TAN)
1848 CASE_MATHFN (BUILT_IN_TANH)
1849 CASE_MATHFN (BUILT_IN_TGAMMA)
1850 CASE_MATHFN (BUILT_IN_TRUNC)
1851 CASE_MATHFN (BUILT_IN_Y0)
1852 CASE_MATHFN (BUILT_IN_Y1)
1853 CASE_MATHFN (BUILT_IN_YN)
1859 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1860 return fn_arr[fcode];
1861 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1862 return fn_arr[fcodef];
1863 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1864 return fn_arr[fcodel];
1869 /* Like mathfn_built_in_1(), but always use the implicit array. */
1872 mathfn_built_in (tree type, enum built_in_function fn)
1874 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1877 /* If errno must be maintained, expand the RTL to check if the result,
1878 TARGET, of a built-in function call, EXP, is NaN, and if so set
1882 expand_errno_check (tree exp, rtx target)
1884 rtx lab = gen_label_rtx ();
1886 /* Test the result; if it is NaN, set errno=EDOM because
1887 the argument was not in the domain. */
1888 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1889 NULL_RTX, NULL_RTX, lab);
1892 /* If this built-in doesn't throw an exception, set errno directly. */
1893 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1895 #ifdef GEN_ERRNO_RTX
1896 rtx errno_rtx = GEN_ERRNO_RTX;
1899 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1901 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1907 /* Make sure the library call isn't expanded as a tail call. */
1908 CALL_EXPR_TAILCALL (exp) = 0;
1910 /* We can't set errno=EDOM directly; let the library call do it.
1911 Pop the arguments right away in case the call gets deleted. */
1913 expand_call (exp, target, 0);
1918 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1919 Return NULL_RTX if a normal call should be emitted rather than expanding
1920 the function in-line. EXP is the expression that is a call to the builtin
1921 function; if convenient, the result should be placed in TARGET.
1922 SUBTARGET may be used as the target for computing one of EXP's operands. */
1925 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1927 optab builtin_optab;
1928 rtx op0, insns, before_call;
1929 tree fndecl = get_callee_fndecl (exp);
1930 enum machine_mode mode;
1931 bool errno_set = false;
1934 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1937 arg = CALL_EXPR_ARG (exp, 0);
1939 switch (DECL_FUNCTION_CODE (fndecl))
1941 CASE_FLT_FN (BUILT_IN_SQRT):
1942 errno_set = ! tree_expr_nonnegative_p (arg);
1943 builtin_optab = sqrt_optab;
1945 CASE_FLT_FN (BUILT_IN_EXP):
1946 errno_set = true; builtin_optab = exp_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP10):
1948 CASE_FLT_FN (BUILT_IN_POW10):
1949 errno_set = true; builtin_optab = exp10_optab; break;
1950 CASE_FLT_FN (BUILT_IN_EXP2):
1951 errno_set = true; builtin_optab = exp2_optab; break;
1952 CASE_FLT_FN (BUILT_IN_EXPM1):
1953 errno_set = true; builtin_optab = expm1_optab; break;
1954 CASE_FLT_FN (BUILT_IN_LOGB):
1955 errno_set = true; builtin_optab = logb_optab; break;
1956 CASE_FLT_FN (BUILT_IN_LOG):
1957 errno_set = true; builtin_optab = log_optab; break;
1958 CASE_FLT_FN (BUILT_IN_LOG10):
1959 errno_set = true; builtin_optab = log10_optab; break;
1960 CASE_FLT_FN (BUILT_IN_LOG2):
1961 errno_set = true; builtin_optab = log2_optab; break;
1962 CASE_FLT_FN (BUILT_IN_LOG1P):
1963 errno_set = true; builtin_optab = log1p_optab; break;
1964 CASE_FLT_FN (BUILT_IN_ASIN):
1965 builtin_optab = asin_optab; break;
1966 CASE_FLT_FN (BUILT_IN_ACOS):
1967 builtin_optab = acos_optab; break;
1968 CASE_FLT_FN (BUILT_IN_TAN):
1969 builtin_optab = tan_optab; break;
1970 CASE_FLT_FN (BUILT_IN_ATAN):
1971 builtin_optab = atan_optab; break;
1972 CASE_FLT_FN (BUILT_IN_FLOOR):
1973 builtin_optab = floor_optab; break;
1974 CASE_FLT_FN (BUILT_IN_CEIL):
1975 builtin_optab = ceil_optab; break;
1976 CASE_FLT_FN (BUILT_IN_TRUNC):
1977 builtin_optab = btrunc_optab; break;
1978 CASE_FLT_FN (BUILT_IN_ROUND):
1979 builtin_optab = round_optab; break;
1980 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1981 builtin_optab = nearbyint_optab;
1982 if (flag_trapping_math)
1984 /* Else fallthrough and expand as rint. */
1985 CASE_FLT_FN (BUILT_IN_RINT):
1986 builtin_optab = rint_optab; break;
1987 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1988 builtin_optab = significand_optab; break;
1993 /* Make a suitable register to place result in. */
1994 mode = TYPE_MODE (TREE_TYPE (exp));
1996 if (! flag_errno_math || ! HONOR_NANS (mode))
1999 /* Before working hard, check whether the instruction is available. */
2000 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2002 target = gen_reg_rtx (mode);
2004 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2005 need to expand the argument again. This way, we will not perform
2006 side-effects more the once. */
2007 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2009 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2013 /* Compute into TARGET.
2014 Set TARGET to wherever the result comes back. */
2015 target = expand_unop (mode, builtin_optab, op0, target, 0);
2020 expand_errno_check (exp, target);
2022 /* Output the entire sequence. */
2023 insns = get_insns ();
2029 /* If we were unable to expand via the builtin, stop the sequence
2030 (without outputting the insns) and call to the library function
2031 with the stabilized argument list. */
2035 before_call = get_last_insn ();
2037 return expand_call (exp, target, target == const0_rtx);
2040 /* Expand a call to the builtin binary math functions (pow and atan2).
2041 Return NULL_RTX if a normal call should be emitted rather than expanding the
2042 function in-line. EXP is the expression that is a call to the builtin
2043 function; if convenient, the result should be placed in TARGET.
2044 SUBTARGET may be used as the target for computing one of EXP's
2048 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2050 optab builtin_optab;
2051 rtx op0, op1, insns;
2052 int op1_type = REAL_TYPE;
2053 tree fndecl = get_callee_fndecl (exp);
2055 enum machine_mode mode;
2056 bool errno_set = true;
2058 switch (DECL_FUNCTION_CODE (fndecl))
2060 CASE_FLT_FN (BUILT_IN_SCALBN):
2061 CASE_FLT_FN (BUILT_IN_SCALBLN):
2062 CASE_FLT_FN (BUILT_IN_LDEXP):
2063 op1_type = INTEGER_TYPE;
2068 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2071 arg0 = CALL_EXPR_ARG (exp, 0);
2072 arg1 = CALL_EXPR_ARG (exp, 1);
2074 switch (DECL_FUNCTION_CODE (fndecl))
2076 CASE_FLT_FN (BUILT_IN_POW):
2077 builtin_optab = pow_optab; break;
2078 CASE_FLT_FN (BUILT_IN_ATAN2):
2079 builtin_optab = atan2_optab; break;
2080 CASE_FLT_FN (BUILT_IN_SCALB):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2083 builtin_optab = scalb_optab; break;
2084 CASE_FLT_FN (BUILT_IN_SCALBN):
2085 CASE_FLT_FN (BUILT_IN_SCALBLN):
2086 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2088 /* Fall through... */
2089 CASE_FLT_FN (BUILT_IN_LDEXP):
2090 builtin_optab = ldexp_optab; break;
2091 CASE_FLT_FN (BUILT_IN_FMOD):
2092 builtin_optab = fmod_optab; break;
2093 CASE_FLT_FN (BUILT_IN_REMAINDER):
2094 CASE_FLT_FN (BUILT_IN_DREM):
2095 builtin_optab = remainder_optab; break;
2100 /* Make a suitable register to place result in. */
2101 mode = TYPE_MODE (TREE_TYPE (exp));
2103 /* Before working hard, check whether the instruction is available. */
2104 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2107 target = gen_reg_rtx (mode);
2109 if (! flag_errno_math || ! HONOR_NANS (mode))
2112 /* Always stabilize the argument list. */
2113 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2114 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2116 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2117 op1 = expand_normal (arg1);
2121 /* Compute into TARGET.
2122 Set TARGET to wherever the result comes back. */
2123 target = expand_binop (mode, builtin_optab, op0, op1,
2124 target, 0, OPTAB_DIRECT);
2126 /* If we were unable to expand via the builtin, stop the sequence
2127 (without outputting the insns) and call to the library function
2128 with the stabilized argument list. */
2132 return expand_call (exp, target, target == const0_rtx);
2136 expand_errno_check (exp, target);
2138 /* Output the entire sequence. */
2139 insns = get_insns ();
2146 /* Expand a call to the builtin sin and cos math functions.
2147 Return NULL_RTX if a normal call should be emitted rather than expanding the
2148 function in-line. EXP is the expression that is a call to the builtin
2149 function; if convenient, the result should be placed in TARGET.
2150 SUBTARGET may be used as the target for computing one of EXP's
2154 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2156 optab builtin_optab;
2158 tree fndecl = get_callee_fndecl (exp);
2159 enum machine_mode mode;
2162 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2165 arg = CALL_EXPR_ARG (exp, 0);
2167 switch (DECL_FUNCTION_CODE (fndecl))
2169 CASE_FLT_FN (BUILT_IN_SIN):
2170 CASE_FLT_FN (BUILT_IN_COS):
2171 builtin_optab = sincos_optab; break;
2176 /* Make a suitable register to place result in. */
2177 mode = TYPE_MODE (TREE_TYPE (exp));
2179 /* Check if sincos insn is available, otherwise fallback
2180 to sin or cos insn. */
2181 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2182 switch (DECL_FUNCTION_CODE (fndecl))
2184 CASE_FLT_FN (BUILT_IN_SIN):
2185 builtin_optab = sin_optab; break;
2186 CASE_FLT_FN (BUILT_IN_COS):
2187 builtin_optab = cos_optab; break;
2192 /* Before working hard, check whether the instruction is available. */
2193 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2195 target = gen_reg_rtx (mode);
2197 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2198 need to expand the argument again. This way, we will not perform
2199 side-effects more the once. */
2200 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2202 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2206 /* Compute into TARGET.
2207 Set TARGET to wherever the result comes back. */
2208 if (builtin_optab == sincos_optab)
2212 switch (DECL_FUNCTION_CODE (fndecl))
2214 CASE_FLT_FN (BUILT_IN_SIN):
2215 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2217 CASE_FLT_FN (BUILT_IN_COS):
2218 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2223 gcc_assert (result);
2227 target = expand_unop (mode, builtin_optab, op0, target, 0);
2232 /* Output the entire sequence. */
2233 insns = get_insns ();
2239 /* If we were unable to expand via the builtin, stop the sequence
2240 (without outputting the insns) and call to the library function
2241 with the stabilized argument list. */
2245 target = expand_call (exp, target, target == const0_rtx);
2250 /* Expand a call to one of the builtin math functions that operate on
2251 floating point argument and output an integer result (ilogb, isinf,
2253 Return 0 if a normal call should be emitted rather than expanding the
2254 function in-line. EXP is the expression that is a call to the builtin
2255 function; if convenient, the result should be placed in TARGET.
2256 SUBTARGET may be used as the target for computing one of EXP's operands. */
2259 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2261 optab builtin_optab = 0;
2262 enum insn_code icode = CODE_FOR_nothing;
2264 tree fndecl = get_callee_fndecl (exp);
2265 enum machine_mode mode;
2266 bool errno_set = false;
2269 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2272 arg = CALL_EXPR_ARG (exp, 0);
2274 switch (DECL_FUNCTION_CODE (fndecl))
2276 CASE_FLT_FN (BUILT_IN_ILOGB):
2277 errno_set = true; builtin_optab = ilogb_optab; break;
2278 CASE_FLT_FN (BUILT_IN_ISINF):
2279 builtin_optab = isinf_optab; break;
2280 case BUILT_IN_ISNORMAL:
2281 case BUILT_IN_ISFINITE:
2282 CASE_FLT_FN (BUILT_IN_FINITE):
2283 /* These builtins have no optabs (yet). */
2289 /* There's no easy way to detect the case we need to set EDOM. */
2290 if (flag_errno_math && errno_set)
2293 /* Optab mode depends on the mode of the input argument. */
2294 mode = TYPE_MODE (TREE_TYPE (arg));
2297 icode = optab_handler (builtin_optab, mode)->insn_code;
2299 /* Before working hard, check whether the instruction is available. */
2300 if (icode != CODE_FOR_nothing)
2302 /* Make a suitable register to place result in. */
2304 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2305 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2307 gcc_assert (insn_data[icode].operand[0].predicate
2308 (target, GET_MODE (target)));
2310 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2311 need to expand the argument again. This way, we will not perform
2312 side-effects more the once. */
2313 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2315 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2317 if (mode != GET_MODE (op0))
2318 op0 = convert_to_mode (mode, op0, 0);
2320 /* Compute into TARGET.
2321 Set TARGET to wherever the result comes back. */
2322 emit_unop_insn (icode, target, op0, UNKNOWN);
2326 /* If there is no optab, try generic code. */
2327 switch (DECL_FUNCTION_CODE (fndecl))
2331 CASE_FLT_FN (BUILT_IN_ISINF):
2333 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2334 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2335 tree const type = TREE_TYPE (arg);
2339 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2340 real_from_string (&r, buf);
2341 result = build_call_expr (isgr_fn, 2,
2342 fold_build1 (ABS_EXPR, type, arg),
2343 build_real (type, r));
2344 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2346 CASE_FLT_FN (BUILT_IN_FINITE):
2347 case BUILT_IN_ISFINITE:
2349 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2350 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2351 tree const type = TREE_TYPE (arg);
2355 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2356 real_from_string (&r, buf);
2357 result = build_call_expr (isle_fn, 2,
2358 fold_build1 (ABS_EXPR, type, arg),
2359 build_real (type, r));
2360 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2362 case BUILT_IN_ISNORMAL:
2364 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2365 islessequal(fabs(x),DBL_MAX). */
2366 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2367 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2368 tree const type = TREE_TYPE (arg);
2369 REAL_VALUE_TYPE rmax, rmin;
2372 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2373 real_from_string (&rmax, buf);
2374 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2375 real_from_string (&rmin, buf);
2376 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2377 result = build_call_expr (isle_fn, 2, arg,
2378 build_real (type, rmax));
2379 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2380 build_call_expr (isge_fn, 2, arg,
2381 build_real (type, rmin)));
2382 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2388 target = expand_call (exp, target, target == const0_rtx);
2393 /* Expand a call to the builtin sincos math function.
2394 Return NULL_RTX if a normal call should be emitted rather than expanding the
2395 function in-line. EXP is the expression that is a call to the builtin
2399 expand_builtin_sincos (tree exp)
2401 rtx op0, op1, op2, target1, target2;
2402 enum machine_mode mode;
2403 tree arg, sinp, cosp;
2406 if (!validate_arglist (exp, REAL_TYPE,
2407 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2410 arg = CALL_EXPR_ARG (exp, 0);
2411 sinp = CALL_EXPR_ARG (exp, 1);
2412 cosp = CALL_EXPR_ARG (exp, 2);
2414 /* Make a suitable register to place result in. */
2415 mode = TYPE_MODE (TREE_TYPE (arg));
2417 /* Check if sincos insn is available, otherwise emit the call. */
2418 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2421 target1 = gen_reg_rtx (mode);
2422 target2 = gen_reg_rtx (mode);
2424 op0 = expand_normal (arg);
2425 op1 = expand_normal (build_fold_indirect_ref (sinp));
2426 op2 = expand_normal (build_fold_indirect_ref (cosp));
2428 /* Compute into target1 and target2.
2429 Set TARGET to wherever the result comes back. */
2430 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2431 gcc_assert (result);
2433 /* Move target1 and target2 to the memory locations indicated
2435 emit_move_insn (op1, target1);
2436 emit_move_insn (op2, target2);
2441 /* Expand a call to the internal cexpi builtin to the sincos math function.
2442 EXP is the expression that is a call to the builtin function; if convenient,
2443 the result should be placed in TARGET. SUBTARGET may be used as the target
2444 for computing one of EXP's operands. */
2447 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2449 tree fndecl = get_callee_fndecl (exp);
2451 enum machine_mode mode;
2454 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2457 arg = CALL_EXPR_ARG (exp, 0);
2458 type = TREE_TYPE (arg);
2459 mode = TYPE_MODE (TREE_TYPE (arg));
2461 /* Try expanding via a sincos optab, fall back to emitting a libcall
2462 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2463 is only generated from sincos, cexp or if we have either of them. */
2464 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2466 op1 = gen_reg_rtx (mode);
2467 op2 = gen_reg_rtx (mode);
2469 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2471 /* Compute into op1 and op2. */
2472 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2474 else if (TARGET_HAS_SINCOS)
2476 tree call, fn = NULL_TREE;
2480 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2481 fn = built_in_decls[BUILT_IN_SINCOSF];
2482 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2483 fn = built_in_decls[BUILT_IN_SINCOS];
2484 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2485 fn = built_in_decls[BUILT_IN_SINCOSL];
2489 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2490 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2491 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2492 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2493 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2494 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2496 /* Make sure not to fold the sincos call again. */
2497 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2498 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2499 call, 3, arg, top1, top2));
2503 tree call, fn = NULL_TREE, narg;
2504 tree ctype = build_complex_type (type);
2506 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2507 fn = built_in_decls[BUILT_IN_CEXPF];
2508 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2509 fn = built_in_decls[BUILT_IN_CEXP];
2510 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2511 fn = built_in_decls[BUILT_IN_CEXPL];
2515 /* If we don't have a decl for cexp create one. This is the
2516 friendliest fallback if the user calls __builtin_cexpi
2517 without full target C99 function support. */
2518 if (fn == NULL_TREE)
2521 const char *name = NULL;
2523 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2525 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2527 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2530 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2531 fn = build_fn_decl (name, fntype);
2534 narg = fold_build2 (COMPLEX_EXPR, ctype,
2535 build_real (type, dconst0), arg);
2537 /* Make sure not to fold the cexp call again. */
2538 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2539 return expand_expr (build_call_nary (ctype, call, 1, narg),
2540 target, VOIDmode, EXPAND_NORMAL);
2543 /* Now build the proper return type. */
2544 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2545 make_tree (TREE_TYPE (arg), op2),
2546 make_tree (TREE_TYPE (arg), op1)),
2547 target, VOIDmode, EXPAND_NORMAL);
2550 /* Expand a call to one of the builtin rounding functions gcc defines
2551 as an extension (lfloor and lceil). As these are gcc extensions we
2552 do not need to worry about setting errno to EDOM.
2553 If expanding via optab fails, lower expression to (int)(floor(x)).
2554 EXP is the expression that is a call to the builtin function;
2555 if convenient, the result should be placed in TARGET. */
2558 expand_builtin_int_roundingfn (tree exp, rtx target)
2560 convert_optab builtin_optab;
2561 rtx op0, insns, tmp;
2562 tree fndecl = get_callee_fndecl (exp);
2563 enum built_in_function fallback_fn;
2564 tree fallback_fndecl;
2565 enum machine_mode mode;
2568 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2571 arg = CALL_EXPR_ARG (exp, 0);
2573 switch (DECL_FUNCTION_CODE (fndecl))
2575 CASE_FLT_FN (BUILT_IN_LCEIL):
2576 CASE_FLT_FN (BUILT_IN_LLCEIL):
2577 builtin_optab = lceil_optab;
2578 fallback_fn = BUILT_IN_CEIL;
2581 CASE_FLT_FN (BUILT_IN_LFLOOR):
2582 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2583 builtin_optab = lfloor_optab;
2584 fallback_fn = BUILT_IN_FLOOR;
2591 /* Make a suitable register to place result in. */
2592 mode = TYPE_MODE (TREE_TYPE (exp));
2594 target = gen_reg_rtx (mode);
2596 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2597 need to expand the argument again. This way, we will not perform
2598 side-effects more the once. */
2599 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2601 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2605 /* Compute into TARGET. */
2606 if (expand_sfix_optab (target, op0, builtin_optab))
2608 /* Output the entire sequence. */
2609 insns = get_insns ();
2615 /* If we were unable to expand via the builtin, stop the sequence
2616 (without outputting the insns). */
2619 /* Fall back to floating point rounding optab. */
2620 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2622 /* For non-C99 targets we may end up without a fallback fndecl here
2623 if the user called __builtin_lfloor directly. In this case emit
2624 a call to the floor/ceil variants nevertheless. This should result
2625 in the best user experience for not full C99 targets. */
2626 if (fallback_fndecl == NULL_TREE)
2629 const char *name = NULL;
2631 switch (DECL_FUNCTION_CODE (fndecl))
2633 case BUILT_IN_LCEIL:
2634 case BUILT_IN_LLCEIL:
2637 case BUILT_IN_LCEILF:
2638 case BUILT_IN_LLCEILF:
2641 case BUILT_IN_LCEILL:
2642 case BUILT_IN_LLCEILL:
2645 case BUILT_IN_LFLOOR:
2646 case BUILT_IN_LLFLOOR:
2649 case BUILT_IN_LFLOORF:
2650 case BUILT_IN_LLFLOORF:
2653 case BUILT_IN_LFLOORL:
2654 case BUILT_IN_LLFLOORL:
2661 fntype = build_function_type_list (TREE_TYPE (arg),
2662 TREE_TYPE (arg), NULL_TREE);
2663 fallback_fndecl = build_fn_decl (name, fntype);
2666 exp = build_call_expr (fallback_fndecl, 1, arg);
2668 tmp = expand_normal (exp);
2670 /* Truncate the result of floating point optab to integer
2671 via expand_fix (). */
2672 target = gen_reg_rtx (mode);
2673 expand_fix (target, tmp, 0);
2678 /* Expand a call to one of the builtin math functions doing integer
2680 Return 0 if a normal call should be emitted rather than expanding the
2681 function in-line. EXP is the expression that is a call to the builtin
2682 function; if convenient, the result should be placed in TARGET. */
2685 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2687 convert_optab builtin_optab;
2689 tree fndecl = get_callee_fndecl (exp);
2691 enum machine_mode mode;
2693 /* There's no easy way to detect the case we need to set EDOM. */
2694 if (flag_errno_math)
2697 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2700 arg = CALL_EXPR_ARG (exp, 0);
2702 switch (DECL_FUNCTION_CODE (fndecl))
2704 CASE_FLT_FN (BUILT_IN_LRINT):
2705 CASE_FLT_FN (BUILT_IN_LLRINT):
2706 builtin_optab = lrint_optab; break;
2707 CASE_FLT_FN (BUILT_IN_LROUND):
2708 CASE_FLT_FN (BUILT_IN_LLROUND):
2709 builtin_optab = lround_optab; break;
2714 /* Make a suitable register to place result in. */
2715 mode = TYPE_MODE (TREE_TYPE (exp));
2717 target = gen_reg_rtx (mode);
2719 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2720 need to expand the argument again. This way, we will not perform
2721 side-effects more the once. */
2722 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2724 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2728 if (expand_sfix_optab (target, op0, builtin_optab))
2730 /* Output the entire sequence. */
2731 insns = get_insns ();
2737 /* If we were unable to expand via the builtin, stop the sequence
2738 (without outputting the insns) and call to the library function
2739 with the stabilized argument list. */
2742 target = expand_call (exp, target, target == const0_rtx);
2747 /* To evaluate powi(x,n), the floating point value x raised to the
2748 constant integer exponent n, we use a hybrid algorithm that
2749 combines the "window method" with look-up tables. For an
2750 introduction to exponentiation algorithms and "addition chains",
2751 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2752 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2753 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2754 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2756 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2757 multiplications to inline before calling the system library's pow
2758 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2759 so this default never requires calling pow, powf or powl. */
2761 #ifndef POWI_MAX_MULTS
2762 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2765 /* The size of the "optimal power tree" lookup table. All
2766 exponents less than this value are simply looked up in the
2767 powi_table below. This threshold is also used to size the
2768 cache of pseudo registers that hold intermediate results. */
2769 #define POWI_TABLE_SIZE 256
2771 /* The size, in bits of the window, used in the "window method"
2772 exponentiation algorithm. This is equivalent to a radix of
2773 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2774 #define POWI_WINDOW_SIZE 3
2776 /* The following table is an efficient representation of an
2777 "optimal power tree". For each value, i, the corresponding
2778 value, j, in the table states than an optimal evaluation
2779 sequence for calculating pow(x,i) can be found by evaluating
2780 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2781 100 integers is given in Knuth's "Seminumerical algorithms". */
2783 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2785 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2786 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2787 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2788 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2789 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2790 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2791 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2792 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2793 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2794 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2795 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2796 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2797 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2798 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2799 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2800 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2801 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2802 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2803 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2804 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2805 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2806 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2807 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2808 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2809 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2810 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2811 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2812 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2813 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2814 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2815 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2816 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2820 /* Return the number of multiplications required to calculate
2821 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2822 subroutine of powi_cost. CACHE is an array indicating
2823 which exponents have already been calculated. */
2826 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2828 /* If we've already calculated this exponent, then this evaluation
2829 doesn't require any additional multiplications. */
2834 return powi_lookup_cost (n - powi_table[n], cache)
2835 + powi_lookup_cost (powi_table[n], cache) + 1;
2838 /* Return the number of multiplications required to calculate
2839 powi(x,n) for an arbitrary x, given the exponent N. This
2840 function needs to be kept in sync with expand_powi below. */
2843 powi_cost (HOST_WIDE_INT n)
2845 bool cache[POWI_TABLE_SIZE];
2846 unsigned HOST_WIDE_INT digit;
2847 unsigned HOST_WIDE_INT val;
2853 /* Ignore the reciprocal when calculating the cost. */
2854 val = (n < 0) ? -n : n;
2856 /* Initialize the exponent cache. */
2857 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2862 while (val >= POWI_TABLE_SIZE)
2866 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2867 result += powi_lookup_cost (digit, cache)
2868 + POWI_WINDOW_SIZE + 1;
2869 val >>= POWI_WINDOW_SIZE;
2878 return result + powi_lookup_cost (val, cache);
2881 /* Recursive subroutine of expand_powi. This function takes the array,
2882 CACHE, of already calculated exponents and an exponent N and returns
2883 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2886 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2888 unsigned HOST_WIDE_INT digit;
2892 if (n < POWI_TABLE_SIZE)
2897 target = gen_reg_rtx (mode);
2900 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2901 op1 = expand_powi_1 (mode, powi_table[n], cache);
2905 target = gen_reg_rtx (mode);
2906 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2907 op0 = expand_powi_1 (mode, n - digit, cache);
2908 op1 = expand_powi_1 (mode, digit, cache);
2912 target = gen_reg_rtx (mode);
2913 op0 = expand_powi_1 (mode, n >> 1, cache);
2917 result = expand_mult (mode, op0, op1, target, 0);
2918 if (result != target)
2919 emit_move_insn (target, result);
2923 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2924 floating point operand in mode MODE, and N is the exponent. This
2925 function needs to be kept in sync with powi_cost above. */
2928 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2930 unsigned HOST_WIDE_INT val;
2931 rtx cache[POWI_TABLE_SIZE];
2935 return CONST1_RTX (mode);
2937 val = (n < 0) ? -n : n;
2939 memset (cache, 0, sizeof (cache));
2942 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2944 /* If the original exponent was negative, reciprocate the result. */
2946 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2947 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2952 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2953 a normal call should be emitted rather than expanding the function
2954 in-line. EXP is the expression that is a call to the builtin
2955 function; if convenient, the result should be placed in TARGET. */
2958 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2962 tree type = TREE_TYPE (exp);
2963 REAL_VALUE_TYPE cint, c, c2;
2966 enum machine_mode mode = TYPE_MODE (type);
2968 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2971 arg0 = CALL_EXPR_ARG (exp, 0);
2972 arg1 = CALL_EXPR_ARG (exp, 1);
2974 if (TREE_CODE (arg1) != REAL_CST
2975 || TREE_OVERFLOW (arg1))
2976 return expand_builtin_mathfn_2 (exp, target, subtarget);
2978 /* Handle constant exponents. */
2980 /* For integer valued exponents we can expand to an optimal multiplication
2981 sequence using expand_powi. */
2982 c = TREE_REAL_CST (arg1);
2983 n = real_to_integer (&c);
2984 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2985 if (real_identical (&c, &cint)
2986 && ((n >= -1 && n <= 2)
2987 || (flag_unsafe_math_optimizations
2988 && optimize_insn_for_speed_p ()
2989 && powi_cost (n) <= POWI_MAX_MULTS)))
2991 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2994 op = force_reg (mode, op);
2995 op = expand_powi (op, mode, n);
3000 narg0 = builtin_save_expr (arg0);
3002 /* If the exponent is not integer valued, check if it is half of an integer.
3003 In this case we can expand to sqrt (x) * x**(n/2). */
3004 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3005 if (fn != NULL_TREE)
3007 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3008 n = real_to_integer (&c2);
3009 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3010 if (real_identical (&c2, &cint)
3011 && ((flag_unsafe_math_optimizations
3012 && optimize_insn_for_speed_p ()
3013 && powi_cost (n/2) <= POWI_MAX_MULTS)
3016 tree call_expr = build_call_expr (fn, 1, narg0);
3017 /* Use expand_expr in case the newly built call expression
3018 was folded to a non-call. */
3019 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3022 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3023 op2 = force_reg (mode, op2);
3024 op2 = expand_powi (op2, mode, abs (n / 2));
3025 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3026 0, OPTAB_LIB_WIDEN);
3027 /* If the original exponent was negative, reciprocate the
3030 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3031 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3037 /* Try if the exponent is a third of an integer. In this case
3038 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3039 different from pow (x, 1./3.) due to rounding and behavior
3040 with negative x we need to constrain this transformation to
3041 unsafe math and positive x or finite math. */
3042 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3044 && flag_unsafe_math_optimizations
3045 && (tree_expr_nonnegative_p (arg0)
3046 || !HONOR_NANS (mode)))
3048 REAL_VALUE_TYPE dconst3;
3049 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3050 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3051 real_round (&c2, mode, &c2);
3052 n = real_to_integer (&c2);
3053 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3054 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3055 real_convert (&c2, mode, &c2);
3056 if (real_identical (&c2, &c)
3057 && ((optimize_insn_for_speed_p ()
3058 && powi_cost (n/3) <= POWI_MAX_MULTS)
3061 tree call_expr = build_call_expr (fn, 1,narg0);
3062 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3063 if (abs (n) % 3 == 2)
3064 op = expand_simple_binop (mode, MULT, op, op, op,
3065 0, OPTAB_LIB_WIDEN);
3068 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3069 op2 = force_reg (mode, op2);
3070 op2 = expand_powi (op2, mode, abs (n / 3));
3071 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3072 0, OPTAB_LIB_WIDEN);
3073 /* If the original exponent was negative, reciprocate the
3076 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3077 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3083 /* Fall back to optab expansion. */
3084 return expand_builtin_mathfn_2 (exp, target, subtarget);
3087 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3088 a normal call should be emitted rather than expanding the function
3089 in-line. EXP is the expression that is a call to the builtin
3090 function; if convenient, the result should be placed in TARGET. */
3093 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3097 enum machine_mode mode;
3098 enum machine_mode mode2;
3100 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3103 arg0 = CALL_EXPR_ARG (exp, 0);
3104 arg1 = CALL_EXPR_ARG (exp, 1);
3105 mode = TYPE_MODE (TREE_TYPE (exp));
3107 /* Handle constant power. */
3109 if (TREE_CODE (arg1) == INTEGER_CST
3110 && !TREE_OVERFLOW (arg1))
3112 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3114 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3115 Otherwise, check the number of multiplications required. */
3116 if ((TREE_INT_CST_HIGH (arg1) == 0
3117 || TREE_INT_CST_HIGH (arg1) == -1)
3118 && ((n >= -1 && n <= 2)
3119 || (optimize_insn_for_speed_p ()
3120 && powi_cost (n) <= POWI_MAX_MULTS)))
3122 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3123 op0 = force_reg (mode, op0);
3124 return expand_powi (op0, mode, n);
3128 /* Emit a libcall to libgcc. */
3130 /* Mode of the 2nd argument must match that of an int. */
3131 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3133 if (target == NULL_RTX)
3134 target = gen_reg_rtx (mode);
3136 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3137 if (GET_MODE (op0) != mode)
3138 op0 = convert_to_mode (mode, op0, 0);
3139 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3140 if (GET_MODE (op1) != mode2)
3141 op1 = convert_to_mode (mode2, op1, 0);
3143 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3144 target, LCT_CONST, mode, 2,
3145 op0, mode, op1, mode2);
3150 /* Expand expression EXP which is a call to the strlen builtin. Return
3151 NULL_RTX if we failed the caller should emit a normal call, otherwise
3152 try to get the result in TARGET, if convenient. */
3155 expand_builtin_strlen (tree exp, rtx target,
3156 enum machine_mode target_mode)
3158 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3164 tree src = CALL_EXPR_ARG (exp, 0);
3165 rtx result, src_reg, char_rtx, before_strlen;
3166 enum machine_mode insn_mode = target_mode, char_mode;
3167 enum insn_code icode = CODE_FOR_nothing;
3170 /* If the length can be computed at compile-time, return it. */
3171 len = c_strlen (src, 0);
3173 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3175 /* If the length can be computed at compile-time and is constant
3176 integer, but there are side-effects in src, evaluate
3177 src for side-effects, then return len.
3178 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3179 can be optimized into: i++; x = 3; */
3180 len = c_strlen (src, 1);
3181 if (len && TREE_CODE (len) == INTEGER_CST)
3183 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3184 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3187 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3189 /* If SRC is not a pointer type, don't do this operation inline. */
3193 /* Bail out if we can't compute strlen in the right mode. */
3194 while (insn_mode != VOIDmode)
3196 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3197 if (icode != CODE_FOR_nothing)
3200 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3202 if (insn_mode == VOIDmode)
3205 /* Make a place to write the result of the instruction. */
3209 && GET_MODE (result) == insn_mode
3210 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3211 result = gen_reg_rtx (insn_mode);
3213 /* Make a place to hold the source address. We will not expand
3214 the actual source until we are sure that the expansion will
3215 not fail -- there are trees that cannot be expanded twice. */
3216 src_reg = gen_reg_rtx (Pmode);
3218 /* Mark the beginning of the strlen sequence so we can emit the
3219 source operand later. */
3220 before_strlen = get_last_insn ();
3222 char_rtx = const0_rtx;
3223 char_mode = insn_data[(int) icode].operand[2].mode;
3224 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3226 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3228 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3229 char_rtx, GEN_INT (align));
3234 /* Now that we are assured of success, expand the source. */
3236 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3238 emit_move_insn (src_reg, pat);
3243 emit_insn_after (pat, before_strlen);
3245 emit_insn_before (pat, get_insns ());
3247 /* Return the value in the proper mode for this function. */
3248 if (GET_MODE (result) == target_mode)
3250 else if (target != 0)
3251 convert_move (target, result, 0);
3253 target = convert_to_mode (target_mode, result, 0);
3259 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3260 caller should emit a normal call, otherwise try to get the result
3261 in TARGET, if convenient (and in mode MODE if that's convenient). */
3264 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3266 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3268 tree type = TREE_TYPE (exp);
3269 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3270 CALL_EXPR_ARG (exp, 1), type);
3272 return expand_expr (result, target, mode, EXPAND_NORMAL);
3277 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3278 caller should emit a normal call, otherwise try to get the result
3279 in TARGET, if convenient (and in mode MODE if that's convenient). */
3282 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3284 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3286 tree type = TREE_TYPE (exp);
3287 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3288 CALL_EXPR_ARG (exp, 1), type);
3290 return expand_expr (result, target, mode, EXPAND_NORMAL);
3292 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3297 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3298 caller should emit a normal call, otherwise try to get the result
3299 in TARGET, if convenient (and in mode MODE if that's convenient). */
3302 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3304 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3306 tree type = TREE_TYPE (exp);
3307 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3308 CALL_EXPR_ARG (exp, 1), type);
3310 return expand_expr (result, target, mode, EXPAND_NORMAL);
3315 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3316 caller should emit a normal call, otherwise try to get the result
3317 in TARGET, if convenient (and in mode MODE if that's convenient). */
3320 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3322 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3324 tree type = TREE_TYPE (exp);
3325 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3326 CALL_EXPR_ARG (exp, 1), type);
3328 return expand_expr (result, target, mode, EXPAND_NORMAL);
3333 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3334 bytes from constant string DATA + OFFSET and return it as target
3338 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3339 enum machine_mode mode)
3341 const char *str = (const char *) data;
3343 gcc_assert (offset >= 0
3344 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3345 <= strlen (str) + 1));
3347 return c_readstr (str + offset, mode);
3350 /* Expand a call EXP to the memcpy builtin.
3351 Return NULL_RTX if we failed, the caller should emit a normal call,
3352 otherwise try to get the result in TARGET, if convenient (and in
3353 mode MODE if that's convenient). */
3356 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3358 tree fndecl = get_callee_fndecl (exp);
3360 if (!validate_arglist (exp,
3361 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3365 tree dest = CALL_EXPR_ARG (exp, 0);
3366 tree src = CALL_EXPR_ARG (exp, 1);
3367 tree len = CALL_EXPR_ARG (exp, 2);
3368 const char *src_str;
3369 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3370 unsigned int dest_align
3371 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3372 rtx dest_mem, src_mem, dest_addr, len_rtx;
3373 tree result = fold_builtin_memory_op (dest, src, len,
3374 TREE_TYPE (TREE_TYPE (fndecl)),
3376 HOST_WIDE_INT expected_size = -1;
3377 unsigned int expected_align = 0;
3378 tree_ann_common_t ann;
3382 while (TREE_CODE (result) == COMPOUND_EXPR)
3384 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3386 result = TREE_OPERAND (result, 1);
3388 return expand_expr (result, target, mode, EXPAND_NORMAL);
3391 /* If DEST is not a pointer type, call the normal function. */
3392 if (dest_align == 0)
3395 /* If either SRC is not a pointer type, don't do this
3396 operation in-line. */
3400 ann = tree_common_ann (exp);
3402 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3404 if (expected_align < dest_align)
3405 expected_align = dest_align;
3406 dest_mem = get_memory_rtx (dest, len);
3407 set_mem_align (dest_mem, dest_align);
3408 len_rtx = expand_normal (len);
3409 src_str = c_getstr (src);
3411 /* If SRC is a string constant and block move would be done
3412 by pieces, we can avoid loading the string from memory
3413 and only stored the computed constants. */
3415 && CONST_INT_P (len_rtx)
3416 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3417 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3418 CONST_CAST (char *, src_str),
3421 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3422 builtin_memcpy_read_str,
3423 CONST_CAST (char *, src_str),
3424 dest_align, false, 0);
3425 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3426 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3430 src_mem = get_memory_rtx (src, len);
3431 set_mem_align (src_mem, src_align);
3433 /* Copy word part most expediently. */
3434 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3435 CALL_EXPR_TAILCALL (exp)
3436 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3437 expected_align, expected_size);
3441 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3442 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3448 /* Expand a call EXP to the mempcpy builtin.
3449 Return NULL_RTX if we failed; the caller should emit a normal call,
3450 otherwise try to get the result in TARGET, if convenient (and in
3451 mode MODE if that's convenient). If ENDP is 0 return the
3452 destination pointer, if ENDP is 1 return the end pointer ala
3453 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3457 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3459 if (!validate_arglist (exp,
3460 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3464 tree dest = CALL_EXPR_ARG (exp, 0);
3465 tree src = CALL_EXPR_ARG (exp, 1);
3466 tree len = CALL_EXPR_ARG (exp, 2);
3467 return expand_builtin_mempcpy_args (dest, src, len,
3469 target, mode, /*endp=*/ 1);
3473 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3474 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3475 so that this can also be called without constructing an actual CALL_EXPR.
3476 TYPE is the return type of the call. The other arguments and return value
3477 are the same as for expand_builtin_mempcpy. */
3480 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3481 rtx target, enum machine_mode mode, int endp)
3483 /* If return value is ignored, transform mempcpy into memcpy. */
3484 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3486 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3487 tree result = build_call_expr (fn, 3, dest, src, len);
3489 while (TREE_CODE (result) == COMPOUND_EXPR)
3491 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3493 result = TREE_OPERAND (result, 1);
3495 return expand_expr (result, target, mode, EXPAND_NORMAL);
3499 const char *src_str;
3500 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3501 unsigned int dest_align
3502 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3503 rtx dest_mem, src_mem, len_rtx;
3504 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3508 while (TREE_CODE (result) == COMPOUND_EXPR)
3510 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3512 result = TREE_OPERAND (result, 1);
3514 return expand_expr (result, target, mode, EXPAND_NORMAL);
3517 /* If either SRC or DEST is not a pointer type, don't do this
3518 operation in-line. */
3519 if (dest_align == 0 || src_align == 0)
3522 /* If LEN is not constant, call the normal function. */
3523 if (! host_integerp (len, 1))
3526 len_rtx = expand_normal (len);
3527 src_str = c_getstr (src);
3529 /* If SRC is a string constant and block move would be done
3530 by pieces, we can avoid loading the string from memory
3531 and only stored the computed constants. */
3533 && CONST_INT_P (len_rtx)
3534 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3535 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3536 CONST_CAST (char *, src_str),
3539 dest_mem = get_memory_rtx (dest, len);
3540 set_mem_align (dest_mem, dest_align);
3541 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3542 builtin_memcpy_read_str,
3543 CONST_CAST (char *, src_str),
3544 dest_align, false, endp);
3545 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3546 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3550 if (CONST_INT_P (len_rtx)
3551 && can_move_by_pieces (INTVAL (len_rtx),
3552 MIN (dest_align, src_align)))
3554 dest_mem = get_memory_rtx (dest, len);
3555 set_mem_align (dest_mem, dest_align);
3556 src_mem = get_memory_rtx (src, len);
3557 set_mem_align (src_mem, src_align);
3558 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3559 MIN (dest_align, src_align), endp);
3560 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3561 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3569 /* Expand expression EXP, which is a call to the memmove builtin. Return
3570 NULL_RTX if we failed; the caller should emit a normal call. */
3573 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3575 if (!validate_arglist (exp,
3576 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3580 tree dest = CALL_EXPR_ARG (exp, 0);
3581 tree src = CALL_EXPR_ARG (exp, 1);
3582 tree len = CALL_EXPR_ARG (exp, 2);
3583 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3584 target, mode, ignore);
3588 /* Helper function to do the actual work for expand_builtin_memmove. The
3589 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3590 so that this can also be called without constructing an actual CALL_EXPR.
3591 TYPE is the return type of the call. The other arguments and return value
3592 are the same as for expand_builtin_memmove. */
3595 expand_builtin_memmove_args (tree dest, tree src, tree len,
3596 tree type, rtx target, enum machine_mode mode,
3599 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3603 STRIP_TYPE_NOPS (result);
3604 while (TREE_CODE (result) == COMPOUND_EXPR)
3606 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3608 result = TREE_OPERAND (result, 1);
3610 return expand_expr (result, target, mode, EXPAND_NORMAL);
3613 /* Otherwise, call the normal function. */
3617 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3618 NULL_RTX if we failed the caller should emit a normal call. */
3621 expand_builtin_bcopy (tree exp, int ignore)
3623 tree type = TREE_TYPE (exp);
3624 tree src, dest, size;
3626 if (!validate_arglist (exp,
3627 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3630 src = CALL_EXPR_ARG (exp, 0);
3631 dest = CALL_EXPR_ARG (exp, 1);
3632 size = CALL_EXPR_ARG (exp, 2);
3634 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3635 This is done this way so that if it isn't expanded inline, we fall
3636 back to calling bcopy instead of memmove. */
3637 return expand_builtin_memmove_args (dest, src,
3638 fold_convert (sizetype, size),
3639 type, const0_rtx, VOIDmode,
3644 # define HAVE_movstr 0
3645 # define CODE_FOR_movstr CODE_FOR_nothing
3648 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3649 we failed, the caller should emit a normal call, otherwise try to
3650 get the result in TARGET, if convenient. If ENDP is 0 return the
3651 destination pointer, if ENDP is 1 return the end pointer ala
3652 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3656 expand_movstr (tree dest, tree src, rtx target, int endp)
3662 const struct insn_data * data;
3667 dest_mem = get_memory_rtx (dest, NULL);
3668 src_mem = get_memory_rtx (src, NULL);
3671 target = force_reg (Pmode, XEXP (dest_mem, 0));
3672 dest_mem = replace_equiv_address (dest_mem, target);
3673 end = gen_reg_rtx (Pmode);
3677 if (target == 0 || target == const0_rtx)
3679 end = gen_reg_rtx (Pmode);
3687 data = insn_data + CODE_FOR_movstr;
3689 if (data->operand[0].mode != VOIDmode)
3690 end = gen_lowpart (data->operand[0].mode, end);
3692 insn = data->genfun (end, dest_mem, src_mem);
3698 /* movstr is supposed to set end to the address of the NUL
3699 terminator. If the caller requested a mempcpy-like return value,
3701 if (endp == 1 && target != const0_rtx)
3703 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3704 emit_move_insn (target, force_operand (tem, NULL_RTX));
3710 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3711 NULL_RTX if we failed the caller should emit a normal call, otherwise
3712 try to get the result in TARGET, if convenient (and in mode MODE if that's
3716 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3718 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3720 tree dest = CALL_EXPR_ARG (exp, 0);
3721 tree src = CALL_EXPR_ARG (exp, 1);
3722 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3727 /* Helper function to do the actual work for expand_builtin_strcpy. The
3728 arguments to the builtin_strcpy call DEST and SRC are broken out
3729 so that this can also be called without constructing an actual CALL_EXPR.
3730 The other arguments and return value are the same as for
3731 expand_builtin_strcpy. */
3734 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3735 rtx target, enum machine_mode mode)
3737 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3739 return expand_expr (result, target, mode, EXPAND_NORMAL);
3740 return expand_movstr (dest, src, target, /*endp=*/0);
3744 /* Expand a call EXP to the stpcpy builtin.
3745 Return NULL_RTX if we failed the caller should emit a normal call,
3746 otherwise try to get the result in TARGET, if convenient (and in
3747 mode MODE if that's convenient). */
3750 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3754 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3757 dst = CALL_EXPR_ARG (exp, 0);
3758 src = CALL_EXPR_ARG (exp, 1);
3760 /* If return value is ignored, transform stpcpy into strcpy. */
3761 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3763 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3764 tree result = build_call_expr (fn, 2, dst, src);
3766 STRIP_NOPS (result);
3767 while (TREE_CODE (result) == COMPOUND_EXPR)
3769 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3771 result = TREE_OPERAND (result, 1);
3773 return expand_expr (result, target, mode, EXPAND_NORMAL);
3780 /* Ensure we get an actual string whose length can be evaluated at
3781 compile-time, not an expression containing a string. This is
3782 because the latter will potentially produce pessimized code
3783 when used to produce the return value. */
3784 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3785 return expand_movstr (dst, src, target, /*endp=*/2);
3787 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3788 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3789 target, mode, /*endp=*/2);
3794 if (TREE_CODE (len) == INTEGER_CST)
3796 rtx len_rtx = expand_normal (len);
3798 if (CONST_INT_P (len_rtx))
3800 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3801 dst, src, target, mode);
3807 if (mode != VOIDmode)
3808 target = gen_reg_rtx (mode);
3810 target = gen_reg_rtx (GET_MODE (ret));
3812 if (GET_MODE (target) != GET_MODE (ret))
3813 ret = gen_lowpart (GET_MODE (target), ret);
3815 ret = plus_constant (ret, INTVAL (len_rtx));
3816 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3824 return expand_movstr (dst, src, target, /*endp=*/2);
3828 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3829 bytes from constant string DATA + OFFSET and return it as target
3833 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3834 enum machine_mode mode)
3836 const char *str = (const char *) data;
3838 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3841 return c_readstr (str + offset, mode);
3844 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3845 NULL_RTX if we failed the caller should emit a normal call. */
3848 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3850 tree fndecl = get_callee_fndecl (exp);
3852 if (validate_arglist (exp,
3853 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3855 tree dest = CALL_EXPR_ARG (exp, 0);
3856 tree src = CALL_EXPR_ARG (exp, 1);
3857 tree len = CALL_EXPR_ARG (exp, 2);
3858 tree slen = c_strlen (src, 1);
3859 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3863 while (TREE_CODE (result) == COMPOUND_EXPR)
3865 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3867 result = TREE_OPERAND (result, 1);
3869 return expand_expr (result, target, mode, EXPAND_NORMAL);
3872 /* We must be passed a constant len and src parameter. */
3873 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3876 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3878 /* We're required to pad with trailing zeros if the requested
3879 len is greater than strlen(s2)+1. In that case try to
3880 use store_by_pieces, if it fails, punt. */
3881 if (tree_int_cst_lt (slen, len))
3883 unsigned int dest_align
3884 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3885 const char *p = c_getstr (src);
3888 if (!p || dest_align == 0 || !host_integerp (len, 1)
3889 || !can_store_by_pieces (tree_low_cst (len, 1),
3890 builtin_strncpy_read_str,
3891 CONST_CAST (char *, p),
3895 dest_mem = get_memory_rtx (dest, len);
3896 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3897 builtin_strncpy_read_str,
3898 CONST_CAST (char *, p), dest_align, false, 0);
3899 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3900 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3907 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3908 bytes from constant string DATA + OFFSET and return it as target
3912 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3913 enum machine_mode mode)
3915 const char *c = (const char *) data;
3916 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3918 memset (p, *c, GET_MODE_SIZE (mode));
3920 return c_readstr (p, mode);
3923 /* Callback routine for store_by_pieces. Return the RTL of a register
3924 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3925 char value given in the RTL register data. For example, if mode is
3926 4 bytes wide, return the RTL for 0x01010101*data. */
3929 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3930 enum machine_mode mode)
3936 size = GET_MODE_SIZE (mode);
3940 p = XALLOCAVEC (char, size);
3941 memset (p, 1, size);
3942 coeff = c_readstr (p, mode);
3944 target = convert_to_mode (mode, (rtx) data, 1);
3945 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3946 return force_reg (mode, target);
3949 /* Expand expression EXP, which is a call to the memset builtin. Return
3950 NULL_RTX if we failed the caller should emit a normal call, otherwise
3951 try to get the result in TARGET, if convenient (and in mode MODE if that's
3955 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3957 if (!validate_arglist (exp,
3958 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3962 tree dest = CALL_EXPR_ARG (exp, 0);
3963 tree val = CALL_EXPR_ARG (exp, 1);
3964 tree len = CALL_EXPR_ARG (exp, 2);
3965 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3969 /* Helper function to do the actual work for expand_builtin_memset. The
3970 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3971 so that this can also be called without constructing an actual CALL_EXPR.
3972 The other arguments and return value are the same as for
3973 expand_builtin_memset. */
3976 expand_builtin_memset_args (tree dest, tree val, tree len,
3977 rtx target, enum machine_mode mode, tree orig_exp)
3980 enum built_in_function fcode;
3982 unsigned int dest_align;
3983 rtx dest_mem, dest_addr, len_rtx;
3984 HOST_WIDE_INT expected_size = -1;
3985 unsigned int expected_align = 0;
3986 tree_ann_common_t ann;
3988 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3990 /* If DEST is not a pointer type, don't do this operation in-line. */
3991 if (dest_align == 0)
3994 ann = tree_common_ann (orig_exp);
3996 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3998 if (expected_align < dest_align)
3999 expected_align = dest_align;
4001 /* If the LEN parameter is zero, return DEST. */
4002 if (integer_zerop (len))
4004 /* Evaluate and ignore VAL in case it has side-effects. */
4005 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4006 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4009 /* Stabilize the arguments in case we fail. */
4010 dest = builtin_save_expr (dest);
4011 val = builtin_save_expr (val);
4012 len = builtin_save_expr (len);
4014 len_rtx = expand_normal (len);
4015 dest_mem = get_memory_rtx (dest, len);
4017 if (TREE_CODE (val) != INTEGER_CST)
4021 val_rtx = expand_normal (val);
4022 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4025 /* Assume that we can memset by pieces if we can store
4026 * the coefficients by pieces (in the required modes).
4027 * We can't pass builtin_memset_gen_str as that emits RTL. */
4029 if (host_integerp (len, 1)
4030 && can_store_by_pieces (tree_low_cst (len, 1),
4031 builtin_memset_read_str, &c, dest_align,
4034 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4036 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4037 builtin_memset_gen_str, val_rtx, dest_align,
4040 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4041 dest_align, expected_align,
4045 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4046 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4050 if (target_char_cast (val, &c))
4055 if (host_integerp (len, 1)
4056 && can_store_by_pieces (tree_low_cst (len, 1),
4057 builtin_memset_read_str, &c, dest_align,
4059 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4060 builtin_memset_read_str, &c, dest_align, true, 0);
4061 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4062 dest_align, expected_align,
4066 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4067 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4071 set_mem_align (dest_mem, dest_align);
4072 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4073 CALL_EXPR_TAILCALL (orig_exp)
4074 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4075 expected_align, expected_size);
4079 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4080 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4086 fndecl = get_callee_fndecl (orig_exp);
4087 fcode = DECL_FUNCTION_CODE (fndecl);
4088 if (fcode == BUILT_IN_MEMSET)
4089 fn = build_call_expr (fndecl, 3, dest, val, len);
4090 else if (fcode == BUILT_IN_BZERO)
4091 fn = build_call_expr (fndecl, 2, dest, len);
4094 if (TREE_CODE (fn) == CALL_EXPR)
4095 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4096 return expand_call (fn, target, target == const0_rtx);
4099 /* Expand expression EXP, which is a call to the bzero builtin. Return
4100 NULL_RTX if we failed the caller should emit a normal call. */
4103 expand_builtin_bzero (tree exp)
4107 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4110 dest = CALL_EXPR_ARG (exp, 0);
4111 size = CALL_EXPR_ARG (exp, 1);
4113 /* New argument list transforming bzero(ptr x, int y) to
4114 memset(ptr x, int 0, size_t y). This is done this way
4115 so that if it isn't expanded inline, we fallback to
4116 calling bzero instead of memset. */
4118 return expand_builtin_memset_args (dest, integer_zero_node,
4119 fold_convert (sizetype, size),
4120 const0_rtx, VOIDmode, exp);
4123 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4124 caller should emit a normal call, otherwise try to get the result
4125 in TARGET, if convenient (and in mode MODE if that's convenient). */
4128 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4130 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4131 INTEGER_TYPE, VOID_TYPE))
4133 tree type = TREE_TYPE (exp);
4134 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4135 CALL_EXPR_ARG (exp, 1),
4136 CALL_EXPR_ARG (exp, 2), type);
4138 return expand_expr (result, target, mode, EXPAND_NORMAL);
4143 /* Expand expression EXP, which is a call to the memcmp built-in function.
4144 Return NULL_RTX if we failed and the
4145 caller should emit a normal call, otherwise try to get the result in
4146 TARGET, if convenient (and in mode MODE, if that's convenient). */
4149 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4151 if (!validate_arglist (exp,
4152 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4156 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4157 CALL_EXPR_ARG (exp, 1),
4158 CALL_EXPR_ARG (exp, 2));
4160 return expand_expr (result, target, mode, EXPAND_NORMAL);
4163 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4165 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4168 tree arg1 = CALL_EXPR_ARG (exp, 0);
4169 tree arg2 = CALL_EXPR_ARG (exp, 1);
4170 tree len = CALL_EXPR_ARG (exp, 2);
4173 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4175 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4176 enum machine_mode insn_mode;
4178 #ifdef HAVE_cmpmemsi
4180 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4183 #ifdef HAVE_cmpstrnsi
4185 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4190 /* If we don't have POINTER_TYPE, call the function. */
4191 if (arg1_align == 0 || arg2_align == 0)
4194 /* Make a place to write the result of the instruction. */
4197 && REG_P (result) && GET_MODE (result) == insn_mode
4198 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4199 result = gen_reg_rtx (insn_mode);
4201 arg1_rtx = get_memory_rtx (arg1, len);
4202 arg2_rtx = get_memory_rtx (arg2, len);
4203 arg3_rtx = expand_normal (fold_convert (sizetype, len));
4205 /* Set MEM_SIZE as appropriate. */
4206 if (CONST_INT_P (arg3_rtx))
4208 set_mem_size (arg1_rtx, arg3_rtx);
4209 set_mem_size (arg2_rtx, arg3_rtx);
4212 #ifdef HAVE_cmpmemsi
4214 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4215 GEN_INT (MIN (arg1_align, arg2_align)));
4218 #ifdef HAVE_cmpstrnsi
4220 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4221 GEN_INT (MIN (arg1_align, arg2_align)));
4229 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4230 TYPE_MODE (integer_type_node), 3,
4231 XEXP (arg1_rtx, 0), Pmode,
4232 XEXP (arg2_rtx, 0), Pmode,
4233 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4234 TYPE_UNSIGNED (sizetype)),
4235 TYPE_MODE (sizetype));
4237 /* Return the value in the proper mode for this function. */
4238 mode = TYPE_MODE (TREE_TYPE (exp));
4239 if (GET_MODE (result) == mode)
4241 else if (target != 0)
4243 convert_move (target, result, 0);
4247 return convert_to_mode (mode, result, 0);
4254 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4255 if we failed the caller should emit a normal call, otherwise try to get
4256 the result in TARGET, if convenient. */
4259 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4261 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4265 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4266 CALL_EXPR_ARG (exp, 1));
4268 return expand_expr (result, target, mode, EXPAND_NORMAL);
4271 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4272 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4273 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4275 rtx arg1_rtx, arg2_rtx;
4276 rtx result, insn = NULL_RTX;
4278 tree arg1 = CALL_EXPR_ARG (exp, 0);
4279 tree arg2 = CALL_EXPR_ARG (exp, 1);
4282 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4284 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4286 /* If we don't have POINTER_TYPE, call the function. */
4287 if (arg1_align == 0 || arg2_align == 0)
4290 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4291 arg1 = builtin_save_expr (arg1);
4292 arg2 = builtin_save_expr (arg2);
4294 arg1_rtx = get_memory_rtx (arg1, NULL);
4295 arg2_rtx = get_memory_rtx (arg2, NULL);
4297 #ifdef HAVE_cmpstrsi
4298 /* Try to call cmpstrsi. */
4301 enum machine_mode insn_mode
4302 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4304 /* Make a place to write the result of the instruction. */
4307 && REG_P (result) && GET_MODE (result) == insn_mode
4308 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4309 result = gen_reg_rtx (insn_mode);
4311 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4312 GEN_INT (MIN (arg1_align, arg2_align)));
4315 #ifdef HAVE_cmpstrnsi
4316 /* Try to determine at least one length and call cmpstrnsi. */
4317 if (!insn && HAVE_cmpstrnsi)
4322 enum machine_mode insn_mode
4323 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4324 tree len1 = c_strlen (arg1, 1);
4325 tree len2 = c_strlen (arg2, 1);
4328 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4330 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4332 /* If we don't have a constant length for the first, use the length
4333 of the second, if we know it. We don't require a constant for
4334 this case; some cost analysis could be done if both are available
4335 but neither is constant. For now, assume they're equally cheap,
4336 unless one has side effects. If both strings have constant lengths,
4343 else if (TREE_SIDE_EFFECTS (len1))
4345 else if (TREE_SIDE_EFFECTS (len2))
4347 else if (TREE_CODE (len1) != INTEGER_CST)
4349 else if (TREE_CODE (len2) != INTEGER_CST)
4351 else if (tree_int_cst_lt (len1, len2))
4356 /* If both arguments have side effects, we cannot optimize. */
4357 if (!len || TREE_SIDE_EFFECTS (len))
4360 arg3_rtx = expand_normal (len);
4362 /* Make a place to write the result of the instruction. */
4365 && REG_P (result) && GET_MODE (result) == insn_mode
4366 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4367 result = gen_reg_rtx (insn_mode);
4369 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4370 GEN_INT (MIN (arg1_align, arg2_align)));
4378 /* Return the value in the proper mode for this function. */
4379 mode = TYPE_MODE (TREE_TYPE (exp));
4380 if (GET_MODE (result) == mode)
4383 return convert_to_mode (mode, result, 0);
4384 convert_move (target, result, 0);
4388 /* Expand the library call ourselves using a stabilized argument
4389 list to avoid re-evaluating the function's arguments twice. */
4390 #ifdef HAVE_cmpstrnsi
4393 fndecl = get_callee_fndecl (exp);
4394 fn = build_call_expr (fndecl, 2, arg1, arg2);
4395 if (TREE_CODE (fn) == CALL_EXPR)
4396 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4397 return expand_call (fn, target, target == const0_rtx);
4403 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4404 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4405 the result in TARGET, if convenient. */
4408 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4410 if (!validate_arglist (exp,
4411 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4415 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4416 CALL_EXPR_ARG (exp, 1),
4417 CALL_EXPR_ARG (exp, 2));
4419 return expand_expr (result, target, mode, EXPAND_NORMAL);
4422 /* If c_strlen can determine an expression for one of the string
4423 lengths, and it doesn't have side effects, then emit cmpstrnsi
4424 using length MIN(strlen(string)+1, arg3). */
4425 #ifdef HAVE_cmpstrnsi
4428 tree len, len1, len2;
4429 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4432 tree arg1 = CALL_EXPR_ARG (exp, 0);
4433 tree arg2 = CALL_EXPR_ARG (exp, 1);
4434 tree arg3 = CALL_EXPR_ARG (exp, 2);
4437 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4439 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4440 enum machine_mode insn_mode
4441 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4443 len1 = c_strlen (arg1, 1);
4444 len2 = c_strlen (arg2, 1);
4447 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4449 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4451 /* If we don't have a constant length for the first, use the length
4452 of the second, if we know it. We don't require a constant for
4453 this case; some cost analysis could be done if both are available
4454 but neither is constant. For now, assume they're equally cheap,
4455 unless one has side effects. If both strings have constant lengths,
4462 else if (TREE_SIDE_EFFECTS (len1))
4464 else if (TREE_SIDE_EFFECTS (len2))
4466 else if (TREE_CODE (len1) != INTEGER_CST)
4468 else if (TREE_CODE (len2) != INTEGER_CST)
4470 else if (tree_int_cst_lt (len1, len2))
4475 /* If both arguments have side effects, we cannot optimize. */
4476 if (!len || TREE_SIDE_EFFECTS (len))
4479 /* The actual new length parameter is MIN(len,arg3). */
4480 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4481 fold_convert (TREE_TYPE (len), arg3));
4483 /* If we don't have POINTER_TYPE, call the function. */
4484 if (arg1_align == 0 || arg2_align == 0)
4487 /* Make a place to write the result of the instruction. */
4490 && REG_P (result) && GET_MODE (result) == insn_mode
4491 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4492 result = gen_reg_rtx (insn_mode);
4494 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4495 arg1 = builtin_save_expr (arg1);
4496 arg2 = builtin_save_expr (arg2);
4497 len = builtin_save_expr (len);
4499 arg1_rtx = get_memory_rtx (arg1, len);
4500 arg2_rtx = get_memory_rtx (arg2, len);
4501 arg3_rtx = expand_normal (len);
4502 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4503 GEN_INT (MIN (arg1_align, arg2_align)));
4508 /* Return the value in the proper mode for this function. */
4509 mode = TYPE_MODE (TREE_TYPE (exp));
4510 if (GET_MODE (result) == mode)
4513 return convert_to_mode (mode, result, 0);
4514 convert_move (target, result, 0);
4518 /* Expand the library call ourselves using a stabilized argument
4519 list to avoid re-evaluating the function's arguments twice. */
4520 fndecl = get_callee_fndecl (exp);
4521 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4522 if (TREE_CODE (fn) == CALL_EXPR)
4523 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4524 return expand_call (fn, target, target == const0_rtx);
4530 /* Expand expression EXP, which is a call to the strcat builtin.
4531 Return NULL_RTX if we failed the caller should emit a normal call,
4532 otherwise try to get the result in TARGET, if convenient. */
4535 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4537 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4541 tree dst = CALL_EXPR_ARG (exp, 0);
4542 tree src = CALL_EXPR_ARG (exp, 1);
4543 const char *p = c_getstr (src);
4545 /* If the string length is zero, return the dst parameter. */
4546 if (p && *p == '\0')
4547 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4549 if (optimize_insn_for_speed_p ())
4551 /* See if we can store by pieces into (dst + strlen(dst)). */
4552 tree newsrc, newdst,
4553 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4556 /* Stabilize the argument list. */
4557 newsrc = builtin_save_expr (src);
4558 dst = builtin_save_expr (dst);
4562 /* Create strlen (dst). */
4563 newdst = build_call_expr (strlen_fn, 1, dst);
4564 /* Create (dst p+ strlen (dst)). */
4566 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4567 newdst = builtin_save_expr (newdst);
4569 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4571 end_sequence (); /* Stop sequence. */
4575 /* Output the entire sequence. */
4576 insns = get_insns ();
4580 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4587 /* Expand expression EXP, which is a call to the strncat builtin.
4588 Return NULL_RTX if we failed the caller should emit a normal call,
4589 otherwise try to get the result in TARGET, if convenient. */
4592 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4594 if (validate_arglist (exp,
4595 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4597 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4598 CALL_EXPR_ARG (exp, 1),
4599 CALL_EXPR_ARG (exp, 2));
4601 return expand_expr (result, target, mode, EXPAND_NORMAL);
4606 /* Expand expression EXP, which is a call to the strspn builtin.
4607 Return NULL_RTX if we failed the caller should emit a normal call,
4608 otherwise try to get the result in TARGET, if convenient. */
4611 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4613 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4615 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4616 CALL_EXPR_ARG (exp, 1));
4618 return expand_expr (result, target, mode, EXPAND_NORMAL);
4623 /* Expand expression EXP, which is a call to the strcspn builtin.
4624 Return NULL_RTX if we failed the caller should emit a normal call,
4625 otherwise try to get the result in TARGET, if convenient. */
4628 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4630 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4632 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4633 CALL_EXPR_ARG (exp, 1));
4635 return expand_expr (result, target, mode, EXPAND_NORMAL);
4640 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4641 if that's convenient. */
4644 expand_builtin_saveregs (void)
4648 /* Don't do __builtin_saveregs more than once in a function.
4649 Save the result of the first call and reuse it. */
4650 if (saveregs_value != 0)
4651 return saveregs_value;
4653 /* When this function is called, it means that registers must be
4654 saved on entry to this function. So we migrate the call to the
4655 first insn of this function. */
4659 /* Do whatever the machine needs done in this case. */
4660 val = targetm.calls.expand_builtin_saveregs ();
4665 saveregs_value = val;
4667 /* Put the insns after the NOTE that starts the function. If this
4668 is inside a start_sequence, make the outer-level insn chain current, so
4669 the code is placed at the start of the function. */
4670 push_topmost_sequence ();
4671 emit_insn_after (seq, entry_of_function ());
4672 pop_topmost_sequence ();
4677 /* __builtin_args_info (N) returns word N of the arg space info
4678 for the current function. The number and meanings of words
4679 is controlled by the definition of CUMULATIVE_ARGS. */
4682 expand_builtin_args_info (tree exp)
4684 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4685 int *word_ptr = (int *) &crtl->args.info;
4687 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4689 if (call_expr_nargs (exp) != 0)
4691 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4692 error ("argument of %<__builtin_args_info%> must be constant");
4695 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4697 if (wordnum < 0 || wordnum >= nwords)
4698 error ("argument of %<__builtin_args_info%> out of range");
4700 return GEN_INT (word_ptr[wordnum]);
4704 error ("missing argument in %<__builtin_args_info%>");
4709 /* Expand a call to __builtin_next_arg. */
4712 expand_builtin_next_arg (void)
4714 /* Checking arguments is already done in fold_builtin_next_arg
4715 that must be called before this function. */
4716 return expand_binop (ptr_mode, add_optab,
4717 crtl->args.internal_arg_pointer,
4718 crtl->args.arg_offset_rtx,
4719 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4722 /* Make it easier for the backends by protecting the valist argument
4723 from multiple evaluations. */
4726 stabilize_va_list (tree valist, int needs_lvalue)
4728 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4730 gcc_assert (vatype != NULL_TREE);
4732 if (TREE_CODE (vatype) == ARRAY_TYPE)
4734 if (TREE_SIDE_EFFECTS (valist))
4735 valist = save_expr (valist);
4737 /* For this case, the backends will be expecting a pointer to
4738 vatype, but it's possible we've actually been given an array
4739 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4741 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4743 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4744 valist = build_fold_addr_expr_with_type (valist, p1);
4753 if (! TREE_SIDE_EFFECTS (valist))
4756 pt = build_pointer_type (vatype);
4757 valist = fold_build1 (ADDR_EXPR, pt, valist);
4758 TREE_SIDE_EFFECTS (valist) = 1;
4761 if (TREE_SIDE_EFFECTS (valist))
4762 valist = save_expr (valist);
4763 valist = build_fold_indirect_ref (valist);
4769 /* The "standard" definition of va_list is void*. */
4772 std_build_builtin_va_list (void)
4774 return ptr_type_node;
4777 /* The "standard" abi va_list is va_list_type_node. */
4780 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4782 return va_list_type_node;
4785 /* The "standard" type of va_list is va_list_type_node. */
4788 std_canonical_va_list_type (tree type)
4792 if (INDIRECT_REF_P (type))
4793 type = TREE_TYPE (type);
4794 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4795 type = TREE_TYPE (type);
4796 wtype = va_list_type_node;
4798 /* Treat structure va_list types. */
4799 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4800 htype = TREE_TYPE (htype);
4801 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4803 /* If va_list is an array type, the argument may have decayed
4804 to a pointer type, e.g. by being passed to another function.
4805 In that case, unwrap both types so that we can compare the
4806 underlying records. */
4807 if (TREE_CODE (htype) == ARRAY_TYPE
4808 || POINTER_TYPE_P (htype))
4810 wtype = TREE_TYPE (wtype);
4811 htype = TREE_TYPE (htype);
4814 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4815 return va_list_type_node;
4820 /* The "standard" implementation of va_start: just assign `nextarg' to
4824 std_expand_builtin_va_start (tree valist, rtx nextarg)
4826 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4827 convert_move (va_r, nextarg, 0);
4830 /* Expand EXP, a call to __builtin_va_start. */
4833 expand_builtin_va_start (tree exp)
4838 if (call_expr_nargs (exp) < 2)
4840 error ("too few arguments to function %<va_start%>");
4844 if (fold_builtin_next_arg (exp, true))
4847 nextarg = expand_builtin_next_arg ();
4848 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4850 if (targetm.expand_builtin_va_start)
4851 targetm.expand_builtin_va_start (valist, nextarg);
4853 std_expand_builtin_va_start (valist, nextarg);
4858 /* The "standard" implementation of va_arg: read the value from the
4859 current (padded) address and increment by the (padded) size. */
4862 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4865 tree addr, t, type_size, rounded_size, valist_tmp;
4866 unsigned HOST_WIDE_INT align, boundary;
4869 #ifdef ARGS_GROW_DOWNWARD
4870 /* All of the alignment and movement below is for args-grow-up machines.
4871 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4872 implement their own specialized gimplify_va_arg_expr routines. */
4876 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4878 type = build_pointer_type (type);
4880 align = PARM_BOUNDARY / BITS_PER_UNIT;
4881 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4883 /* When we align parameter on stack for caller, if the parameter
4884 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4885 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4886 here with caller. */
4887 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4888 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4890 boundary /= BITS_PER_UNIT;
4892 /* Hoist the valist value into a temporary for the moment. */
4893 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4895 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4896 requires greater alignment, we must perform dynamic alignment. */
4897 if (boundary > align
4898 && !integer_zerop (TYPE_SIZE (type)))
4900 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4901 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4902 valist_tmp, size_int (boundary - 1)));
4903 gimplify_and_add (t, pre_p);
4905 t = fold_convert (sizetype, valist_tmp);
4906 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4907 fold_convert (TREE_TYPE (valist),
4908 fold_build2 (BIT_AND_EXPR, sizetype, t,
4909 size_int (-boundary))));
4910 gimplify_and_add (t, pre_p);
4915 /* If the actual alignment is less than the alignment of the type,
4916 adjust the type accordingly so that we don't assume strict alignment
4917 when dereferencing the pointer. */
4918 boundary *= BITS_PER_UNIT;
4919 if (boundary < TYPE_ALIGN (type))
4921 type = build_variant_type_copy (type);
4922 TYPE_ALIGN (type) = boundary;
4925 /* Compute the rounded size of the type. */
4926 type_size = size_in_bytes (type);
4927 rounded_size = round_up (type_size, align);
4929 /* Reduce rounded_size so it's sharable with the postqueue. */
4930 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4934 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4936 /* Small args are padded downward. */
4937 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4938 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4939 size_binop (MINUS_EXPR, rounded_size, type_size));
4940 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4943 /* Compute new value for AP. */
4944 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4945 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4946 gimplify_and_add (t, pre_p);
4948 addr = fold_convert (build_pointer_type (type), addr);
4951 addr = build_va_arg_indirect_ref (addr);
4953 return build_va_arg_indirect_ref (addr);
4956 /* Build an indirect-ref expression over the given TREE, which represents a
4957 piece of a va_arg() expansion. */
4959 build_va_arg_indirect_ref (tree addr)
4961 addr = build_fold_indirect_ref (addr);
4963 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4969 /* Return a dummy expression of type TYPE in order to keep going after an
4973 dummy_object (tree type)
4975 tree t = build_int_cst (build_pointer_type (type), 0);
4976 return build1 (INDIRECT_REF, type, t);
4979 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4980 builtin function, but a very special sort of operator. */
4982 enum gimplify_status
4983 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4985 tree promoted_type, have_va_type;
4986 tree valist = TREE_OPERAND (*expr_p, 0);
4987 tree type = TREE_TYPE (*expr_p);
4989 location_t loc = EXPR_HAS_LOCATION (*expr_p) ? EXPR_LOCATION (*expr_p) :
4992 /* Verify that valist is of the proper type. */
4993 have_va_type = TREE_TYPE (valist);
4994 if (have_va_type == error_mark_node)
4996 have_va_type = targetm.canonical_va_list_type (have_va_type);
4998 if (have_va_type == NULL_TREE)
5000 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
5004 /* Generate a diagnostic for requesting data of a type that cannot
5005 be passed through `...' due to type promotion at the call site. */
5006 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
5009 static bool gave_help;
5012 /* Unfortunately, this is merely undefined, rather than a constraint
5013 violation, so we cannot make this an error. If this call is never
5014 executed, the program is still strictly conforming. */
5015 warned = warning_at (loc, 0,
5016 "%qT is promoted to %qT when passed through %<...%>",
5017 type, promoted_type);
5018 if (!gave_help && warned)
5021 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
5022 promoted_type, type);
5025 /* We can, however, treat "undefined" any way we please.
5026 Call abort to encourage the user to fix the program. */
5028 inform (loc, "if this code is reached, the program will abort");
5029 /* Before the abort, allow the evaluation of the va_list
5030 expression to exit or longjmp. */
5031 gimplify_and_add (valist, pre_p);
5032 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
5033 gimplify_and_add (t, pre_p);
5035 /* This is dead code, but go ahead and finish so that the
5036 mode of the result comes out right. */
5037 *expr_p = dummy_object (type);
5042 /* Make it easier for the backends by protecting the valist argument
5043 from multiple evaluations. */
5044 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5046 /* For this case, the backends will be expecting a pointer to
5047 TREE_TYPE (abi), but it's possible we've
5048 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5050 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5052 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5053 valist = fold_convert (p1, build_fold_addr_expr (valist));
5056 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5059 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5061 if (!targetm.gimplify_va_arg_expr)
5062 /* FIXME: Once most targets are converted we should merely
5063 assert this is non-null. */
5066 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5071 /* Expand EXP, a call to __builtin_va_end. */
5074 expand_builtin_va_end (tree exp)
5076 tree valist = CALL_EXPR_ARG (exp, 0);
5078 /* Evaluate for side effects, if needed. I hate macros that don't
5080 if (TREE_SIDE_EFFECTS (valist))
5081 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5086 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5087 builtin rather than just as an assignment in stdarg.h because of the
5088 nastiness of array-type va_list types. */
5091 expand_builtin_va_copy (tree exp)
5095 dst = CALL_EXPR_ARG (exp, 0);
5096 src = CALL_EXPR_ARG (exp, 1);
5098 dst = stabilize_va_list (dst, 1);
5099 src = stabilize_va_list (src, 0);
5101 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5103 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5105 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5106 TREE_SIDE_EFFECTS (t) = 1;
5107 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5111 rtx dstb, srcb, size;
5113 /* Evaluate to pointers. */
5114 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5115 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5116 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5117 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5119 dstb = convert_memory_address (Pmode, dstb);
5120 srcb = convert_memory_address (Pmode, srcb);
5122 /* "Dereference" to BLKmode memories. */
5123 dstb = gen_rtx_MEM (BLKmode, dstb);
5124 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5125 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5126 srcb = gen_rtx_MEM (BLKmode, srcb);
5127 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5128 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5131 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5137 /* Expand a call to one of the builtin functions __builtin_frame_address or
5138 __builtin_return_address. */
5141 expand_builtin_frame_address (tree fndecl, tree exp)
5143 /* The argument must be a nonnegative integer constant.
5144 It counts the number of frames to scan up the stack.
5145 The value is the return address saved in that frame. */
5146 if (call_expr_nargs (exp) == 0)
5147 /* Warning about missing arg was already issued. */
5149 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5151 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5152 error ("invalid argument to %<__builtin_frame_address%>");
5154 error ("invalid argument to %<__builtin_return_address%>");
5160 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5161 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5163 /* Some ports cannot access arbitrary stack frames. */
5166 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5167 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5169 warning (0, "unsupported argument to %<__builtin_return_address%>");
5173 /* For __builtin_frame_address, return what we've got. */
5174 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5178 && ! CONSTANT_P (tem))
5179 tem = copy_to_mode_reg (Pmode, tem);
5184 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5185 we failed and the caller should emit a normal call, otherwise try to get
5186 the result in TARGET, if convenient. */
5189 expand_builtin_alloca (tree exp, rtx target)
5194 /* Emit normal call if marked not-inlineable. */
5195 if (CALL_CANNOT_INLINE_P (exp))
5198 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5201 /* Compute the argument. */
5202 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5204 /* Allocate the desired space. */
5205 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5206 result = convert_memory_address (ptr_mode, result);
5211 /* Expand a call to a bswap builtin with argument ARG0. MODE
5212 is the mode to expand with. */
5215 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5217 enum machine_mode mode;
5221 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5224 arg = CALL_EXPR_ARG (exp, 0);
5225 mode = TYPE_MODE (TREE_TYPE (arg));
5226 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5228 target = expand_unop (mode, bswap_optab, op0, target, 1);
5230 gcc_assert (target);
5232 return convert_to_mode (mode, target, 0);
5235 /* Expand a call to a unary builtin in EXP.
5236 Return NULL_RTX if a normal call should be emitted rather than expanding the
5237 function in-line. If convenient, the result should be placed in TARGET.
5238 SUBTARGET may be used as the target for computing one of EXP's operands. */
5241 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5242 rtx subtarget, optab op_optab)
5246 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5249 /* Compute the argument. */
5250 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5251 VOIDmode, EXPAND_NORMAL);
5252 /* Compute op, into TARGET if possible.
5253 Set TARGET to wherever the result comes back. */
5254 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5255 op_optab, op0, target, 1);
5256 gcc_assert (target);
5258 return convert_to_mode (target_mode, target, 0);
5261 /* If the string passed to fputs is a constant and is one character
5262 long, we attempt to transform this call into __builtin_fputc(). */
5265 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5267 /* Verify the arguments in the original call. */
5268 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5270 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5271 CALL_EXPR_ARG (exp, 1),
5272 (target == const0_rtx),
5273 unlocked, NULL_TREE);
5275 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5280 /* Expand a call to __builtin_expect. We just return our argument
5281 as the builtin_expect semantic should've been already executed by
5282 tree branch prediction pass. */
5285 expand_builtin_expect (tree exp, rtx target)
5289 if (call_expr_nargs (exp) < 2)
5291 arg = CALL_EXPR_ARG (exp, 0);
5292 c = CALL_EXPR_ARG (exp, 1);
5294 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5295 /* When guessing was done, the hints should be already stripped away. */
5296 gcc_assert (!flag_guess_branch_prob
5297 || optimize == 0 || errorcount || sorrycount);
5302 expand_builtin_trap (void)
5306 emit_insn (gen_trap ());
5309 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5313 /* Expand a call to __builtin_unreachable. We do nothing except emit
5314 a barrier saying that control flow will not pass here.
5316 It is the responsibility of the program being compiled to ensure
5317 that control flow does never reach __builtin_unreachable. */
5319 expand_builtin_unreachable (void)
5324 /* Expand EXP, a call to fabs, fabsf or fabsl.
5325 Return NULL_RTX if a normal call should be emitted rather than expanding
5326 the function inline. If convenient, the result should be placed
5327 in TARGET. SUBTARGET may be used as the target for computing
5331 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5333 enum machine_mode mode;
5337 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5340 arg = CALL_EXPR_ARG (exp, 0);
5341 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5342 mode = TYPE_MODE (TREE_TYPE (arg));
5343 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5344 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5347 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5348 Return NULL is a normal call should be emitted rather than expanding the
5349 function inline. If convenient, the result should be placed in TARGET.
5350 SUBTARGET may be used as the target for computing the operand. */
5353 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5358 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5361 arg = CALL_EXPR_ARG (exp, 0);
5362 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5364 arg = CALL_EXPR_ARG (exp, 1);
5365 op1 = expand_normal (arg);
5367 return expand_copysign (op0, op1, target);
5370 /* Create a new constant string literal and return a char* pointer to it.
5371 The STRING_CST value is the LEN characters at STR. */
5373 build_string_literal (int len, const char *str)
5375 tree t, elem, index, type;
5377 t = build_string (len, str);
5378 elem = build_type_variant (char_type_node, 1, 0);
5379 index = build_index_type (size_int (len - 1));
5380 type = build_array_type (elem, index);
5381 TREE_TYPE (t) = type;
5382 TREE_CONSTANT (t) = 1;
5383 TREE_READONLY (t) = 1;
5384 TREE_STATIC (t) = 1;
5386 type = build_pointer_type (elem);
5387 t = build1 (ADDR_EXPR, type,
5388 build4 (ARRAY_REF, elem,
5389 t, integer_zero_node, NULL_TREE, NULL_TREE));
5393 /* Expand EXP, a call to printf or printf_unlocked.
5394 Return NULL_RTX if a normal call should be emitted rather than transforming
5395 the function inline. If convenient, the result should be placed in
5396 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5399 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5402 /* If we're using an unlocked function, assume the other unlocked
5403 functions exist explicitly. */
5404 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5405 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5406 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5407 : implicit_built_in_decls[BUILT_IN_PUTS];
5408 const char *fmt_str;
5411 int nargs = call_expr_nargs (exp);
5413 /* If the return value is used, don't do the transformation. */
5414 if (target != const0_rtx)
5417 /* Verify the required arguments in the original call. */
5420 fmt = CALL_EXPR_ARG (exp, 0);
5421 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5424 /* Check whether the format is a literal string constant. */
5425 fmt_str = c_getstr (fmt);
5426 if (fmt_str == NULL)
5429 if (!init_target_chars ())
5432 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5433 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5436 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5439 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5441 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5442 else if (strcmp (fmt_str, target_percent_c) == 0)
5445 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5448 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5452 /* We can't handle anything else with % args or %% ... yet. */
5453 if (strchr (fmt_str, target_percent))
5459 /* If the format specifier was "", printf does nothing. */
5460 if (fmt_str[0] == '\0')
5462 /* If the format specifier has length of 1, call putchar. */
5463 if (fmt_str[1] == '\0')
5465 /* Given printf("c"), (where c is any one character,)
5466 convert "c"[0] to an int and pass that to the replacement
5468 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5470 fn = build_call_expr (fn_putchar, 1, arg);
5474 /* If the format specifier was "string\n", call puts("string"). */
5475 size_t len = strlen (fmt_str);
5476 if ((unsigned char)fmt_str[len - 1] == target_newline)
5478 /* Create a NUL-terminated string that's one char shorter
5479 than the original, stripping off the trailing '\n'. */
5480 char *newstr = XALLOCAVEC (char, len);
5481 memcpy (newstr, fmt_str, len - 1);
5482 newstr[len - 1] = 0;
5483 arg = build_string_literal (len, newstr);
5485 fn = build_call_expr (fn_puts, 1, arg);
5488 /* We'd like to arrange to call fputs(string,stdout) here,
5489 but we need stdout and don't have a way to get it yet. */
5496 if (TREE_CODE (fn) == CALL_EXPR)
5497 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5498 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5501 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5502 Return NULL_RTX if a normal call should be emitted rather than transforming
5503 the function inline. If convenient, the result should be placed in
5504 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5507 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5510 /* If we're using an unlocked function, assume the other unlocked
5511 functions exist explicitly. */
5512 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5513 : implicit_built_in_decls[BUILT_IN_FPUTC];
5514 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5515 : implicit_built_in_decls[BUILT_IN_FPUTS];
5516 const char *fmt_str;
5519 int nargs = call_expr_nargs (exp);
5521 /* If the return value is used, don't do the transformation. */
5522 if (target != const0_rtx)
5525 /* Verify the required arguments in the original call. */
5528 fp = CALL_EXPR_ARG (exp, 0);
5529 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5531 fmt = CALL_EXPR_ARG (exp, 1);
5532 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5535 /* Check whether the format is a literal string constant. */
5536 fmt_str = c_getstr (fmt);
5537 if (fmt_str == NULL)
5540 if (!init_target_chars ())
5543 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5544 if (strcmp (fmt_str, target_percent_s) == 0)
5547 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5549 arg = CALL_EXPR_ARG (exp, 2);
5551 fn = build_call_expr (fn_fputs, 2, arg, fp);
5553 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5554 else if (strcmp (fmt_str, target_percent_c) == 0)
5557 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5559 arg = CALL_EXPR_ARG (exp, 2);
5561 fn = build_call_expr (fn_fputc, 2, arg, fp);
5565 /* We can't handle anything else with % args or %% ... yet. */
5566 if (strchr (fmt_str, target_percent))
5572 /* If the format specifier was "", fprintf does nothing. */
5573 if (fmt_str[0] == '\0')
5575 /* Evaluate and ignore FILE* argument for side-effects. */
5576 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5580 /* When "string" doesn't contain %, replace all cases of
5581 fprintf(stream,string) with fputs(string,stream). The fputs
5582 builtin will take care of special cases like length == 1. */
5584 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5589 if (TREE_CODE (fn) == CALL_EXPR)
5590 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5591 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5594 /* Expand a call EXP to sprintf. Return NULL_RTX if
5595 a normal call should be emitted rather than expanding the function
5596 inline. If convenient, the result should be placed in TARGET with
5600 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5603 const char *fmt_str;
5604 int nargs = call_expr_nargs (exp);
5606 /* Verify the required arguments in the original call. */
5609 dest = CALL_EXPR_ARG (exp, 0);
5610 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5612 fmt = CALL_EXPR_ARG (exp, 0);
5613 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5616 /* Check whether the format is a literal string constant. */
5617 fmt_str = c_getstr (fmt);
5618 if (fmt_str == NULL)
5621 if (!init_target_chars ())
5624 /* If the format doesn't contain % args or %%, use strcpy. */
5625 if (strchr (fmt_str, target_percent) == 0)
5627 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5630 if ((nargs > 2) || ! fn)
5632 expand_expr (build_call_expr (fn, 2, dest, fmt),
5633 const0_rtx, VOIDmode, EXPAND_NORMAL);
5634 if (target == const0_rtx)
5636 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5637 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5639 /* If the format is "%s", use strcpy if the result isn't used. */
5640 else if (strcmp (fmt_str, target_percent_s) == 0)
5643 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5649 arg = CALL_EXPR_ARG (exp, 2);
5650 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5653 if (target != const0_rtx)
5655 len = c_strlen (arg, 1);
5656 if (! len || TREE_CODE (len) != INTEGER_CST)
5662 expand_expr (build_call_expr (fn, 2, dest, arg),
5663 const0_rtx, VOIDmode, EXPAND_NORMAL);
5665 if (target == const0_rtx)
5667 return expand_expr (len, target, mode, EXPAND_NORMAL);
5673 /* Expand a call to either the entry or exit function profiler. */
5676 expand_builtin_profile_func (bool exitp)
5678 rtx this_rtx, which;
5680 this_rtx = DECL_RTL (current_function_decl);
5681 gcc_assert (MEM_P (this_rtx));
5682 this_rtx = XEXP (this_rtx, 0);
5685 which = profile_function_exit_libfunc;
5687 which = profile_function_entry_libfunc;
5689 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5690 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5697 /* Expand a call to __builtin___clear_cache. */
5700 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5702 #ifndef HAVE_clear_cache
5703 #ifdef CLEAR_INSN_CACHE
5704 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5705 does something. Just do the default expansion to a call to
5709 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5710 does nothing. There is no need to call it. Do nothing. */
5712 #endif /* CLEAR_INSN_CACHE */
5714 /* We have a "clear_cache" insn, and it will handle everything. */
5716 rtx begin_rtx, end_rtx;
5717 enum insn_code icode;
5719 /* We must not expand to a library call. If we did, any
5720 fallback library function in libgcc that might contain a call to
5721 __builtin___clear_cache() would recurse infinitely. */
5722 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5724 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5728 if (HAVE_clear_cache)
5730 icode = CODE_FOR_clear_cache;
5732 begin = CALL_EXPR_ARG (exp, 0);
5733 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5734 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5735 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5736 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5738 end = CALL_EXPR_ARG (exp, 1);
5739 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5740 end_rtx = convert_memory_address (Pmode, end_rtx);
5741 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5742 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5744 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5747 #endif /* HAVE_clear_cache */
5750 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5753 round_trampoline_addr (rtx tramp)
5755 rtx temp, addend, mask;
5757 /* If we don't need too much alignment, we'll have been guaranteed
5758 proper alignment by get_trampoline_type. */
5759 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5762 /* Round address up to desired boundary. */
5763 temp = gen_reg_rtx (Pmode);
5764 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5765 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5767 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5768 temp, 0, OPTAB_LIB_WIDEN);
5769 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5770 temp, 0, OPTAB_LIB_WIDEN);
5776 expand_builtin_init_trampoline (tree exp)
5778 tree t_tramp, t_func, t_chain;
5779 rtx r_tramp, r_func, r_chain;
5780 #ifdef TRAMPOLINE_TEMPLATE
5784 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5785 POINTER_TYPE, VOID_TYPE))
5788 t_tramp = CALL_EXPR_ARG (exp, 0);
5789 t_func = CALL_EXPR_ARG (exp, 1);
5790 t_chain = CALL_EXPR_ARG (exp, 2);
5792 r_tramp = expand_normal (t_tramp);
5793 r_func = expand_normal (t_func);
5794 r_chain = expand_normal (t_chain);
5796 /* Generate insns to initialize the trampoline. */
5797 r_tramp = round_trampoline_addr (r_tramp);
5798 #ifdef TRAMPOLINE_TEMPLATE
5799 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5800 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5801 emit_block_move (blktramp, assemble_trampoline_template (),
5802 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5804 trampolines_created = 1;
5805 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5811 expand_builtin_adjust_trampoline (tree exp)
5815 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5818 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5819 tramp = round_trampoline_addr (tramp);
5820 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5821 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5827 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5828 function. The function first checks whether the back end provides
5829 an insn to implement signbit for the respective mode. If not, it
5830 checks whether the floating point format of the value is such that
5831 the sign bit can be extracted. If that is not the case, the
5832 function returns NULL_RTX to indicate that a normal call should be
5833 emitted rather than expanding the function in-line. EXP is the
5834 expression that is a call to the builtin function; if convenient,
5835 the result should be placed in TARGET. */
5837 expand_builtin_signbit (tree exp, rtx target)
5839 const struct real_format *fmt;
5840 enum machine_mode fmode, imode, rmode;
5841 HOST_WIDE_INT hi, lo;
5844 enum insn_code icode;
5847 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5850 arg = CALL_EXPR_ARG (exp, 0);
5851 fmode = TYPE_MODE (TREE_TYPE (arg));
5852 rmode = TYPE_MODE (TREE_TYPE (exp));
5853 fmt = REAL_MODE_FORMAT (fmode);
5855 arg = builtin_save_expr (arg);
5857 /* Expand the argument yielding a RTX expression. */
5858 temp = expand_normal (arg);
5860 /* Check if the back end provides an insn that handles signbit for the
5862 icode = signbit_optab->handlers [(int) fmode].insn_code;
5863 if (icode != CODE_FOR_nothing)
5865 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5866 emit_unop_insn (icode, target, temp, UNKNOWN);
5870 /* For floating point formats without a sign bit, implement signbit
5872 bitpos = fmt->signbit_ro;
5875 /* But we can't do this if the format supports signed zero. */
5876 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5879 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5880 build_real (TREE_TYPE (arg), dconst0));
5881 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5884 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5886 imode = int_mode_for_mode (fmode);
5887 if (imode == BLKmode)
5889 temp = gen_lowpart (imode, temp);
5894 /* Handle targets with different FP word orders. */
5895 if (FLOAT_WORDS_BIG_ENDIAN)
5896 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5898 word = bitpos / BITS_PER_WORD;
5899 temp = operand_subword_force (temp, word, fmode);
5900 bitpos = bitpos % BITS_PER_WORD;
5903 /* Force the intermediate word_mode (or narrower) result into a
5904 register. This avoids attempting to create paradoxical SUBREGs
5905 of floating point modes below. */
5906 temp = force_reg (imode, temp);
5908 /* If the bitpos is within the "result mode" lowpart, the operation
5909 can be implement with a single bitwise AND. Otherwise, we need
5910 a right shift and an AND. */
5912 if (bitpos < GET_MODE_BITSIZE (rmode))
5914 if (bitpos < HOST_BITS_PER_WIDE_INT)
5917 lo = (HOST_WIDE_INT) 1 << bitpos;
5921 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5925 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5926 temp = gen_lowpart (rmode, temp);
5927 temp = expand_binop (rmode, and_optab, temp,
5928 immed_double_const (lo, hi, rmode),
5929 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5933 /* Perform a logical right shift to place the signbit in the least
5934 significant bit, then truncate the result to the desired mode
5935 and mask just this bit. */
5936 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5937 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5938 temp = gen_lowpart (rmode, temp);
5939 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5940 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5946 /* Expand fork or exec calls. TARGET is the desired target of the
5947 call. EXP is the call. FN is the
5948 identificator of the actual function. IGNORE is nonzero if the
5949 value is to be ignored. */
5952 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5957 /* If we are not profiling, just call the function. */
5958 if (!profile_arc_flag)
5961 /* Otherwise call the wrapper. This should be equivalent for the rest of
5962 compiler, so the code does not diverge, and the wrapper may run the
5963 code necessary for keeping the profiling sane. */
5965 switch (DECL_FUNCTION_CODE (fn))
5968 id = get_identifier ("__gcov_fork");
5971 case BUILT_IN_EXECL:
5972 id = get_identifier ("__gcov_execl");
5975 case BUILT_IN_EXECV:
5976 id = get_identifier ("__gcov_execv");
5979 case BUILT_IN_EXECLP:
5980 id = get_identifier ("__gcov_execlp");
5983 case BUILT_IN_EXECLE:
5984 id = get_identifier ("__gcov_execle");
5987 case BUILT_IN_EXECVP:
5988 id = get_identifier ("__gcov_execvp");
5991 case BUILT_IN_EXECVE:
5992 id = get_identifier ("__gcov_execve");
5999 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6000 FUNCTION_DECL, id, TREE_TYPE (fn));
6001 DECL_EXTERNAL (decl) = 1;
6002 TREE_PUBLIC (decl) = 1;
6003 DECL_ARTIFICIAL (decl) = 1;
6004 TREE_NOTHROW (decl) = 1;
6005 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6006 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6007 call = rewrite_call_expr (exp, 0, decl, 0);
6008 return expand_call (call, target, ignore);
6013 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6014 the pointer in these functions is void*, the tree optimizers may remove
6015 casts. The mode computed in expand_builtin isn't reliable either, due
6016 to __sync_bool_compare_and_swap.
6018 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6019 group of builtins. This gives us log2 of the mode size. */
6021 static inline enum machine_mode
6022 get_builtin_sync_mode (int fcode_diff)
6024 /* The size is not negotiable, so ask not to get BLKmode in return
6025 if the target indicates that a smaller size would be better. */
6026 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
6029 /* Expand the memory expression LOC and return the appropriate memory operand
6030 for the builtin_sync operations. */
6033 get_builtin_sync_mem (tree loc, enum machine_mode mode)
6037 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6039 /* Note that we explicitly do not want any alias information for this
6040 memory, so that we kill all other live memories. Otherwise we don't
6041 satisfy the full barrier semantics of the intrinsic. */
6042 mem = validize_mem (gen_rtx_MEM (mode, addr));
6044 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6045 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6046 MEM_VOLATILE_P (mem) = 1;
6051 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6052 EXP is the CALL_EXPR. CODE is the rtx code
6053 that corresponds to the arithmetic or logical operation from the name;
6054 an exception here is that NOT actually means NAND. TARGET is an optional
6055 place for us to store the results; AFTER is true if this is the
6056 fetch_and_xxx form. IGNORE is true if we don't actually care about
6057 the result of the operation at all. */
6060 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6061 enum rtx_code code, bool after,
6062 rtx target, bool ignore)
6065 enum machine_mode old_mode;
6066 location_t loc = EXPR_LOCATION (exp);
6068 if (code == NOT && warn_sync_nand)
6070 tree fndecl = get_callee_fndecl (exp);
6071 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6073 static bool warned_f_a_n, warned_n_a_f;
6077 case BUILT_IN_FETCH_AND_NAND_1:
6078 case BUILT_IN_FETCH_AND_NAND_2:
6079 case BUILT_IN_FETCH_AND_NAND_4:
6080 case BUILT_IN_FETCH_AND_NAND_8:
6081 case BUILT_IN_FETCH_AND_NAND_16:
6086 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6087 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6088 warned_f_a_n = true;
6091 case BUILT_IN_NAND_AND_FETCH_1:
6092 case BUILT_IN_NAND_AND_FETCH_2:
6093 case BUILT_IN_NAND_AND_FETCH_4:
6094 case BUILT_IN_NAND_AND_FETCH_8:
6095 case BUILT_IN_NAND_AND_FETCH_16:
6100 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6101 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6102 warned_n_a_f = true;
6110 /* Expand the operands. */
6111 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6113 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6114 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6115 of CONST_INTs, where we know the old_mode only from the call argument. */
6116 old_mode = GET_MODE (val);
6117 if (old_mode == VOIDmode)
6118 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6119 val = convert_modes (mode, old_mode, val, 1);
6122 return expand_sync_operation (mem, val, code);
6124 return expand_sync_fetch_operation (mem, val, code, after, target);
6127 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6128 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6129 true if this is the boolean form. TARGET is a place for us to store the
6130 results; this is NOT optional if IS_BOOL is true. */
6133 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6134 bool is_bool, rtx target)
6136 rtx old_val, new_val, mem;
6137 enum machine_mode old_mode;
6139 /* Expand the operands. */
6140 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6143 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6144 mode, EXPAND_NORMAL);
6145 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6146 of CONST_INTs, where we know the old_mode only from the call argument. */
6147 old_mode = GET_MODE (old_val);
6148 if (old_mode == VOIDmode)
6149 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6150 old_val = convert_modes (mode, old_mode, old_val, 1);
6152 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6153 mode, EXPAND_NORMAL);
6154 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6155 of CONST_INTs, where we know the old_mode only from the call argument. */
6156 old_mode = GET_MODE (new_val);
6157 if (old_mode == VOIDmode)
6158 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6159 new_val = convert_modes (mode, old_mode, new_val, 1);
6162 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6164 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6167 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6168 general form is actually an atomic exchange, and some targets only
6169 support a reduced form with the second argument being a constant 1.
6170 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6174 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6178 enum machine_mode old_mode;
6180 /* Expand the operands. */
6181 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6182 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6183 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6184 of CONST_INTs, where we know the old_mode only from the call argument. */
6185 old_mode = GET_MODE (val);
6186 if (old_mode == VOIDmode)
6187 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6188 val = convert_modes (mode, old_mode, val, 1);
6190 return expand_sync_lock_test_and_set (mem, val, target);
6193 /* Expand the __sync_synchronize intrinsic. */
6196 expand_builtin_synchronize (void)
6200 #ifdef HAVE_memory_barrier
6201 if (HAVE_memory_barrier)
6203 emit_insn (gen_memory_barrier ());
6208 if (synchronize_libfunc != NULL_RTX)
6210 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6214 /* If no explicit memory barrier instruction is available, create an
6215 empty asm stmt with a memory clobber. */
6216 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6217 tree_cons (NULL, build_string (6, "memory"), NULL));
6218 ASM_VOLATILE_P (x) = 1;
6219 expand_asm_expr (x);
6222 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6225 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6227 enum insn_code icode;
6229 rtx val = const0_rtx;
6231 /* Expand the operands. */
6232 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6234 /* If there is an explicit operation in the md file, use it. */
6235 icode = sync_lock_release[mode];
6236 if (icode != CODE_FOR_nothing)
6238 if (!insn_data[icode].operand[1].predicate (val, mode))
6239 val = force_reg (mode, val);
6241 insn = GEN_FCN (icode) (mem, val);
6249 /* Otherwise we can implement this operation by emitting a barrier
6250 followed by a store of zero. */
6251 expand_builtin_synchronize ();
6252 emit_move_insn (mem, val);
6255 /* Expand an expression EXP that calls a built-in function,
6256 with result going to TARGET if that's convenient
6257 (and in mode MODE if that's convenient).
6258 SUBTARGET may be used as the target for computing one of EXP's operands.
6259 IGNORE is nonzero if the value is to be ignored. */
6262 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6265 tree fndecl = get_callee_fndecl (exp);
6266 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6267 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6269 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6270 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6272 /* When not optimizing, generate calls to library functions for a certain
6275 && !called_as_built_in (fndecl)
6276 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6277 && fcode != BUILT_IN_ALLOCA
6278 && fcode != BUILT_IN_FREE)
6279 return expand_call (exp, target, ignore);
6281 /* The built-in function expanders test for target == const0_rtx
6282 to determine whether the function's result will be ignored. */
6284 target = const0_rtx;
6286 /* If the result of a pure or const built-in function is ignored, and
6287 none of its arguments are volatile, we can avoid expanding the
6288 built-in call and just evaluate the arguments for side-effects. */
6289 if (target == const0_rtx
6290 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6292 bool volatilep = false;
6294 call_expr_arg_iterator iter;
6296 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6297 if (TREE_THIS_VOLATILE (arg))
6305 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6306 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6313 CASE_FLT_FN (BUILT_IN_FABS):
6314 target = expand_builtin_fabs (exp, target, subtarget);
6319 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6320 target = expand_builtin_copysign (exp, target, subtarget);
6325 /* Just do a normal library call if we were unable to fold
6327 CASE_FLT_FN (BUILT_IN_CABS):
6330 CASE_FLT_FN (BUILT_IN_EXP):
6331 CASE_FLT_FN (BUILT_IN_EXP10):
6332 CASE_FLT_FN (BUILT_IN_POW10):
6333 CASE_FLT_FN (BUILT_IN_EXP2):
6334 CASE_FLT_FN (BUILT_IN_EXPM1):
6335 CASE_FLT_FN (BUILT_IN_LOGB):
6336 CASE_FLT_FN (BUILT_IN_LOG):
6337 CASE_FLT_FN (BUILT_IN_LOG10):
6338 CASE_FLT_FN (BUILT_IN_LOG2):
6339 CASE_FLT_FN (BUILT_IN_LOG1P):
6340 CASE_FLT_FN (BUILT_IN_TAN):
6341 CASE_FLT_FN (BUILT_IN_ASIN):
6342 CASE_FLT_FN (BUILT_IN_ACOS):
6343 CASE_FLT_FN (BUILT_IN_ATAN):
6344 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6345 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6346 because of possible accuracy problems. */
6347 if (! flag_unsafe_math_optimizations)
6349 CASE_FLT_FN (BUILT_IN_SQRT):
6350 CASE_FLT_FN (BUILT_IN_FLOOR):
6351 CASE_FLT_FN (BUILT_IN_CEIL):
6352 CASE_FLT_FN (BUILT_IN_TRUNC):
6353 CASE_FLT_FN (BUILT_IN_ROUND):
6354 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6355 CASE_FLT_FN (BUILT_IN_RINT):
6356 target = expand_builtin_mathfn (exp, target, subtarget);
6361 CASE_FLT_FN (BUILT_IN_ILOGB):
6362 if (! flag_unsafe_math_optimizations)
6364 CASE_FLT_FN (BUILT_IN_ISINF):
6365 CASE_FLT_FN (BUILT_IN_FINITE):
6366 case BUILT_IN_ISFINITE:
6367 case BUILT_IN_ISNORMAL:
6368 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6373 CASE_FLT_FN (BUILT_IN_LCEIL):
6374 CASE_FLT_FN (BUILT_IN_LLCEIL):
6375 CASE_FLT_FN (BUILT_IN_LFLOOR):
6376 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6377 target = expand_builtin_int_roundingfn (exp, target);
6382 CASE_FLT_FN (BUILT_IN_LRINT):
6383 CASE_FLT_FN (BUILT_IN_LLRINT):
6384 CASE_FLT_FN (BUILT_IN_LROUND):
6385 CASE_FLT_FN (BUILT_IN_LLROUND):
6386 target = expand_builtin_int_roundingfn_2 (exp, target);
6391 CASE_FLT_FN (BUILT_IN_POW):
6392 target = expand_builtin_pow (exp, target, subtarget);
6397 CASE_FLT_FN (BUILT_IN_POWI):
6398 target = expand_builtin_powi (exp, target, subtarget);
6403 CASE_FLT_FN (BUILT_IN_ATAN2):
6404 CASE_FLT_FN (BUILT_IN_LDEXP):
6405 CASE_FLT_FN (BUILT_IN_SCALB):
6406 CASE_FLT_FN (BUILT_IN_SCALBN):
6407 CASE_FLT_FN (BUILT_IN_SCALBLN):
6408 if (! flag_unsafe_math_optimizations)
6411 CASE_FLT_FN (BUILT_IN_FMOD):
6412 CASE_FLT_FN (BUILT_IN_REMAINDER):
6413 CASE_FLT_FN (BUILT_IN_DREM):
6414 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6419 CASE_FLT_FN (BUILT_IN_CEXPI):
6420 target = expand_builtin_cexpi (exp, target, subtarget);
6421 gcc_assert (target);
6424 CASE_FLT_FN (BUILT_IN_SIN):
6425 CASE_FLT_FN (BUILT_IN_COS):
6426 if (! flag_unsafe_math_optimizations)
6428 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6433 CASE_FLT_FN (BUILT_IN_SINCOS):
6434 if (! flag_unsafe_math_optimizations)
6436 target = expand_builtin_sincos (exp);
6441 case BUILT_IN_APPLY_ARGS:
6442 return expand_builtin_apply_args ();
6444 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6445 FUNCTION with a copy of the parameters described by
6446 ARGUMENTS, and ARGSIZE. It returns a block of memory
6447 allocated on the stack into which is stored all the registers
6448 that might possibly be used for returning the result of a
6449 function. ARGUMENTS is the value returned by
6450 __builtin_apply_args. ARGSIZE is the number of bytes of
6451 arguments that must be copied. ??? How should this value be
6452 computed? We'll also need a safe worst case value for varargs
6454 case BUILT_IN_APPLY:
6455 if (!validate_arglist (exp, POINTER_TYPE,
6456 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6457 && !validate_arglist (exp, REFERENCE_TYPE,
6458 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6464 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6465 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6466 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6468 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6471 /* __builtin_return (RESULT) causes the function to return the
6472 value described by RESULT. RESULT is address of the block of
6473 memory returned by __builtin_apply. */
6474 case BUILT_IN_RETURN:
6475 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6476 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6479 case BUILT_IN_SAVEREGS:
6480 return expand_builtin_saveregs ();
6482 case BUILT_IN_ARGS_INFO:
6483 return expand_builtin_args_info (exp);
6485 case BUILT_IN_VA_ARG_PACK:
6486 /* All valid uses of __builtin_va_arg_pack () are removed during
6488 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6491 case BUILT_IN_VA_ARG_PACK_LEN:
6492 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6494 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6497 /* Return the address of the first anonymous stack arg. */
6498 case BUILT_IN_NEXT_ARG:
6499 if (fold_builtin_next_arg (exp, false))
6501 return expand_builtin_next_arg ();
6503 case BUILT_IN_CLEAR_CACHE:
6504 target = expand_builtin___clear_cache (exp);
6509 case BUILT_IN_CLASSIFY_TYPE:
6510 return expand_builtin_classify_type (exp);
6512 case BUILT_IN_CONSTANT_P:
6515 case BUILT_IN_FRAME_ADDRESS:
6516 case BUILT_IN_RETURN_ADDRESS:
6517 return expand_builtin_frame_address (fndecl, exp);
6519 /* Returns the address of the area where the structure is returned.
6521 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6522 if (call_expr_nargs (exp) != 0
6523 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6524 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6527 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6529 case BUILT_IN_ALLOCA:
6530 target = expand_builtin_alloca (exp, target);
6535 case BUILT_IN_STACK_SAVE:
6536 return expand_stack_save ();
6538 case BUILT_IN_STACK_RESTORE:
6539 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6542 case BUILT_IN_BSWAP32:
6543 case BUILT_IN_BSWAP64:
6544 target = expand_builtin_bswap (exp, target, subtarget);
6550 CASE_INT_FN (BUILT_IN_FFS):
6551 case BUILT_IN_FFSIMAX:
6552 target = expand_builtin_unop (target_mode, exp, target,
6553 subtarget, ffs_optab);
6558 CASE_INT_FN (BUILT_IN_CLZ):
6559 case BUILT_IN_CLZIMAX:
6560 target = expand_builtin_unop (target_mode, exp, target,
6561 subtarget, clz_optab);
6566 CASE_INT_FN (BUILT_IN_CTZ):
6567 case BUILT_IN_CTZIMAX:
6568 target = expand_builtin_unop (target_mode, exp, target,
6569 subtarget, ctz_optab);
6574 CASE_INT_FN (BUILT_IN_POPCOUNT):
6575 case BUILT_IN_POPCOUNTIMAX:
6576 target = expand_builtin_unop (target_mode, exp, target,
6577 subtarget, popcount_optab);
6582 CASE_INT_FN (BUILT_IN_PARITY):
6583 case BUILT_IN_PARITYIMAX:
6584 target = expand_builtin_unop (target_mode, exp, target,
6585 subtarget, parity_optab);
6590 case BUILT_IN_STRLEN:
6591 target = expand_builtin_strlen (exp, target, target_mode);
6596 case BUILT_IN_STRCPY:
6597 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6602 case BUILT_IN_STRNCPY:
6603 target = expand_builtin_strncpy (exp, target, mode);
6608 case BUILT_IN_STPCPY:
6609 target = expand_builtin_stpcpy (exp, target, mode);
6614 case BUILT_IN_STRCAT:
6615 target = expand_builtin_strcat (fndecl, exp, target, mode);
6620 case BUILT_IN_STRNCAT:
6621 target = expand_builtin_strncat (exp, target, mode);
6626 case BUILT_IN_STRSPN:
6627 target = expand_builtin_strspn (exp, target, mode);
6632 case BUILT_IN_STRCSPN:
6633 target = expand_builtin_strcspn (exp, target, mode);
6638 case BUILT_IN_STRSTR:
6639 target = expand_builtin_strstr (exp, target, mode);
6644 case BUILT_IN_STRPBRK:
6645 target = expand_builtin_strpbrk (exp, target, mode);
6650 case BUILT_IN_INDEX:
6651 case BUILT_IN_STRCHR:
6652 target = expand_builtin_strchr (exp, target, mode);
6657 case BUILT_IN_RINDEX:
6658 case BUILT_IN_STRRCHR:
6659 target = expand_builtin_strrchr (exp, target, mode);
6664 case BUILT_IN_MEMCPY:
6665 target = expand_builtin_memcpy (exp, target, mode);
6670 case BUILT_IN_MEMPCPY:
6671 target = expand_builtin_mempcpy (exp, target, mode);
6676 case BUILT_IN_MEMMOVE:
6677 target = expand_builtin_memmove (exp, target, mode, ignore);
6682 case BUILT_IN_BCOPY:
6683 target = expand_builtin_bcopy (exp, ignore);
6688 case BUILT_IN_MEMSET:
6689 target = expand_builtin_memset (exp, target, mode);
6694 case BUILT_IN_BZERO:
6695 target = expand_builtin_bzero (exp);
6700 case BUILT_IN_STRCMP:
6701 target = expand_builtin_strcmp (exp, target, mode);
6706 case BUILT_IN_STRNCMP:
6707 target = expand_builtin_strncmp (exp, target, mode);
6712 case BUILT_IN_MEMCHR:
6713 target = expand_builtin_memchr (exp, target, mode);
6719 case BUILT_IN_MEMCMP:
6720 target = expand_builtin_memcmp (exp, target, mode);
6725 case BUILT_IN_SETJMP:
6726 /* This should have been lowered to the builtins below. */
6729 case BUILT_IN_SETJMP_SETUP:
6730 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6731 and the receiver label. */
6732 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6734 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6735 VOIDmode, EXPAND_NORMAL);
6736 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6737 rtx label_r = label_rtx (label);
6739 /* This is copied from the handling of non-local gotos. */
6740 expand_builtin_setjmp_setup (buf_addr, label_r);
6741 nonlocal_goto_handler_labels
6742 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6743 nonlocal_goto_handler_labels);
6744 /* ??? Do not let expand_label treat us as such since we would
6745 not want to be both on the list of non-local labels and on
6746 the list of forced labels. */
6747 FORCED_LABEL (label) = 0;
6752 case BUILT_IN_SETJMP_DISPATCHER:
6753 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6754 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6756 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6757 rtx label_r = label_rtx (label);
6759 /* Remove the dispatcher label from the list of non-local labels
6760 since the receiver labels have been added to it above. */
6761 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6766 case BUILT_IN_SETJMP_RECEIVER:
6767 /* __builtin_setjmp_receiver is passed the receiver label. */
6768 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6770 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6771 rtx label_r = label_rtx (label);
6773 expand_builtin_setjmp_receiver (label_r);
6778 /* __builtin_longjmp is passed a pointer to an array of five words.
6779 It's similar to the C library longjmp function but works with
6780 __builtin_setjmp above. */
6781 case BUILT_IN_LONGJMP:
6782 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6784 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6785 VOIDmode, EXPAND_NORMAL);
6786 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6788 if (value != const1_rtx)
6790 error ("%<__builtin_longjmp%> second argument must be 1");
6794 expand_builtin_longjmp (buf_addr, value);
6799 case BUILT_IN_NONLOCAL_GOTO:
6800 target = expand_builtin_nonlocal_goto (exp);
6805 /* This updates the setjmp buffer that is its argument with the value
6806 of the current stack pointer. */
6807 case BUILT_IN_UPDATE_SETJMP_BUF:
6808 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6811 = expand_normal (CALL_EXPR_ARG (exp, 0));
6813 expand_builtin_update_setjmp_buf (buf_addr);
6819 expand_builtin_trap ();
6822 case BUILT_IN_UNREACHABLE:
6823 expand_builtin_unreachable ();
6826 case BUILT_IN_PRINTF:
6827 target = expand_builtin_printf (exp, target, mode, false);
6832 case BUILT_IN_PRINTF_UNLOCKED:
6833 target = expand_builtin_printf (exp, target, mode, true);
6838 case BUILT_IN_FPUTS:
6839 target = expand_builtin_fputs (exp, target, false);
6843 case BUILT_IN_FPUTS_UNLOCKED:
6844 target = expand_builtin_fputs (exp, target, true);
6849 case BUILT_IN_FPRINTF:
6850 target = expand_builtin_fprintf (exp, target, mode, false);
6855 case BUILT_IN_FPRINTF_UNLOCKED:
6856 target = expand_builtin_fprintf (exp, target, mode, true);
6861 case BUILT_IN_SPRINTF:
6862 target = expand_builtin_sprintf (exp, target, mode);
6867 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6868 case BUILT_IN_SIGNBITD32:
6869 case BUILT_IN_SIGNBITD64:
6870 case BUILT_IN_SIGNBITD128:
6871 target = expand_builtin_signbit (exp, target);
6876 /* Various hooks for the DWARF 2 __throw routine. */
6877 case BUILT_IN_UNWIND_INIT:
6878 expand_builtin_unwind_init ();
6880 case BUILT_IN_DWARF_CFA:
6881 return virtual_cfa_rtx;
6882 #ifdef DWARF2_UNWIND_INFO
6883 case BUILT_IN_DWARF_SP_COLUMN:
6884 return expand_builtin_dwarf_sp_column ();
6885 case BUILT_IN_INIT_DWARF_REG_SIZES:
6886 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6889 case BUILT_IN_FROB_RETURN_ADDR:
6890 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6891 case BUILT_IN_EXTRACT_RETURN_ADDR:
6892 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6893 case BUILT_IN_EH_RETURN:
6894 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6895 CALL_EXPR_ARG (exp, 1));
6897 #ifdef EH_RETURN_DATA_REGNO
6898 case BUILT_IN_EH_RETURN_DATA_REGNO:
6899 return expand_builtin_eh_return_data_regno (exp);
6901 case BUILT_IN_EXTEND_POINTER:
6902 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6904 case BUILT_IN_VA_START:
6905 return expand_builtin_va_start (exp);
6906 case BUILT_IN_VA_END:
6907 return expand_builtin_va_end (exp);
6908 case BUILT_IN_VA_COPY:
6909 return expand_builtin_va_copy (exp);
6910 case BUILT_IN_EXPECT:
6911 return expand_builtin_expect (exp, target);
6912 case BUILT_IN_PREFETCH:
6913 expand_builtin_prefetch (exp);
6916 case BUILT_IN_PROFILE_FUNC_ENTER:
6917 return expand_builtin_profile_func (false);
6918 case BUILT_IN_PROFILE_FUNC_EXIT:
6919 return expand_builtin_profile_func (true);
6921 case BUILT_IN_INIT_TRAMPOLINE:
6922 return expand_builtin_init_trampoline (exp);
6923 case BUILT_IN_ADJUST_TRAMPOLINE:
6924 return expand_builtin_adjust_trampoline (exp);
6927 case BUILT_IN_EXECL:
6928 case BUILT_IN_EXECV:
6929 case BUILT_IN_EXECLP:
6930 case BUILT_IN_EXECLE:
6931 case BUILT_IN_EXECVP:
6932 case BUILT_IN_EXECVE:
6933 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6938 case BUILT_IN_FETCH_AND_ADD_1:
6939 case BUILT_IN_FETCH_AND_ADD_2:
6940 case BUILT_IN_FETCH_AND_ADD_4:
6941 case BUILT_IN_FETCH_AND_ADD_8:
6942 case BUILT_IN_FETCH_AND_ADD_16:
6943 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6944 target = expand_builtin_sync_operation (mode, exp, PLUS,
6945 false, target, ignore);
6950 case BUILT_IN_FETCH_AND_SUB_1:
6951 case BUILT_IN_FETCH_AND_SUB_2:
6952 case BUILT_IN_FETCH_AND_SUB_4:
6953 case BUILT_IN_FETCH_AND_SUB_8:
6954 case BUILT_IN_FETCH_AND_SUB_16:
6955 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6956 target = expand_builtin_sync_operation (mode, exp, MINUS,
6957 false, target, ignore);
6962 case BUILT_IN_FETCH_AND_OR_1:
6963 case BUILT_IN_FETCH_AND_OR_2:
6964 case BUILT_IN_FETCH_AND_OR_4:
6965 case BUILT_IN_FETCH_AND_OR_8:
6966 case BUILT_IN_FETCH_AND_OR_16:
6967 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6968 target = expand_builtin_sync_operation (mode, exp, IOR,
6969 false, target, ignore);
6974 case BUILT_IN_FETCH_AND_AND_1:
6975 case BUILT_IN_FETCH_AND_AND_2:
6976 case BUILT_IN_FETCH_AND_AND_4:
6977 case BUILT_IN_FETCH_AND_AND_8:
6978 case BUILT_IN_FETCH_AND_AND_16:
6979 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6980 target = expand_builtin_sync_operation (mode, exp, AND,
6981 false, target, ignore);
6986 case BUILT_IN_FETCH_AND_XOR_1:
6987 case BUILT_IN_FETCH_AND_XOR_2:
6988 case BUILT_IN_FETCH_AND_XOR_4:
6989 case BUILT_IN_FETCH_AND_XOR_8:
6990 case BUILT_IN_FETCH_AND_XOR_16:
6991 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6992 target = expand_builtin_sync_operation (mode, exp, XOR,
6993 false, target, ignore);
6998 case BUILT_IN_FETCH_AND_NAND_1:
6999 case BUILT_IN_FETCH_AND_NAND_2:
7000 case BUILT_IN_FETCH_AND_NAND_4:
7001 case BUILT_IN_FETCH_AND_NAND_8:
7002 case BUILT_IN_FETCH_AND_NAND_16:
7003 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
7004 target = expand_builtin_sync_operation (mode, exp, NOT,
7005 false, target, ignore);
7010 case BUILT_IN_ADD_AND_FETCH_1:
7011 case BUILT_IN_ADD_AND_FETCH_2:
7012 case BUILT_IN_ADD_AND_FETCH_4:
7013 case BUILT_IN_ADD_AND_FETCH_8:
7014 case BUILT_IN_ADD_AND_FETCH_16:
7015 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
7016 target = expand_builtin_sync_operation (mode, exp, PLUS,
7017 true, target, ignore);
7022 case BUILT_IN_SUB_AND_FETCH_1:
7023 case BUILT_IN_SUB_AND_FETCH_2:
7024 case BUILT_IN_SUB_AND_FETCH_4:
7025 case BUILT_IN_SUB_AND_FETCH_8:
7026 case BUILT_IN_SUB_AND_FETCH_16:
7027 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
7028 target = expand_builtin_sync_operation (mode, exp, MINUS,
7029 true, target, ignore);
7034 case BUILT_IN_OR_AND_FETCH_1:
7035 case BUILT_IN_OR_AND_FETCH_2:
7036 case BUILT_IN_OR_AND_FETCH_4:
7037 case BUILT_IN_OR_AND_FETCH_8:
7038 case BUILT_IN_OR_AND_FETCH_16:
7039 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
7040 target = expand_builtin_sync_operation (mode, exp, IOR,
7041 true, target, ignore);
7046 case BUILT_IN_AND_AND_FETCH_1:
7047 case BUILT_IN_AND_AND_FETCH_2:
7048 case BUILT_IN_AND_AND_FETCH_4:
7049 case BUILT_IN_AND_AND_FETCH_8:
7050 case BUILT_IN_AND_AND_FETCH_16:
7051 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7052 target = expand_builtin_sync_operation (mode, exp, AND,
7053 true, target, ignore);
7058 case BUILT_IN_XOR_AND_FETCH_1:
7059 case BUILT_IN_XOR_AND_FETCH_2:
7060 case BUILT_IN_XOR_AND_FETCH_4:
7061 case BUILT_IN_XOR_AND_FETCH_8:
7062 case BUILT_IN_XOR_AND_FETCH_16:
7063 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7064 target = expand_builtin_sync_operation (mode, exp, XOR,
7065 true, target, ignore);
7070 case BUILT_IN_NAND_AND_FETCH_1:
7071 case BUILT_IN_NAND_AND_FETCH_2:
7072 case BUILT_IN_NAND_AND_FETCH_4:
7073 case BUILT_IN_NAND_AND_FETCH_8:
7074 case BUILT_IN_NAND_AND_FETCH_16:
7075 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7076 target = expand_builtin_sync_operation (mode, exp, NOT,
7077 true, target, ignore);
7082 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7083 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7084 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7085 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7086 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7087 if (mode == VOIDmode)
7088 mode = TYPE_MODE (boolean_type_node);
7089 if (!target || !register_operand (target, mode))
7090 target = gen_reg_rtx (mode);
7092 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7093 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7098 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7099 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7100 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7101 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7102 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7103 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7104 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7109 case BUILT_IN_LOCK_TEST_AND_SET_1:
7110 case BUILT_IN_LOCK_TEST_AND_SET_2:
7111 case BUILT_IN_LOCK_TEST_AND_SET_4:
7112 case BUILT_IN_LOCK_TEST_AND_SET_8:
7113 case BUILT_IN_LOCK_TEST_AND_SET_16:
7114 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7115 target = expand_builtin_lock_test_and_set (mode, exp, target);
7120 case BUILT_IN_LOCK_RELEASE_1:
7121 case BUILT_IN_LOCK_RELEASE_2:
7122 case BUILT_IN_LOCK_RELEASE_4:
7123 case BUILT_IN_LOCK_RELEASE_8:
7124 case BUILT_IN_LOCK_RELEASE_16:
7125 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7126 expand_builtin_lock_release (mode, exp);
7129 case BUILT_IN_SYNCHRONIZE:
7130 expand_builtin_synchronize ();
7133 case BUILT_IN_OBJECT_SIZE:
7134 return expand_builtin_object_size (exp);
7136 case BUILT_IN_MEMCPY_CHK:
7137 case BUILT_IN_MEMPCPY_CHK:
7138 case BUILT_IN_MEMMOVE_CHK:
7139 case BUILT_IN_MEMSET_CHK:
7140 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7145 case BUILT_IN_STRCPY_CHK:
7146 case BUILT_IN_STPCPY_CHK:
7147 case BUILT_IN_STRNCPY_CHK:
7148 case BUILT_IN_STRCAT_CHK:
7149 case BUILT_IN_STRNCAT_CHK:
7150 case BUILT_IN_SNPRINTF_CHK:
7151 case BUILT_IN_VSNPRINTF_CHK:
7152 maybe_emit_chk_warning (exp, fcode);
7155 case BUILT_IN_SPRINTF_CHK:
7156 case BUILT_IN_VSPRINTF_CHK:
7157 maybe_emit_sprintf_chk_warning (exp, fcode);
7161 maybe_emit_free_warning (exp);
7164 default: /* just do library call, if unknown builtin */
7168 /* The switch statement above can drop through to cause the function
7169 to be called normally. */
7170 return expand_call (exp, target, ignore);
7173 /* Determine whether a tree node represents a call to a built-in
7174 function. If the tree T is a call to a built-in function with
7175 the right number of arguments of the appropriate types, return
7176 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7177 Otherwise the return value is END_BUILTINS. */
7179 enum built_in_function
7180 builtin_mathfn_code (const_tree t)
7182 const_tree fndecl, arg, parmlist;
7183 const_tree argtype, parmtype;
7184 const_call_expr_arg_iterator iter;
7186 if (TREE_CODE (t) != CALL_EXPR
7187 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7188 return END_BUILTINS;
7190 fndecl = get_callee_fndecl (t);
7191 if (fndecl == NULL_TREE
7192 || TREE_CODE (fndecl) != FUNCTION_DECL
7193 || ! DECL_BUILT_IN (fndecl)
7194 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7195 return END_BUILTINS;
7197 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7198 init_const_call_expr_arg_iterator (t, &iter);
7199 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7201 /* If a function doesn't take a variable number of arguments,
7202 the last element in the list will have type `void'. */
7203 parmtype = TREE_VALUE (parmlist);
7204 if (VOID_TYPE_P (parmtype))
7206 if (more_const_call_expr_args_p (&iter))
7207 return END_BUILTINS;
7208 return DECL_FUNCTION_CODE (fndecl);
7211 if (! more_const_call_expr_args_p (&iter))
7212 return END_BUILTINS;
7214 arg = next_const_call_expr_arg (&iter);
7215 argtype = TREE_TYPE (arg);
7217 if (SCALAR_FLOAT_TYPE_P (parmtype))
7219 if (! SCALAR_FLOAT_TYPE_P (argtype))
7220 return END_BUILTINS;
7222 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7224 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7225 return END_BUILTINS;
7227 else if (POINTER_TYPE_P (parmtype))
7229 if (! POINTER_TYPE_P (argtype))
7230 return END_BUILTINS;
7232 else if (INTEGRAL_TYPE_P (parmtype))
7234 if (! INTEGRAL_TYPE_P (argtype))
7235 return END_BUILTINS;
7238 return END_BUILTINS;
7241 /* Variable-length argument list. */
7242 return DECL_FUNCTION_CODE (fndecl);
7245 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7246 evaluate to a constant. */
7249 fold_builtin_constant_p (tree arg)
7251 /* We return 1 for a numeric type that's known to be a constant
7252 value at compile-time or for an aggregate type that's a
7253 literal constant. */
7256 /* If we know this is a constant, emit the constant of one. */
7257 if (CONSTANT_CLASS_P (arg)
7258 || (TREE_CODE (arg) == CONSTRUCTOR
7259 && TREE_CONSTANT (arg)))
7260 return integer_one_node;
7261 if (TREE_CODE (arg) == ADDR_EXPR)
7263 tree op = TREE_OPERAND (arg, 0);
7264 if (TREE_CODE (op) == STRING_CST
7265 || (TREE_CODE (op) == ARRAY_REF
7266 && integer_zerop (TREE_OPERAND (op, 1))
7267 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7268 return integer_one_node;
7271 /* If this expression has side effects, show we don't know it to be a
7272 constant. Likewise if it's a pointer or aggregate type since in
7273 those case we only want literals, since those are only optimized
7274 when generating RTL, not later.
7275 And finally, if we are compiling an initializer, not code, we
7276 need to return a definite result now; there's not going to be any
7277 more optimization done. */
7278 if (TREE_SIDE_EFFECTS (arg)
7279 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7280 || POINTER_TYPE_P (TREE_TYPE (arg))
7282 || folding_initializer)
7283 return integer_zero_node;
7288 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7289 return it as a truthvalue. */
7292 build_builtin_expect_predicate (tree pred, tree expected)
7294 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7296 fn = built_in_decls[BUILT_IN_EXPECT];
7297 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7298 ret_type = TREE_TYPE (TREE_TYPE (fn));
7299 pred_type = TREE_VALUE (arg_types);
7300 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7302 pred = fold_convert (pred_type, pred);
7303 expected = fold_convert (expected_type, expected);
7304 call_expr = build_call_expr (fn, 2, pred, expected);
7306 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7307 build_int_cst (ret_type, 0));
7310 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7311 NULL_TREE if no simplification is possible. */
7314 fold_builtin_expect (tree arg0, tree arg1)
7317 enum tree_code code;
7319 /* If this is a builtin_expect within a builtin_expect keep the
7320 inner one. See through a comparison against a constant. It
7321 might have been added to create a thruthvalue. */
7323 if (COMPARISON_CLASS_P (inner)
7324 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7325 inner = TREE_OPERAND (inner, 0);
7327 if (TREE_CODE (inner) == CALL_EXPR
7328 && (fndecl = get_callee_fndecl (inner))
7329 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7330 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7333 /* Distribute the expected value over short-circuiting operators.
7334 See through the cast from truthvalue_type_node to long. */
7336 while (TREE_CODE (inner) == NOP_EXPR
7337 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7338 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7339 inner = TREE_OPERAND (inner, 0);
7341 code = TREE_CODE (inner);
7342 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7344 tree op0 = TREE_OPERAND (inner, 0);
7345 tree op1 = TREE_OPERAND (inner, 1);
7347 op0 = build_builtin_expect_predicate (op0, arg1);
7348 op1 = build_builtin_expect_predicate (op1, arg1);
7349 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7351 return fold_convert (TREE_TYPE (arg0), inner);
7354 /* If the argument isn't invariant then there's nothing else we can do. */
7355 if (!TREE_CONSTANT (arg0))
7358 /* If we expect that a comparison against the argument will fold to
7359 a constant return the constant. In practice, this means a true
7360 constant or the address of a non-weak symbol. */
7363 if (TREE_CODE (inner) == ADDR_EXPR)
7367 inner = TREE_OPERAND (inner, 0);
7369 while (TREE_CODE (inner) == COMPONENT_REF
7370 || TREE_CODE (inner) == ARRAY_REF);
7371 if ((TREE_CODE (inner) == VAR_DECL
7372 || TREE_CODE (inner) == FUNCTION_DECL)
7373 && DECL_WEAK (inner))
7377 /* Otherwise, ARG0 already has the proper type for the return value. */
7381 /* Fold a call to __builtin_classify_type with argument ARG. */
7384 fold_builtin_classify_type (tree arg)
7387 return build_int_cst (NULL_TREE, no_type_class);
7389 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7392 /* Fold a call to __builtin_strlen with argument ARG. */
7395 fold_builtin_strlen (tree arg)
7397 if (!validate_arg (arg, POINTER_TYPE))
7401 tree len = c_strlen (arg, 0);
7405 /* Convert from the internal "sizetype" type to "size_t". */
7407 len = fold_convert (size_type_node, len);
7415 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7418 fold_builtin_inf (tree type, int warn)
7420 REAL_VALUE_TYPE real;
7422 /* __builtin_inff is intended to be usable to define INFINITY on all
7423 targets. If an infinity is not available, INFINITY expands "to a
7424 positive constant of type float that overflows at translation
7425 time", footnote "In this case, using INFINITY will violate the
7426 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7427 Thus we pedwarn to ensure this constraint violation is
7429 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7430 pedwarn (input_location, 0, "target format does not support infinity");
7433 return build_real (type, real);
7436 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7439 fold_builtin_nan (tree arg, tree type, int quiet)
7441 REAL_VALUE_TYPE real;
7444 if (!validate_arg (arg, POINTER_TYPE))
7446 str = c_getstr (arg);
7450 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7453 return build_real (type, real);
7456 /* Return true if the floating point expression T has an integer value.
7457 We also allow +Inf, -Inf and NaN to be considered integer values. */
7460 integer_valued_real_p (tree t)
7462 switch (TREE_CODE (t))
7469 return integer_valued_real_p (TREE_OPERAND (t, 0));
7474 return integer_valued_real_p (TREE_OPERAND (t, 1));
7481 return integer_valued_real_p (TREE_OPERAND (t, 0))
7482 && integer_valued_real_p (TREE_OPERAND (t, 1));
7485 return integer_valued_real_p (TREE_OPERAND (t, 1))
7486 && integer_valued_real_p (TREE_OPERAND (t, 2));
7489 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7493 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7494 if (TREE_CODE (type) == INTEGER_TYPE)
7496 if (TREE_CODE (type) == REAL_TYPE)
7497 return integer_valued_real_p (TREE_OPERAND (t, 0));
7502 switch (builtin_mathfn_code (t))
7504 CASE_FLT_FN (BUILT_IN_CEIL):
7505 CASE_FLT_FN (BUILT_IN_FLOOR):
7506 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7507 CASE_FLT_FN (BUILT_IN_RINT):
7508 CASE_FLT_FN (BUILT_IN_ROUND):
7509 CASE_FLT_FN (BUILT_IN_TRUNC):
7512 CASE_FLT_FN (BUILT_IN_FMIN):
7513 CASE_FLT_FN (BUILT_IN_FMAX):
7514 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7515 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7528 /* FNDECL is assumed to be a builtin where truncation can be propagated
7529 across (for instance floor((double)f) == (double)floorf (f).
7530 Do the transformation for a call with argument ARG. */
7533 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7535 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7537 if (!validate_arg (arg, REAL_TYPE))
7540 /* Integer rounding functions are idempotent. */
7541 if (fcode == builtin_mathfn_code (arg))
7544 /* If argument is already integer valued, and we don't need to worry
7545 about setting errno, there's no need to perform rounding. */
7546 if (! flag_errno_math && integer_valued_real_p (arg))
7551 tree arg0 = strip_float_extensions (arg);
7552 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7553 tree newtype = TREE_TYPE (arg0);
7556 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7557 && (decl = mathfn_built_in (newtype, fcode)))
7558 return fold_convert (ftype,
7559 build_call_expr (decl, 1,
7560 fold_convert (newtype, arg0)));
7565 /* FNDECL is assumed to be builtin which can narrow the FP type of
7566 the argument, for instance lround((double)f) -> lroundf (f).
7567 Do the transformation for a call with argument ARG. */
7570 fold_fixed_mathfn (tree fndecl, tree arg)
7572 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7574 if (!validate_arg (arg, REAL_TYPE))
7577 /* If argument is already integer valued, and we don't need to worry
7578 about setting errno, there's no need to perform rounding. */
7579 if (! flag_errno_math && integer_valued_real_p (arg))
7580 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7584 tree ftype = TREE_TYPE (arg);
7585 tree arg0 = strip_float_extensions (arg);
7586 tree newtype = TREE_TYPE (arg0);
7589 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7590 && (decl = mathfn_built_in (newtype, fcode)))
7591 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7594 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7595 sizeof (long long) == sizeof (long). */
7596 if (TYPE_PRECISION (long_long_integer_type_node)
7597 == TYPE_PRECISION (long_integer_type_node))
7599 tree newfn = NULL_TREE;
7602 CASE_FLT_FN (BUILT_IN_LLCEIL):
7603 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7606 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7607 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7610 CASE_FLT_FN (BUILT_IN_LLROUND):
7611 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7614 CASE_FLT_FN (BUILT_IN_LLRINT):
7615 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7624 tree newcall = build_call_expr(newfn, 1, arg);
7625 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7632 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7633 return type. Return NULL_TREE if no simplification can be made. */
7636 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7640 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7641 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7644 /* Calculate the result when the argument is a constant. */
7645 if (TREE_CODE (arg) == COMPLEX_CST
7646 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7650 if (TREE_CODE (arg) == COMPLEX_EXPR)
7652 tree real = TREE_OPERAND (arg, 0);
7653 tree imag = TREE_OPERAND (arg, 1);
7655 /* If either part is zero, cabs is fabs of the other. */
7656 if (real_zerop (real))
7657 return fold_build1 (ABS_EXPR, type, imag);
7658 if (real_zerop (imag))
7659 return fold_build1 (ABS_EXPR, type, real);
7661 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7662 if (flag_unsafe_math_optimizations
7663 && operand_equal_p (real, imag, OEP_PURE_SAME))
7665 const REAL_VALUE_TYPE sqrt2_trunc
7666 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7668 return fold_build2 (MULT_EXPR, type,
7669 fold_build1 (ABS_EXPR, type, real),
7670 build_real (type, sqrt2_trunc));
7674 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7675 if (TREE_CODE (arg) == NEGATE_EXPR
7676 || TREE_CODE (arg) == CONJ_EXPR)
7677 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7679 /* Don't do this when optimizing for size. */
7680 if (flag_unsafe_math_optimizations
7681 && optimize && optimize_function_for_speed_p (cfun))
7683 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7685 if (sqrtfn != NULL_TREE)
7687 tree rpart, ipart, result;
7689 arg = builtin_save_expr (arg);
7691 rpart = fold_build1 (REALPART_EXPR, type, arg);
7692 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7694 rpart = builtin_save_expr (rpart);
7695 ipart = builtin_save_expr (ipart);
7697 result = fold_build2 (PLUS_EXPR, type,
7698 fold_build2 (MULT_EXPR, type,
7700 fold_build2 (MULT_EXPR, type,
7703 return build_call_expr (sqrtfn, 1, result);
7710 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7711 Return NULL_TREE if no simplification can be made. */
7714 fold_builtin_sqrt (tree arg, tree type)
7717 enum built_in_function fcode;
7720 if (!validate_arg (arg, REAL_TYPE))
7723 /* Calculate the result when the argument is a constant. */
7724 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7727 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7728 fcode = builtin_mathfn_code (arg);
7729 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7731 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7732 arg = fold_build2 (MULT_EXPR, type,
7733 CALL_EXPR_ARG (arg, 0),
7734 build_real (type, dconsthalf));
7735 return build_call_expr (expfn, 1, arg);
7738 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7739 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7741 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7745 tree arg0 = CALL_EXPR_ARG (arg, 0);
7747 /* The inner root was either sqrt or cbrt. */
7748 /* This was a conditional expression but it triggered a bug
7750 REAL_VALUE_TYPE dconstroot;
7751 if (BUILTIN_SQRT_P (fcode))
7752 dconstroot = dconsthalf;
7754 dconstroot = dconst_third ();
7756 /* Adjust for the outer root. */
7757 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7758 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7759 tree_root = build_real (type, dconstroot);
7760 return build_call_expr (powfn, 2, arg0, tree_root);
7764 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7765 if (flag_unsafe_math_optimizations
7766 && (fcode == BUILT_IN_POW
7767 || fcode == BUILT_IN_POWF
7768 || fcode == BUILT_IN_POWL))
7770 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7771 tree arg0 = CALL_EXPR_ARG (arg, 0);
7772 tree arg1 = CALL_EXPR_ARG (arg, 1);
7774 if (!tree_expr_nonnegative_p (arg0))
7775 arg0 = build1 (ABS_EXPR, type, arg0);
7776 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7777 build_real (type, dconsthalf));
7778 return build_call_expr (powfn, 2, arg0, narg1);
7784 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7785 Return NULL_TREE if no simplification can be made. */
7788 fold_builtin_cbrt (tree arg, tree type)
7790 const enum built_in_function fcode = builtin_mathfn_code (arg);
7793 if (!validate_arg (arg, REAL_TYPE))
7796 /* Calculate the result when the argument is a constant. */
7797 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7800 if (flag_unsafe_math_optimizations)
7802 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7803 if (BUILTIN_EXPONENT_P (fcode))
7805 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7806 const REAL_VALUE_TYPE third_trunc =
7807 real_value_truncate (TYPE_MODE (type), dconst_third ());
7808 arg = fold_build2 (MULT_EXPR, type,
7809 CALL_EXPR_ARG (arg, 0),
7810 build_real (type, third_trunc));
7811 return build_call_expr (expfn, 1, arg);
7814 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7815 if (BUILTIN_SQRT_P (fcode))
7817 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7821 tree arg0 = CALL_EXPR_ARG (arg, 0);
7823 REAL_VALUE_TYPE dconstroot = dconst_third ();
7825 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7826 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7827 tree_root = build_real (type, dconstroot);
7828 return build_call_expr (powfn, 2, arg0, tree_root);
7832 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7833 if (BUILTIN_CBRT_P (fcode))
7835 tree arg0 = CALL_EXPR_ARG (arg, 0);
7836 if (tree_expr_nonnegative_p (arg0))
7838 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7843 REAL_VALUE_TYPE dconstroot;
7845 real_arithmetic (&dconstroot, MULT_EXPR,
7846 dconst_third_ptr (), dconst_third_ptr ());
7847 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7848 tree_root = build_real (type, dconstroot);
7849 return build_call_expr (powfn, 2, arg0, tree_root);
7854 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7855 if (fcode == BUILT_IN_POW
7856 || fcode == BUILT_IN_POWF
7857 || fcode == BUILT_IN_POWL)
7859 tree arg00 = CALL_EXPR_ARG (arg, 0);
7860 tree arg01 = CALL_EXPR_ARG (arg, 1);
7861 if (tree_expr_nonnegative_p (arg00))
7863 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7864 const REAL_VALUE_TYPE dconstroot
7865 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7866 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7867 build_real (type, dconstroot));
7868 return build_call_expr (powfn, 2, arg00, narg01);
7875 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7876 TYPE is the type of the return value. Return NULL_TREE if no
7877 simplification can be made. */
7880 fold_builtin_cos (tree arg, tree type, tree fndecl)
7884 if (!validate_arg (arg, REAL_TYPE))
7887 /* Calculate the result when the argument is a constant. */
7888 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7891 /* Optimize cos(-x) into cos (x). */
7892 if ((narg = fold_strip_sign_ops (arg)))
7893 return build_call_expr (fndecl, 1, narg);
7898 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7899 Return NULL_TREE if no simplification can be made. */
7902 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7904 if (validate_arg (arg, REAL_TYPE))
7908 /* Calculate the result when the argument is a constant. */
7909 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7912 /* Optimize cosh(-x) into cosh (x). */
7913 if ((narg = fold_strip_sign_ops (arg)))
7914 return build_call_expr (fndecl, 1, narg);
7920 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7921 argument ARG. TYPE is the type of the return value. Return
7922 NULL_TREE if no simplification can be made. */
7925 fold_builtin_ccos (tree arg, tree type ATTRIBUTE_UNUSED, tree fndecl,
7926 bool hyper ATTRIBUTE_UNUSED)
7928 if (validate_arg (arg, COMPLEX_TYPE)
7929 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7934 /* Calculate the result when the argument is a constant. */
7935 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7939 /* Optimize fn(-x) into fn(x). */
7940 if ((tmp = fold_strip_sign_ops (arg)))
7941 return build_call_expr (fndecl, 1, tmp);
7947 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7948 Return NULL_TREE if no simplification can be made. */
7951 fold_builtin_tan (tree arg, tree type)
7953 enum built_in_function fcode;
7956 if (!validate_arg (arg, REAL_TYPE))
7959 /* Calculate the result when the argument is a constant. */
7960 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7963 /* Optimize tan(atan(x)) = x. */
7964 fcode = builtin_mathfn_code (arg);
7965 if (flag_unsafe_math_optimizations
7966 && (fcode == BUILT_IN_ATAN
7967 || fcode == BUILT_IN_ATANF
7968 || fcode == BUILT_IN_ATANL))
7969 return CALL_EXPR_ARG (arg, 0);
7974 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7975 NULL_TREE if no simplification can be made. */
7978 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7983 if (!validate_arg (arg0, REAL_TYPE)
7984 || !validate_arg (arg1, POINTER_TYPE)
7985 || !validate_arg (arg2, POINTER_TYPE))
7988 type = TREE_TYPE (arg0);
7990 /* Calculate the result when the argument is a constant. */
7991 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7994 /* Canonicalize sincos to cexpi. */
7995 if (!TARGET_C99_FUNCTIONS)
7997 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8001 call = build_call_expr (fn, 1, arg0);
8002 call = builtin_save_expr (call);
8004 return build2 (COMPOUND_EXPR, void_type_node,
8005 build2 (MODIFY_EXPR, void_type_node,
8006 build_fold_indirect_ref (arg1),
8007 build1 (IMAGPART_EXPR, type, call)),
8008 build2 (MODIFY_EXPR, void_type_node,
8009 build_fold_indirect_ref (arg2),
8010 build1 (REALPART_EXPR, type, call)));
8013 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8014 NULL_TREE if no simplification can be made. */
8017 fold_builtin_cexp (tree arg0, tree type)
8020 tree realp, imagp, ifn;
8025 if (!validate_arg (arg0, COMPLEX_TYPE)
8026 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8030 /* Calculate the result when the argument is a constant. */
8031 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8035 rtype = TREE_TYPE (TREE_TYPE (arg0));
8037 /* In case we can figure out the real part of arg0 and it is constant zero
8039 if (!TARGET_C99_FUNCTIONS)
8041 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8045 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
8046 && real_zerop (realp))
8048 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
8049 return build_call_expr (ifn, 1, narg);
8052 /* In case we can easily decompose real and imaginary parts split cexp
8053 to exp (r) * cexpi (i). */
8054 if (flag_unsafe_math_optimizations
8057 tree rfn, rcall, icall;
8059 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8063 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
8067 icall = build_call_expr (ifn, 1, imagp);
8068 icall = builtin_save_expr (icall);
8069 rcall = build_call_expr (rfn, 1, realp);
8070 rcall = builtin_save_expr (rcall);
8071 return fold_build2 (COMPLEX_EXPR, type,
8072 fold_build2 (MULT_EXPR, rtype,
8074 fold_build1 (REALPART_EXPR, rtype, icall)),
8075 fold_build2 (MULT_EXPR, rtype,
8077 fold_build1 (IMAGPART_EXPR, rtype, icall)));
8083 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8084 Return NULL_TREE if no simplification can be made. */
8087 fold_builtin_trunc (tree fndecl, tree arg)
8089 if (!validate_arg (arg, REAL_TYPE))
8092 /* Optimize trunc of constant value. */
8093 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8095 REAL_VALUE_TYPE r, x;
8096 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8098 x = TREE_REAL_CST (arg);
8099 real_trunc (&r, TYPE_MODE (type), &x);
8100 return build_real (type, r);
8103 return fold_trunc_transparent_mathfn (fndecl, arg);
8106 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8107 Return NULL_TREE if no simplification can be made. */
8110 fold_builtin_floor (tree fndecl, tree arg)
8112 if (!validate_arg (arg, REAL_TYPE))
8115 /* Optimize floor of constant value. */
8116 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8120 x = TREE_REAL_CST (arg);
8121 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8123 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8126 real_floor (&r, TYPE_MODE (type), &x);
8127 return build_real (type, r);
8131 /* Fold floor (x) where x is nonnegative to trunc (x). */
8132 if (tree_expr_nonnegative_p (arg))
8134 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8136 return build_call_expr (truncfn, 1, arg);
8139 return fold_trunc_transparent_mathfn (fndecl, arg);
8142 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8143 Return NULL_TREE if no simplification can be made. */
8146 fold_builtin_ceil (tree fndecl, tree arg)
8148 if (!validate_arg (arg, REAL_TYPE))
8151 /* Optimize ceil of constant value. */
8152 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8156 x = TREE_REAL_CST (arg);
8157 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8159 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8162 real_ceil (&r, TYPE_MODE (type), &x);
8163 return build_real (type, r);
8167 return fold_trunc_transparent_mathfn (fndecl, arg);
8170 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8171 Return NULL_TREE if no simplification can be made. */
8174 fold_builtin_round (tree fndecl, tree arg)
8176 if (!validate_arg (arg, REAL_TYPE))
8179 /* Optimize round of constant value. */
8180 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8184 x = TREE_REAL_CST (arg);
8185 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8187 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8190 real_round (&r, TYPE_MODE (type), &x);
8191 return build_real (type, r);
8195 return fold_trunc_transparent_mathfn (fndecl, arg);
8198 /* Fold function call to builtin lround, lroundf or lroundl (or the
8199 corresponding long long versions) and other rounding functions. ARG
8200 is the argument to the call. Return NULL_TREE if no simplification
8204 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8206 if (!validate_arg (arg, REAL_TYPE))
8209 /* Optimize lround of constant value. */
8210 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8212 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8214 if (real_isfinite (&x))
8216 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8217 tree ftype = TREE_TYPE (arg);
8218 unsigned HOST_WIDE_INT lo2;
8219 HOST_WIDE_INT hi, lo;
8222 switch (DECL_FUNCTION_CODE (fndecl))
8224 CASE_FLT_FN (BUILT_IN_LFLOOR):
8225 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8226 real_floor (&r, TYPE_MODE (ftype), &x);
8229 CASE_FLT_FN (BUILT_IN_LCEIL):
8230 CASE_FLT_FN (BUILT_IN_LLCEIL):
8231 real_ceil (&r, TYPE_MODE (ftype), &x);
8234 CASE_FLT_FN (BUILT_IN_LROUND):
8235 CASE_FLT_FN (BUILT_IN_LLROUND):
8236 real_round (&r, TYPE_MODE (ftype), &x);
8243 REAL_VALUE_TO_INT (&lo, &hi, r);
8244 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8245 return build_int_cst_wide (itype, lo2, hi);
8249 switch (DECL_FUNCTION_CODE (fndecl))
8251 CASE_FLT_FN (BUILT_IN_LFLOOR):
8252 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8253 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8254 if (tree_expr_nonnegative_p (arg))
8255 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8261 return fold_fixed_mathfn (fndecl, arg);
8264 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8265 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8266 the argument to the call. Return NULL_TREE if no simplification can
8270 fold_builtin_bitop (tree fndecl, tree arg)
8272 if (!validate_arg (arg, INTEGER_TYPE))
8275 /* Optimize for constant argument. */
8276 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8278 HOST_WIDE_INT hi, width, result;
8279 unsigned HOST_WIDE_INT lo;
8282 type = TREE_TYPE (arg);
8283 width = TYPE_PRECISION (type);
8284 lo = TREE_INT_CST_LOW (arg);
8286 /* Clear all the bits that are beyond the type's precision. */
8287 if (width > HOST_BITS_PER_WIDE_INT)
8289 hi = TREE_INT_CST_HIGH (arg);
8290 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8291 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8296 if (width < HOST_BITS_PER_WIDE_INT)
8297 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8300 switch (DECL_FUNCTION_CODE (fndecl))
8302 CASE_INT_FN (BUILT_IN_FFS):
8304 result = exact_log2 (lo & -lo) + 1;
8306 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8311 CASE_INT_FN (BUILT_IN_CLZ):
8313 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8315 result = width - floor_log2 (lo) - 1;
8316 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8320 CASE_INT_FN (BUILT_IN_CTZ):
8322 result = exact_log2 (lo & -lo);
8324 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8325 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8329 CASE_INT_FN (BUILT_IN_POPCOUNT):
8332 result++, lo &= lo - 1;
8334 result++, hi &= hi - 1;
8337 CASE_INT_FN (BUILT_IN_PARITY):
8340 result++, lo &= lo - 1;
8342 result++, hi &= hi - 1;
8350 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8356 /* Fold function call to builtin_bswap and the long and long long
8357 variants. Return NULL_TREE if no simplification can be made. */
8359 fold_builtin_bswap (tree fndecl, tree arg)
8361 if (! validate_arg (arg, INTEGER_TYPE))
8364 /* Optimize constant value. */
8365 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8367 HOST_WIDE_INT hi, width, r_hi = 0;
8368 unsigned HOST_WIDE_INT lo, r_lo = 0;
8371 type = TREE_TYPE (arg);
8372 width = TYPE_PRECISION (type);
8373 lo = TREE_INT_CST_LOW (arg);
8374 hi = TREE_INT_CST_HIGH (arg);
8376 switch (DECL_FUNCTION_CODE (fndecl))
8378 case BUILT_IN_BSWAP32:
8379 case BUILT_IN_BSWAP64:
8383 for (s = 0; s < width; s += 8)
8385 int d = width - s - 8;
8386 unsigned HOST_WIDE_INT byte;
8388 if (s < HOST_BITS_PER_WIDE_INT)
8389 byte = (lo >> s) & 0xff;
8391 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8393 if (d < HOST_BITS_PER_WIDE_INT)
8396 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8406 if (width < HOST_BITS_PER_WIDE_INT)
8407 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8409 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8415 /* A subroutine of fold_builtin to fold the various logarithmic
8416 functions. Return NULL_TREE if no simplification can me made.
8417 FUNC is the corresponding MPFR logarithm function. */
8420 fold_builtin_logarithm (tree fndecl, tree arg,
8421 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8423 if (validate_arg (arg, REAL_TYPE))
8425 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8427 const enum built_in_function fcode = builtin_mathfn_code (arg);
8429 /* Calculate the result when the argument is a constant. */
8430 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8433 /* Special case, optimize logN(expN(x)) = x. */
8434 if (flag_unsafe_math_optimizations
8435 && ((func == mpfr_log
8436 && (fcode == BUILT_IN_EXP
8437 || fcode == BUILT_IN_EXPF
8438 || fcode == BUILT_IN_EXPL))
8439 || (func == mpfr_log2
8440 && (fcode == BUILT_IN_EXP2
8441 || fcode == BUILT_IN_EXP2F
8442 || fcode == BUILT_IN_EXP2L))
8443 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8444 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8446 /* Optimize logN(func()) for various exponential functions. We
8447 want to determine the value "x" and the power "exponent" in
8448 order to transform logN(x**exponent) into exponent*logN(x). */
8449 if (flag_unsafe_math_optimizations)
8451 tree exponent = 0, x = 0;
8455 CASE_FLT_FN (BUILT_IN_EXP):
8456 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8457 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8459 exponent = CALL_EXPR_ARG (arg, 0);
8461 CASE_FLT_FN (BUILT_IN_EXP2):
8462 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8463 x = build_real (type, dconst2);
8464 exponent = CALL_EXPR_ARG (arg, 0);
8466 CASE_FLT_FN (BUILT_IN_EXP10):
8467 CASE_FLT_FN (BUILT_IN_POW10):
8468 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8470 REAL_VALUE_TYPE dconst10;
8471 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8472 x = build_real (type, dconst10);
8474 exponent = CALL_EXPR_ARG (arg, 0);
8476 CASE_FLT_FN (BUILT_IN_SQRT):
8477 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8478 x = CALL_EXPR_ARG (arg, 0);
8479 exponent = build_real (type, dconsthalf);
8481 CASE_FLT_FN (BUILT_IN_CBRT):
8482 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8483 x = CALL_EXPR_ARG (arg, 0);
8484 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8487 CASE_FLT_FN (BUILT_IN_POW):
8488 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8489 x = CALL_EXPR_ARG (arg, 0);
8490 exponent = CALL_EXPR_ARG (arg, 1);
8496 /* Now perform the optimization. */
8499 tree logfn = build_call_expr (fndecl, 1, x);
8500 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8508 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8509 NULL_TREE if no simplification can be made. */
8512 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8514 tree res, narg0, narg1;
8516 if (!validate_arg (arg0, REAL_TYPE)
8517 || !validate_arg (arg1, REAL_TYPE))
8520 /* Calculate the result when the argument is a constant. */
8521 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8524 /* If either argument to hypot has a negate or abs, strip that off.
8525 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8526 narg0 = fold_strip_sign_ops (arg0);
8527 narg1 = fold_strip_sign_ops (arg1);
8530 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8531 narg1 ? narg1 : arg1);
8534 /* If either argument is zero, hypot is fabs of the other. */
8535 if (real_zerop (arg0))
8536 return fold_build1 (ABS_EXPR, type, arg1);
8537 else if (real_zerop (arg1))
8538 return fold_build1 (ABS_EXPR, type, arg0);
8540 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8541 if (flag_unsafe_math_optimizations
8542 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8544 const REAL_VALUE_TYPE sqrt2_trunc
8545 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8546 return fold_build2 (MULT_EXPR, type,
8547 fold_build1 (ABS_EXPR, type, arg0),
8548 build_real (type, sqrt2_trunc));
8555 /* Fold a builtin function call to pow, powf, or powl. Return
8556 NULL_TREE if no simplification can be made. */
8558 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8562 if (!validate_arg (arg0, REAL_TYPE)
8563 || !validate_arg (arg1, REAL_TYPE))
8566 /* Calculate the result when the argument is a constant. */
8567 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8570 /* Optimize pow(1.0,y) = 1.0. */
8571 if (real_onep (arg0))
8572 return omit_one_operand (type, build_real (type, dconst1), arg1);
8574 if (TREE_CODE (arg1) == REAL_CST
8575 && !TREE_OVERFLOW (arg1))
8577 REAL_VALUE_TYPE cint;
8581 c = TREE_REAL_CST (arg1);
8583 /* Optimize pow(x,0.0) = 1.0. */
8584 if (REAL_VALUES_EQUAL (c, dconst0))
8585 return omit_one_operand (type, build_real (type, dconst1),
8588 /* Optimize pow(x,1.0) = x. */
8589 if (REAL_VALUES_EQUAL (c, dconst1))
8592 /* Optimize pow(x,-1.0) = 1.0/x. */
8593 if (REAL_VALUES_EQUAL (c, dconstm1))
8594 return fold_build2 (RDIV_EXPR, type,
8595 build_real (type, dconst1), arg0);
8597 /* Optimize pow(x,0.5) = sqrt(x). */
8598 if (flag_unsafe_math_optimizations
8599 && REAL_VALUES_EQUAL (c, dconsthalf))
8601 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8603 if (sqrtfn != NULL_TREE)
8604 return build_call_expr (sqrtfn, 1, arg0);
8607 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8608 if (flag_unsafe_math_optimizations)
8610 const REAL_VALUE_TYPE dconstroot
8611 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8613 if (REAL_VALUES_EQUAL (c, dconstroot))
8615 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8616 if (cbrtfn != NULL_TREE)
8617 return build_call_expr (cbrtfn, 1, arg0);
8621 /* Check for an integer exponent. */
8622 n = real_to_integer (&c);
8623 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8624 if (real_identical (&c, &cint))
8626 /* Attempt to evaluate pow at compile-time, unless this should
8627 raise an exception. */
8628 if (TREE_CODE (arg0) == REAL_CST
8629 && !TREE_OVERFLOW (arg0)
8631 || (!flag_trapping_math && !flag_errno_math)
8632 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8637 x = TREE_REAL_CST (arg0);
8638 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8639 if (flag_unsafe_math_optimizations || !inexact)
8640 return build_real (type, x);
8643 /* Strip sign ops from even integer powers. */
8644 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8646 tree narg0 = fold_strip_sign_ops (arg0);
8648 return build_call_expr (fndecl, 2, narg0, arg1);
8653 if (flag_unsafe_math_optimizations)
8655 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8657 /* Optimize pow(expN(x),y) = expN(x*y). */
8658 if (BUILTIN_EXPONENT_P (fcode))
8660 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8661 tree arg = CALL_EXPR_ARG (arg0, 0);
8662 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8663 return build_call_expr (expfn, 1, arg);
8666 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8667 if (BUILTIN_SQRT_P (fcode))
8669 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8670 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8671 build_real (type, dconsthalf));
8672 return build_call_expr (fndecl, 2, narg0, narg1);
8675 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8676 if (BUILTIN_CBRT_P (fcode))
8678 tree arg = CALL_EXPR_ARG (arg0, 0);
8679 if (tree_expr_nonnegative_p (arg))
8681 const REAL_VALUE_TYPE dconstroot
8682 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8683 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8684 build_real (type, dconstroot));
8685 return build_call_expr (fndecl, 2, arg, narg1);
8689 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8690 if (fcode == BUILT_IN_POW
8691 || fcode == BUILT_IN_POWF
8692 || fcode == BUILT_IN_POWL)
8694 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8695 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8696 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8697 return build_call_expr (fndecl, 2, arg00, narg1);
8704 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8705 Return NULL_TREE if no simplification can be made. */
8707 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8708 tree arg0, tree arg1, tree type)
8710 if (!validate_arg (arg0, REAL_TYPE)
8711 || !validate_arg (arg1, INTEGER_TYPE))
8714 /* Optimize pow(1.0,y) = 1.0. */
8715 if (real_onep (arg0))
8716 return omit_one_operand (type, build_real (type, dconst1), arg1);
8718 if (host_integerp (arg1, 0))
8720 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8722 /* Evaluate powi at compile-time. */
8723 if (TREE_CODE (arg0) == REAL_CST
8724 && !TREE_OVERFLOW (arg0))
8727 x = TREE_REAL_CST (arg0);
8728 real_powi (&x, TYPE_MODE (type), &x, c);
8729 return build_real (type, x);
8732 /* Optimize pow(x,0) = 1.0. */
8734 return omit_one_operand (type, build_real (type, dconst1),
8737 /* Optimize pow(x,1) = x. */
8741 /* Optimize pow(x,-1) = 1.0/x. */
8743 return fold_build2 (RDIV_EXPR, type,
8744 build_real (type, dconst1), arg0);
8750 /* A subroutine of fold_builtin to fold the various exponent
8751 functions. Return NULL_TREE if no simplification can be made.
8752 FUNC is the corresponding MPFR exponent function. */
8755 fold_builtin_exponent (tree fndecl, tree arg,
8756 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8758 if (validate_arg (arg, REAL_TYPE))
8760 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8763 /* Calculate the result when the argument is a constant. */
8764 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8767 /* Optimize expN(logN(x)) = x. */
8768 if (flag_unsafe_math_optimizations)
8770 const enum built_in_function fcode = builtin_mathfn_code (arg);
8772 if ((func == mpfr_exp
8773 && (fcode == BUILT_IN_LOG
8774 || fcode == BUILT_IN_LOGF
8775 || fcode == BUILT_IN_LOGL))
8776 || (func == mpfr_exp2
8777 && (fcode == BUILT_IN_LOG2
8778 || fcode == BUILT_IN_LOG2F
8779 || fcode == BUILT_IN_LOG2L))
8780 || (func == mpfr_exp10
8781 && (fcode == BUILT_IN_LOG10
8782 || fcode == BUILT_IN_LOG10F
8783 || fcode == BUILT_IN_LOG10L)))
8784 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8791 /* Return true if VAR is a VAR_DECL or a component thereof. */
8794 var_decl_component_p (tree var)
8797 while (handled_component_p (inner))
8798 inner = TREE_OPERAND (inner, 0);
8799 return SSA_VAR_P (inner);
8802 /* Fold function call to builtin memset. Return
8803 NULL_TREE if no simplification can be made. */
8806 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8808 tree var, ret, etype;
8809 unsigned HOST_WIDE_INT length, cval;
8811 if (! validate_arg (dest, POINTER_TYPE)
8812 || ! validate_arg (c, INTEGER_TYPE)
8813 || ! validate_arg (len, INTEGER_TYPE))
8816 if (! host_integerp (len, 1))
8819 /* If the LEN parameter is zero, return DEST. */
8820 if (integer_zerop (len))
8821 return omit_one_operand (type, dest, c);
8823 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8828 if (TREE_CODE (var) != ADDR_EXPR)
8831 var = TREE_OPERAND (var, 0);
8832 if (TREE_THIS_VOLATILE (var))
8835 etype = TREE_TYPE (var);
8836 if (TREE_CODE (etype) == ARRAY_TYPE)
8837 etype = TREE_TYPE (etype);
8839 if (!INTEGRAL_TYPE_P (etype)
8840 && !POINTER_TYPE_P (etype))
8843 if (! var_decl_component_p (var))
8846 length = tree_low_cst (len, 1);
8847 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8848 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8852 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8855 if (integer_zerop (c))
8859 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8862 cval = tree_low_cst (c, 1);
8866 cval |= (cval << 31) << 1;
8869 ret = build_int_cst_type (etype, cval);
8870 var = build_fold_indirect_ref (fold_convert (build_pointer_type (etype),
8872 ret = build2 (MODIFY_EXPR, etype, var, ret);
8876 return omit_one_operand (type, dest, ret);
8879 /* Fold function call to builtin memset. Return
8880 NULL_TREE if no simplification can be made. */
8883 fold_builtin_bzero (tree dest, tree size, bool ignore)
8885 if (! validate_arg (dest, POINTER_TYPE)
8886 || ! validate_arg (size, INTEGER_TYPE))
8892 /* New argument list transforming bzero(ptr x, int y) to
8893 memset(ptr x, int 0, size_t y). This is done this way
8894 so that if it isn't expanded inline, we fallback to
8895 calling bzero instead of memset. */
8897 return fold_builtin_memset (dest, integer_zero_node,
8898 fold_convert (sizetype, size),
8899 void_type_node, ignore);
8902 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8903 NULL_TREE if no simplification can be made.
8904 If ENDP is 0, return DEST (like memcpy).
8905 If ENDP is 1, return DEST+LEN (like mempcpy).
8906 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8907 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8911 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8913 tree destvar, srcvar, expr;
8915 if (! validate_arg (dest, POINTER_TYPE)
8916 || ! validate_arg (src, POINTER_TYPE)
8917 || ! validate_arg (len, INTEGER_TYPE))
8920 /* If the LEN parameter is zero, return DEST. */
8921 if (integer_zerop (len))
8922 return omit_one_operand (type, dest, src);
8924 /* If SRC and DEST are the same (and not volatile), return
8925 DEST{,+LEN,+LEN-1}. */
8926 if (operand_equal_p (src, dest, 0))
8930 tree srctype, desttype;
8931 int src_align, dest_align;
8935 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8936 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8938 /* Both DEST and SRC must be pointer types.
8939 ??? This is what old code did. Is the testing for pointer types
8942 If either SRC is readonly or length is 1, we can use memcpy. */
8943 if (!dest_align || !src_align)
8945 if (readonly_data_expr (src)
8946 || (host_integerp (len, 1)
8947 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8948 >= tree_low_cst (len, 1))))
8950 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8953 return build_call_expr (fn, 3, dest, src, len);
8956 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8957 srcvar = build_fold_indirect_ref (src);
8958 destvar = build_fold_indirect_ref (dest);
8960 && !TREE_THIS_VOLATILE (srcvar)
8962 && !TREE_THIS_VOLATILE (destvar))
8964 tree src_base, dest_base, fn;
8965 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8966 HOST_WIDE_INT size = -1;
8967 HOST_WIDE_INT maxsize = -1;
8970 if (handled_component_p (src_base))
8971 src_base = get_ref_base_and_extent (src_base, &src_offset,
8973 dest_base = destvar;
8974 if (handled_component_p (dest_base))
8975 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8977 if (host_integerp (len, 1))
8979 maxsize = tree_low_cst (len, 1);
8981 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8984 maxsize *= BITS_PER_UNIT;
8988 if (SSA_VAR_P (src_base)
8989 && SSA_VAR_P (dest_base))
8991 if (operand_equal_p (src_base, dest_base, 0)
8992 && ranges_overlap_p (src_offset, maxsize,
8993 dest_offset, maxsize))
8996 else if (TREE_CODE (src_base) == INDIRECT_REF
8997 && TREE_CODE (dest_base) == INDIRECT_REF)
8999 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
9000 TREE_OPERAND (dest_base, 0), 0)
9001 || ranges_overlap_p (src_offset, maxsize,
9002 dest_offset, maxsize))
9008 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9011 return build_call_expr (fn, 3, dest, src, len);
9016 if (!host_integerp (len, 0))
9019 This logic lose for arguments like (type *)malloc (sizeof (type)),
9020 since we strip the casts of up to VOID return value from malloc.
9021 Perhaps we ought to inherit type from non-VOID argument here? */
9024 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
9025 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
9027 tree tem = TREE_OPERAND (src, 0);
9029 if (tem != TREE_OPERAND (src, 0))
9030 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
9032 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
9034 tree tem = TREE_OPERAND (dest, 0);
9036 if (tem != TREE_OPERAND (dest, 0))
9037 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
9039 srctype = TREE_TYPE (TREE_TYPE (src));
9041 && TREE_CODE (srctype) == ARRAY_TYPE
9042 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9044 srctype = TREE_TYPE (srctype);
9046 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
9048 desttype = TREE_TYPE (TREE_TYPE (dest));
9050 && TREE_CODE (desttype) == ARRAY_TYPE
9051 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9053 desttype = TREE_TYPE (desttype);
9055 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
9057 if (!srctype || !desttype
9058 || !TYPE_SIZE_UNIT (srctype)
9059 || !TYPE_SIZE_UNIT (desttype)
9060 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
9061 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
9062 || TYPE_VOLATILE (srctype)
9063 || TYPE_VOLATILE (desttype))
9066 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
9067 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
9068 if (dest_align < (int) TYPE_ALIGN (desttype)
9069 || src_align < (int) TYPE_ALIGN (srctype))
9073 dest = builtin_save_expr (dest);
9076 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9078 srcvar = build_fold_indirect_ref (src);
9079 if (TREE_THIS_VOLATILE (srcvar))
9081 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
9083 /* With memcpy, it is possible to bypass aliasing rules, so without
9084 this check i.e. execute/20060930-2.c would be misoptimized,
9085 because it use conflicting alias set to hold argument for the
9086 memcpy call. This check is probably unnecessary with
9087 -fno-strict-aliasing. Similarly for destvar. See also
9089 else if (!var_decl_component_p (srcvar))
9093 destvar = NULL_TREE;
9094 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9096 destvar = build_fold_indirect_ref (dest);
9097 if (TREE_THIS_VOLATILE (destvar))
9099 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
9100 destvar = NULL_TREE;
9101 else if (!var_decl_component_p (destvar))
9102 destvar = NULL_TREE;
9105 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9108 if (srcvar == NULL_TREE)
9111 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
9114 srctype = build_qualified_type (desttype, 0);
9115 if (src_align < (int) TYPE_ALIGN (srctype))
9117 if (AGGREGATE_TYPE_P (srctype)
9118 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
9121 srctype = build_variant_type_copy (srctype);
9122 TYPE_ALIGN (srctype) = src_align;
9123 TYPE_USER_ALIGN (srctype) = 1;
9124 TYPE_PACKED (srctype) = 1;
9126 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
9127 src = fold_convert (srcptype, src);
9128 srcvar = build_fold_indirect_ref (src);
9130 else if (destvar == NULL_TREE)
9133 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9136 desttype = build_qualified_type (srctype, 0);
9137 if (dest_align < (int) TYPE_ALIGN (desttype))
9139 if (AGGREGATE_TYPE_P (desttype)
9140 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9143 desttype = build_variant_type_copy (desttype);
9144 TYPE_ALIGN (desttype) = dest_align;
9145 TYPE_USER_ALIGN (desttype) = 1;
9146 TYPE_PACKED (desttype) = 1;
9148 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9149 dest = fold_convert (destptype, dest);
9150 destvar = build_fold_indirect_ref (dest);
9153 if (srctype == desttype
9154 || (gimple_in_ssa_p (cfun)
9155 && useless_type_conversion_p (desttype, srctype)))
9157 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9158 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9159 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9160 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9161 expr = fold_convert (TREE_TYPE (destvar), srcvar);
9163 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
9164 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9170 if (endp == 0 || endp == 3)
9171 return omit_one_operand (type, dest, expr);
9177 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
9180 len = fold_convert (sizetype, len);
9181 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9182 dest = fold_convert (type, dest);
9184 dest = omit_one_operand (type, dest, expr);
9188 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9189 If LEN is not NULL, it represents the length of the string to be
9190 copied. Return NULL_TREE if no simplification can be made. */
9193 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
9197 if (!validate_arg (dest, POINTER_TYPE)
9198 || !validate_arg (src, POINTER_TYPE))
9201 /* If SRC and DEST are the same (and not volatile), return DEST. */
9202 if (operand_equal_p (src, dest, 0))
9203 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
9205 if (optimize_function_for_size_p (cfun))
9208 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9214 len = c_strlen (src, 1);
9215 if (! len || TREE_SIDE_EFFECTS (len))
9219 len = size_binop (PLUS_EXPR, len, ssize_int (1));
9220 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9221 build_call_expr (fn, 3, dest, src, len));
9224 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9225 If SLEN is not NULL, it represents the length of the source string.
9226 Return NULL_TREE if no simplification can be made. */
9229 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
9233 if (!validate_arg (dest, POINTER_TYPE)
9234 || !validate_arg (src, POINTER_TYPE)
9235 || !validate_arg (len, INTEGER_TYPE))
9238 /* If the LEN parameter is zero, return DEST. */
9239 if (integer_zerop (len))
9240 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9242 /* We can't compare slen with len as constants below if len is not a
9244 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9248 slen = c_strlen (src, 1);
9250 /* Now, we must be passed a constant src ptr parameter. */
9251 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9254 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
9256 /* We do not support simplification of this case, though we do
9257 support it when expanding trees into RTL. */
9258 /* FIXME: generate a call to __builtin_memset. */
9259 if (tree_int_cst_lt (slen, len))
9262 /* OK transform into builtin memcpy. */
9263 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9266 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9267 build_call_expr (fn, 3, dest, src, len));
9270 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9271 arguments to the call, and TYPE is its return type.
9272 Return NULL_TREE if no simplification can be made. */
9275 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
9277 if (!validate_arg (arg1, POINTER_TYPE)
9278 || !validate_arg (arg2, INTEGER_TYPE)
9279 || !validate_arg (len, INTEGER_TYPE))
9285 if (TREE_CODE (arg2) != INTEGER_CST
9286 || !host_integerp (len, 1))
9289 p1 = c_getstr (arg1);
9290 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9296 if (target_char_cast (arg2, &c))
9299 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9302 return build_int_cst (TREE_TYPE (arg1), 0);
9304 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9306 return fold_convert (type, tem);
9312 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9313 Return NULL_TREE if no simplification can be made. */
9316 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9318 const char *p1, *p2;
9320 if (!validate_arg (arg1, POINTER_TYPE)
9321 || !validate_arg (arg2, POINTER_TYPE)
9322 || !validate_arg (len, INTEGER_TYPE))
9325 /* If the LEN parameter is zero, return zero. */
9326 if (integer_zerop (len))
9327 return omit_two_operands (integer_type_node, integer_zero_node,
9330 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9331 if (operand_equal_p (arg1, arg2, 0))
9332 return omit_one_operand (integer_type_node, integer_zero_node, len);
9334 p1 = c_getstr (arg1);
9335 p2 = c_getstr (arg2);
9337 /* If all arguments are constant, and the value of len is not greater
9338 than the lengths of arg1 and arg2, evaluate at compile-time. */
9339 if (host_integerp (len, 1) && p1 && p2
9340 && compare_tree_int (len, strlen (p1) + 1) <= 0
9341 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9343 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9346 return integer_one_node;
9348 return integer_minus_one_node;
9350 return integer_zero_node;
9353 /* If len parameter is one, return an expression corresponding to
9354 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9355 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9357 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9358 tree cst_uchar_ptr_node
9359 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9361 tree ind1 = fold_convert (integer_type_node,
9362 build1 (INDIRECT_REF, cst_uchar_node,
9363 fold_convert (cst_uchar_ptr_node,
9365 tree ind2 = fold_convert (integer_type_node,
9366 build1 (INDIRECT_REF, cst_uchar_node,
9367 fold_convert (cst_uchar_ptr_node,
9369 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9375 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9376 Return NULL_TREE if no simplification can be made. */
9379 fold_builtin_strcmp (tree arg1, tree arg2)
9381 const char *p1, *p2;
9383 if (!validate_arg (arg1, POINTER_TYPE)
9384 || !validate_arg (arg2, POINTER_TYPE))
9387 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9388 if (operand_equal_p (arg1, arg2, 0))
9389 return integer_zero_node;
9391 p1 = c_getstr (arg1);
9392 p2 = c_getstr (arg2);
9396 const int i = strcmp (p1, p2);
9398 return integer_minus_one_node;
9400 return integer_one_node;
9402 return integer_zero_node;
9405 /* If the second arg is "", return *(const unsigned char*)arg1. */
9406 if (p2 && *p2 == '\0')
9408 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9409 tree cst_uchar_ptr_node
9410 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9412 return fold_convert (integer_type_node,
9413 build1 (INDIRECT_REF, cst_uchar_node,
9414 fold_convert (cst_uchar_ptr_node,
9418 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9419 if (p1 && *p1 == '\0')
9421 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9422 tree cst_uchar_ptr_node
9423 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9425 tree temp = fold_convert (integer_type_node,
9426 build1 (INDIRECT_REF, cst_uchar_node,
9427 fold_convert (cst_uchar_ptr_node,
9429 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9435 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9436 Return NULL_TREE if no simplification can be made. */
9439 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9441 const char *p1, *p2;
9443 if (!validate_arg (arg1, POINTER_TYPE)
9444 || !validate_arg (arg2, POINTER_TYPE)
9445 || !validate_arg (len, INTEGER_TYPE))
9448 /* If the LEN parameter is zero, return zero. */
9449 if (integer_zerop (len))
9450 return omit_two_operands (integer_type_node, integer_zero_node,
9453 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9454 if (operand_equal_p (arg1, arg2, 0))
9455 return omit_one_operand (integer_type_node, integer_zero_node, len);
9457 p1 = c_getstr (arg1);
9458 p2 = c_getstr (arg2);
9460 if (host_integerp (len, 1) && p1 && p2)
9462 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9464 return integer_one_node;
9466 return integer_minus_one_node;
9468 return integer_zero_node;
9471 /* If the second arg is "", and the length is greater than zero,
9472 return *(const unsigned char*)arg1. */
9473 if (p2 && *p2 == '\0'
9474 && TREE_CODE (len) == INTEGER_CST
9475 && tree_int_cst_sgn (len) == 1)
9477 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9478 tree cst_uchar_ptr_node
9479 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9481 return fold_convert (integer_type_node,
9482 build1 (INDIRECT_REF, cst_uchar_node,
9483 fold_convert (cst_uchar_ptr_node,
9487 /* If the first arg is "", and the length is greater than zero,
9488 return -*(const unsigned char*)arg2. */
9489 if (p1 && *p1 == '\0'
9490 && TREE_CODE (len) == INTEGER_CST
9491 && tree_int_cst_sgn (len) == 1)
9493 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9494 tree cst_uchar_ptr_node
9495 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9497 tree temp = fold_convert (integer_type_node,
9498 build1 (INDIRECT_REF, cst_uchar_node,
9499 fold_convert (cst_uchar_ptr_node,
9501 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9504 /* If len parameter is one, return an expression corresponding to
9505 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9506 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9508 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9509 tree cst_uchar_ptr_node
9510 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9512 tree ind1 = fold_convert (integer_type_node,
9513 build1 (INDIRECT_REF, cst_uchar_node,
9514 fold_convert (cst_uchar_ptr_node,
9516 tree ind2 = fold_convert (integer_type_node,
9517 build1 (INDIRECT_REF, cst_uchar_node,
9518 fold_convert (cst_uchar_ptr_node,
9520 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9526 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9527 ARG. Return NULL_TREE if no simplification can be made. */
9530 fold_builtin_signbit (tree arg, tree type)
9534 if (!validate_arg (arg, REAL_TYPE))
9537 /* If ARG is a compile-time constant, determine the result. */
9538 if (TREE_CODE (arg) == REAL_CST
9539 && !TREE_OVERFLOW (arg))
9543 c = TREE_REAL_CST (arg);
9544 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9545 return fold_convert (type, temp);
9548 /* If ARG is non-negative, the result is always zero. */
9549 if (tree_expr_nonnegative_p (arg))
9550 return omit_one_operand (type, integer_zero_node, arg);
9552 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9553 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9554 return fold_build2 (LT_EXPR, type, arg,
9555 build_real (TREE_TYPE (arg), dconst0));
9560 /* Fold function call to builtin copysign, copysignf or copysignl with
9561 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9565 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9569 if (!validate_arg (arg1, REAL_TYPE)
9570 || !validate_arg (arg2, REAL_TYPE))
9573 /* copysign(X,X) is X. */
9574 if (operand_equal_p (arg1, arg2, 0))
9575 return fold_convert (type, arg1);
9577 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9578 if (TREE_CODE (arg1) == REAL_CST
9579 && TREE_CODE (arg2) == REAL_CST
9580 && !TREE_OVERFLOW (arg1)
9581 && !TREE_OVERFLOW (arg2))
9583 REAL_VALUE_TYPE c1, c2;
9585 c1 = TREE_REAL_CST (arg1);
9586 c2 = TREE_REAL_CST (arg2);
9587 /* c1.sign := c2.sign. */
9588 real_copysign (&c1, &c2);
9589 return build_real (type, c1);
9592 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9593 Remember to evaluate Y for side-effects. */
9594 if (tree_expr_nonnegative_p (arg2))
9595 return omit_one_operand (type,
9596 fold_build1 (ABS_EXPR, type, arg1),
9599 /* Strip sign changing operations for the first argument. */
9600 tem = fold_strip_sign_ops (arg1);
9602 return build_call_expr (fndecl, 2, tem, arg2);
9607 /* Fold a call to builtin isascii with argument ARG. */
9610 fold_builtin_isascii (tree arg)
9612 if (!validate_arg (arg, INTEGER_TYPE))
9616 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9617 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9618 build_int_cst (NULL_TREE,
9619 ~ (unsigned HOST_WIDE_INT) 0x7f));
9620 return fold_build2 (EQ_EXPR, integer_type_node,
9621 arg, integer_zero_node);
9625 /* Fold a call to builtin toascii with argument ARG. */
9628 fold_builtin_toascii (tree arg)
9630 if (!validate_arg (arg, INTEGER_TYPE))
9633 /* Transform toascii(c) -> (c & 0x7f). */
9634 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9635 build_int_cst (NULL_TREE, 0x7f));
9638 /* Fold a call to builtin isdigit with argument ARG. */
9641 fold_builtin_isdigit (tree arg)
9643 if (!validate_arg (arg, INTEGER_TYPE))
9647 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9648 /* According to the C standard, isdigit is unaffected by locale.
9649 However, it definitely is affected by the target character set. */
9650 unsigned HOST_WIDE_INT target_digit0
9651 = lang_hooks.to_target_charset ('0');
9653 if (target_digit0 == 0)
9656 arg = fold_convert (unsigned_type_node, arg);
9657 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9658 build_int_cst (unsigned_type_node, target_digit0));
9659 return fold_build2 (LE_EXPR, integer_type_node, arg,
9660 build_int_cst (unsigned_type_node, 9));
9664 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9667 fold_builtin_fabs (tree arg, tree type)
9669 if (!validate_arg (arg, REAL_TYPE))
9672 arg = fold_convert (type, arg);
9673 if (TREE_CODE (arg) == REAL_CST)
9674 return fold_abs_const (arg, type);
9675 return fold_build1 (ABS_EXPR, type, arg);
9678 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9681 fold_builtin_abs (tree arg, tree type)
9683 if (!validate_arg (arg, INTEGER_TYPE))
9686 arg = fold_convert (type, arg);
9687 if (TREE_CODE (arg) == INTEGER_CST)
9688 return fold_abs_const (arg, type);
9689 return fold_build1 (ABS_EXPR, type, arg);
9692 /* Fold a call to builtin fmin or fmax. */
9695 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9697 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9699 /* Calculate the result when the argument is a constant. */
9700 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9705 /* If either argument is NaN, return the other one. Avoid the
9706 transformation if we get (and honor) a signalling NaN. Using
9707 omit_one_operand() ensures we create a non-lvalue. */
9708 if (TREE_CODE (arg0) == REAL_CST
9709 && real_isnan (&TREE_REAL_CST (arg0))
9710 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9711 || ! TREE_REAL_CST (arg0).signalling))
9712 return omit_one_operand (type, arg1, arg0);
9713 if (TREE_CODE (arg1) == REAL_CST
9714 && real_isnan (&TREE_REAL_CST (arg1))
9715 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9716 || ! TREE_REAL_CST (arg1).signalling))
9717 return omit_one_operand (type, arg0, arg1);
9719 /* Transform fmin/fmax(x,x) -> x. */
9720 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9721 return omit_one_operand (type, arg0, arg1);
9723 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9724 functions to return the numeric arg if the other one is NaN.
9725 These tree codes don't honor that, so only transform if
9726 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9727 handled, so we don't have to worry about it either. */
9728 if (flag_finite_math_only)
9729 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9730 fold_convert (type, arg0),
9731 fold_convert (type, arg1));
9736 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9739 fold_builtin_carg (tree arg, tree type)
9741 if (validate_arg (arg, COMPLEX_TYPE)
9742 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9744 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9748 tree new_arg = builtin_save_expr (arg);
9749 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9750 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9751 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9758 /* Fold a call to builtin logb/ilogb. */
9761 fold_builtin_logb (tree arg, tree rettype)
9763 if (! validate_arg (arg, REAL_TYPE))
9768 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9770 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9776 /* If arg is Inf or NaN and we're logb, return it. */
9777 if (TREE_CODE (rettype) == REAL_TYPE)
9778 return fold_convert (rettype, arg);
9779 /* Fall through... */
9781 /* Zero may set errno and/or raise an exception for logb, also
9782 for ilogb we don't know FP_ILOGB0. */
9785 /* For normal numbers, proceed iff radix == 2. In GCC,
9786 normalized significands are in the range [0.5, 1.0). We
9787 want the exponent as if they were [1.0, 2.0) so get the
9788 exponent and subtract 1. */
9789 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9790 return fold_convert (rettype, build_int_cst (NULL_TREE,
9791 REAL_EXP (value)-1));
9799 /* Fold a call to builtin significand, if radix == 2. */
9802 fold_builtin_significand (tree arg, tree rettype)
9804 if (! validate_arg (arg, REAL_TYPE))
9809 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9811 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9818 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9819 return fold_convert (rettype, arg);
9821 /* For normal numbers, proceed iff radix == 2. */
9822 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9824 REAL_VALUE_TYPE result = *value;
9825 /* In GCC, normalized significands are in the range [0.5,
9826 1.0). We want them to be [1.0, 2.0) so set the
9828 SET_REAL_EXP (&result, 1);
9829 return build_real (rettype, result);
9838 /* Fold a call to builtin frexp, we can assume the base is 2. */
9841 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9843 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9848 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9851 arg1 = build_fold_indirect_ref (arg1);
9853 /* Proceed if a valid pointer type was passed in. */
9854 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9856 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9862 /* For +-0, return (*exp = 0, +-0). */
9863 exp = integer_zero_node;
9868 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9869 return omit_one_operand (rettype, arg0, arg1);
9872 /* Since the frexp function always expects base 2, and in
9873 GCC normalized significands are already in the range
9874 [0.5, 1.0), we have exactly what frexp wants. */
9875 REAL_VALUE_TYPE frac_rvt = *value;
9876 SET_REAL_EXP (&frac_rvt, 0);
9877 frac = build_real (rettype, frac_rvt);
9878 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9885 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9886 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9887 TREE_SIDE_EFFECTS (arg1) = 1;
9888 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9894 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9895 then we can assume the base is two. If it's false, then we have to
9896 check the mode of the TYPE parameter in certain cases. */
9899 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9901 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9906 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9907 if (real_zerop (arg0) || integer_zerop (arg1)
9908 || (TREE_CODE (arg0) == REAL_CST
9909 && !real_isfinite (&TREE_REAL_CST (arg0))))
9910 return omit_one_operand (type, arg0, arg1);
9912 /* If both arguments are constant, then try to evaluate it. */
9913 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9914 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9915 && host_integerp (arg1, 0))
9917 /* Bound the maximum adjustment to twice the range of the
9918 mode's valid exponents. Use abs to ensure the range is
9919 positive as a sanity check. */
9920 const long max_exp_adj = 2 *
9921 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9922 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9924 /* Get the user-requested adjustment. */
9925 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9927 /* The requested adjustment must be inside this range. This
9928 is a preliminary cap to avoid things like overflow, we
9929 may still fail to compute the result for other reasons. */
9930 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9932 REAL_VALUE_TYPE initial_result;
9934 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9936 /* Ensure we didn't overflow. */
9937 if (! real_isinf (&initial_result))
9939 const REAL_VALUE_TYPE trunc_result
9940 = real_value_truncate (TYPE_MODE (type), initial_result);
9942 /* Only proceed if the target mode can hold the
9944 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9945 return build_real (type, trunc_result);
9954 /* Fold a call to builtin modf. */
9957 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9959 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9964 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9967 arg1 = build_fold_indirect_ref (arg1);
9969 /* Proceed if a valid pointer type was passed in. */
9970 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9972 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9973 REAL_VALUE_TYPE trunc, frac;
9979 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9980 trunc = frac = *value;
9983 /* For +-Inf, return (*arg1 = arg0, +-0). */
9985 frac.sign = value->sign;
9989 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9990 real_trunc (&trunc, VOIDmode, value);
9991 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9992 /* If the original number was negative and already
9993 integral, then the fractional part is -0.0. */
9994 if (value->sign && frac.cl == rvc_zero)
9995 frac.sign = value->sign;
9999 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
10000 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
10001 build_real (rettype, trunc));
10002 TREE_SIDE_EFFECTS (arg1) = 1;
10003 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
10004 build_real (rettype, frac));
10010 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10011 ARG is the argument for the call. */
10014 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
10016 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10019 if (!validate_arg (arg, REAL_TYPE))
10022 switch (builtin_index)
10024 case BUILT_IN_ISINF:
10025 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10026 return omit_one_operand (type, integer_zero_node, arg);
10028 if (TREE_CODE (arg) == REAL_CST)
10030 r = TREE_REAL_CST (arg);
10031 if (real_isinf (&r))
10032 return real_compare (GT_EXPR, &r, &dconst0)
10033 ? integer_one_node : integer_minus_one_node;
10035 return integer_zero_node;
10040 case BUILT_IN_ISINF_SIGN:
10042 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10043 /* In a boolean context, GCC will fold the inner COND_EXPR to
10044 1. So e.g. "if (isinf_sign(x))" would be folded to just
10045 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10046 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10047 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
10048 tree tmp = NULL_TREE;
10050 arg = builtin_save_expr (arg);
10052 if (signbit_fn && isinf_fn)
10054 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
10055 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
10057 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
10058 signbit_call, integer_zero_node);
10059 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
10060 isinf_call, integer_zero_node);
10062 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
10063 integer_minus_one_node, integer_one_node);
10064 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
10065 integer_zero_node);
10071 case BUILT_IN_ISFINITE:
10072 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10073 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10074 return omit_one_operand (type, integer_one_node, arg);
10076 if (TREE_CODE (arg) == REAL_CST)
10078 r = TREE_REAL_CST (arg);
10079 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10084 case BUILT_IN_ISNAN:
10085 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10086 return omit_one_operand (type, integer_zero_node, arg);
10088 if (TREE_CODE (arg) == REAL_CST)
10090 r = TREE_REAL_CST (arg);
10091 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10094 arg = builtin_save_expr (arg);
10095 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
10098 gcc_unreachable ();
10102 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10103 This builtin will generate code to return the appropriate floating
10104 point classification depending on the value of the floating point
10105 number passed in. The possible return values must be supplied as
10106 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10107 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10108 one floating point argument which is "type generic". */
10111 fold_builtin_fpclassify (tree exp)
10113 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10114 arg, type, res, tmp;
10115 enum machine_mode mode;
10119 /* Verify the required arguments in the original call. */
10120 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10121 INTEGER_TYPE, INTEGER_TYPE,
10122 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10125 fp_nan = CALL_EXPR_ARG (exp, 0);
10126 fp_infinite = CALL_EXPR_ARG (exp, 1);
10127 fp_normal = CALL_EXPR_ARG (exp, 2);
10128 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10129 fp_zero = CALL_EXPR_ARG (exp, 4);
10130 arg = CALL_EXPR_ARG (exp, 5);
10131 type = TREE_TYPE (arg);
10132 mode = TYPE_MODE (type);
10133 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
10135 /* fpclassify(x) ->
10136 isnan(x) ? FP_NAN :
10137 (fabs(x) == Inf ? FP_INFINITE :
10138 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10139 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10141 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10142 build_real (type, dconst0));
10143 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
10145 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10146 real_from_string (&r, buf);
10147 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
10148 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
10150 if (HONOR_INFINITIES (mode))
10153 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10154 build_real (type, r));
10155 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
10158 if (HONOR_NANS (mode))
10160 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
10161 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
10167 /* Fold a call to an unordered comparison function such as
10168 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10169 being called and ARG0 and ARG1 are the arguments for the call.
10170 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10171 the opposite of the desired result. UNORDERED_CODE is used
10172 for modes that can hold NaNs and ORDERED_CODE is used for
10176 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
10177 enum tree_code unordered_code,
10178 enum tree_code ordered_code)
10180 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10181 enum tree_code code;
10183 enum tree_code code0, code1;
10184 tree cmp_type = NULL_TREE;
10186 type0 = TREE_TYPE (arg0);
10187 type1 = TREE_TYPE (arg1);
10189 code0 = TREE_CODE (type0);
10190 code1 = TREE_CODE (type1);
10192 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10193 /* Choose the wider of two real types. */
10194 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10196 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10198 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10201 arg0 = fold_convert (cmp_type, arg0);
10202 arg1 = fold_convert (cmp_type, arg1);
10204 if (unordered_code == UNORDERED_EXPR)
10206 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10207 return omit_two_operands (type, integer_zero_node, arg0, arg1);
10208 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
10211 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10213 return fold_build1 (TRUTH_NOT_EXPR, type,
10214 fold_build2 (code, type, arg0, arg1));
10217 /* Fold a call to built-in function FNDECL with 0 arguments.
10218 IGNORE is true if the result of the function call is ignored. This
10219 function returns NULL_TREE if no simplification was possible. */
10222 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10224 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10225 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10228 CASE_FLT_FN (BUILT_IN_INF):
10229 case BUILT_IN_INFD32:
10230 case BUILT_IN_INFD64:
10231 case BUILT_IN_INFD128:
10232 return fold_builtin_inf (type, true);
10234 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10235 return fold_builtin_inf (type, false);
10237 case BUILT_IN_CLASSIFY_TYPE:
10238 return fold_builtin_classify_type (NULL_TREE);
10246 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10247 IGNORE is true if the result of the function call is ignored. This
10248 function returns NULL_TREE if no simplification was possible. */
10251 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
10253 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10254 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10258 case BUILT_IN_CONSTANT_P:
10260 tree val = fold_builtin_constant_p (arg0);
10262 /* Gimplification will pull the CALL_EXPR for the builtin out of
10263 an if condition. When not optimizing, we'll not CSE it back.
10264 To avoid link error types of regressions, return false now. */
10265 if (!val && !optimize)
10266 val = integer_zero_node;
10271 case BUILT_IN_CLASSIFY_TYPE:
10272 return fold_builtin_classify_type (arg0);
10274 case BUILT_IN_STRLEN:
10275 return fold_builtin_strlen (arg0);
10277 CASE_FLT_FN (BUILT_IN_FABS):
10278 return fold_builtin_fabs (arg0, type);
10281 case BUILT_IN_LABS:
10282 case BUILT_IN_LLABS:
10283 case BUILT_IN_IMAXABS:
10284 return fold_builtin_abs (arg0, type);
10286 CASE_FLT_FN (BUILT_IN_CONJ):
10287 if (validate_arg (arg0, COMPLEX_TYPE)
10288 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10289 return fold_build1 (CONJ_EXPR, type, arg0);
10292 CASE_FLT_FN (BUILT_IN_CREAL):
10293 if (validate_arg (arg0, COMPLEX_TYPE)
10294 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10295 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
10298 CASE_FLT_FN (BUILT_IN_CIMAG):
10299 if (validate_arg (arg0, COMPLEX_TYPE))
10300 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10303 CASE_FLT_FN (BUILT_IN_CCOS):
10304 return fold_builtin_ccos(arg0, type, fndecl, /*hyper=*/ false);
10306 CASE_FLT_FN (BUILT_IN_CCOSH):
10307 return fold_builtin_ccos(arg0, type, fndecl, /*hyper=*/ true);
10310 CASE_FLT_FN (BUILT_IN_CSIN):
10311 if (validate_arg (arg0, COMPLEX_TYPE)
10312 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10313 return do_mpc_arg1 (arg0, type, mpc_sin);
10316 CASE_FLT_FN (BUILT_IN_CSINH):
10317 if (validate_arg (arg0, COMPLEX_TYPE)
10318 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10319 return do_mpc_arg1 (arg0, type, mpc_sinh);
10322 CASE_FLT_FN (BUILT_IN_CTAN):
10323 if (validate_arg (arg0, COMPLEX_TYPE)
10324 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10325 return do_mpc_arg1 (arg0, type, mpc_tan);
10328 CASE_FLT_FN (BUILT_IN_CTANH):
10329 if (validate_arg (arg0, COMPLEX_TYPE)
10330 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10331 return do_mpc_arg1 (arg0, type, mpc_tanh);
10334 CASE_FLT_FN (BUILT_IN_CLOG):
10335 if (validate_arg (arg0, COMPLEX_TYPE)
10336 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10337 return do_mpc_arg1 (arg0, type, mpc_log);
10340 CASE_FLT_FN (BUILT_IN_CSQRT):
10341 if (validate_arg (arg0, COMPLEX_TYPE)
10342 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10343 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10347 CASE_FLT_FN (BUILT_IN_CABS):
10348 return fold_builtin_cabs (arg0, type, fndecl);
10350 CASE_FLT_FN (BUILT_IN_CARG):
10351 return fold_builtin_carg (arg0, type);
10353 CASE_FLT_FN (BUILT_IN_SQRT):
10354 return fold_builtin_sqrt (arg0, type);
10356 CASE_FLT_FN (BUILT_IN_CBRT):
10357 return fold_builtin_cbrt (arg0, type);
10359 CASE_FLT_FN (BUILT_IN_ASIN):
10360 if (validate_arg (arg0, REAL_TYPE))
10361 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10362 &dconstm1, &dconst1, true);
10365 CASE_FLT_FN (BUILT_IN_ACOS):
10366 if (validate_arg (arg0, REAL_TYPE))
10367 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10368 &dconstm1, &dconst1, true);
10371 CASE_FLT_FN (BUILT_IN_ATAN):
10372 if (validate_arg (arg0, REAL_TYPE))
10373 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10376 CASE_FLT_FN (BUILT_IN_ASINH):
10377 if (validate_arg (arg0, REAL_TYPE))
10378 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10381 CASE_FLT_FN (BUILT_IN_ACOSH):
10382 if (validate_arg (arg0, REAL_TYPE))
10383 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10384 &dconst1, NULL, true);
10387 CASE_FLT_FN (BUILT_IN_ATANH):
10388 if (validate_arg (arg0, REAL_TYPE))
10389 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10390 &dconstm1, &dconst1, false);
10393 CASE_FLT_FN (BUILT_IN_SIN):
10394 if (validate_arg (arg0, REAL_TYPE))
10395 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10398 CASE_FLT_FN (BUILT_IN_COS):
10399 return fold_builtin_cos (arg0, type, fndecl);
10401 CASE_FLT_FN (BUILT_IN_TAN):
10402 return fold_builtin_tan (arg0, type);
10404 CASE_FLT_FN (BUILT_IN_CEXP):
10405 return fold_builtin_cexp (arg0, type);
10407 CASE_FLT_FN (BUILT_IN_CEXPI):
10408 if (validate_arg (arg0, REAL_TYPE))
10409 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10412 CASE_FLT_FN (BUILT_IN_SINH):
10413 if (validate_arg (arg0, REAL_TYPE))
10414 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10417 CASE_FLT_FN (BUILT_IN_COSH):
10418 return fold_builtin_cosh (arg0, type, fndecl);
10420 CASE_FLT_FN (BUILT_IN_TANH):
10421 if (validate_arg (arg0, REAL_TYPE))
10422 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10425 CASE_FLT_FN (BUILT_IN_ERF):
10426 if (validate_arg (arg0, REAL_TYPE))
10427 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10430 CASE_FLT_FN (BUILT_IN_ERFC):
10431 if (validate_arg (arg0, REAL_TYPE))
10432 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10435 CASE_FLT_FN (BUILT_IN_TGAMMA):
10436 if (validate_arg (arg0, REAL_TYPE))
10437 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10440 CASE_FLT_FN (BUILT_IN_EXP):
10441 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10443 CASE_FLT_FN (BUILT_IN_EXP2):
10444 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10446 CASE_FLT_FN (BUILT_IN_EXP10):
10447 CASE_FLT_FN (BUILT_IN_POW10):
10448 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10450 CASE_FLT_FN (BUILT_IN_EXPM1):
10451 if (validate_arg (arg0, REAL_TYPE))
10452 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10455 CASE_FLT_FN (BUILT_IN_LOG):
10456 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10458 CASE_FLT_FN (BUILT_IN_LOG2):
10459 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10461 CASE_FLT_FN (BUILT_IN_LOG10):
10462 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10464 CASE_FLT_FN (BUILT_IN_LOG1P):
10465 if (validate_arg (arg0, REAL_TYPE))
10466 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10467 &dconstm1, NULL, false);
10470 CASE_FLT_FN (BUILT_IN_J0):
10471 if (validate_arg (arg0, REAL_TYPE))
10472 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10476 CASE_FLT_FN (BUILT_IN_J1):
10477 if (validate_arg (arg0, REAL_TYPE))
10478 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10482 CASE_FLT_FN (BUILT_IN_Y0):
10483 if (validate_arg (arg0, REAL_TYPE))
10484 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10485 &dconst0, NULL, false);
10488 CASE_FLT_FN (BUILT_IN_Y1):
10489 if (validate_arg (arg0, REAL_TYPE))
10490 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10491 &dconst0, NULL, false);
10494 CASE_FLT_FN (BUILT_IN_NAN):
10495 case BUILT_IN_NAND32:
10496 case BUILT_IN_NAND64:
10497 case BUILT_IN_NAND128:
10498 return fold_builtin_nan (arg0, type, true);
10500 CASE_FLT_FN (BUILT_IN_NANS):
10501 return fold_builtin_nan (arg0, type, false);
10503 CASE_FLT_FN (BUILT_IN_FLOOR):
10504 return fold_builtin_floor (fndecl, arg0);
10506 CASE_FLT_FN (BUILT_IN_CEIL):
10507 return fold_builtin_ceil (fndecl, arg0);
10509 CASE_FLT_FN (BUILT_IN_TRUNC):
10510 return fold_builtin_trunc (fndecl, arg0);
10512 CASE_FLT_FN (BUILT_IN_ROUND):
10513 return fold_builtin_round (fndecl, arg0);
10515 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10516 CASE_FLT_FN (BUILT_IN_RINT):
10517 return fold_trunc_transparent_mathfn (fndecl, arg0);
10519 CASE_FLT_FN (BUILT_IN_LCEIL):
10520 CASE_FLT_FN (BUILT_IN_LLCEIL):
10521 CASE_FLT_FN (BUILT_IN_LFLOOR):
10522 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10523 CASE_FLT_FN (BUILT_IN_LROUND):
10524 CASE_FLT_FN (BUILT_IN_LLROUND):
10525 return fold_builtin_int_roundingfn (fndecl, arg0);
10527 CASE_FLT_FN (BUILT_IN_LRINT):
10528 CASE_FLT_FN (BUILT_IN_LLRINT):
10529 return fold_fixed_mathfn (fndecl, arg0);
10531 case BUILT_IN_BSWAP32:
10532 case BUILT_IN_BSWAP64:
10533 return fold_builtin_bswap (fndecl, arg0);
10535 CASE_INT_FN (BUILT_IN_FFS):
10536 CASE_INT_FN (BUILT_IN_CLZ):
10537 CASE_INT_FN (BUILT_IN_CTZ):
10538 CASE_INT_FN (BUILT_IN_POPCOUNT):
10539 CASE_INT_FN (BUILT_IN_PARITY):
10540 return fold_builtin_bitop (fndecl, arg0);
10542 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10543 return fold_builtin_signbit (arg0, type);
10545 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10546 return fold_builtin_significand (arg0, type);
10548 CASE_FLT_FN (BUILT_IN_ILOGB):
10549 CASE_FLT_FN (BUILT_IN_LOGB):
10550 return fold_builtin_logb (arg0, type);
10552 case BUILT_IN_ISASCII:
10553 return fold_builtin_isascii (arg0);
10555 case BUILT_IN_TOASCII:
10556 return fold_builtin_toascii (arg0);
10558 case BUILT_IN_ISDIGIT:
10559 return fold_builtin_isdigit (arg0);
10561 CASE_FLT_FN (BUILT_IN_FINITE):
10562 case BUILT_IN_FINITED32:
10563 case BUILT_IN_FINITED64:
10564 case BUILT_IN_FINITED128:
10565 case BUILT_IN_ISFINITE:
10566 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10568 CASE_FLT_FN (BUILT_IN_ISINF):
10569 case BUILT_IN_ISINFD32:
10570 case BUILT_IN_ISINFD64:
10571 case BUILT_IN_ISINFD128:
10572 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10574 case BUILT_IN_ISINF_SIGN:
10575 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10577 CASE_FLT_FN (BUILT_IN_ISNAN):
10578 case BUILT_IN_ISNAND32:
10579 case BUILT_IN_ISNAND64:
10580 case BUILT_IN_ISNAND128:
10581 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10583 case BUILT_IN_PRINTF:
10584 case BUILT_IN_PRINTF_UNLOCKED:
10585 case BUILT_IN_VPRINTF:
10586 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10596 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10597 IGNORE is true if the result of the function call is ignored. This
10598 function returns NULL_TREE if no simplification was possible. */
10601 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10603 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10604 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10608 CASE_FLT_FN (BUILT_IN_JN):
10609 if (validate_arg (arg0, INTEGER_TYPE)
10610 && validate_arg (arg1, REAL_TYPE))
10611 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10614 CASE_FLT_FN (BUILT_IN_YN):
10615 if (validate_arg (arg0, INTEGER_TYPE)
10616 && validate_arg (arg1, REAL_TYPE))
10617 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10621 CASE_FLT_FN (BUILT_IN_DREM):
10622 CASE_FLT_FN (BUILT_IN_REMAINDER):
10623 if (validate_arg (arg0, REAL_TYPE)
10624 && validate_arg(arg1, REAL_TYPE))
10625 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10628 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10629 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10630 if (validate_arg (arg0, REAL_TYPE)
10631 && validate_arg(arg1, POINTER_TYPE))
10632 return do_mpfr_lgamma_r (arg0, arg1, type);
10635 CASE_FLT_FN (BUILT_IN_ATAN2):
10636 if (validate_arg (arg0, REAL_TYPE)
10637 && validate_arg(arg1, REAL_TYPE))
10638 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10641 CASE_FLT_FN (BUILT_IN_FDIM):
10642 if (validate_arg (arg0, REAL_TYPE)
10643 && validate_arg(arg1, REAL_TYPE))
10644 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10647 CASE_FLT_FN (BUILT_IN_HYPOT):
10648 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10650 CASE_FLT_FN (BUILT_IN_LDEXP):
10651 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10652 CASE_FLT_FN (BUILT_IN_SCALBN):
10653 CASE_FLT_FN (BUILT_IN_SCALBLN):
10654 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10656 CASE_FLT_FN (BUILT_IN_FREXP):
10657 return fold_builtin_frexp (arg0, arg1, type);
10659 CASE_FLT_FN (BUILT_IN_MODF):
10660 return fold_builtin_modf (arg0, arg1, type);
10662 case BUILT_IN_BZERO:
10663 return fold_builtin_bzero (arg0, arg1, ignore);
10665 case BUILT_IN_FPUTS:
10666 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10668 case BUILT_IN_FPUTS_UNLOCKED:
10669 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10671 case BUILT_IN_STRSTR:
10672 return fold_builtin_strstr (arg0, arg1, type);
10674 case BUILT_IN_STRCAT:
10675 return fold_builtin_strcat (arg0, arg1);
10677 case BUILT_IN_STRSPN:
10678 return fold_builtin_strspn (arg0, arg1);
10680 case BUILT_IN_STRCSPN:
10681 return fold_builtin_strcspn (arg0, arg1);
10683 case BUILT_IN_STRCHR:
10684 case BUILT_IN_INDEX:
10685 return fold_builtin_strchr (arg0, arg1, type);
10687 case BUILT_IN_STRRCHR:
10688 case BUILT_IN_RINDEX:
10689 return fold_builtin_strrchr (arg0, arg1, type);
10691 case BUILT_IN_STRCPY:
10692 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10694 case BUILT_IN_STPCPY:
10697 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10701 return build_call_expr (fn, 2, arg0, arg1);
10705 case BUILT_IN_STRCMP:
10706 return fold_builtin_strcmp (arg0, arg1);
10708 case BUILT_IN_STRPBRK:
10709 return fold_builtin_strpbrk (arg0, arg1, type);
10711 case BUILT_IN_EXPECT:
10712 return fold_builtin_expect (arg0, arg1);
10714 CASE_FLT_FN (BUILT_IN_POW):
10715 return fold_builtin_pow (fndecl, arg0, arg1, type);
10717 CASE_FLT_FN (BUILT_IN_POWI):
10718 return fold_builtin_powi (fndecl, arg0, arg1, type);
10720 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10721 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10723 CASE_FLT_FN (BUILT_IN_FMIN):
10724 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10726 CASE_FLT_FN (BUILT_IN_FMAX):
10727 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10729 case BUILT_IN_ISGREATER:
10730 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10731 case BUILT_IN_ISGREATEREQUAL:
10732 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10733 case BUILT_IN_ISLESS:
10734 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10735 case BUILT_IN_ISLESSEQUAL:
10736 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10737 case BUILT_IN_ISLESSGREATER:
10738 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10739 case BUILT_IN_ISUNORDERED:
10740 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10743 /* We do the folding for va_start in the expander. */
10744 case BUILT_IN_VA_START:
10747 case BUILT_IN_SPRINTF:
10748 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10750 case BUILT_IN_OBJECT_SIZE:
10751 return fold_builtin_object_size (arg0, arg1);
10753 case BUILT_IN_PRINTF:
10754 case BUILT_IN_PRINTF_UNLOCKED:
10755 case BUILT_IN_VPRINTF:
10756 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10758 case BUILT_IN_PRINTF_CHK:
10759 case BUILT_IN_VPRINTF_CHK:
10760 if (!validate_arg (arg0, INTEGER_TYPE)
10761 || TREE_SIDE_EFFECTS (arg0))
10764 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10767 case BUILT_IN_FPRINTF:
10768 case BUILT_IN_FPRINTF_UNLOCKED:
10769 case BUILT_IN_VFPRINTF:
10770 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10779 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10780 and ARG2. IGNORE is true if the result of the function call is ignored.
10781 This function returns NULL_TREE if no simplification was possible. */
10784 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10786 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10787 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10791 CASE_FLT_FN (BUILT_IN_SINCOS):
10792 return fold_builtin_sincos (arg0, arg1, arg2);
10794 CASE_FLT_FN (BUILT_IN_FMA):
10795 if (validate_arg (arg0, REAL_TYPE)
10796 && validate_arg(arg1, REAL_TYPE)
10797 && validate_arg(arg2, REAL_TYPE))
10798 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10801 CASE_FLT_FN (BUILT_IN_REMQUO):
10802 if (validate_arg (arg0, REAL_TYPE)
10803 && validate_arg(arg1, REAL_TYPE)
10804 && validate_arg(arg2, POINTER_TYPE))
10805 return do_mpfr_remquo (arg0, arg1, arg2);
10808 case BUILT_IN_MEMSET:
10809 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10811 case BUILT_IN_BCOPY:
10812 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10814 case BUILT_IN_MEMCPY:
10815 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10817 case BUILT_IN_MEMPCPY:
10818 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10820 case BUILT_IN_MEMMOVE:
10821 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10823 case BUILT_IN_STRNCAT:
10824 return fold_builtin_strncat (arg0, arg1, arg2);
10826 case BUILT_IN_STRNCPY:
10827 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10829 case BUILT_IN_STRNCMP:
10830 return fold_builtin_strncmp (arg0, arg1, arg2);
10832 case BUILT_IN_MEMCHR:
10833 return fold_builtin_memchr (arg0, arg1, arg2, type);
10835 case BUILT_IN_BCMP:
10836 case BUILT_IN_MEMCMP:
10837 return fold_builtin_memcmp (arg0, arg1, arg2);;
10839 case BUILT_IN_SPRINTF:
10840 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10842 case BUILT_IN_STRCPY_CHK:
10843 case BUILT_IN_STPCPY_CHK:
10844 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10847 case BUILT_IN_STRCAT_CHK:
10848 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10850 case BUILT_IN_PRINTF_CHK:
10851 case BUILT_IN_VPRINTF_CHK:
10852 if (!validate_arg (arg0, INTEGER_TYPE)
10853 || TREE_SIDE_EFFECTS (arg0))
10856 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10859 case BUILT_IN_FPRINTF:
10860 case BUILT_IN_FPRINTF_UNLOCKED:
10861 case BUILT_IN_VFPRINTF:
10862 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10864 case BUILT_IN_FPRINTF_CHK:
10865 case BUILT_IN_VFPRINTF_CHK:
10866 if (!validate_arg (arg1, INTEGER_TYPE)
10867 || TREE_SIDE_EFFECTS (arg1))
10870 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10879 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10880 ARG2, and ARG3. IGNORE is true if the result of the function call is
10881 ignored. This function returns NULL_TREE if no simplification was
10885 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10888 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10892 case BUILT_IN_MEMCPY_CHK:
10893 case BUILT_IN_MEMPCPY_CHK:
10894 case BUILT_IN_MEMMOVE_CHK:
10895 case BUILT_IN_MEMSET_CHK:
10896 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10898 DECL_FUNCTION_CODE (fndecl));
10900 case BUILT_IN_STRNCPY_CHK:
10901 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10903 case BUILT_IN_STRNCAT_CHK:
10904 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10906 case BUILT_IN_FPRINTF_CHK:
10907 case BUILT_IN_VFPRINTF_CHK:
10908 if (!validate_arg (arg1, INTEGER_TYPE)
10909 || TREE_SIDE_EFFECTS (arg1))
10912 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10922 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10923 arguments, where NARGS <= 4. IGNORE is true if the result of the
10924 function call is ignored. This function returns NULL_TREE if no
10925 simplification was possible. Note that this only folds builtins with
10926 fixed argument patterns. Foldings that do varargs-to-varargs
10927 transformations, or that match calls with more than 4 arguments,
10928 need to be handled with fold_builtin_varargs instead. */
10930 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10933 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10935 tree ret = NULL_TREE;
10940 ret = fold_builtin_0 (fndecl, ignore);
10943 ret = fold_builtin_1 (fndecl, args[0], ignore);
10946 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10949 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10952 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10960 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10961 TREE_NO_WARNING (ret) = 1;
10967 /* Builtins with folding operations that operate on "..." arguments
10968 need special handling; we need to store the arguments in a convenient
10969 data structure before attempting any folding. Fortunately there are
10970 only a few builtins that fall into this category. FNDECL is the
10971 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10972 result of the function call is ignored. */
10975 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10977 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10978 tree ret = NULL_TREE;
10982 case BUILT_IN_SPRINTF_CHK:
10983 case BUILT_IN_VSPRINTF_CHK:
10984 ret = fold_builtin_sprintf_chk (exp, fcode);
10987 case BUILT_IN_SNPRINTF_CHK:
10988 case BUILT_IN_VSNPRINTF_CHK:
10989 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10992 case BUILT_IN_FPCLASSIFY:
10993 ret = fold_builtin_fpclassify (exp);
11001 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11002 TREE_NO_WARNING (ret) = 1;
11008 /* Return true if FNDECL shouldn't be folded right now.
11009 If a built-in function has an inline attribute always_inline
11010 wrapper, defer folding it after always_inline functions have
11011 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11012 might not be performed. */
11015 avoid_folding_inline_builtin (tree fndecl)
11017 return (DECL_DECLARED_INLINE_P (fndecl)
11018 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11020 && !cfun->always_inline_functions_inlined
11021 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11024 /* A wrapper function for builtin folding that prevents warnings for
11025 "statement without effect" and the like, caused by removing the
11026 call node earlier than the warning is generated. */
11029 fold_call_expr (tree exp, bool ignore)
11031 tree ret = NULL_TREE;
11032 tree fndecl = get_callee_fndecl (exp);
11034 && TREE_CODE (fndecl) == FUNCTION_DECL
11035 && DECL_BUILT_IN (fndecl)
11036 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11037 yet. Defer folding until we see all the arguments
11038 (after inlining). */
11039 && !CALL_EXPR_VA_ARG_PACK (exp))
11041 int nargs = call_expr_nargs (exp);
11043 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11044 instead last argument is __builtin_va_arg_pack (). Defer folding
11045 even in that case, until arguments are finalized. */
11046 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11048 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11050 && TREE_CODE (fndecl2) == FUNCTION_DECL
11051 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11052 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11056 if (avoid_folding_inline_builtin (fndecl))
11059 /* FIXME: Don't use a list in this interface. */
11060 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11061 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
11064 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11066 tree *args = CALL_EXPR_ARGP (exp);
11067 ret = fold_builtin_n (fndecl, args, nargs, ignore);
11070 ret = fold_builtin_varargs (fndecl, exp, ignore);
11073 /* Propagate location information from original call to
11074 expansion of builtin. Otherwise things like
11075 maybe_emit_chk_warning, that operate on the expansion
11076 of a builtin, will use the wrong location information. */
11077 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
11079 tree realret = ret;
11080 if (TREE_CODE (ret) == NOP_EXPR)
11081 realret = TREE_OPERAND (ret, 0);
11082 if (CAN_HAVE_LOCATION_P (realret)
11083 && !EXPR_HAS_LOCATION (realret))
11084 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
11093 /* Conveniently construct a function call expression. FNDECL names the
11094 function to be called and ARGLIST is a TREE_LIST of arguments. */
11097 build_function_call_expr (tree fndecl, tree arglist)
11099 tree fntype = TREE_TYPE (fndecl);
11100 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11101 int n = list_length (arglist);
11102 tree *argarray = (tree *) alloca (n * sizeof (tree));
11105 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
11106 argarray[i] = TREE_VALUE (arglist);
11107 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
11110 /* Conveniently construct a function call expression. FNDECL names the
11111 function to be called, N is the number of arguments, and the "..."
11112 parameters are the argument expressions. */
11115 build_call_expr (tree fndecl, int n, ...)
11118 tree fntype = TREE_TYPE (fndecl);
11119 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11120 tree *argarray = (tree *) alloca (n * sizeof (tree));
11124 for (i = 0; i < n; i++)
11125 argarray[i] = va_arg (ap, tree);
11127 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
11130 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11131 N arguments are passed in the array ARGARRAY. */
11134 fold_builtin_call_array (tree type,
11139 tree ret = NULL_TREE;
11143 if (TREE_CODE (fn) == ADDR_EXPR)
11145 tree fndecl = TREE_OPERAND (fn, 0);
11146 if (TREE_CODE (fndecl) == FUNCTION_DECL
11147 && DECL_BUILT_IN (fndecl))
11149 /* If last argument is __builtin_va_arg_pack (), arguments to this
11150 function are not finalized yet. Defer folding until they are. */
11151 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11153 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11155 && TREE_CODE (fndecl2) == FUNCTION_DECL
11156 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11157 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11158 return build_call_array (type, fn, n, argarray);
11160 if (avoid_folding_inline_builtin (fndecl))
11161 return build_call_array (type, fn, n, argarray);
11162 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11164 tree arglist = NULL_TREE;
11165 for (i = n - 1; i >= 0; i--)
11166 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11167 ret = targetm.fold_builtin (fndecl, arglist, false);
11170 return build_call_array (type, fn, n, argarray);
11172 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11174 /* First try the transformations that don't require consing up
11176 ret = fold_builtin_n (fndecl, argarray, n, false);
11181 /* If we got this far, we need to build an exp. */
11182 exp = build_call_array (type, fn, n, argarray);
11183 ret = fold_builtin_varargs (fndecl, exp, false);
11184 return ret ? ret : exp;
11188 return build_call_array (type, fn, n, argarray);
11191 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11192 along with N new arguments specified as the "..." parameters. SKIP
11193 is the number of arguments in EXP to be omitted. This function is used
11194 to do varargs-to-varargs transformations. */
11197 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
11199 int oldnargs = call_expr_nargs (exp);
11200 int nargs = oldnargs - skip + n;
11201 tree fntype = TREE_TYPE (fndecl);
11202 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11210 buffer = XALLOCAVEC (tree, nargs);
11212 for (i = 0; i < n; i++)
11213 buffer[i] = va_arg (ap, tree);
11215 for (j = skip; j < oldnargs; j++, i++)
11216 buffer[i] = CALL_EXPR_ARG (exp, j);
11219 buffer = CALL_EXPR_ARGP (exp) + skip;
11221 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
11224 /* Validate a single argument ARG against a tree code CODE representing
11228 validate_arg (const_tree arg, enum tree_code code)
11232 else if (code == POINTER_TYPE)
11233 return POINTER_TYPE_P (TREE_TYPE (arg));
11234 else if (code == INTEGER_TYPE)
11235 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11236 return code == TREE_CODE (TREE_TYPE (arg));
11239 /* This function validates the types of a function call argument list
11240 against a specified list of tree_codes. If the last specifier is a 0,
11241 that represents an ellipses, otherwise the last specifier must be a
11244 This is the GIMPLE version of validate_arglist. Eventually we want to
11245 completely convert builtins.c to work from GIMPLEs and the tree based
11246 validate_arglist will then be removed. */
11249 validate_gimple_arglist (const_gimple call, ...)
11251 enum tree_code code;
11257 va_start (ap, call);
11262 code = (enum tree_code) va_arg (ap, int);
11266 /* This signifies an ellipses, any further arguments are all ok. */
11270 /* This signifies an endlink, if no arguments remain, return
11271 true, otherwise return false. */
11272 res = (i == gimple_call_num_args (call));
11275 /* If no parameters remain or the parameter's code does not
11276 match the specified code, return false. Otherwise continue
11277 checking any remaining arguments. */
11278 arg = gimple_call_arg (call, i++);
11279 if (!validate_arg (arg, code))
11286 /* We need gotos here since we can only have one VA_CLOSE in a
11294 /* This function validates the types of a function call argument list
11295 against a specified list of tree_codes. If the last specifier is a 0,
11296 that represents an ellipses, otherwise the last specifier must be a
11300 validate_arglist (const_tree callexpr, ...)
11302 enum tree_code code;
11305 const_call_expr_arg_iterator iter;
11308 va_start (ap, callexpr);
11309 init_const_call_expr_arg_iterator (callexpr, &iter);
11313 code = (enum tree_code) va_arg (ap, int);
11317 /* This signifies an ellipses, any further arguments are all ok. */
11321 /* This signifies an endlink, if no arguments remain, return
11322 true, otherwise return false. */
11323 res = !more_const_call_expr_args_p (&iter);
11326 /* If no parameters remain or the parameter's code does not
11327 match the specified code, return false. Otherwise continue
11328 checking any remaining arguments. */
11329 arg = next_const_call_expr_arg (&iter);
11330 if (!validate_arg (arg, code))
11337 /* We need gotos here since we can only have one VA_CLOSE in a
11345 /* Default target-specific builtin expander that does nothing. */
11348 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11349 rtx target ATTRIBUTE_UNUSED,
11350 rtx subtarget ATTRIBUTE_UNUSED,
11351 enum machine_mode mode ATTRIBUTE_UNUSED,
11352 int ignore ATTRIBUTE_UNUSED)
11357 /* Returns true is EXP represents data that would potentially reside
11358 in a readonly section. */
11361 readonly_data_expr (tree exp)
11365 if (TREE_CODE (exp) != ADDR_EXPR)
11368 exp = get_base_address (TREE_OPERAND (exp, 0));
11372 /* Make sure we call decl_readonly_section only for trees it
11373 can handle (since it returns true for everything it doesn't
11375 if (TREE_CODE (exp) == STRING_CST
11376 || TREE_CODE (exp) == CONSTRUCTOR
11377 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11378 return decl_readonly_section (exp, 0);
11383 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11384 to the call, and TYPE is its return type.
11386 Return NULL_TREE if no simplification was possible, otherwise return the
11387 simplified form of the call as a tree.
11389 The simplified form may be a constant or other expression which
11390 computes the same value, but in a more efficient manner (including
11391 calls to other builtin functions).
11393 The call may contain arguments which need to be evaluated, but
11394 which are not useful to determine the result of the call. In
11395 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11396 COMPOUND_EXPR will be an argument which must be evaluated.
11397 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11398 COMPOUND_EXPR in the chain will contain the tree for the simplified
11399 form of the builtin function call. */
11402 fold_builtin_strstr (tree s1, tree s2, tree type)
11404 if (!validate_arg (s1, POINTER_TYPE)
11405 || !validate_arg (s2, POINTER_TYPE))
11410 const char *p1, *p2;
11412 p2 = c_getstr (s2);
11416 p1 = c_getstr (s1);
11419 const char *r = strstr (p1, p2);
11423 return build_int_cst (TREE_TYPE (s1), 0);
11425 /* Return an offset into the constant string argument. */
11426 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11427 s1, size_int (r - p1));
11428 return fold_convert (type, tem);
11431 /* The argument is const char *, and the result is char *, so we need
11432 a type conversion here to avoid a warning. */
11434 return fold_convert (type, s1);
11439 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11443 /* New argument list transforming strstr(s1, s2) to
11444 strchr(s1, s2[0]). */
11445 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11449 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11450 the call, and TYPE is its return type.
11452 Return NULL_TREE if no simplification was possible, otherwise return the
11453 simplified form of the call as a tree.
11455 The simplified form may be a constant or other expression which
11456 computes the same value, but in a more efficient manner (including
11457 calls to other builtin functions).
11459 The call may contain arguments which need to be evaluated, but
11460 which are not useful to determine the result of the call. In
11461 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11462 COMPOUND_EXPR will be an argument which must be evaluated.
11463 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11464 COMPOUND_EXPR in the chain will contain the tree for the simplified
11465 form of the builtin function call. */
11468 fold_builtin_strchr (tree s1, tree s2, tree type)
11470 if (!validate_arg (s1, POINTER_TYPE)
11471 || !validate_arg (s2, INTEGER_TYPE))
11477 if (TREE_CODE (s2) != INTEGER_CST)
11480 p1 = c_getstr (s1);
11487 if (target_char_cast (s2, &c))
11490 r = strchr (p1, c);
11493 return build_int_cst (TREE_TYPE (s1), 0);
11495 /* Return an offset into the constant string argument. */
11496 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11497 s1, size_int (r - p1));
11498 return fold_convert (type, tem);
11504 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11505 the call, and TYPE is its return type.
11507 Return NULL_TREE if no simplification was possible, otherwise return the
11508 simplified form of the call as a tree.
11510 The simplified form may be a constant or other expression which
11511 computes the same value, but in a more efficient manner (including
11512 calls to other builtin functions).
11514 The call may contain arguments which need to be evaluated, but
11515 which are not useful to determine the result of the call. In
11516 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11517 COMPOUND_EXPR will be an argument which must be evaluated.
11518 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11519 COMPOUND_EXPR in the chain will contain the tree for the simplified
11520 form of the builtin function call. */
11523 fold_builtin_strrchr (tree s1, tree s2, tree type)
11525 if (!validate_arg (s1, POINTER_TYPE)
11526 || !validate_arg (s2, INTEGER_TYPE))
11533 if (TREE_CODE (s2) != INTEGER_CST)
11536 p1 = c_getstr (s1);
11543 if (target_char_cast (s2, &c))
11546 r = strrchr (p1, c);
11549 return build_int_cst (TREE_TYPE (s1), 0);
11551 /* Return an offset into the constant string argument. */
11552 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11553 s1, size_int (r - p1));
11554 return fold_convert (type, tem);
11557 if (! integer_zerop (s2))
11560 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11564 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11565 return build_call_expr (fn, 2, s1, s2);
11569 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11570 to the call, and TYPE is its return type.
11572 Return NULL_TREE if no simplification was possible, otherwise return the
11573 simplified form of the call as a tree.
11575 The simplified form may be a constant or other expression which
11576 computes the same value, but in a more efficient manner (including
11577 calls to other builtin functions).
11579 The call may contain arguments which need to be evaluated, but
11580 which are not useful to determine the result of the call. In
11581 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11582 COMPOUND_EXPR will be an argument which must be evaluated.
11583 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11584 COMPOUND_EXPR in the chain will contain the tree for the simplified
11585 form of the builtin function call. */
11588 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11590 if (!validate_arg (s1, POINTER_TYPE)
11591 || !validate_arg (s2, POINTER_TYPE))
11596 const char *p1, *p2;
11598 p2 = c_getstr (s2);
11602 p1 = c_getstr (s1);
11605 const char *r = strpbrk (p1, p2);
11609 return build_int_cst (TREE_TYPE (s1), 0);
11611 /* Return an offset into the constant string argument. */
11612 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11613 s1, size_int (r - p1));
11614 return fold_convert (type, tem);
11618 /* strpbrk(x, "") == NULL.
11619 Evaluate and ignore s1 in case it had side-effects. */
11620 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11623 return NULL_TREE; /* Really call strpbrk. */
11625 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11629 /* New argument list transforming strpbrk(s1, s2) to
11630 strchr(s1, s2[0]). */
11631 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11635 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11638 Return NULL_TREE if no simplification was possible, otherwise return the
11639 simplified form of the call as a tree.
11641 The simplified form may be a constant or other expression which
11642 computes the same value, but in a more efficient manner (including
11643 calls to other builtin functions).
11645 The call may contain arguments which need to be evaluated, but
11646 which are not useful to determine the result of the call. In
11647 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11648 COMPOUND_EXPR will be an argument which must be evaluated.
11649 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11650 COMPOUND_EXPR in the chain will contain the tree for the simplified
11651 form of the builtin function call. */
11654 fold_builtin_strcat (tree dst, tree src)
11656 if (!validate_arg (dst, POINTER_TYPE)
11657 || !validate_arg (src, POINTER_TYPE))
11661 const char *p = c_getstr (src);
11663 /* If the string length is zero, return the dst parameter. */
11664 if (p && *p == '\0')
11671 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11672 arguments to the call.
11674 Return NULL_TREE if no simplification was possible, otherwise return the
11675 simplified form of the call as a tree.
11677 The simplified form may be a constant or other expression which
11678 computes the same value, but in a more efficient manner (including
11679 calls to other builtin functions).
11681 The call may contain arguments which need to be evaluated, but
11682 which are not useful to determine the result of the call. In
11683 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11684 COMPOUND_EXPR will be an argument which must be evaluated.
11685 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11686 COMPOUND_EXPR in the chain will contain the tree for the simplified
11687 form of the builtin function call. */
11690 fold_builtin_strncat (tree dst, tree src, tree len)
11692 if (!validate_arg (dst, POINTER_TYPE)
11693 || !validate_arg (src, POINTER_TYPE)
11694 || !validate_arg (len, INTEGER_TYPE))
11698 const char *p = c_getstr (src);
11700 /* If the requested length is zero, or the src parameter string
11701 length is zero, return the dst parameter. */
11702 if (integer_zerop (len) || (p && *p == '\0'))
11703 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11705 /* If the requested len is greater than or equal to the string
11706 length, call strcat. */
11707 if (TREE_CODE (len) == INTEGER_CST && p
11708 && compare_tree_int (len, strlen (p)) >= 0)
11710 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11712 /* If the replacement _DECL isn't initialized, don't do the
11717 return build_call_expr (fn, 2, dst, src);
11723 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11726 Return NULL_TREE if no simplification was possible, otherwise return the
11727 simplified form of the call as a tree.
11729 The simplified form may be a constant or other expression which
11730 computes the same value, but in a more efficient manner (including
11731 calls to other builtin functions).
11733 The call may contain arguments which need to be evaluated, but
11734 which are not useful to determine the result of the call. In
11735 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11736 COMPOUND_EXPR will be an argument which must be evaluated.
11737 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11738 COMPOUND_EXPR in the chain will contain the tree for the simplified
11739 form of the builtin function call. */
11742 fold_builtin_strspn (tree s1, tree s2)
11744 if (!validate_arg (s1, POINTER_TYPE)
11745 || !validate_arg (s2, POINTER_TYPE))
11749 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11751 /* If both arguments are constants, evaluate at compile-time. */
11754 const size_t r = strspn (p1, p2);
11755 return size_int (r);
11758 /* If either argument is "", return NULL_TREE. */
11759 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11760 /* Evaluate and ignore both arguments in case either one has
11762 return omit_two_operands (size_type_node, size_zero_node,
11768 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11771 Return NULL_TREE if no simplification was possible, otherwise return the
11772 simplified form of the call as a tree.
11774 The simplified form may be a constant or other expression which
11775 computes the same value, but in a more efficient manner (including
11776 calls to other builtin functions).
11778 The call may contain arguments which need to be evaluated, but
11779 which are not useful to determine the result of the call. In
11780 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11781 COMPOUND_EXPR will be an argument which must be evaluated.
11782 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11783 COMPOUND_EXPR in the chain will contain the tree for the simplified
11784 form of the builtin function call. */
11787 fold_builtin_strcspn (tree s1, tree s2)
11789 if (!validate_arg (s1, POINTER_TYPE)
11790 || !validate_arg (s2, POINTER_TYPE))
11794 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11796 /* If both arguments are constants, evaluate at compile-time. */
11799 const size_t r = strcspn (p1, p2);
11800 return size_int (r);
11803 /* If the first argument is "", return NULL_TREE. */
11804 if (p1 && *p1 == '\0')
11806 /* Evaluate and ignore argument s2 in case it has
11808 return omit_one_operand (size_type_node,
11809 size_zero_node, s2);
11812 /* If the second argument is "", return __builtin_strlen(s1). */
11813 if (p2 && *p2 == '\0')
11815 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11817 /* If the replacement _DECL isn't initialized, don't do the
11822 return build_call_expr (fn, 1, s1);
11828 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11829 to the call. IGNORE is true if the value returned
11830 by the builtin will be ignored. UNLOCKED is true is true if this
11831 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11832 the known length of the string. Return NULL_TREE if no simplification
11836 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11838 /* If we're using an unlocked function, assume the other unlocked
11839 functions exist explicitly. */
11840 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11841 : implicit_built_in_decls[BUILT_IN_FPUTC];
11842 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11843 : implicit_built_in_decls[BUILT_IN_FWRITE];
11845 /* If the return value is used, don't do the transformation. */
11849 /* Verify the arguments in the original call. */
11850 if (!validate_arg (arg0, POINTER_TYPE)
11851 || !validate_arg (arg1, POINTER_TYPE))
11855 len = c_strlen (arg0, 0);
11857 /* Get the length of the string passed to fputs. If the length
11858 can't be determined, punt. */
11860 || TREE_CODE (len) != INTEGER_CST)
11863 switch (compare_tree_int (len, 1))
11865 case -1: /* length is 0, delete the call entirely . */
11866 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11868 case 0: /* length is 1, call fputc. */
11870 const char *p = c_getstr (arg0);
11875 return build_call_expr (fn_fputc, 2,
11876 build_int_cst (NULL_TREE, p[0]), arg1);
11882 case 1: /* length is greater than 1, call fwrite. */
11884 /* If optimizing for size keep fputs. */
11885 if (optimize_function_for_size_p (cfun))
11887 /* New argument list transforming fputs(string, stream) to
11888 fwrite(string, 1, len, stream). */
11890 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11895 gcc_unreachable ();
11900 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11901 produced. False otherwise. This is done so that we don't output the error
11902 or warning twice or three times. */
11905 fold_builtin_next_arg (tree exp, bool va_start_p)
11907 tree fntype = TREE_TYPE (current_function_decl);
11908 int nargs = call_expr_nargs (exp);
11911 if (TYPE_ARG_TYPES (fntype) == 0
11912 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11913 == void_type_node))
11915 error ("%<va_start%> used in function with fixed args");
11921 if (va_start_p && (nargs != 2))
11923 error ("wrong number of arguments to function %<va_start%>");
11926 arg = CALL_EXPR_ARG (exp, 1);
11928 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11929 when we checked the arguments and if needed issued a warning. */
11934 /* Evidently an out of date version of <stdarg.h>; can't validate
11935 va_start's second argument, but can still work as intended. */
11936 warning (0, "%<__builtin_next_arg%> called without an argument");
11939 else if (nargs > 1)
11941 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11944 arg = CALL_EXPR_ARG (exp, 0);
11947 if (TREE_CODE (arg) == SSA_NAME)
11948 arg = SSA_NAME_VAR (arg);
11950 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11951 or __builtin_next_arg (0) the first time we see it, after checking
11952 the arguments and if needed issuing a warning. */
11953 if (!integer_zerop (arg))
11955 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11957 /* Strip off all nops for the sake of the comparison. This
11958 is not quite the same as STRIP_NOPS. It does more.
11959 We must also strip off INDIRECT_EXPR for C++ reference
11961 while (CONVERT_EXPR_P (arg)
11962 || TREE_CODE (arg) == INDIRECT_REF)
11963 arg = TREE_OPERAND (arg, 0);
11964 if (arg != last_parm)
11966 /* FIXME: Sometimes with the tree optimizers we can get the
11967 not the last argument even though the user used the last
11968 argument. We just warn and set the arg to be the last
11969 argument so that we will get wrong-code because of
11971 warning (0, "second parameter of %<va_start%> not last named argument");
11974 /* Undefined by C99 7.15.1.4p4 (va_start):
11975 "If the parameter parmN is declared with the register storage
11976 class, with a function or array type, or with a type that is
11977 not compatible with the type that results after application of
11978 the default argument promotions, the behavior is undefined."
11980 else if (DECL_REGISTER (arg))
11981 warning (0, "undefined behaviour when second parameter of "
11982 "%<va_start%> is declared with %<register%> storage");
11984 /* We want to verify the second parameter just once before the tree
11985 optimizers are run and then avoid keeping it in the tree,
11986 as otherwise we could warn even for correct code like:
11987 void foo (int i, ...)
11988 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11990 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11992 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11998 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11999 ORIG may be null if this is a 2-argument call. We don't attempt to
12000 simplify calls with more than 3 arguments.
12002 Return NULL_TREE if no simplification was possible, otherwise return the
12003 simplified form of the call as a tree. If IGNORED is true, it means that
12004 the caller does not use the returned value of the function. */
12007 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
12010 const char *fmt_str = NULL;
12012 /* Verify the required arguments in the original call. We deal with two
12013 types of sprintf() calls: 'sprintf (str, fmt)' and
12014 'sprintf (dest, "%s", orig)'. */
12015 if (!validate_arg (dest, POINTER_TYPE)
12016 || !validate_arg (fmt, POINTER_TYPE))
12018 if (orig && !validate_arg (orig, POINTER_TYPE))
12021 /* Check whether the format is a literal string constant. */
12022 fmt_str = c_getstr (fmt);
12023 if (fmt_str == NULL)
12027 retval = NULL_TREE;
12029 if (!init_target_chars ())
12032 /* If the format doesn't contain % args or %%, use strcpy. */
12033 if (strchr (fmt_str, target_percent) == NULL)
12035 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12040 /* Don't optimize sprintf (buf, "abc", ptr++). */
12044 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12045 'format' is known to contain no % formats. */
12046 call = build_call_expr (fn, 2, dest, fmt);
12048 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
12051 /* If the format is "%s", use strcpy if the result isn't used. */
12052 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12055 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12060 /* Don't crash on sprintf (str1, "%s"). */
12064 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12067 retval = c_strlen (orig, 1);
12068 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12071 call = build_call_expr (fn, 2, dest, orig);
12074 if (call && retval)
12076 retval = fold_convert
12077 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
12079 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12085 /* Expand a call EXP to __builtin_object_size. */
12088 expand_builtin_object_size (tree exp)
12091 int object_size_type;
12092 tree fndecl = get_callee_fndecl (exp);
12094 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12096 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12098 expand_builtin_trap ();
12102 ost = CALL_EXPR_ARG (exp, 1);
12105 if (TREE_CODE (ost) != INTEGER_CST
12106 || tree_int_cst_sgn (ost) < 0
12107 || compare_tree_int (ost, 3) > 0)
12109 error ("%Klast argument of %D is not integer constant between 0 and 3",
12111 expand_builtin_trap ();
12115 object_size_type = tree_low_cst (ost, 0);
12117 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12120 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12121 FCODE is the BUILT_IN_* to use.
12122 Return NULL_RTX if we failed; the caller should emit a normal call,
12123 otherwise try to get the result in TARGET, if convenient (and in
12124 mode MODE if that's convenient). */
12127 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12128 enum built_in_function fcode)
12130 tree dest, src, len, size;
12132 if (!validate_arglist (exp,
12134 fcode == BUILT_IN_MEMSET_CHK
12135 ? INTEGER_TYPE : POINTER_TYPE,
12136 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12139 dest = CALL_EXPR_ARG (exp, 0);
12140 src = CALL_EXPR_ARG (exp, 1);
12141 len = CALL_EXPR_ARG (exp, 2);
12142 size = CALL_EXPR_ARG (exp, 3);
12144 if (! host_integerp (size, 1))
12147 if (host_integerp (len, 1) || integer_all_onesp (size))
12151 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12153 warning_at (tree_nonartificial_location (exp),
12154 0, "%Kcall to %D will always overflow destination buffer",
12155 exp, get_callee_fndecl (exp));
12160 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12161 mem{cpy,pcpy,move,set} is available. */
12164 case BUILT_IN_MEMCPY_CHK:
12165 fn = built_in_decls[BUILT_IN_MEMCPY];
12167 case BUILT_IN_MEMPCPY_CHK:
12168 fn = built_in_decls[BUILT_IN_MEMPCPY];
12170 case BUILT_IN_MEMMOVE_CHK:
12171 fn = built_in_decls[BUILT_IN_MEMMOVE];
12173 case BUILT_IN_MEMSET_CHK:
12174 fn = built_in_decls[BUILT_IN_MEMSET];
12183 fn = build_call_expr (fn, 3, dest, src, len);
12184 STRIP_TYPE_NOPS (fn);
12185 while (TREE_CODE (fn) == COMPOUND_EXPR)
12187 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12189 fn = TREE_OPERAND (fn, 1);
12191 if (TREE_CODE (fn) == CALL_EXPR)
12192 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12193 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12195 else if (fcode == BUILT_IN_MEMSET_CHK)
12199 unsigned int dest_align
12200 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12202 /* If DEST is not a pointer type, call the normal function. */
12203 if (dest_align == 0)
12206 /* If SRC and DEST are the same (and not volatile), do nothing. */
12207 if (operand_equal_p (src, dest, 0))
12211 if (fcode != BUILT_IN_MEMPCPY_CHK)
12213 /* Evaluate and ignore LEN in case it has side-effects. */
12214 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12215 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12218 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12219 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12222 /* __memmove_chk special case. */
12223 if (fcode == BUILT_IN_MEMMOVE_CHK)
12225 unsigned int src_align
12226 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12228 if (src_align == 0)
12231 /* If src is categorized for a readonly section we can use
12232 normal __memcpy_chk. */
12233 if (readonly_data_expr (src))
12235 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12238 fn = build_call_expr (fn, 4, dest, src, len, size);
12239 STRIP_TYPE_NOPS (fn);
12240 while (TREE_CODE (fn) == COMPOUND_EXPR)
12242 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12244 fn = TREE_OPERAND (fn, 1);
12246 if (TREE_CODE (fn) == CALL_EXPR)
12247 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12248 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12255 /* Emit warning if a buffer overflow is detected at compile time. */
12258 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12262 location_t loc = tree_nonartificial_location (exp);
12266 case BUILT_IN_STRCPY_CHK:
12267 case BUILT_IN_STPCPY_CHK:
12268 /* For __strcat_chk the warning will be emitted only if overflowing
12269 by at least strlen (dest) + 1 bytes. */
12270 case BUILT_IN_STRCAT_CHK:
12271 len = CALL_EXPR_ARG (exp, 1);
12272 size = CALL_EXPR_ARG (exp, 2);
12275 case BUILT_IN_STRNCAT_CHK:
12276 case BUILT_IN_STRNCPY_CHK:
12277 len = CALL_EXPR_ARG (exp, 2);
12278 size = CALL_EXPR_ARG (exp, 3);
12280 case BUILT_IN_SNPRINTF_CHK:
12281 case BUILT_IN_VSNPRINTF_CHK:
12282 len = CALL_EXPR_ARG (exp, 1);
12283 size = CALL_EXPR_ARG (exp, 3);
12286 gcc_unreachable ();
12292 if (! host_integerp (size, 1) || integer_all_onesp (size))
12297 len = c_strlen (len, 1);
12298 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12301 else if (fcode == BUILT_IN_STRNCAT_CHK)
12303 tree src = CALL_EXPR_ARG (exp, 1);
12304 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12306 src = c_strlen (src, 1);
12307 if (! src || ! host_integerp (src, 1))
12309 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12310 exp, get_callee_fndecl (exp));
12313 else if (tree_int_cst_lt (src, size))
12316 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12319 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12320 exp, get_callee_fndecl (exp));
12323 /* Emit warning if a buffer overflow is detected at compile time
12324 in __sprintf_chk/__vsprintf_chk calls. */
12327 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12329 tree dest, size, len, fmt, flag;
12330 const char *fmt_str;
12331 int nargs = call_expr_nargs (exp);
12333 /* Verify the required arguments in the original call. */
12337 dest = CALL_EXPR_ARG (exp, 0);
12338 flag = CALL_EXPR_ARG (exp, 1);
12339 size = CALL_EXPR_ARG (exp, 2);
12340 fmt = CALL_EXPR_ARG (exp, 3);
12342 if (! host_integerp (size, 1) || integer_all_onesp (size))
12345 /* Check whether the format is a literal string constant. */
12346 fmt_str = c_getstr (fmt);
12347 if (fmt_str == NULL)
12350 if (!init_target_chars ())
12353 /* If the format doesn't contain % args or %%, we know its size. */
12354 if (strchr (fmt_str, target_percent) == 0)
12355 len = build_int_cstu (size_type_node, strlen (fmt_str));
12356 /* If the format is "%s" and first ... argument is a string literal,
12358 else if (fcode == BUILT_IN_SPRINTF_CHK
12359 && strcmp (fmt_str, target_percent_s) == 0)
12365 arg = CALL_EXPR_ARG (exp, 4);
12366 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12369 len = c_strlen (arg, 1);
12370 if (!len || ! host_integerp (len, 1))
12376 if (! tree_int_cst_lt (len, size))
12377 warning_at (tree_nonartificial_location (exp),
12378 0, "%Kcall to %D will always overflow destination buffer",
12379 exp, get_callee_fndecl (exp));
12382 /* Emit warning if a free is called with address of a variable. */
12385 maybe_emit_free_warning (tree exp)
12387 tree arg = CALL_EXPR_ARG (exp, 0);
12390 if (TREE_CODE (arg) != ADDR_EXPR)
12393 arg = get_base_address (TREE_OPERAND (arg, 0));
12394 if (arg == NULL || INDIRECT_REF_P (arg))
12397 if (SSA_VAR_P (arg))
12398 warning_at (tree_nonartificial_location (exp),
12399 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12401 warning_at (tree_nonartificial_location (exp),
12402 0, "%Kattempt to free a non-heap object", exp);
12405 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12409 fold_builtin_object_size (tree ptr, tree ost)
12411 tree ret = NULL_TREE;
12412 int object_size_type;
12414 if (!validate_arg (ptr, POINTER_TYPE)
12415 || !validate_arg (ost, INTEGER_TYPE))
12420 if (TREE_CODE (ost) != INTEGER_CST
12421 || tree_int_cst_sgn (ost) < 0
12422 || compare_tree_int (ost, 3) > 0)
12425 object_size_type = tree_low_cst (ost, 0);
12427 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12428 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12429 and (size_t) 0 for types 2 and 3. */
12430 if (TREE_SIDE_EFFECTS (ptr))
12431 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12433 if (TREE_CODE (ptr) == ADDR_EXPR)
12434 ret = build_int_cstu (size_type_node,
12435 compute_builtin_object_size (ptr, object_size_type));
12437 else if (TREE_CODE (ptr) == SSA_NAME)
12439 unsigned HOST_WIDE_INT bytes;
12441 /* If object size is not known yet, delay folding until
12442 later. Maybe subsequent passes will help determining
12444 bytes = compute_builtin_object_size (ptr, object_size_type);
12445 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12447 ret = build_int_cstu (size_type_node, bytes);
12452 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12453 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12454 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12461 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12462 DEST, SRC, LEN, and SIZE are the arguments to the call.
12463 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12464 code of the builtin. If MAXLEN is not NULL, it is maximum length
12465 passed as third argument. */
12468 fold_builtin_memory_chk (tree fndecl,
12469 tree dest, tree src, tree len, tree size,
12470 tree maxlen, bool ignore,
12471 enum built_in_function fcode)
12475 if (!validate_arg (dest, POINTER_TYPE)
12476 || !validate_arg (src,
12477 (fcode == BUILT_IN_MEMSET_CHK
12478 ? INTEGER_TYPE : POINTER_TYPE))
12479 || !validate_arg (len, INTEGER_TYPE)
12480 || !validate_arg (size, INTEGER_TYPE))
12483 /* If SRC and DEST are the same (and not volatile), return DEST
12484 (resp. DEST+LEN for __mempcpy_chk). */
12485 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12487 if (fcode != BUILT_IN_MEMPCPY_CHK)
12488 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12491 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12492 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12496 if (! host_integerp (size, 1))
12499 if (! integer_all_onesp (size))
12501 if (! host_integerp (len, 1))
12503 /* If LEN is not constant, try MAXLEN too.
12504 For MAXLEN only allow optimizing into non-_ocs function
12505 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12506 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12508 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12510 /* (void) __mempcpy_chk () can be optimized into
12511 (void) __memcpy_chk (). */
12512 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12516 return build_call_expr (fn, 4, dest, src, len, size);
12524 if (tree_int_cst_lt (size, maxlen))
12529 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12530 mem{cpy,pcpy,move,set} is available. */
12533 case BUILT_IN_MEMCPY_CHK:
12534 fn = built_in_decls[BUILT_IN_MEMCPY];
12536 case BUILT_IN_MEMPCPY_CHK:
12537 fn = built_in_decls[BUILT_IN_MEMPCPY];
12539 case BUILT_IN_MEMMOVE_CHK:
12540 fn = built_in_decls[BUILT_IN_MEMMOVE];
12542 case BUILT_IN_MEMSET_CHK:
12543 fn = built_in_decls[BUILT_IN_MEMSET];
12552 return build_call_expr (fn, 3, dest, src, len);
12555 /* Fold a call to the __st[rp]cpy_chk builtin.
12556 DEST, SRC, and SIZE are the arguments to the call.
12557 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12558 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12559 strings passed as second argument. */
12562 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12563 tree maxlen, bool ignore,
12564 enum built_in_function fcode)
12568 if (!validate_arg (dest, POINTER_TYPE)
12569 || !validate_arg (src, POINTER_TYPE)
12570 || !validate_arg (size, INTEGER_TYPE))
12573 /* If SRC and DEST are the same (and not volatile), return DEST. */
12574 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12575 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12577 if (! host_integerp (size, 1))
12580 if (! integer_all_onesp (size))
12582 len = c_strlen (src, 1);
12583 if (! len || ! host_integerp (len, 1))
12585 /* If LEN is not constant, try MAXLEN too.
12586 For MAXLEN only allow optimizing into non-_ocs function
12587 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12588 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12590 if (fcode == BUILT_IN_STPCPY_CHK)
12595 /* If return value of __stpcpy_chk is ignored,
12596 optimize into __strcpy_chk. */
12597 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12601 return build_call_expr (fn, 3, dest, src, size);
12604 if (! len || TREE_SIDE_EFFECTS (len))
12607 /* If c_strlen returned something, but not a constant,
12608 transform __strcpy_chk into __memcpy_chk. */
12609 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12613 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12614 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12615 build_call_expr (fn, 4,
12616 dest, src, len, size));
12622 if (! tree_int_cst_lt (maxlen, size))
12626 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12627 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12628 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12632 return build_call_expr (fn, 2, dest, src);
12635 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12636 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12637 length passed as third argument. */
12640 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12645 if (!validate_arg (dest, POINTER_TYPE)
12646 || !validate_arg (src, POINTER_TYPE)
12647 || !validate_arg (len, INTEGER_TYPE)
12648 || !validate_arg (size, INTEGER_TYPE))
12651 if (! host_integerp (size, 1))
12654 if (! integer_all_onesp (size))
12656 if (! host_integerp (len, 1))
12658 /* If LEN is not constant, try MAXLEN too.
12659 For MAXLEN only allow optimizing into non-_ocs function
12660 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12661 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12667 if (tree_int_cst_lt (size, maxlen))
12671 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12672 fn = built_in_decls[BUILT_IN_STRNCPY];
12676 return build_call_expr (fn, 3, dest, src, len);
12679 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12680 are the arguments to the call. */
12683 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12688 if (!validate_arg (dest, POINTER_TYPE)
12689 || !validate_arg (src, POINTER_TYPE)
12690 || !validate_arg (size, INTEGER_TYPE))
12693 p = c_getstr (src);
12694 /* If the SRC parameter is "", return DEST. */
12695 if (p && *p == '\0')
12696 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12698 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12701 /* If __builtin_strcat_chk is used, assume strcat is available. */
12702 fn = built_in_decls[BUILT_IN_STRCAT];
12706 return build_call_expr (fn, 2, dest, src);
12709 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12713 fold_builtin_strncat_chk (tree fndecl,
12714 tree dest, tree src, tree len, tree size)
12719 if (!validate_arg (dest, POINTER_TYPE)
12720 || !validate_arg (src, POINTER_TYPE)
12721 || !validate_arg (size, INTEGER_TYPE)
12722 || !validate_arg (size, INTEGER_TYPE))
12725 p = c_getstr (src);
12726 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12727 if (p && *p == '\0')
12728 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12729 else if (integer_zerop (len))
12730 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12732 if (! host_integerp (size, 1))
12735 if (! integer_all_onesp (size))
12737 tree src_len = c_strlen (src, 1);
12739 && host_integerp (src_len, 1)
12740 && host_integerp (len, 1)
12741 && ! tree_int_cst_lt (len, src_len))
12743 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12744 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12748 return build_call_expr (fn, 3, dest, src, size);
12753 /* If __builtin_strncat_chk is used, assume strncat is available. */
12754 fn = built_in_decls[BUILT_IN_STRNCAT];
12758 return build_call_expr (fn, 3, dest, src, len);
12761 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12762 a normal call should be emitted rather than expanding the function
12763 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12766 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12768 tree dest, size, len, fn, fmt, flag;
12769 const char *fmt_str;
12770 int nargs = call_expr_nargs (exp);
12772 /* Verify the required arguments in the original call. */
12775 dest = CALL_EXPR_ARG (exp, 0);
12776 if (!validate_arg (dest, POINTER_TYPE))
12778 flag = CALL_EXPR_ARG (exp, 1);
12779 if (!validate_arg (flag, INTEGER_TYPE))
12781 size = CALL_EXPR_ARG (exp, 2);
12782 if (!validate_arg (size, INTEGER_TYPE))
12784 fmt = CALL_EXPR_ARG (exp, 3);
12785 if (!validate_arg (fmt, POINTER_TYPE))
12788 if (! host_integerp (size, 1))
12793 if (!init_target_chars ())
12796 /* Check whether the format is a literal string constant. */
12797 fmt_str = c_getstr (fmt);
12798 if (fmt_str != NULL)
12800 /* If the format doesn't contain % args or %%, we know the size. */
12801 if (strchr (fmt_str, target_percent) == 0)
12803 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12804 len = build_int_cstu (size_type_node, strlen (fmt_str));
12806 /* If the format is "%s" and first ... argument is a string literal,
12807 we know the size too. */
12808 else if (fcode == BUILT_IN_SPRINTF_CHK
12809 && strcmp (fmt_str, target_percent_s) == 0)
12815 arg = CALL_EXPR_ARG (exp, 4);
12816 if (validate_arg (arg, POINTER_TYPE))
12818 len = c_strlen (arg, 1);
12819 if (! len || ! host_integerp (len, 1))
12826 if (! integer_all_onesp (size))
12828 if (! len || ! tree_int_cst_lt (len, size))
12832 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12833 or if format doesn't contain % chars or is "%s". */
12834 if (! integer_zerop (flag))
12836 if (fmt_str == NULL)
12838 if (strchr (fmt_str, target_percent) != NULL
12839 && strcmp (fmt_str, target_percent_s))
12843 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12844 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12845 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12849 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12852 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12853 a normal call should be emitted rather than expanding the function
12854 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12855 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12856 passed as second argument. */
12859 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12860 enum built_in_function fcode)
12862 tree dest, size, len, fn, fmt, flag;
12863 const char *fmt_str;
12865 /* Verify the required arguments in the original call. */
12866 if (call_expr_nargs (exp) < 5)
12868 dest = CALL_EXPR_ARG (exp, 0);
12869 if (!validate_arg (dest, POINTER_TYPE))
12871 len = CALL_EXPR_ARG (exp, 1);
12872 if (!validate_arg (len, INTEGER_TYPE))
12874 flag = CALL_EXPR_ARG (exp, 2);
12875 if (!validate_arg (flag, INTEGER_TYPE))
12877 size = CALL_EXPR_ARG (exp, 3);
12878 if (!validate_arg (size, INTEGER_TYPE))
12880 fmt = CALL_EXPR_ARG (exp, 4);
12881 if (!validate_arg (fmt, POINTER_TYPE))
12884 if (! host_integerp (size, 1))
12887 if (! integer_all_onesp (size))
12889 if (! host_integerp (len, 1))
12891 /* If LEN is not constant, try MAXLEN too.
12892 For MAXLEN only allow optimizing into non-_ocs function
12893 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12894 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12900 if (tree_int_cst_lt (size, maxlen))
12904 if (!init_target_chars ())
12907 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12908 or if format doesn't contain % chars or is "%s". */
12909 if (! integer_zerop (flag))
12911 fmt_str = c_getstr (fmt);
12912 if (fmt_str == NULL)
12914 if (strchr (fmt_str, target_percent) != NULL
12915 && strcmp (fmt_str, target_percent_s))
12919 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12921 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12922 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12926 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12929 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12930 FMT and ARG are the arguments to the call; we don't fold cases with
12931 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12933 Return NULL_TREE if no simplification was possible, otherwise return the
12934 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12935 code of the function to be simplified. */
12938 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12939 enum built_in_function fcode)
12941 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12942 const char *fmt_str = NULL;
12944 /* If the return value is used, don't do the transformation. */
12948 /* Verify the required arguments in the original call. */
12949 if (!validate_arg (fmt, POINTER_TYPE))
12952 /* Check whether the format is a literal string constant. */
12953 fmt_str = c_getstr (fmt);
12954 if (fmt_str == NULL)
12957 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12959 /* If we're using an unlocked function, assume the other
12960 unlocked functions exist explicitly. */
12961 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12962 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12966 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12967 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12970 if (!init_target_chars ())
12973 if (strcmp (fmt_str, target_percent_s) == 0
12974 || strchr (fmt_str, target_percent) == NULL)
12978 if (strcmp (fmt_str, target_percent_s) == 0)
12980 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12983 if (!arg || !validate_arg (arg, POINTER_TYPE))
12986 str = c_getstr (arg);
12992 /* The format specifier doesn't contain any '%' characters. */
12993 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12999 /* If the string was "", printf does nothing. */
13000 if (str[0] == '\0')
13001 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13003 /* If the string has length of 1, call putchar. */
13004 if (str[1] == '\0')
13006 /* Given printf("c"), (where c is any one character,)
13007 convert "c"[0] to an int and pass that to the replacement
13009 newarg = build_int_cst (NULL_TREE, str[0]);
13011 call = build_call_expr (fn_putchar, 1, newarg);
13015 /* If the string was "string\n", call puts("string"). */
13016 size_t len = strlen (str);
13017 if ((unsigned char)str[len - 1] == target_newline)
13019 /* Create a NUL-terminated string that's one char shorter
13020 than the original, stripping off the trailing '\n'. */
13021 char *newstr = XALLOCAVEC (char, len);
13022 memcpy (newstr, str, len - 1);
13023 newstr[len - 1] = 0;
13025 newarg = build_string_literal (len, newstr);
13027 call = build_call_expr (fn_puts, 1, newarg);
13030 /* We'd like to arrange to call fputs(string,stdout) here,
13031 but we need stdout and don't have a way to get it yet. */
13036 /* The other optimizations can be done only on the non-va_list variants. */
13037 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13040 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13041 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13043 if (!arg || !validate_arg (arg, POINTER_TYPE))
13046 call = build_call_expr (fn_puts, 1, arg);
13049 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13050 else if (strcmp (fmt_str, target_percent_c) == 0)
13052 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13055 call = build_call_expr (fn_putchar, 1, arg);
13061 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
13064 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13065 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13066 more than 3 arguments, and ARG may be null in the 2-argument case.
13068 Return NULL_TREE if no simplification was possible, otherwise return the
13069 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13070 code of the function to be simplified. */
13073 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
13074 enum built_in_function fcode)
13076 tree fn_fputc, fn_fputs, call = NULL_TREE;
13077 const char *fmt_str = NULL;
13079 /* If the return value is used, don't do the transformation. */
13083 /* Verify the required arguments in the original call. */
13084 if (!validate_arg (fp, POINTER_TYPE))
13086 if (!validate_arg (fmt, POINTER_TYPE))
13089 /* Check whether the format is a literal string constant. */
13090 fmt_str = c_getstr (fmt);
13091 if (fmt_str == NULL)
13094 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13096 /* If we're using an unlocked function, assume the other
13097 unlocked functions exist explicitly. */
13098 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
13099 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
13103 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
13104 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
13107 if (!init_target_chars ())
13110 /* If the format doesn't contain % args or %%, use strcpy. */
13111 if (strchr (fmt_str, target_percent) == NULL)
13113 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13117 /* If the format specifier was "", fprintf does nothing. */
13118 if (fmt_str[0] == '\0')
13120 /* If FP has side-effects, just wait until gimplification is
13122 if (TREE_SIDE_EFFECTS (fp))
13125 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13128 /* When "string" doesn't contain %, replace all cases of
13129 fprintf (fp, string) with fputs (string, fp). The fputs
13130 builtin will take care of special cases like length == 1. */
13132 call = build_call_expr (fn_fputs, 2, fmt, fp);
13135 /* The other optimizations can be done only on the non-va_list variants. */
13136 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13139 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13140 else if (strcmp (fmt_str, target_percent_s) == 0)
13142 if (!arg || !validate_arg (arg, POINTER_TYPE))
13145 call = build_call_expr (fn_fputs, 2, arg, fp);
13148 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13149 else if (strcmp (fmt_str, target_percent_c) == 0)
13151 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13154 call = build_call_expr (fn_fputc, 2, arg, fp);
13159 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
13162 /* Initialize format string characters in the target charset. */
13165 init_target_chars (void)
13170 target_newline = lang_hooks.to_target_charset ('\n');
13171 target_percent = lang_hooks.to_target_charset ('%');
13172 target_c = lang_hooks.to_target_charset ('c');
13173 target_s = lang_hooks.to_target_charset ('s');
13174 if (target_newline == 0 || target_percent == 0 || target_c == 0
13178 target_percent_c[0] = target_percent;
13179 target_percent_c[1] = target_c;
13180 target_percent_c[2] = '\0';
13182 target_percent_s[0] = target_percent;
13183 target_percent_s[1] = target_s;
13184 target_percent_s[2] = '\0';
13186 target_percent_s_newline[0] = target_percent;
13187 target_percent_s_newline[1] = target_s;
13188 target_percent_s_newline[2] = target_newline;
13189 target_percent_s_newline[3] = '\0';
13196 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13197 and no overflow/underflow occurred. INEXACT is true if M was not
13198 exactly calculated. TYPE is the tree type for the result. This
13199 function assumes that you cleared the MPFR flags and then
13200 calculated M to see if anything subsequently set a flag prior to
13201 entering this function. Return NULL_TREE if any checks fail. */
13204 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13206 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13207 overflow/underflow occurred. If -frounding-math, proceed iff the
13208 result of calling FUNC was exact. */
13209 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13210 && (!flag_rounding_math || !inexact))
13212 REAL_VALUE_TYPE rr;
13214 real_from_mpfr (&rr, m, type, GMP_RNDN);
13215 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13216 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13217 but the mpft_t is not, then we underflowed in the
13219 if (real_isfinite (&rr)
13220 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13222 REAL_VALUE_TYPE rmode;
13224 real_convert (&rmode, TYPE_MODE (type), &rr);
13225 /* Proceed iff the specified mode can hold the value. */
13226 if (real_identical (&rmode, &rr))
13227 return build_real (type, rmode);
13234 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13235 number and no overflow/underflow occurred. INEXACT is true if M
13236 was not exactly calculated. TYPE is the tree type for the result.
13237 This function assumes that you cleared the MPFR flags and then
13238 calculated M to see if anything subsequently set a flag prior to
13239 entering this function. Return NULL_TREE if any checks fail. */
13242 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact)
13244 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13245 overflow/underflow occurred. If -frounding-math, proceed iff the
13246 result of calling FUNC was exact. */
13247 if (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13248 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13249 && (!flag_rounding_math || !inexact))
13251 REAL_VALUE_TYPE re, im;
13253 real_from_mpfr (&re, mpc_realref (m), type, GMP_RNDN);
13254 real_from_mpfr (&im, mpc_imagref (m), type, GMP_RNDN);
13255 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13256 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13257 but the mpft_t is not, then we underflowed in the
13259 if (real_isfinite (&re) && real_isfinite (&im)
13260 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13261 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0))
13263 REAL_VALUE_TYPE re_mode, im_mode;
13265 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13266 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13267 /* Proceed iff the specified mode can hold the value. */
13268 if (real_identical (&re_mode, &re) && real_identical (&im_mode, &im))
13269 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13270 build_real (TREE_TYPE (type), im_mode));
13275 #endif /* HAVE_mpc */
13277 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13278 FUNC on it and return the resulting value as a tree with type TYPE.
13279 If MIN and/or MAX are not NULL, then the supplied ARG must be
13280 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13281 acceptable values, otherwise they are not. The mpfr precision is
13282 set to the precision of TYPE. We assume that function FUNC returns
13283 zero if the result could be calculated exactly within the requested
13287 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13288 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13291 tree result = NULL_TREE;
13295 /* To proceed, MPFR must exactly represent the target floating point
13296 format, which only happens when the target base equals two. */
13297 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13298 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13300 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13302 if (real_isfinite (ra)
13303 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13304 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13306 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13307 const int prec = fmt->p;
13308 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13312 mpfr_init2 (m, prec);
13313 mpfr_from_real (m, ra, GMP_RNDN);
13314 mpfr_clear_flags ();
13315 inexact = func (m, m, rnd);
13316 result = do_mpfr_ckconv (m, type, inexact);
13324 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13325 FUNC on it and return the resulting value as a tree with type TYPE.
13326 The mpfr precision is set to the precision of TYPE. We assume that
13327 function FUNC returns zero if the result could be calculated
13328 exactly within the requested precision. */
13331 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13332 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13334 tree result = NULL_TREE;
13339 /* To proceed, MPFR must exactly represent the target floating point
13340 format, which only happens when the target base equals two. */
13341 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13342 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13343 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13345 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13346 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13348 if (real_isfinite (ra1) && real_isfinite (ra2))
13350 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13351 const int prec = fmt->p;
13352 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13356 mpfr_inits2 (prec, m1, m2, NULL);
13357 mpfr_from_real (m1, ra1, GMP_RNDN);
13358 mpfr_from_real (m2, ra2, GMP_RNDN);
13359 mpfr_clear_flags ();
13360 inexact = func (m1, m1, m2, rnd);
13361 result = do_mpfr_ckconv (m1, type, inexact);
13362 mpfr_clears (m1, m2, NULL);
13369 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13370 FUNC on it and return the resulting value as a tree with type TYPE.
13371 The mpfr precision is set to the precision of TYPE. We assume that
13372 function FUNC returns zero if the result could be calculated
13373 exactly within the requested precision. */
13376 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13377 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13379 tree result = NULL_TREE;
13385 /* To proceed, MPFR must exactly represent the target floating point
13386 format, which only happens when the target base equals two. */
13387 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13388 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13389 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13390 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13392 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13393 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13394 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13396 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13398 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13399 const int prec = fmt->p;
13400 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13404 mpfr_inits2 (prec, m1, m2, m3, NULL);
13405 mpfr_from_real (m1, ra1, GMP_RNDN);
13406 mpfr_from_real (m2, ra2, GMP_RNDN);
13407 mpfr_from_real (m3, ra3, GMP_RNDN);
13408 mpfr_clear_flags ();
13409 inexact = func (m1, m1, m2, m3, rnd);
13410 result = do_mpfr_ckconv (m1, type, inexact);
13411 mpfr_clears (m1, m2, m3, NULL);
13418 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13419 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13420 If ARG_SINP and ARG_COSP are NULL then the result is returned
13421 as a complex value.
13422 The type is taken from the type of ARG and is used for setting the
13423 precision of the calculation and results. */
13426 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13428 tree const type = TREE_TYPE (arg);
13429 tree result = NULL_TREE;
13433 /* To proceed, MPFR must exactly represent the target floating point
13434 format, which only happens when the target base equals two. */
13435 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13436 && TREE_CODE (arg) == REAL_CST
13437 && !TREE_OVERFLOW (arg))
13439 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13441 if (real_isfinite (ra))
13443 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13444 const int prec = fmt->p;
13445 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13446 tree result_s, result_c;
13450 mpfr_inits2 (prec, m, ms, mc, NULL);
13451 mpfr_from_real (m, ra, GMP_RNDN);
13452 mpfr_clear_flags ();
13453 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13454 result_s = do_mpfr_ckconv (ms, type, inexact);
13455 result_c = do_mpfr_ckconv (mc, type, inexact);
13456 mpfr_clears (m, ms, mc, NULL);
13457 if (result_s && result_c)
13459 /* If we are to return in a complex value do so. */
13460 if (!arg_sinp && !arg_cosp)
13461 return build_complex (build_complex_type (type),
13462 result_c, result_s);
13464 /* Dereference the sin/cos pointer arguments. */
13465 arg_sinp = build_fold_indirect_ref (arg_sinp);
13466 arg_cosp = build_fold_indirect_ref (arg_cosp);
13467 /* Proceed if valid pointer type were passed in. */
13468 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13469 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13471 /* Set the values. */
13472 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13474 TREE_SIDE_EFFECTS (result_s) = 1;
13475 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13477 TREE_SIDE_EFFECTS (result_c) = 1;
13478 /* Combine the assignments into a compound expr. */
13479 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13480 result_s, result_c));
13488 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13489 two-argument mpfr order N Bessel function FUNC on them and return
13490 the resulting value as a tree with type TYPE. The mpfr precision
13491 is set to the precision of TYPE. We assume that function FUNC
13492 returns zero if the result could be calculated exactly within the
13493 requested precision. */
13495 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13496 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13497 const REAL_VALUE_TYPE *min, bool inclusive)
13499 tree result = NULL_TREE;
13504 /* To proceed, MPFR must exactly represent the target floating point
13505 format, which only happens when the target base equals two. */
13506 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13507 && host_integerp (arg1, 0)
13508 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13510 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13511 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13514 && real_isfinite (ra)
13515 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13517 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13518 const int prec = fmt->p;
13519 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13523 mpfr_init2 (m, prec);
13524 mpfr_from_real (m, ra, GMP_RNDN);
13525 mpfr_clear_flags ();
13526 inexact = func (m, n, m, rnd);
13527 result = do_mpfr_ckconv (m, type, inexact);
13535 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13536 the pointer *(ARG_QUO) and return the result. The type is taken
13537 from the type of ARG0 and is used for setting the precision of the
13538 calculation and results. */
13541 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13543 tree const type = TREE_TYPE (arg0);
13544 tree result = NULL_TREE;
13549 /* To proceed, MPFR must exactly represent the target floating point
13550 format, which only happens when the target base equals two. */
13551 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13552 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13553 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13555 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13556 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13558 if (real_isfinite (ra0) && real_isfinite (ra1))
13560 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13561 const int prec = fmt->p;
13562 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13567 mpfr_inits2 (prec, m0, m1, NULL);
13568 mpfr_from_real (m0, ra0, GMP_RNDN);
13569 mpfr_from_real (m1, ra1, GMP_RNDN);
13570 mpfr_clear_flags ();
13571 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13572 /* Remquo is independent of the rounding mode, so pass
13573 inexact=0 to do_mpfr_ckconv(). */
13574 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13575 mpfr_clears (m0, m1, NULL);
13578 /* MPFR calculates quo in the host's long so it may
13579 return more bits in quo than the target int can hold
13580 if sizeof(host long) > sizeof(target int). This can
13581 happen even for native compilers in LP64 mode. In
13582 these cases, modulo the quo value with the largest
13583 number that the target int can hold while leaving one
13584 bit for the sign. */
13585 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13586 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13588 /* Dereference the quo pointer argument. */
13589 arg_quo = build_fold_indirect_ref (arg_quo);
13590 /* Proceed iff a valid pointer type was passed in. */
13591 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13593 /* Set the value. */
13594 tree result_quo = fold_build2 (MODIFY_EXPR,
13595 TREE_TYPE (arg_quo), arg_quo,
13596 build_int_cst (NULL, integer_quo));
13597 TREE_SIDE_EFFECTS (result_quo) = 1;
13598 /* Combine the quo assignment with the rem. */
13599 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13600 result_quo, result_rem));
13608 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13609 resulting value as a tree with type TYPE. The mpfr precision is
13610 set to the precision of TYPE. We assume that this mpfr function
13611 returns zero if the result could be calculated exactly within the
13612 requested precision. In addition, the integer pointer represented
13613 by ARG_SG will be dereferenced and set to the appropriate signgam
13617 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13619 tree result = NULL_TREE;
13623 /* To proceed, MPFR must exactly represent the target floating point
13624 format, which only happens when the target base equals two. Also
13625 verify ARG is a constant and that ARG_SG is an int pointer. */
13626 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13627 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13628 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13629 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13631 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13633 /* In addition to NaN and Inf, the argument cannot be zero or a
13634 negative integer. */
13635 if (real_isfinite (ra)
13636 && ra->cl != rvc_zero
13637 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13639 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13640 const int prec = fmt->p;
13641 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13646 mpfr_init2 (m, prec);
13647 mpfr_from_real (m, ra, GMP_RNDN);
13648 mpfr_clear_flags ();
13649 inexact = mpfr_lgamma (m, &sg, m, rnd);
13650 result_lg = do_mpfr_ckconv (m, type, inexact);
13656 /* Dereference the arg_sg pointer argument. */
13657 arg_sg = build_fold_indirect_ref (arg_sg);
13658 /* Assign the signgam value into *arg_sg. */
13659 result_sg = fold_build2 (MODIFY_EXPR,
13660 TREE_TYPE (arg_sg), arg_sg,
13661 build_int_cst (NULL, sg));
13662 TREE_SIDE_EFFECTS (result_sg) = 1;
13663 /* Combine the signgam assignment with the lgamma result. */
13664 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13665 result_sg, result_lg));
13674 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13675 function FUNC on it and return the resulting value as a tree with
13676 type TYPE. The mpfr precision is set to the precision of TYPE. We
13677 assume that function FUNC returns zero if the result could be
13678 calculated exactly within the requested precision. */
13681 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13683 tree result = NULL_TREE;
13687 /* To proceed, MPFR must exactly represent the target floating point
13688 format, which only happens when the target base equals two. */
13689 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13690 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13691 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13693 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13694 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13696 if (real_isfinite (re) && real_isfinite (im))
13698 const struct real_format *const fmt =
13699 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13700 const int prec = fmt->p;
13701 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13702 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13706 mpc_init2 (m, prec);
13707 mpfr_from_real (mpc_realref(m), re, rnd);
13708 mpfr_from_real (mpc_imagref(m), im, rnd);
13709 mpfr_clear_flags ();
13710 inexact = func (m, m, crnd);
13711 result = do_mpc_ckconv (m, type, inexact);
13718 #endif /* HAVE_mpc */
13721 The functions below provide an alternate interface for folding
13722 builtin function calls presented as GIMPLE_CALL statements rather
13723 than as CALL_EXPRs. The folded result is still expressed as a
13724 tree. There is too much code duplication in the handling of
13725 varargs functions, and a more intrusive re-factoring would permit
13726 better sharing of code between the tree and statement-based
13727 versions of these functions. */
13729 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13730 along with N new arguments specified as the "..." parameters. SKIP
13731 is the number of arguments in STMT to be omitted. This function is used
13732 to do varargs-to-varargs transformations. */
13735 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13737 int oldnargs = gimple_call_num_args (stmt);
13738 int nargs = oldnargs - skip + n;
13739 tree fntype = TREE_TYPE (fndecl);
13740 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13745 buffer = XALLOCAVEC (tree, nargs);
13747 for (i = 0; i < n; i++)
13748 buffer[i] = va_arg (ap, tree);
13750 for (j = skip; j < oldnargs; j++, i++)
13751 buffer[i] = gimple_call_arg (stmt, j);
13753 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13756 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13757 a normal call should be emitted rather than expanding the function
13758 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13761 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13763 tree dest, size, len, fn, fmt, flag;
13764 const char *fmt_str;
13765 int nargs = gimple_call_num_args (stmt);
13767 /* Verify the required arguments in the original call. */
13770 dest = gimple_call_arg (stmt, 0);
13771 if (!validate_arg (dest, POINTER_TYPE))
13773 flag = gimple_call_arg (stmt, 1);
13774 if (!validate_arg (flag, INTEGER_TYPE))
13776 size = gimple_call_arg (stmt, 2);
13777 if (!validate_arg (size, INTEGER_TYPE))
13779 fmt = gimple_call_arg (stmt, 3);
13780 if (!validate_arg (fmt, POINTER_TYPE))
13783 if (! host_integerp (size, 1))
13788 if (!init_target_chars ())
13791 /* Check whether the format is a literal string constant. */
13792 fmt_str = c_getstr (fmt);
13793 if (fmt_str != NULL)
13795 /* If the format doesn't contain % args or %%, we know the size. */
13796 if (strchr (fmt_str, target_percent) == 0)
13798 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13799 len = build_int_cstu (size_type_node, strlen (fmt_str));
13801 /* If the format is "%s" and first ... argument is a string literal,
13802 we know the size too. */
13803 else if (fcode == BUILT_IN_SPRINTF_CHK
13804 && strcmp (fmt_str, target_percent_s) == 0)
13810 arg = gimple_call_arg (stmt, 4);
13811 if (validate_arg (arg, POINTER_TYPE))
13813 len = c_strlen (arg, 1);
13814 if (! len || ! host_integerp (len, 1))
13821 if (! integer_all_onesp (size))
13823 if (! len || ! tree_int_cst_lt (len, size))
13827 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13828 or if format doesn't contain % chars or is "%s". */
13829 if (! integer_zerop (flag))
13831 if (fmt_str == NULL)
13833 if (strchr (fmt_str, target_percent) != NULL
13834 && strcmp (fmt_str, target_percent_s))
13838 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13839 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13840 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13844 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13847 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13848 a normal call should be emitted rather than expanding the function
13849 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13850 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13851 passed as second argument. */
13854 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13855 enum built_in_function fcode)
13857 tree dest, size, len, fn, fmt, flag;
13858 const char *fmt_str;
13860 /* Verify the required arguments in the original call. */
13861 if (gimple_call_num_args (stmt) < 5)
13863 dest = gimple_call_arg (stmt, 0);
13864 if (!validate_arg (dest, POINTER_TYPE))
13866 len = gimple_call_arg (stmt, 1);
13867 if (!validate_arg (len, INTEGER_TYPE))
13869 flag = gimple_call_arg (stmt, 2);
13870 if (!validate_arg (flag, INTEGER_TYPE))
13872 size = gimple_call_arg (stmt, 3);
13873 if (!validate_arg (size, INTEGER_TYPE))
13875 fmt = gimple_call_arg (stmt, 4);
13876 if (!validate_arg (fmt, POINTER_TYPE))
13879 if (! host_integerp (size, 1))
13882 if (! integer_all_onesp (size))
13884 if (! host_integerp (len, 1))
13886 /* If LEN is not constant, try MAXLEN too.
13887 For MAXLEN only allow optimizing into non-_ocs function
13888 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13889 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13895 if (tree_int_cst_lt (size, maxlen))
13899 if (!init_target_chars ())
13902 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13903 or if format doesn't contain % chars or is "%s". */
13904 if (! integer_zerop (flag))
13906 fmt_str = c_getstr (fmt);
13907 if (fmt_str == NULL)
13909 if (strchr (fmt_str, target_percent) != NULL
13910 && strcmp (fmt_str, target_percent_s))
13914 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13916 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13917 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13921 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13924 /* Builtins with folding operations that operate on "..." arguments
13925 need special handling; we need to store the arguments in a convenient
13926 data structure before attempting any folding. Fortunately there are
13927 only a few builtins that fall into this category. FNDECL is the
13928 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13929 result of the function call is ignored. */
13932 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13934 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13935 tree ret = NULL_TREE;
13939 case BUILT_IN_SPRINTF_CHK:
13940 case BUILT_IN_VSPRINTF_CHK:
13941 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13944 case BUILT_IN_SNPRINTF_CHK:
13945 case BUILT_IN_VSNPRINTF_CHK:
13946 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13953 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13954 TREE_NO_WARNING (ret) = 1;
13960 /* A wrapper function for builtin folding that prevents warnings for
13961 "statement without effect" and the like, caused by removing the
13962 call node earlier than the warning is generated. */
13965 fold_call_stmt (gimple stmt, bool ignore)
13967 tree ret = NULL_TREE;
13968 tree fndecl = gimple_call_fndecl (stmt);
13970 && TREE_CODE (fndecl) == FUNCTION_DECL
13971 && DECL_BUILT_IN (fndecl)
13972 && !gimple_call_va_arg_pack_p (stmt))
13974 int nargs = gimple_call_num_args (stmt);
13976 if (avoid_folding_inline_builtin (fndecl))
13978 /* FIXME: Don't use a list in this interface. */
13979 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13981 tree arglist = NULL_TREE;
13983 for (i = nargs - 1; i >= 0; i--)
13984 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13985 return targetm.fold_builtin (fndecl, arglist, ignore);
13989 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13991 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13993 for (i = 0; i < nargs; i++)
13994 args[i] = gimple_call_arg (stmt, i);
13995 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13998 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14001 /* Propagate location information from original call to
14002 expansion of builtin. Otherwise things like
14003 maybe_emit_chk_warning, that operate on the expansion
14004 of a builtin, will use the wrong location information. */
14005 if (gimple_has_location (stmt))
14007 tree realret = ret;
14008 if (TREE_CODE (ret) == NOP_EXPR)
14009 realret = TREE_OPERAND (ret, 0);
14010 if (CAN_HAVE_LOCATION_P (realret)
14011 && !EXPR_HAS_LOCATION (realret))
14012 SET_EXPR_LOCATION (realret, gimple_location (stmt));