1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
64 static tree do_mpc_arg2 (tree, tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t));
68 /* Define the names of the builtin function types and codes. */
69 const char *const built_in_class_names[4]
70 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
72 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
73 const char * built_in_names[(int) END_BUILTINS] =
75 #include "builtins.def"
79 /* Setup an array of _DECL trees, make sure each element is
80 initialized to NULL_TREE. */
81 tree built_in_decls[(int) END_BUILTINS];
82 /* Declarations used when constructing the builtin implicitly in the compiler.
83 It may be NULL_TREE when this is invalid (for instance runtime is not
84 required to implement the function call in all cases). */
85 tree implicit_built_in_decls[(int) END_BUILTINS];
87 static const char *c_getstr (tree);
88 static rtx c_readstr (const char *, enum machine_mode);
89 static int target_char_cast (tree, char *);
90 static rtx get_memory_rtx (tree, tree);
91 static int apply_args_size (void);
92 static int apply_result_size (void);
93 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
94 static rtx result_vector (int, rtx);
96 static void expand_builtin_update_setjmp_buf (rtx);
97 static void expand_builtin_prefetch (tree);
98 static rtx expand_builtin_apply_args (void);
99 static rtx expand_builtin_apply_args_1 (void);
100 static rtx expand_builtin_apply (rtx, rtx, rtx);
101 static void expand_builtin_return (rtx);
102 static enum type_class type_to_class (tree);
103 static rtx expand_builtin_classify_type (tree);
104 static void expand_errno_check (tree, rtx);
105 static rtx expand_builtin_mathfn (tree, rtx, rtx);
106 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
109 static rtx expand_builtin_sincos (tree);
110 static rtx expand_builtin_cexpi (tree, rtx, rtx);
111 static rtx expand_builtin_int_roundingfn (tree, rtx);
112 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
113 static rtx expand_builtin_args_info (tree);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
123 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
130 enum machine_mode, int);
131 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
132 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
133 enum machine_mode, int);
134 static rtx expand_builtin_bcopy (tree, int);
135 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
137 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
140 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
142 static rtx expand_builtin_bzero (tree);
143 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
144 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
145 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
146 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
147 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
148 static rtx expand_builtin_alloca (tree, rtx);
149 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
150 static rtx expand_builtin_frame_address (tree, tree);
151 static rtx expand_builtin_fputs (tree, rtx, bool);
152 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
153 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
154 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
155 static tree stabilize_va_list_loc (location_t, tree, int);
156 static rtx expand_builtin_expect (tree, rtx);
157 static tree fold_builtin_constant_p (tree);
158 static tree fold_builtin_expect (location_t, tree, tree);
159 static tree fold_builtin_classify_type (tree);
160 static tree fold_builtin_strlen (location_t, tree);
161 static tree fold_builtin_inf (location_t, tree, int);
162 static tree fold_builtin_nan (tree, tree, int);
163 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
164 static bool validate_arg (const_tree, enum tree_code code);
165 static bool integer_valued_real_p (tree);
166 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
167 static bool readonly_data_expr (tree);
168 static rtx expand_builtin_fabs (tree, rtx, rtx);
169 static rtx expand_builtin_signbit (tree, rtx);
170 static tree fold_builtin_sqrt (location_t, tree, tree);
171 static tree fold_builtin_cbrt (location_t, tree, tree);
172 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_cos (location_t, tree, tree, tree);
175 static tree fold_builtin_cosh (location_t, tree, tree, tree);
176 static tree fold_builtin_tan (tree, tree);
177 static tree fold_builtin_trunc (location_t, tree, tree);
178 static tree fold_builtin_floor (location_t, tree, tree);
179 static tree fold_builtin_ceil (location_t, tree, tree);
180 static tree fold_builtin_round (location_t, tree, tree);
181 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
182 static tree fold_builtin_bitop (tree, tree);
183 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
184 static tree fold_builtin_strchr (location_t, tree, tree, tree);
185 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
186 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
187 static tree fold_builtin_strcmp (location_t, tree, tree);
188 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
189 static tree fold_builtin_signbit (location_t, tree, tree);
190 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
191 static tree fold_builtin_isascii (location_t, tree);
192 static tree fold_builtin_toascii (location_t, tree);
193 static tree fold_builtin_isdigit (location_t, tree);
194 static tree fold_builtin_fabs (location_t, tree, tree);
195 static tree fold_builtin_abs (location_t, tree, tree);
196 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
198 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
199 static tree fold_builtin_0 (location_t, tree, bool);
200 static tree fold_builtin_1 (location_t, tree, tree, bool);
201 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
202 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
203 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
204 static tree fold_builtin_varargs (location_t, tree, tree, bool);
206 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
207 static tree fold_builtin_strstr (location_t, tree, tree, tree);
208 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
209 static tree fold_builtin_strcat (location_t, tree, tree);
210 static tree fold_builtin_strncat (location_t, tree, tree, tree);
211 static tree fold_builtin_strspn (location_t, tree, tree);
212 static tree fold_builtin_strcspn (location_t, tree, tree);
213 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
215 static rtx expand_builtin_object_size (tree);
216 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
217 enum built_in_function);
218 static void maybe_emit_chk_warning (tree, enum built_in_function);
219 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
220 static void maybe_emit_free_warning (tree);
221 static tree fold_builtin_object_size (tree, tree);
222 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
223 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
224 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
225 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
226 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
227 enum built_in_function);
228 static bool init_target_chars (void);
230 static unsigned HOST_WIDE_INT target_newline;
231 static unsigned HOST_WIDE_INT target_percent;
232 static unsigned HOST_WIDE_INT target_c;
233 static unsigned HOST_WIDE_INT target_s;
234 static char target_percent_c[3];
235 static char target_percent_s[3];
236 static char target_percent_s_newline[4];
237 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
238 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
239 static tree do_mpfr_arg2 (tree, tree, tree,
240 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
241 static tree do_mpfr_arg3 (tree, tree, tree, tree,
242 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
243 static tree do_mpfr_sincos (tree, tree, tree);
244 static tree do_mpfr_bessel_n (tree, tree, tree,
245 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
246 const REAL_VALUE_TYPE *, bool);
247 static tree do_mpfr_remquo (tree, tree, tree);
248 static tree do_mpfr_lgamma_r (tree, tree, tree);
251 is_builtin_name (const char *name)
253 if (strncmp (name, "__builtin_", 10) == 0)
255 if (strncmp (name, "__sync_", 7) == 0)
260 /* Return true if NODE should be considered for inline expansion regardless
261 of the optimization level. This means whenever a function is invoked with
262 its "internal" name, which normally contains the prefix "__builtin". */
265 called_as_built_in (tree node)
267 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
268 we want the name used to call the function, not the name it
270 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
271 return is_builtin_name (name);
274 /* Return the alignment in bits of EXP, an object.
275 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
276 guessed alignment e.g. from type alignment. */
279 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
284 if (handled_component_p (exp))
286 HOST_WIDE_INT bitsize, bitpos;
288 enum machine_mode mode;
289 int unsignedp, volatilep;
291 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
292 &mode, &unsignedp, &volatilep, true);
294 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
299 if (TREE_CODE (offset) == PLUS_EXPR)
301 next_offset = TREE_OPERAND (offset, 0);
302 offset = TREE_OPERAND (offset, 1);
306 if (host_integerp (offset, 1))
308 /* Any overflow in calculating offset_bits won't change
311 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
314 inner = MIN (inner, (offset_bits & -offset_bits));
316 else if (TREE_CODE (offset) == MULT_EXPR
317 && host_integerp (TREE_OPERAND (offset, 1), 1))
319 /* Any overflow in calculating offset_factor won't change
321 unsigned offset_factor
322 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
326 inner = MIN (inner, (offset_factor & -offset_factor));
330 inner = MIN (inner, BITS_PER_UNIT);
333 offset = next_offset;
337 align = MIN (inner, DECL_ALIGN (exp));
338 #ifdef CONSTANT_ALIGNMENT
339 else if (CONSTANT_CLASS_P (exp))
340 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
342 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
343 || TREE_CODE (exp) == INDIRECT_REF)
344 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
346 align = MIN (align, inner);
347 return MIN (align, max_align);
350 /* Returns true iff we can trust that alignment information has been
351 calculated properly. */
354 can_trust_pointer_alignment (void)
356 /* We rely on TER to compute accurate alignment information. */
357 return (optimize && flag_tree_ter);
360 /* Return the alignment in bits of EXP, a pointer valued expression.
361 But don't return more than MAX_ALIGN no matter what.
362 The alignment returned is, by default, the alignment of the thing that
363 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
365 Otherwise, look at the expression to see if we can do better, i.e., if the
366 expression is actually pointing at an object whose alignment is tighter. */
369 get_pointer_alignment (tree exp, unsigned int max_align)
371 unsigned int align, inner;
373 if (!can_trust_pointer_alignment ())
376 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
379 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
380 align = MIN (align, max_align);
384 switch (TREE_CODE (exp))
387 exp = TREE_OPERAND (exp, 0);
388 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
391 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
392 align = MIN (inner, max_align);
395 case POINTER_PLUS_EXPR:
396 /* If sum of pointer + int, restrict our maximum alignment to that
397 imposed by the integer. If not, we can't do any better than
399 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
402 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
403 & (max_align / BITS_PER_UNIT - 1))
407 exp = TREE_OPERAND (exp, 0);
411 /* See what we are pointing at and look at its alignment. */
412 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
420 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
421 way, because it could contain a zero byte in the middle.
422 TREE_STRING_LENGTH is the size of the character array, not the string.
424 ONLY_VALUE should be nonzero if the result is not going to be emitted
425 into the instruction stream and zero if it is going to be expanded.
426 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
427 is returned, otherwise NULL, since
428 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
429 evaluate the side-effects.
431 The value returned is of type `ssizetype'.
433 Unfortunately, string_constant can't access the values of const char
434 arrays with initializers, so neither can we do so here. */
437 c_strlen (tree src, int only_value)
440 HOST_WIDE_INT offset;
445 if (TREE_CODE (src) == COND_EXPR
446 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
450 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
451 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
452 if (tree_int_cst_equal (len1, len2))
456 if (TREE_CODE (src) == COMPOUND_EXPR
457 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
458 return c_strlen (TREE_OPERAND (src, 1), only_value);
460 src = string_constant (src, &offset_node);
464 max = TREE_STRING_LENGTH (src) - 1;
465 ptr = TREE_STRING_POINTER (src);
467 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
469 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
470 compute the offset to the following null if we don't know where to
471 start searching for it. */
474 for (i = 0; i < max; i++)
478 /* We don't know the starting offset, but we do know that the string
479 has no internal zero bytes. We can assume that the offset falls
480 within the bounds of the string; otherwise, the programmer deserves
481 what he gets. Subtract the offset from the length of the string,
482 and return that. This would perhaps not be valid if we were dealing
483 with named arrays in addition to literal string constants. */
485 return size_diffop_loc (input_location, size_int (max), offset_node);
488 /* We have a known offset into the string. Start searching there for
489 a null character if we can represent it as a single HOST_WIDE_INT. */
490 if (offset_node == 0)
492 else if (! host_integerp (offset_node, 0))
495 offset = tree_low_cst (offset_node, 0);
497 /* If the offset is known to be out of bounds, warn, and call strlen at
499 if (offset < 0 || offset > max)
501 /* Suppress multiple warnings for propagated constant strings. */
502 if (! TREE_NO_WARNING (src))
504 warning (0, "offset outside bounds of constant string");
505 TREE_NO_WARNING (src) = 1;
510 /* Use strlen to search for the first zero byte. Since any strings
511 constructed with build_string will have nulls appended, we win even
512 if we get handed something like (char[4])"abcd".
514 Since OFFSET is our starting index into the string, no further
515 calculation is needed. */
516 return ssize_int (strlen (ptr + offset));
519 /* Return a char pointer for a C string if it is a string constant
520 or sum of string constant and integer constant. */
527 src = string_constant (src, &offset_node);
531 if (offset_node == 0)
532 return TREE_STRING_POINTER (src);
533 else if (!host_integerp (offset_node, 1)
534 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
537 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
540 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
541 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
544 c_readstr (const char *str, enum machine_mode mode)
550 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
555 for (i = 0; i < GET_MODE_SIZE (mode); i++)
558 if (WORDS_BIG_ENDIAN)
559 j = GET_MODE_SIZE (mode) - i - 1;
560 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
561 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
562 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
564 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
567 ch = (unsigned char) str[i];
568 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
570 return immed_double_const (c[0], c[1], mode);
573 /* Cast a target constant CST to target CHAR and if that value fits into
574 host char type, return zero and put that value into variable pointed to by
578 target_char_cast (tree cst, char *p)
580 unsigned HOST_WIDE_INT val, hostval;
582 if (!host_integerp (cst, 1)
583 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
586 val = tree_low_cst (cst, 1);
587 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
588 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
591 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
592 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
601 /* Similar to save_expr, but assumes that arbitrary code is not executed
602 in between the multiple evaluations. In particular, we assume that a
603 non-addressable local variable will not be modified. */
606 builtin_save_expr (tree exp)
608 if (TREE_ADDRESSABLE (exp) == 0
609 && (TREE_CODE (exp) == PARM_DECL
610 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
613 return save_expr (exp);
616 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
617 times to get the address of either a higher stack frame, or a return
618 address located within it (depending on FNDECL_CODE). */
621 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
625 #ifdef INITIAL_FRAME_ADDRESS_RTX
626 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
630 /* For a zero count with __builtin_return_address, we don't care what
631 frame address we return, because target-specific definitions will
632 override us. Therefore frame pointer elimination is OK, and using
633 the soft frame pointer is OK.
635 For a nonzero count, or a zero count with __builtin_frame_address,
636 we require a stable offset from the current frame pointer to the
637 previous one, so we must use the hard frame pointer, and
638 we must disable frame pointer elimination. */
639 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
640 tem = frame_pointer_rtx;
643 tem = hard_frame_pointer_rtx;
645 /* Tell reload not to eliminate the frame pointer. */
646 crtl->accesses_prior_frames = 1;
650 /* Some machines need special handling before we can access
651 arbitrary frames. For example, on the SPARC, we must first flush
652 all register windows to the stack. */
653 #ifdef SETUP_FRAME_ADDRESSES
655 SETUP_FRAME_ADDRESSES ();
658 /* On the SPARC, the return address is not in the frame, it is in a
659 register. There is no way to access it off of the current frame
660 pointer, but it can be accessed off the previous frame pointer by
661 reading the value from the register window save area. */
662 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
663 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
667 /* Scan back COUNT frames to the specified frame. */
668 for (i = 0; i < count; i++)
670 /* Assume the dynamic chain pointer is in the word that the
671 frame address points to, unless otherwise specified. */
672 #ifdef DYNAMIC_CHAIN_ADDRESS
673 tem = DYNAMIC_CHAIN_ADDRESS (tem);
675 tem = memory_address (Pmode, tem);
676 tem = gen_frame_mem (Pmode, tem);
677 tem = copy_to_reg (tem);
680 /* For __builtin_frame_address, return what we've got. But, on
681 the SPARC for example, we may have to add a bias. */
682 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
683 #ifdef FRAME_ADDR_RTX
684 return FRAME_ADDR_RTX (tem);
689 /* For __builtin_return_address, get the return address from that frame. */
690 #ifdef RETURN_ADDR_RTX
691 tem = RETURN_ADDR_RTX (count, tem);
693 tem = memory_address (Pmode,
694 plus_constant (tem, GET_MODE_SIZE (Pmode)));
695 tem = gen_frame_mem (Pmode, tem);
700 /* Alias set used for setjmp buffer. */
701 static alias_set_type setjmp_alias_set = -1;
703 /* Construct the leading half of a __builtin_setjmp call. Control will
704 return to RECEIVER_LABEL. This is also called directly by the SJLJ
705 exception handling code. */
708 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
710 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
714 if (setjmp_alias_set == -1)
715 setjmp_alias_set = new_alias_set ();
717 buf_addr = convert_memory_address (Pmode, buf_addr);
719 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
721 /* We store the frame pointer and the address of receiver_label in
722 the buffer and use the rest of it for the stack save area, which
723 is machine-dependent. */
725 mem = gen_rtx_MEM (Pmode, buf_addr);
726 set_mem_alias_set (mem, setjmp_alias_set);
727 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
729 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
730 set_mem_alias_set (mem, setjmp_alias_set);
732 emit_move_insn (validize_mem (mem),
733 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
735 stack_save = gen_rtx_MEM (sa_mode,
736 plus_constant (buf_addr,
737 2 * GET_MODE_SIZE (Pmode)));
738 set_mem_alias_set (stack_save, setjmp_alias_set);
739 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
741 /* If there is further processing to do, do it. */
742 #ifdef HAVE_builtin_setjmp_setup
743 if (HAVE_builtin_setjmp_setup)
744 emit_insn (gen_builtin_setjmp_setup (buf_addr));
747 /* Tell optimize_save_area_alloca that extra work is going to
748 need to go on during alloca. */
749 cfun->calls_setjmp = 1;
751 /* We have a nonlocal label. */
752 cfun->has_nonlocal_label = 1;
755 /* Construct the trailing part of a __builtin_setjmp call. This is
756 also called directly by the SJLJ exception handling code. */
759 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
761 /* Clobber the FP when we get here, so we have to make sure it's
762 marked as used by this function. */
763 emit_use (hard_frame_pointer_rtx);
765 /* Mark the static chain as clobbered here so life information
766 doesn't get messed up for it. */
767 emit_clobber (static_chain_rtx);
769 /* Now put in the code to restore the frame pointer, and argument
770 pointer, if needed. */
771 #ifdef HAVE_nonlocal_goto
772 if (! HAVE_nonlocal_goto)
775 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
776 /* This might change the hard frame pointer in ways that aren't
777 apparent to early optimization passes, so force a clobber. */
778 emit_clobber (hard_frame_pointer_rtx);
781 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
782 if (fixed_regs[ARG_POINTER_REGNUM])
784 #ifdef ELIMINABLE_REGS
786 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
788 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
789 if (elim_regs[i].from == ARG_POINTER_REGNUM
790 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
793 if (i == ARRAY_SIZE (elim_regs))
796 /* Now restore our arg pointer from the address at which it
797 was saved in our stack frame. */
798 emit_move_insn (crtl->args.internal_arg_pointer,
799 copy_to_reg (get_arg_pointer_save_area ()));
804 #ifdef HAVE_builtin_setjmp_receiver
805 if (HAVE_builtin_setjmp_receiver)
806 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
809 #ifdef HAVE_nonlocal_goto_receiver
810 if (HAVE_nonlocal_goto_receiver)
811 emit_insn (gen_nonlocal_goto_receiver ());
816 /* We must not allow the code we just generated to be reordered by
817 scheduling. Specifically, the update of the frame pointer must
818 happen immediately, not later. */
819 emit_insn (gen_blockage ());
822 /* __builtin_longjmp is passed a pointer to an array of five words (not
823 all will be used on all machines). It operates similarly to the C
824 library function of the same name, but is more efficient. Much of
825 the code below is copied from the handling of non-local gotos. */
828 expand_builtin_longjmp (rtx buf_addr, rtx value)
830 rtx fp, lab, stack, insn, last;
831 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
833 /* DRAP is needed for stack realign if longjmp is expanded to current
835 if (SUPPORTS_STACK_ALIGNMENT)
836 crtl->need_drap = true;
838 if (setjmp_alias_set == -1)
839 setjmp_alias_set = new_alias_set ();
841 buf_addr = convert_memory_address (Pmode, buf_addr);
843 buf_addr = force_reg (Pmode, buf_addr);
845 /* We used to store value in static_chain_rtx, but that fails if pointers
846 are smaller than integers. We instead require that the user must pass
847 a second argument of 1, because that is what builtin_setjmp will
848 return. This also makes EH slightly more efficient, since we are no
849 longer copying around a value that we don't care about. */
850 gcc_assert (value == const1_rtx);
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
880 lab = copy_to_reg (lab);
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
901 gcc_assert (insn != last);
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
908 else if (CALL_P (insn))
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
917 expand_builtin_nonlocal_goto (tree exp)
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
939 crtl->has_nonlocal_goto = 1;
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
948 r_label = copy_to_reg (r_label);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
979 emit_indirect_jump (r_label);
982 /* Search backwards to the jump insn and mark it as a
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
991 else if (CALL_P (insn))
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1006 enum machine_mode sa_mode = Pmode;
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1019 = gen_rtx_MEM (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1026 emit_insn (gen_setjmp ());
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1037 expand_builtin_prefetch (tree exp)
1039 tree arg0, arg1, arg2;
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1051 nargs = call_expr_nargs (exp);
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1055 arg1 = integer_zero_node;
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1059 arg2 = build_int_cst (NULL_TREE, 3);
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1093 #ifdef HAVE_prefetch
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1104 emit_insn (gen_prefetch (op0, op1, op2));
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1120 get_memory_rtx (tree exp, tree len)
1122 tree orig_exp = exp;
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1159 set_mem_attributes (mem, exp, 0);
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1216 && offset + length <= size)
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1235 if (mem_expr == NULL)
1237 if (mem_expr != MEM_EXPR (mem))
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1250 /* Built-in functions to perform an untyped call and return. */
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1266 /* For each register that may be used for calling a function, this
1267 gives the offset of that register into the block returned by
1268 __builtin_apply_args. 0 indicates that the register is not
1269 used for calling a function. */
1270 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1272 /* Return the size required for the block returned by __builtin_apply_args,
1273 and initialize apply_args_mode. */
1276 apply_args_size (void)
1278 static int size = -1;
1281 enum machine_mode mode;
1283 /* The values computed by this function never change. */
1286 /* The first value is the incoming arg-pointer. */
1287 size = GET_MODE_SIZE (Pmode);
1289 /* The second value is the structure value address unless this is
1290 passed as an "invisible" first argument. */
1291 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1292 size += GET_MODE_SIZE (Pmode);
1294 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1295 if (FUNCTION_ARG_REGNO_P (regno))
1297 mode = reg_raw_mode[regno];
1299 gcc_assert (mode != VOIDmode);
1301 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1302 if (size % align != 0)
1303 size = CEIL (size, align) * align;
1304 apply_args_reg_offset[regno] = size;
1305 size += GET_MODE_SIZE (mode);
1306 apply_args_mode[regno] = mode;
1310 apply_args_mode[regno] = VOIDmode;
1311 apply_args_reg_offset[regno] = 0;
1317 /* Return the size required for the block returned by __builtin_apply,
1318 and initialize apply_result_mode. */
1321 apply_result_size (void)
1323 static int size = -1;
1325 enum machine_mode mode;
1327 /* The values computed by this function never change. */
1332 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1333 if (FUNCTION_VALUE_REGNO_P (regno))
1335 mode = reg_raw_mode[regno];
1337 gcc_assert (mode != VOIDmode);
1339 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1340 if (size % align != 0)
1341 size = CEIL (size, align) * align;
1342 size += GET_MODE_SIZE (mode);
1343 apply_result_mode[regno] = mode;
1346 apply_result_mode[regno] = VOIDmode;
1348 /* Allow targets that use untyped_call and untyped_return to override
1349 the size so that machine-specific information can be stored here. */
1350 #ifdef APPLY_RESULT_SIZE
1351 size = APPLY_RESULT_SIZE;
1357 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1358 /* Create a vector describing the result block RESULT. If SAVEP is true,
1359 the result block is used to save the values; otherwise it is used to
1360 restore the values. */
1363 result_vector (int savep, rtx result)
1365 int regno, size, align, nelts;
1366 enum machine_mode mode;
1368 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1371 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1372 if ((mode = apply_result_mode[regno]) != VOIDmode)
1374 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1375 if (size % align != 0)
1376 size = CEIL (size, align) * align;
1377 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1378 mem = adjust_address (result, mode, size);
1379 savevec[nelts++] = (savep
1380 ? gen_rtx_SET (VOIDmode, mem, reg)
1381 : gen_rtx_SET (VOIDmode, reg, mem));
1382 size += GET_MODE_SIZE (mode);
1384 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1386 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1388 /* Save the state required to perform an untyped call with the same
1389 arguments as were passed to the current function. */
1392 expand_builtin_apply_args_1 (void)
1395 int size, align, regno;
1396 enum machine_mode mode;
1397 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1399 /* Create a block where the arg-pointer, structure value address,
1400 and argument registers can be saved. */
1401 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1403 /* Walk past the arg-pointer and structure value address. */
1404 size = GET_MODE_SIZE (Pmode);
1405 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1406 size += GET_MODE_SIZE (Pmode);
1408 /* Save each register used in calling a function to the block. */
1409 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1410 if ((mode = apply_args_mode[regno]) != VOIDmode)
1412 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1413 if (size % align != 0)
1414 size = CEIL (size, align) * align;
1416 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1418 emit_move_insn (adjust_address (registers, mode, size), tem);
1419 size += GET_MODE_SIZE (mode);
1422 /* Save the arg pointer to the block. */
1423 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1424 #ifdef STACK_GROWS_DOWNWARD
1425 /* We need the pointer as the caller actually passed them to us, not
1426 as we might have pretended they were passed. Make sure it's a valid
1427 operand, as emit_move_insn isn't expected to handle a PLUS. */
1429 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1432 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1434 size = GET_MODE_SIZE (Pmode);
1436 /* Save the structure value address unless this is passed as an
1437 "invisible" first argument. */
1438 if (struct_incoming_value)
1440 emit_move_insn (adjust_address (registers, Pmode, size),
1441 copy_to_reg (struct_incoming_value));
1442 size += GET_MODE_SIZE (Pmode);
1445 /* Return the address of the block. */
1446 return copy_addr_to_reg (XEXP (registers, 0));
1449 /* __builtin_apply_args returns block of memory allocated on
1450 the stack into which is stored the arg pointer, structure
1451 value address, static chain, and all the registers that might
1452 possibly be used in performing a function call. The code is
1453 moved to the start of the function so the incoming values are
1457 expand_builtin_apply_args (void)
1459 /* Don't do __builtin_apply_args more than once in a function.
1460 Save the result of the first call and reuse it. */
1461 if (apply_args_value != 0)
1462 return apply_args_value;
1464 /* When this function is called, it means that registers must be
1465 saved on entry to this function. So we migrate the
1466 call to the first insn of this function. */
1471 temp = expand_builtin_apply_args_1 ();
1475 apply_args_value = temp;
1477 /* Put the insns after the NOTE that starts the function.
1478 If this is inside a start_sequence, make the outer-level insn
1479 chain current, so the code is placed at the start of the
1480 function. If internal_arg_pointer is a non-virtual pseudo,
1481 it needs to be placed after the function that initializes
1483 push_topmost_sequence ();
1484 if (REG_P (crtl->args.internal_arg_pointer)
1485 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1486 emit_insn_before (seq, parm_birth_insn);
1488 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1489 pop_topmost_sequence ();
1494 /* Perform an untyped call and save the state required to perform an
1495 untyped return of whatever value was returned by the given function. */
1498 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1500 int size, align, regno;
1501 enum machine_mode mode;
1502 rtx incoming_args, result, reg, dest, src, call_insn;
1503 rtx old_stack_level = 0;
1504 rtx call_fusage = 0;
1505 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1507 arguments = convert_memory_address (Pmode, arguments);
1509 /* Create a block where the return registers can be saved. */
1510 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1512 /* Fetch the arg pointer from the ARGUMENTS block. */
1513 incoming_args = gen_reg_rtx (Pmode);
1514 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1515 #ifndef STACK_GROWS_DOWNWARD
1516 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1517 incoming_args, 0, OPTAB_LIB_WIDEN);
1520 /* Push a new argument block and copy the arguments. Do not allow
1521 the (potential) memcpy call below to interfere with our stack
1523 do_pending_stack_adjust ();
1526 /* Save the stack with nonlocal if available. */
1527 #ifdef HAVE_save_stack_nonlocal
1528 if (HAVE_save_stack_nonlocal)
1529 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1532 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1534 /* Allocate a block of memory onto the stack and copy the memory
1535 arguments to the outgoing arguments address. */
1536 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1538 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1539 may have already set current_function_calls_alloca to true.
1540 current_function_calls_alloca won't be set if argsize is zero,
1541 so we have to guarantee need_drap is true here. */
1542 if (SUPPORTS_STACK_ALIGNMENT)
1543 crtl->need_drap = true;
1545 dest = virtual_outgoing_args_rtx;
1546 #ifndef STACK_GROWS_DOWNWARD
1547 if (CONST_INT_P (argsize))
1548 dest = plus_constant (dest, -INTVAL (argsize));
1550 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1552 dest = gen_rtx_MEM (BLKmode, dest);
1553 set_mem_align (dest, PARM_BOUNDARY);
1554 src = gen_rtx_MEM (BLKmode, incoming_args);
1555 set_mem_align (src, PARM_BOUNDARY);
1556 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1558 /* Refer to the argument block. */
1560 arguments = gen_rtx_MEM (BLKmode, arguments);
1561 set_mem_align (arguments, PARM_BOUNDARY);
1563 /* Walk past the arg-pointer and structure value address. */
1564 size = GET_MODE_SIZE (Pmode);
1566 size += GET_MODE_SIZE (Pmode);
1568 /* Restore each of the registers previously saved. Make USE insns
1569 for each of these registers for use in making the call. */
1570 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1571 if ((mode = apply_args_mode[regno]) != VOIDmode)
1573 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1574 if (size % align != 0)
1575 size = CEIL (size, align) * align;
1576 reg = gen_rtx_REG (mode, regno);
1577 emit_move_insn (reg, adjust_address (arguments, mode, size));
1578 use_reg (&call_fusage, reg);
1579 size += GET_MODE_SIZE (mode);
1582 /* Restore the structure value address unless this is passed as an
1583 "invisible" first argument. */
1584 size = GET_MODE_SIZE (Pmode);
1587 rtx value = gen_reg_rtx (Pmode);
1588 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1589 emit_move_insn (struct_value, value);
1590 if (REG_P (struct_value))
1591 use_reg (&call_fusage, struct_value);
1592 size += GET_MODE_SIZE (Pmode);
1595 /* All arguments and registers used for the call are set up by now! */
1596 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1598 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1599 and we don't want to load it into a register as an optimization,
1600 because prepare_call_address already did it if it should be done. */
1601 if (GET_CODE (function) != SYMBOL_REF)
1602 function = memory_address (FUNCTION_MODE, function);
1604 /* Generate the actual call instruction and save the return value. */
1605 #ifdef HAVE_untyped_call
1606 if (HAVE_untyped_call)
1607 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1608 result, result_vector (1, result)));
1611 #ifdef HAVE_call_value
1612 if (HAVE_call_value)
1616 /* Locate the unique return register. It is not possible to
1617 express a call that sets more than one return register using
1618 call_value; use untyped_call for that. In fact, untyped_call
1619 only needs to save the return registers in the given block. */
1620 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1621 if ((mode = apply_result_mode[regno]) != VOIDmode)
1623 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1625 valreg = gen_rtx_REG (mode, regno);
1628 emit_call_insn (GEN_CALL_VALUE (valreg,
1629 gen_rtx_MEM (FUNCTION_MODE, function),
1630 const0_rtx, NULL_RTX, const0_rtx));
1632 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1638 /* Find the CALL insn we just emitted, and attach the register usage
1640 call_insn = last_call_insn ();
1641 add_function_usage_to (call_insn, call_fusage);
1643 /* Restore the stack. */
1644 #ifdef HAVE_save_stack_nonlocal
1645 if (HAVE_save_stack_nonlocal)
1646 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1649 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1653 /* Return the address of the result block. */
1654 result = copy_addr_to_reg (XEXP (result, 0));
1655 return convert_memory_address (ptr_mode, result);
1658 /* Perform an untyped return. */
1661 expand_builtin_return (rtx result)
1663 int size, align, regno;
1664 enum machine_mode mode;
1666 rtx call_fusage = 0;
1668 result = convert_memory_address (Pmode, result);
1670 apply_result_size ();
1671 result = gen_rtx_MEM (BLKmode, result);
1673 #ifdef HAVE_untyped_return
1674 if (HAVE_untyped_return)
1676 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1682 /* Restore the return value and note that each value is used. */
1684 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1685 if ((mode = apply_result_mode[regno]) != VOIDmode)
1687 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1688 if (size % align != 0)
1689 size = CEIL (size, align) * align;
1690 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1691 emit_move_insn (reg, adjust_address (result, mode, size));
1693 push_to_sequence (call_fusage);
1695 call_fusage = get_insns ();
1697 size += GET_MODE_SIZE (mode);
1700 /* Put the USE insns before the return. */
1701 emit_insn (call_fusage);
1703 /* Return whatever values was restored by jumping directly to the end
1705 expand_naked_return ();
1708 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1710 static enum type_class
1711 type_to_class (tree type)
1713 switch (TREE_CODE (type))
1715 case VOID_TYPE: return void_type_class;
1716 case INTEGER_TYPE: return integer_type_class;
1717 case ENUMERAL_TYPE: return enumeral_type_class;
1718 case BOOLEAN_TYPE: return boolean_type_class;
1719 case POINTER_TYPE: return pointer_type_class;
1720 case REFERENCE_TYPE: return reference_type_class;
1721 case OFFSET_TYPE: return offset_type_class;
1722 case REAL_TYPE: return real_type_class;
1723 case COMPLEX_TYPE: return complex_type_class;
1724 case FUNCTION_TYPE: return function_type_class;
1725 case METHOD_TYPE: return method_type_class;
1726 case RECORD_TYPE: return record_type_class;
1728 case QUAL_UNION_TYPE: return union_type_class;
1729 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1730 ? string_type_class : array_type_class);
1731 case LANG_TYPE: return lang_type_class;
1732 default: return no_type_class;
1736 /* Expand a call EXP to __builtin_classify_type. */
1739 expand_builtin_classify_type (tree exp)
1741 if (call_expr_nargs (exp))
1742 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1743 return GEN_INT (no_type_class);
1746 /* This helper macro, meant to be used in mathfn_built_in below,
1747 determines which among a set of three builtin math functions is
1748 appropriate for a given type mode. The `F' and `L' cases are
1749 automatically generated from the `double' case. */
1750 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1751 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1752 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1753 fcodel = BUILT_IN_MATHFN##L ; break;
1754 /* Similar to above, but appends _R after any F/L suffix. */
1755 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1756 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1757 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1758 fcodel = BUILT_IN_MATHFN##L_R ; break;
1760 /* Return mathematic function equivalent to FN but operating directly
1761 on TYPE, if available. If IMPLICIT is true find the function in
1762 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1763 can't do the conversion, return zero. */
1766 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1768 tree const *const fn_arr
1769 = implicit ? implicit_built_in_decls : built_in_decls;
1770 enum built_in_function fcode, fcodef, fcodel;
1774 CASE_MATHFN (BUILT_IN_ACOS)
1775 CASE_MATHFN (BUILT_IN_ACOSH)
1776 CASE_MATHFN (BUILT_IN_ASIN)
1777 CASE_MATHFN (BUILT_IN_ASINH)
1778 CASE_MATHFN (BUILT_IN_ATAN)
1779 CASE_MATHFN (BUILT_IN_ATAN2)
1780 CASE_MATHFN (BUILT_IN_ATANH)
1781 CASE_MATHFN (BUILT_IN_CBRT)
1782 CASE_MATHFN (BUILT_IN_CEIL)
1783 CASE_MATHFN (BUILT_IN_CEXPI)
1784 CASE_MATHFN (BUILT_IN_COPYSIGN)
1785 CASE_MATHFN (BUILT_IN_COS)
1786 CASE_MATHFN (BUILT_IN_COSH)
1787 CASE_MATHFN (BUILT_IN_DREM)
1788 CASE_MATHFN (BUILT_IN_ERF)
1789 CASE_MATHFN (BUILT_IN_ERFC)
1790 CASE_MATHFN (BUILT_IN_EXP)
1791 CASE_MATHFN (BUILT_IN_EXP10)
1792 CASE_MATHFN (BUILT_IN_EXP2)
1793 CASE_MATHFN (BUILT_IN_EXPM1)
1794 CASE_MATHFN (BUILT_IN_FABS)
1795 CASE_MATHFN (BUILT_IN_FDIM)
1796 CASE_MATHFN (BUILT_IN_FLOOR)
1797 CASE_MATHFN (BUILT_IN_FMA)
1798 CASE_MATHFN (BUILT_IN_FMAX)
1799 CASE_MATHFN (BUILT_IN_FMIN)
1800 CASE_MATHFN (BUILT_IN_FMOD)
1801 CASE_MATHFN (BUILT_IN_FREXP)
1802 CASE_MATHFN (BUILT_IN_GAMMA)
1803 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1804 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1805 CASE_MATHFN (BUILT_IN_HYPOT)
1806 CASE_MATHFN (BUILT_IN_ILOGB)
1807 CASE_MATHFN (BUILT_IN_INF)
1808 CASE_MATHFN (BUILT_IN_ISINF)
1809 CASE_MATHFN (BUILT_IN_J0)
1810 CASE_MATHFN (BUILT_IN_J1)
1811 CASE_MATHFN (BUILT_IN_JN)
1812 CASE_MATHFN (BUILT_IN_LCEIL)
1813 CASE_MATHFN (BUILT_IN_LDEXP)
1814 CASE_MATHFN (BUILT_IN_LFLOOR)
1815 CASE_MATHFN (BUILT_IN_LGAMMA)
1816 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1817 CASE_MATHFN (BUILT_IN_LLCEIL)
1818 CASE_MATHFN (BUILT_IN_LLFLOOR)
1819 CASE_MATHFN (BUILT_IN_LLRINT)
1820 CASE_MATHFN (BUILT_IN_LLROUND)
1821 CASE_MATHFN (BUILT_IN_LOG)
1822 CASE_MATHFN (BUILT_IN_LOG10)
1823 CASE_MATHFN (BUILT_IN_LOG1P)
1824 CASE_MATHFN (BUILT_IN_LOG2)
1825 CASE_MATHFN (BUILT_IN_LOGB)
1826 CASE_MATHFN (BUILT_IN_LRINT)
1827 CASE_MATHFN (BUILT_IN_LROUND)
1828 CASE_MATHFN (BUILT_IN_MODF)
1829 CASE_MATHFN (BUILT_IN_NAN)
1830 CASE_MATHFN (BUILT_IN_NANS)
1831 CASE_MATHFN (BUILT_IN_NEARBYINT)
1832 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1833 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1834 CASE_MATHFN (BUILT_IN_POW)
1835 CASE_MATHFN (BUILT_IN_POWI)
1836 CASE_MATHFN (BUILT_IN_POW10)
1837 CASE_MATHFN (BUILT_IN_REMAINDER)
1838 CASE_MATHFN (BUILT_IN_REMQUO)
1839 CASE_MATHFN (BUILT_IN_RINT)
1840 CASE_MATHFN (BUILT_IN_ROUND)
1841 CASE_MATHFN (BUILT_IN_SCALB)
1842 CASE_MATHFN (BUILT_IN_SCALBLN)
1843 CASE_MATHFN (BUILT_IN_SCALBN)
1844 CASE_MATHFN (BUILT_IN_SIGNBIT)
1845 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1846 CASE_MATHFN (BUILT_IN_SIN)
1847 CASE_MATHFN (BUILT_IN_SINCOS)
1848 CASE_MATHFN (BUILT_IN_SINH)
1849 CASE_MATHFN (BUILT_IN_SQRT)
1850 CASE_MATHFN (BUILT_IN_TAN)
1851 CASE_MATHFN (BUILT_IN_TANH)
1852 CASE_MATHFN (BUILT_IN_TGAMMA)
1853 CASE_MATHFN (BUILT_IN_TRUNC)
1854 CASE_MATHFN (BUILT_IN_Y0)
1855 CASE_MATHFN (BUILT_IN_Y1)
1856 CASE_MATHFN (BUILT_IN_YN)
1862 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1863 return fn_arr[fcode];
1864 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1865 return fn_arr[fcodef];
1866 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1867 return fn_arr[fcodel];
1872 /* Like mathfn_built_in_1(), but always use the implicit array. */
1875 mathfn_built_in (tree type, enum built_in_function fn)
1877 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1880 /* If errno must be maintained, expand the RTL to check if the result,
1881 TARGET, of a built-in function call, EXP, is NaN, and if so set
1885 expand_errno_check (tree exp, rtx target)
1887 rtx lab = gen_label_rtx ();
1889 /* Test the result; if it is NaN, set errno=EDOM because
1890 the argument was not in the domain. */
1891 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1892 NULL_RTX, NULL_RTX, lab);
1895 /* If this built-in doesn't throw an exception, set errno directly. */
1896 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1898 #ifdef GEN_ERRNO_RTX
1899 rtx errno_rtx = GEN_ERRNO_RTX;
1902 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1904 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1910 /* Make sure the library call isn't expanded as a tail call. */
1911 CALL_EXPR_TAILCALL (exp) = 0;
1913 /* We can't set errno=EDOM directly; let the library call do it.
1914 Pop the arguments right away in case the call gets deleted. */
1916 expand_call (exp, target, 0);
1921 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1922 Return NULL_RTX if a normal call should be emitted rather than expanding
1923 the function in-line. EXP is the expression that is a call to the builtin
1924 function; if convenient, the result should be placed in TARGET.
1925 SUBTARGET may be used as the target for computing one of EXP's operands. */
1928 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1930 optab builtin_optab;
1931 rtx op0, insns, before_call;
1932 tree fndecl = get_callee_fndecl (exp);
1933 enum machine_mode mode;
1934 bool errno_set = false;
1937 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1940 arg = CALL_EXPR_ARG (exp, 0);
1942 switch (DECL_FUNCTION_CODE (fndecl))
1944 CASE_FLT_FN (BUILT_IN_SQRT):
1945 errno_set = ! tree_expr_nonnegative_p (arg);
1946 builtin_optab = sqrt_optab;
1948 CASE_FLT_FN (BUILT_IN_EXP):
1949 errno_set = true; builtin_optab = exp_optab; break;
1950 CASE_FLT_FN (BUILT_IN_EXP10):
1951 CASE_FLT_FN (BUILT_IN_POW10):
1952 errno_set = true; builtin_optab = exp10_optab; break;
1953 CASE_FLT_FN (BUILT_IN_EXP2):
1954 errno_set = true; builtin_optab = exp2_optab; break;
1955 CASE_FLT_FN (BUILT_IN_EXPM1):
1956 errno_set = true; builtin_optab = expm1_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOGB):
1958 errno_set = true; builtin_optab = logb_optab; break;
1959 CASE_FLT_FN (BUILT_IN_LOG):
1960 errno_set = true; builtin_optab = log_optab; break;
1961 CASE_FLT_FN (BUILT_IN_LOG10):
1962 errno_set = true; builtin_optab = log10_optab; break;
1963 CASE_FLT_FN (BUILT_IN_LOG2):
1964 errno_set = true; builtin_optab = log2_optab; break;
1965 CASE_FLT_FN (BUILT_IN_LOG1P):
1966 errno_set = true; builtin_optab = log1p_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ASIN):
1968 builtin_optab = asin_optab; break;
1969 CASE_FLT_FN (BUILT_IN_ACOS):
1970 builtin_optab = acos_optab; break;
1971 CASE_FLT_FN (BUILT_IN_TAN):
1972 builtin_optab = tan_optab; break;
1973 CASE_FLT_FN (BUILT_IN_ATAN):
1974 builtin_optab = atan_optab; break;
1975 CASE_FLT_FN (BUILT_IN_FLOOR):
1976 builtin_optab = floor_optab; break;
1977 CASE_FLT_FN (BUILT_IN_CEIL):
1978 builtin_optab = ceil_optab; break;
1979 CASE_FLT_FN (BUILT_IN_TRUNC):
1980 builtin_optab = btrunc_optab; break;
1981 CASE_FLT_FN (BUILT_IN_ROUND):
1982 builtin_optab = round_optab; break;
1983 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1984 builtin_optab = nearbyint_optab;
1985 if (flag_trapping_math)
1987 /* Else fallthrough and expand as rint. */
1988 CASE_FLT_FN (BUILT_IN_RINT):
1989 builtin_optab = rint_optab; break;
1990 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1991 builtin_optab = significand_optab; break;
1996 /* Make a suitable register to place result in. */
1997 mode = TYPE_MODE (TREE_TYPE (exp));
1999 if (! flag_errno_math || ! HONOR_NANS (mode))
2002 /* Before working hard, check whether the instruction is available. */
2003 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2005 target = gen_reg_rtx (mode);
2007 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2008 need to expand the argument again. This way, we will not perform
2009 side-effects more the once. */
2010 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2012 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2016 /* Compute into TARGET.
2017 Set TARGET to wherever the result comes back. */
2018 target = expand_unop (mode, builtin_optab, op0, target, 0);
2023 expand_errno_check (exp, target);
2025 /* Output the entire sequence. */
2026 insns = get_insns ();
2032 /* If we were unable to expand via the builtin, stop the sequence
2033 (without outputting the insns) and call to the library function
2034 with the stabilized argument list. */
2038 before_call = get_last_insn ();
2040 return expand_call (exp, target, target == const0_rtx);
2043 /* Expand a call to the builtin binary math functions (pow and atan2).
2044 Return NULL_RTX if a normal call should be emitted rather than expanding the
2045 function in-line. EXP is the expression that is a call to the builtin
2046 function; if convenient, the result should be placed in TARGET.
2047 SUBTARGET may be used as the target for computing one of EXP's
2051 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2053 optab builtin_optab;
2054 rtx op0, op1, insns;
2055 int op1_type = REAL_TYPE;
2056 tree fndecl = get_callee_fndecl (exp);
2058 enum machine_mode mode;
2059 bool errno_set = true;
2061 switch (DECL_FUNCTION_CODE (fndecl))
2063 CASE_FLT_FN (BUILT_IN_SCALBN):
2064 CASE_FLT_FN (BUILT_IN_SCALBLN):
2065 CASE_FLT_FN (BUILT_IN_LDEXP):
2066 op1_type = INTEGER_TYPE;
2071 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2074 arg0 = CALL_EXPR_ARG (exp, 0);
2075 arg1 = CALL_EXPR_ARG (exp, 1);
2077 switch (DECL_FUNCTION_CODE (fndecl))
2079 CASE_FLT_FN (BUILT_IN_POW):
2080 builtin_optab = pow_optab; break;
2081 CASE_FLT_FN (BUILT_IN_ATAN2):
2082 builtin_optab = atan2_optab; break;
2083 CASE_FLT_FN (BUILT_IN_SCALB):
2084 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2086 builtin_optab = scalb_optab; break;
2087 CASE_FLT_FN (BUILT_IN_SCALBN):
2088 CASE_FLT_FN (BUILT_IN_SCALBLN):
2089 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2091 /* Fall through... */
2092 CASE_FLT_FN (BUILT_IN_LDEXP):
2093 builtin_optab = ldexp_optab; break;
2094 CASE_FLT_FN (BUILT_IN_FMOD):
2095 builtin_optab = fmod_optab; break;
2096 CASE_FLT_FN (BUILT_IN_REMAINDER):
2097 CASE_FLT_FN (BUILT_IN_DREM):
2098 builtin_optab = remainder_optab; break;
2103 /* Make a suitable register to place result in. */
2104 mode = TYPE_MODE (TREE_TYPE (exp));
2106 /* Before working hard, check whether the instruction is available. */
2107 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2110 target = gen_reg_rtx (mode);
2112 if (! flag_errno_math || ! HONOR_NANS (mode))
2115 /* Always stabilize the argument list. */
2116 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2117 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2119 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2120 op1 = expand_normal (arg1);
2124 /* Compute into TARGET.
2125 Set TARGET to wherever the result comes back. */
2126 target = expand_binop (mode, builtin_optab, op0, op1,
2127 target, 0, OPTAB_DIRECT);
2129 /* If we were unable to expand via the builtin, stop the sequence
2130 (without outputting the insns) and call to the library function
2131 with the stabilized argument list. */
2135 return expand_call (exp, target, target == const0_rtx);
2139 expand_errno_check (exp, target);
2141 /* Output the entire sequence. */
2142 insns = get_insns ();
2149 /* Expand a call to the builtin sin and cos math functions.
2150 Return NULL_RTX if a normal call should be emitted rather than expanding the
2151 function in-line. EXP is the expression that is a call to the builtin
2152 function; if convenient, the result should be placed in TARGET.
2153 SUBTARGET may be used as the target for computing one of EXP's
2157 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2159 optab builtin_optab;
2161 tree fndecl = get_callee_fndecl (exp);
2162 enum machine_mode mode;
2165 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2168 arg = CALL_EXPR_ARG (exp, 0);
2170 switch (DECL_FUNCTION_CODE (fndecl))
2172 CASE_FLT_FN (BUILT_IN_SIN):
2173 CASE_FLT_FN (BUILT_IN_COS):
2174 builtin_optab = sincos_optab; break;
2179 /* Make a suitable register to place result in. */
2180 mode = TYPE_MODE (TREE_TYPE (exp));
2182 /* Check if sincos insn is available, otherwise fallback
2183 to sin or cos insn. */
2184 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2185 switch (DECL_FUNCTION_CODE (fndecl))
2187 CASE_FLT_FN (BUILT_IN_SIN):
2188 builtin_optab = sin_optab; break;
2189 CASE_FLT_FN (BUILT_IN_COS):
2190 builtin_optab = cos_optab; break;
2195 /* Before working hard, check whether the instruction is available. */
2196 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2198 target = gen_reg_rtx (mode);
2200 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2201 need to expand the argument again. This way, we will not perform
2202 side-effects more the once. */
2203 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2205 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2209 /* Compute into TARGET.
2210 Set TARGET to wherever the result comes back. */
2211 if (builtin_optab == sincos_optab)
2215 switch (DECL_FUNCTION_CODE (fndecl))
2217 CASE_FLT_FN (BUILT_IN_SIN):
2218 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2220 CASE_FLT_FN (BUILT_IN_COS):
2221 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2226 gcc_assert (result);
2230 target = expand_unop (mode, builtin_optab, op0, target, 0);
2235 /* Output the entire sequence. */
2236 insns = get_insns ();
2242 /* If we were unable to expand via the builtin, stop the sequence
2243 (without outputting the insns) and call to the library function
2244 with the stabilized argument list. */
2248 target = expand_call (exp, target, target == const0_rtx);
2253 /* Expand a call to one of the builtin math functions that operate on
2254 floating point argument and output an integer result (ilogb, isinf,
2256 Return 0 if a normal call should be emitted rather than expanding the
2257 function in-line. EXP is the expression that is a call to the builtin
2258 function; if convenient, the result should be placed in TARGET.
2259 SUBTARGET may be used as the target for computing one of EXP's operands. */
2262 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2264 optab builtin_optab = 0;
2265 enum insn_code icode = CODE_FOR_nothing;
2267 tree fndecl = get_callee_fndecl (exp);
2268 enum machine_mode mode;
2269 bool errno_set = false;
2271 location_t loc = EXPR_LOCATION (exp);
2273 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2276 arg = CALL_EXPR_ARG (exp, 0);
2278 switch (DECL_FUNCTION_CODE (fndecl))
2280 CASE_FLT_FN (BUILT_IN_ILOGB):
2281 errno_set = true; builtin_optab = ilogb_optab; break;
2282 CASE_FLT_FN (BUILT_IN_ISINF):
2283 builtin_optab = isinf_optab; break;
2284 case BUILT_IN_ISNORMAL:
2285 case BUILT_IN_ISFINITE:
2286 CASE_FLT_FN (BUILT_IN_FINITE):
2287 /* These builtins have no optabs (yet). */
2293 /* There's no easy way to detect the case we need to set EDOM. */
2294 if (flag_errno_math && errno_set)
2297 /* Optab mode depends on the mode of the input argument. */
2298 mode = TYPE_MODE (TREE_TYPE (arg));
2301 icode = optab_handler (builtin_optab, mode)->insn_code;
2303 /* Before working hard, check whether the instruction is available. */
2304 if (icode != CODE_FOR_nothing)
2306 /* Make a suitable register to place result in. */
2308 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2309 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2311 gcc_assert (insn_data[icode].operand[0].predicate
2312 (target, GET_MODE (target)));
2314 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2315 need to expand the argument again. This way, we will not perform
2316 side-effects more the once. */
2317 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2319 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2321 if (mode != GET_MODE (op0))
2322 op0 = convert_to_mode (mode, op0, 0);
2324 /* Compute into TARGET.
2325 Set TARGET to wherever the result comes back. */
2326 emit_unop_insn (icode, target, op0, UNKNOWN);
2330 /* If there is no optab, try generic code. */
2331 switch (DECL_FUNCTION_CODE (fndecl))
2335 CASE_FLT_FN (BUILT_IN_ISINF):
2337 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2338 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2339 tree const type = TREE_TYPE (arg);
2343 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2344 real_from_string (&r, buf);
2345 result = build_call_expr (isgr_fn, 2,
2346 fold_build1_loc (loc, ABS_EXPR, type, arg),
2347 build_real (type, r));
2348 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2350 CASE_FLT_FN (BUILT_IN_FINITE):
2351 case BUILT_IN_ISFINITE:
2353 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2354 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2355 tree const type = TREE_TYPE (arg);
2359 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2360 real_from_string (&r, buf);
2361 result = build_call_expr (isle_fn, 2,
2362 fold_build1_loc (loc, ABS_EXPR, type, arg),
2363 build_real (type, r));
2364 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2366 case BUILT_IN_ISNORMAL:
2368 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2369 islessequal(fabs(x),DBL_MAX). */
2370 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2371 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2372 tree const type = TREE_TYPE (arg);
2373 REAL_VALUE_TYPE rmax, rmin;
2376 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2377 real_from_string (&rmax, buf);
2378 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2379 real_from_string (&rmin, buf);
2380 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
2381 result = build_call_expr (isle_fn, 2, arg,
2382 build_real (type, rmax));
2383 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2384 build_call_expr (isge_fn, 2, arg,
2385 build_real (type, rmin)));
2386 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2392 target = expand_call (exp, target, target == const0_rtx);
2397 /* Expand a call to the builtin sincos math function.
2398 Return NULL_RTX if a normal call should be emitted rather than expanding the
2399 function in-line. EXP is the expression that is a call to the builtin
2403 expand_builtin_sincos (tree exp)
2405 rtx op0, op1, op2, target1, target2;
2406 enum machine_mode mode;
2407 tree arg, sinp, cosp;
2409 location_t loc = EXPR_LOCATION (exp);
2411 if (!validate_arglist (exp, REAL_TYPE,
2412 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2415 arg = CALL_EXPR_ARG (exp, 0);
2416 sinp = CALL_EXPR_ARG (exp, 1);
2417 cosp = CALL_EXPR_ARG (exp, 2);
2419 /* Make a suitable register to place result in. */
2420 mode = TYPE_MODE (TREE_TYPE (arg));
2422 /* Check if sincos insn is available, otherwise emit the call. */
2423 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2426 target1 = gen_reg_rtx (mode);
2427 target2 = gen_reg_rtx (mode);
2429 op0 = expand_normal (arg);
2430 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2431 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2433 /* Compute into target1 and target2.
2434 Set TARGET to wherever the result comes back. */
2435 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2436 gcc_assert (result);
2438 /* Move target1 and target2 to the memory locations indicated
2440 emit_move_insn (op1, target1);
2441 emit_move_insn (op2, target2);
2446 /* Expand a call to the internal cexpi builtin to the sincos math function.
2447 EXP is the expression that is a call to the builtin function; if convenient,
2448 the result should be placed in TARGET. SUBTARGET may be used as the target
2449 for computing one of EXP's operands. */
2452 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2454 tree fndecl = get_callee_fndecl (exp);
2456 enum machine_mode mode;
2458 location_t loc = EXPR_LOCATION (exp);
2460 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2463 arg = CALL_EXPR_ARG (exp, 0);
2464 type = TREE_TYPE (arg);
2465 mode = TYPE_MODE (TREE_TYPE (arg));
2467 /* Try expanding via a sincos optab, fall back to emitting a libcall
2468 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2469 is only generated from sincos, cexp or if we have either of them. */
2470 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2472 op1 = gen_reg_rtx (mode);
2473 op2 = gen_reg_rtx (mode);
2475 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2477 /* Compute into op1 and op2. */
2478 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2480 else if (TARGET_HAS_SINCOS)
2482 tree call, fn = NULL_TREE;
2486 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2487 fn = built_in_decls[BUILT_IN_SINCOSF];
2488 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2489 fn = built_in_decls[BUILT_IN_SINCOS];
2490 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2491 fn = built_in_decls[BUILT_IN_SINCOSL];
2495 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2496 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2497 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2498 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2499 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2500 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2502 /* Make sure not to fold the sincos call again. */
2503 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2504 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2505 call, 3, arg, top1, top2));
2509 tree call, fn = NULL_TREE, narg;
2510 tree ctype = build_complex_type (type);
2512 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2513 fn = built_in_decls[BUILT_IN_CEXPF];
2514 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2515 fn = built_in_decls[BUILT_IN_CEXP];
2516 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2517 fn = built_in_decls[BUILT_IN_CEXPL];
2521 /* If we don't have a decl for cexp create one. This is the
2522 friendliest fallback if the user calls __builtin_cexpi
2523 without full target C99 function support. */
2524 if (fn == NULL_TREE)
2527 const char *name = NULL;
2529 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2531 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2533 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2536 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2537 fn = build_fn_decl (name, fntype);
2540 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2541 build_real (type, dconst0), arg);
2543 /* Make sure not to fold the cexp call again. */
2544 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2545 return expand_expr (build_call_nary (ctype, call, 1, narg),
2546 target, VOIDmode, EXPAND_NORMAL);
2549 /* Now build the proper return type. */
2550 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2551 make_tree (TREE_TYPE (arg), op2),
2552 make_tree (TREE_TYPE (arg), op1)),
2553 target, VOIDmode, EXPAND_NORMAL);
2556 /* Expand a call to one of the builtin rounding functions gcc defines
2557 as an extension (lfloor and lceil). As these are gcc extensions we
2558 do not need to worry about setting errno to EDOM.
2559 If expanding via optab fails, lower expression to (int)(floor(x)).
2560 EXP is the expression that is a call to the builtin function;
2561 if convenient, the result should be placed in TARGET. */
2564 expand_builtin_int_roundingfn (tree exp, rtx target)
2566 convert_optab builtin_optab;
2567 rtx op0, insns, tmp;
2568 tree fndecl = get_callee_fndecl (exp);
2569 enum built_in_function fallback_fn;
2570 tree fallback_fndecl;
2571 enum machine_mode mode;
2574 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2577 arg = CALL_EXPR_ARG (exp, 0);
2579 switch (DECL_FUNCTION_CODE (fndecl))
2581 CASE_FLT_FN (BUILT_IN_LCEIL):
2582 CASE_FLT_FN (BUILT_IN_LLCEIL):
2583 builtin_optab = lceil_optab;
2584 fallback_fn = BUILT_IN_CEIL;
2587 CASE_FLT_FN (BUILT_IN_LFLOOR):
2588 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2589 builtin_optab = lfloor_optab;
2590 fallback_fn = BUILT_IN_FLOOR;
2597 /* Make a suitable register to place result in. */
2598 mode = TYPE_MODE (TREE_TYPE (exp));
2600 target = gen_reg_rtx (mode);
2602 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2603 need to expand the argument again. This way, we will not perform
2604 side-effects more the once. */
2605 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2607 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2611 /* Compute into TARGET. */
2612 if (expand_sfix_optab (target, op0, builtin_optab))
2614 /* Output the entire sequence. */
2615 insns = get_insns ();
2621 /* If we were unable to expand via the builtin, stop the sequence
2622 (without outputting the insns). */
2625 /* Fall back to floating point rounding optab. */
2626 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2628 /* For non-C99 targets we may end up without a fallback fndecl here
2629 if the user called __builtin_lfloor directly. In this case emit
2630 a call to the floor/ceil variants nevertheless. This should result
2631 in the best user experience for not full C99 targets. */
2632 if (fallback_fndecl == NULL_TREE)
2635 const char *name = NULL;
2637 switch (DECL_FUNCTION_CODE (fndecl))
2639 case BUILT_IN_LCEIL:
2640 case BUILT_IN_LLCEIL:
2643 case BUILT_IN_LCEILF:
2644 case BUILT_IN_LLCEILF:
2647 case BUILT_IN_LCEILL:
2648 case BUILT_IN_LLCEILL:
2651 case BUILT_IN_LFLOOR:
2652 case BUILT_IN_LLFLOOR:
2655 case BUILT_IN_LFLOORF:
2656 case BUILT_IN_LLFLOORF:
2659 case BUILT_IN_LFLOORL:
2660 case BUILT_IN_LLFLOORL:
2667 fntype = build_function_type_list (TREE_TYPE (arg),
2668 TREE_TYPE (arg), NULL_TREE);
2669 fallback_fndecl = build_fn_decl (name, fntype);
2672 exp = build_call_expr (fallback_fndecl, 1, arg);
2674 tmp = expand_normal (exp);
2676 /* Truncate the result of floating point optab to integer
2677 via expand_fix (). */
2678 target = gen_reg_rtx (mode);
2679 expand_fix (target, tmp, 0);
2684 /* Expand a call to one of the builtin math functions doing integer
2686 Return 0 if a normal call should be emitted rather than expanding the
2687 function in-line. EXP is the expression that is a call to the builtin
2688 function; if convenient, the result should be placed in TARGET. */
2691 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2693 convert_optab builtin_optab;
2695 tree fndecl = get_callee_fndecl (exp);
2697 enum machine_mode mode;
2699 /* There's no easy way to detect the case we need to set EDOM. */
2700 if (flag_errno_math)
2703 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2706 arg = CALL_EXPR_ARG (exp, 0);
2708 switch (DECL_FUNCTION_CODE (fndecl))
2710 CASE_FLT_FN (BUILT_IN_LRINT):
2711 CASE_FLT_FN (BUILT_IN_LLRINT):
2712 builtin_optab = lrint_optab; break;
2713 CASE_FLT_FN (BUILT_IN_LROUND):
2714 CASE_FLT_FN (BUILT_IN_LLROUND):
2715 builtin_optab = lround_optab; break;
2720 /* Make a suitable register to place result in. */
2721 mode = TYPE_MODE (TREE_TYPE (exp));
2723 target = gen_reg_rtx (mode);
2725 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2726 need to expand the argument again. This way, we will not perform
2727 side-effects more the once. */
2728 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2730 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2734 if (expand_sfix_optab (target, op0, builtin_optab))
2736 /* Output the entire sequence. */
2737 insns = get_insns ();
2743 /* If we were unable to expand via the builtin, stop the sequence
2744 (without outputting the insns) and call to the library function
2745 with the stabilized argument list. */
2748 target = expand_call (exp, target, target == const0_rtx);
2753 /* To evaluate powi(x,n), the floating point value x raised to the
2754 constant integer exponent n, we use a hybrid algorithm that
2755 combines the "window method" with look-up tables. For an
2756 introduction to exponentiation algorithms and "addition chains",
2757 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2758 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2759 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2760 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2762 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2763 multiplications to inline before calling the system library's pow
2764 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2765 so this default never requires calling pow, powf or powl. */
2767 #ifndef POWI_MAX_MULTS
2768 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2771 /* The size of the "optimal power tree" lookup table. All
2772 exponents less than this value are simply looked up in the
2773 powi_table below. This threshold is also used to size the
2774 cache of pseudo registers that hold intermediate results. */
2775 #define POWI_TABLE_SIZE 256
2777 /* The size, in bits of the window, used in the "window method"
2778 exponentiation algorithm. This is equivalent to a radix of
2779 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2780 #define POWI_WINDOW_SIZE 3
2782 /* The following table is an efficient representation of an
2783 "optimal power tree". For each value, i, the corresponding
2784 value, j, in the table states than an optimal evaluation
2785 sequence for calculating pow(x,i) can be found by evaluating
2786 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2787 100 integers is given in Knuth's "Seminumerical algorithms". */
2789 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2791 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2792 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2793 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2794 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2795 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2796 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2797 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2798 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2799 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2800 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2801 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2802 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2803 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2804 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2805 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2806 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2807 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2808 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2809 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2810 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2811 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2812 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2813 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2814 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2815 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2816 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2817 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2818 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2819 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2820 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2821 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2822 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2826 /* Return the number of multiplications required to calculate
2827 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2828 subroutine of powi_cost. CACHE is an array indicating
2829 which exponents have already been calculated. */
2832 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2834 /* If we've already calculated this exponent, then this evaluation
2835 doesn't require any additional multiplications. */
2840 return powi_lookup_cost (n - powi_table[n], cache)
2841 + powi_lookup_cost (powi_table[n], cache) + 1;
2844 /* Return the number of multiplications required to calculate
2845 powi(x,n) for an arbitrary x, given the exponent N. This
2846 function needs to be kept in sync with expand_powi below. */
2849 powi_cost (HOST_WIDE_INT n)
2851 bool cache[POWI_TABLE_SIZE];
2852 unsigned HOST_WIDE_INT digit;
2853 unsigned HOST_WIDE_INT val;
2859 /* Ignore the reciprocal when calculating the cost. */
2860 val = (n < 0) ? -n : n;
2862 /* Initialize the exponent cache. */
2863 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2868 while (val >= POWI_TABLE_SIZE)
2872 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2873 result += powi_lookup_cost (digit, cache)
2874 + POWI_WINDOW_SIZE + 1;
2875 val >>= POWI_WINDOW_SIZE;
2884 return result + powi_lookup_cost (val, cache);
2887 /* Recursive subroutine of expand_powi. This function takes the array,
2888 CACHE, of already calculated exponents and an exponent N and returns
2889 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2892 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2894 unsigned HOST_WIDE_INT digit;
2898 if (n < POWI_TABLE_SIZE)
2903 target = gen_reg_rtx (mode);
2906 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2907 op1 = expand_powi_1 (mode, powi_table[n], cache);
2911 target = gen_reg_rtx (mode);
2912 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2913 op0 = expand_powi_1 (mode, n - digit, cache);
2914 op1 = expand_powi_1 (mode, digit, cache);
2918 target = gen_reg_rtx (mode);
2919 op0 = expand_powi_1 (mode, n >> 1, cache);
2923 result = expand_mult (mode, op0, op1, target, 0);
2924 if (result != target)
2925 emit_move_insn (target, result);
2929 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2930 floating point operand in mode MODE, and N is the exponent. This
2931 function needs to be kept in sync with powi_cost above. */
2934 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2936 unsigned HOST_WIDE_INT val;
2937 rtx cache[POWI_TABLE_SIZE];
2941 return CONST1_RTX (mode);
2943 val = (n < 0) ? -n : n;
2945 memset (cache, 0, sizeof (cache));
2948 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2950 /* If the original exponent was negative, reciprocate the result. */
2952 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2953 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2958 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2959 a normal call should be emitted rather than expanding the function
2960 in-line. EXP is the expression that is a call to the builtin
2961 function; if convenient, the result should be placed in TARGET. */
2964 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2968 tree type = TREE_TYPE (exp);
2969 REAL_VALUE_TYPE cint, c, c2;
2972 enum machine_mode mode = TYPE_MODE (type);
2974 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2977 arg0 = CALL_EXPR_ARG (exp, 0);
2978 arg1 = CALL_EXPR_ARG (exp, 1);
2980 if (TREE_CODE (arg1) != REAL_CST
2981 || TREE_OVERFLOW (arg1))
2982 return expand_builtin_mathfn_2 (exp, target, subtarget);
2984 /* Handle constant exponents. */
2986 /* For integer valued exponents we can expand to an optimal multiplication
2987 sequence using expand_powi. */
2988 c = TREE_REAL_CST (arg1);
2989 n = real_to_integer (&c);
2990 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2991 if (real_identical (&c, &cint)
2992 && ((n >= -1 && n <= 2)
2993 || (flag_unsafe_math_optimizations
2994 && optimize_insn_for_speed_p ()
2995 && powi_cost (n) <= POWI_MAX_MULTS)))
2997 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3000 op = force_reg (mode, op);
3001 op = expand_powi (op, mode, n);
3006 narg0 = builtin_save_expr (arg0);
3008 /* If the exponent is not integer valued, check if it is half of an integer.
3009 In this case we can expand to sqrt (x) * x**(n/2). */
3010 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3011 if (fn != NULL_TREE)
3013 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3014 n = real_to_integer (&c2);
3015 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3016 if (real_identical (&c2, &cint)
3017 && ((flag_unsafe_math_optimizations
3018 && optimize_insn_for_speed_p ()
3019 && powi_cost (n/2) <= POWI_MAX_MULTS)
3022 tree call_expr = build_call_expr (fn, 1, narg0);
3023 /* Use expand_expr in case the newly built call expression
3024 was folded to a non-call. */
3025 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3028 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3029 op2 = force_reg (mode, op2);
3030 op2 = expand_powi (op2, mode, abs (n / 2));
3031 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3032 0, OPTAB_LIB_WIDEN);
3033 /* If the original exponent was negative, reciprocate the
3036 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3037 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3043 /* Try if the exponent is a third of an integer. In this case
3044 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3045 different from pow (x, 1./3.) due to rounding and behavior
3046 with negative x we need to constrain this transformation to
3047 unsafe math and positive x or finite math. */
3048 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3050 && flag_unsafe_math_optimizations
3051 && (tree_expr_nonnegative_p (arg0)
3052 || !HONOR_NANS (mode)))
3054 REAL_VALUE_TYPE dconst3;
3055 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3056 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3057 real_round (&c2, mode, &c2);
3058 n = real_to_integer (&c2);
3059 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3060 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3061 real_convert (&c2, mode, &c2);
3062 if (real_identical (&c2, &c)
3063 && ((optimize_insn_for_speed_p ()
3064 && powi_cost (n/3) <= POWI_MAX_MULTS)
3067 tree call_expr = build_call_expr (fn, 1,narg0);
3068 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3069 if (abs (n) % 3 == 2)
3070 op = expand_simple_binop (mode, MULT, op, op, op,
3071 0, OPTAB_LIB_WIDEN);
3074 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3075 op2 = force_reg (mode, op2);
3076 op2 = expand_powi (op2, mode, abs (n / 3));
3077 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3078 0, OPTAB_LIB_WIDEN);
3079 /* If the original exponent was negative, reciprocate the
3082 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3083 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3089 /* Fall back to optab expansion. */
3090 return expand_builtin_mathfn_2 (exp, target, subtarget);
3093 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3094 a normal call should be emitted rather than expanding the function
3095 in-line. EXP is the expression that is a call to the builtin
3096 function; if convenient, the result should be placed in TARGET. */
3099 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3103 enum machine_mode mode;
3104 enum machine_mode mode2;
3106 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3109 arg0 = CALL_EXPR_ARG (exp, 0);
3110 arg1 = CALL_EXPR_ARG (exp, 1);
3111 mode = TYPE_MODE (TREE_TYPE (exp));
3113 /* Handle constant power. */
3115 if (TREE_CODE (arg1) == INTEGER_CST
3116 && !TREE_OVERFLOW (arg1))
3118 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3120 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3121 Otherwise, check the number of multiplications required. */
3122 if ((TREE_INT_CST_HIGH (arg1) == 0
3123 || TREE_INT_CST_HIGH (arg1) == -1)
3124 && ((n >= -1 && n <= 2)
3125 || (optimize_insn_for_speed_p ()
3126 && powi_cost (n) <= POWI_MAX_MULTS)))
3128 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3129 op0 = force_reg (mode, op0);
3130 return expand_powi (op0, mode, n);
3134 /* Emit a libcall to libgcc. */
3136 /* Mode of the 2nd argument must match that of an int. */
3137 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3139 if (target == NULL_RTX)
3140 target = gen_reg_rtx (mode);
3142 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3143 if (GET_MODE (op0) != mode)
3144 op0 = convert_to_mode (mode, op0, 0);
3145 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3146 if (GET_MODE (op1) != mode2)
3147 op1 = convert_to_mode (mode2, op1, 0);
3149 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3150 target, LCT_CONST, mode, 2,
3151 op0, mode, op1, mode2);
3156 /* Expand expression EXP which is a call to the strlen builtin. Return
3157 NULL_RTX if we failed the caller should emit a normal call, otherwise
3158 try to get the result in TARGET, if convenient. */
3161 expand_builtin_strlen (tree exp, rtx target,
3162 enum machine_mode target_mode)
3164 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3170 tree src = CALL_EXPR_ARG (exp, 0);
3171 rtx result, src_reg, char_rtx, before_strlen;
3172 enum machine_mode insn_mode = target_mode, char_mode;
3173 enum insn_code icode = CODE_FOR_nothing;
3176 /* If the length can be computed at compile-time, return it. */
3177 len = c_strlen (src, 0);
3179 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3181 /* If the length can be computed at compile-time and is constant
3182 integer, but there are side-effects in src, evaluate
3183 src for side-effects, then return len.
3184 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3185 can be optimized into: i++; x = 3; */
3186 len = c_strlen (src, 1);
3187 if (len && TREE_CODE (len) == INTEGER_CST)
3189 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3190 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3193 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3195 /* If SRC is not a pointer type, don't do this operation inline. */
3199 /* Bail out if we can't compute strlen in the right mode. */
3200 while (insn_mode != VOIDmode)
3202 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3203 if (icode != CODE_FOR_nothing)
3206 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3208 if (insn_mode == VOIDmode)
3211 /* Make a place to write the result of the instruction. */
3215 && GET_MODE (result) == insn_mode
3216 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3217 result = gen_reg_rtx (insn_mode);
3219 /* Make a place to hold the source address. We will not expand
3220 the actual source until we are sure that the expansion will
3221 not fail -- there are trees that cannot be expanded twice. */
3222 src_reg = gen_reg_rtx (Pmode);
3224 /* Mark the beginning of the strlen sequence so we can emit the
3225 source operand later. */
3226 before_strlen = get_last_insn ();
3228 char_rtx = const0_rtx;
3229 char_mode = insn_data[(int) icode].operand[2].mode;
3230 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3232 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3234 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3235 char_rtx, GEN_INT (align));
3240 /* Now that we are assured of success, expand the source. */
3242 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3244 emit_move_insn (src_reg, pat);
3249 emit_insn_after (pat, before_strlen);
3251 emit_insn_before (pat, get_insns ());
3253 /* Return the value in the proper mode for this function. */
3254 if (GET_MODE (result) == target_mode)
3256 else if (target != 0)
3257 convert_move (target, result, 0);
3259 target = convert_to_mode (target_mode, result, 0);
3265 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3266 caller should emit a normal call, otherwise try to get the result
3267 in TARGET, if convenient (and in mode MODE if that's convenient). */
3270 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3272 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3274 tree type = TREE_TYPE (exp);
3275 tree result = fold_builtin_strstr (EXPR_LOCATION (exp),
3276 CALL_EXPR_ARG (exp, 0),
3277 CALL_EXPR_ARG (exp, 1), type);
3279 return expand_expr (result, target, mode, EXPAND_NORMAL);
3284 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3285 caller should emit a normal call, otherwise try to get the result
3286 in TARGET, if convenient (and in mode MODE if that's convenient). */
3289 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3291 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3293 tree type = TREE_TYPE (exp);
3294 tree result = fold_builtin_strchr (EXPR_LOCATION (exp),
3295 CALL_EXPR_ARG (exp, 0),
3296 CALL_EXPR_ARG (exp, 1), type);
3298 return expand_expr (result, target, mode, EXPAND_NORMAL);
3300 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3305 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3306 caller should emit a normal call, otherwise try to get the result
3307 in TARGET, if convenient (and in mode MODE if that's convenient). */
3310 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3312 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3314 tree type = TREE_TYPE (exp);
3315 tree result = fold_builtin_strrchr (EXPR_LOCATION (exp),
3316 CALL_EXPR_ARG (exp, 0),
3317 CALL_EXPR_ARG (exp, 1), type);
3319 return expand_expr (result, target, mode, EXPAND_NORMAL);
3324 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3325 caller should emit a normal call, otherwise try to get the result
3326 in TARGET, if convenient (and in mode MODE if that's convenient). */
3329 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3331 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3333 tree type = TREE_TYPE (exp);
3334 tree result = fold_builtin_strpbrk (EXPR_LOCATION (exp),
3335 CALL_EXPR_ARG (exp, 0),
3336 CALL_EXPR_ARG (exp, 1), type);
3338 return expand_expr (result, target, mode, EXPAND_NORMAL);
3343 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3344 bytes from constant string DATA + OFFSET and return it as target
3348 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3349 enum machine_mode mode)
3351 const char *str = (const char *) data;
3353 gcc_assert (offset >= 0
3354 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3355 <= strlen (str) + 1));
3357 return c_readstr (str + offset, mode);
3360 /* Expand a call EXP to the memcpy builtin.
3361 Return NULL_RTX if we failed, the caller should emit a normal call,
3362 otherwise try to get the result in TARGET, if convenient (and in
3363 mode MODE if that's convenient). */
3366 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3368 tree fndecl = get_callee_fndecl (exp);
3370 if (!validate_arglist (exp,
3371 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3375 tree dest = CALL_EXPR_ARG (exp, 0);
3376 tree src = CALL_EXPR_ARG (exp, 1);
3377 tree len = CALL_EXPR_ARG (exp, 2);
3378 const char *src_str;
3379 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3380 unsigned int dest_align
3381 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3382 rtx dest_mem, src_mem, dest_addr, len_rtx;
3383 tree result = fold_builtin_memory_op (EXPR_LOCATION (exp),
3385 TREE_TYPE (TREE_TYPE (fndecl)),
3387 HOST_WIDE_INT expected_size = -1;
3388 unsigned int expected_align = 0;
3389 tree_ann_common_t ann;
3393 while (TREE_CODE (result) == COMPOUND_EXPR)
3395 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3397 result = TREE_OPERAND (result, 1);
3399 return expand_expr (result, target, mode, EXPAND_NORMAL);
3402 /* If DEST is not a pointer type, call the normal function. */
3403 if (dest_align == 0)
3406 /* If either SRC is not a pointer type, don't do this
3407 operation in-line. */
3411 ann = tree_common_ann (exp);
3413 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3415 if (expected_align < dest_align)
3416 expected_align = dest_align;
3417 dest_mem = get_memory_rtx (dest, len);
3418 set_mem_align (dest_mem, dest_align);
3419 len_rtx = expand_normal (len);
3420 src_str = c_getstr (src);
3422 /* If SRC is a string constant and block move would be done
3423 by pieces, we can avoid loading the string from memory
3424 and only stored the computed constants. */
3426 && CONST_INT_P (len_rtx)
3427 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3428 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3429 CONST_CAST (char *, src_str),
3432 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3433 builtin_memcpy_read_str,
3434 CONST_CAST (char *, src_str),
3435 dest_align, false, 0);
3436 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3437 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3441 src_mem = get_memory_rtx (src, len);
3442 set_mem_align (src_mem, src_align);
3444 /* Copy word part most expediently. */
3445 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3446 CALL_EXPR_TAILCALL (exp)
3447 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3448 expected_align, expected_size);
3452 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3453 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3459 /* Expand a call EXP to the mempcpy builtin.
3460 Return NULL_RTX if we failed; the caller should emit a normal call,
3461 otherwise try to get the result in TARGET, if convenient (and in
3462 mode MODE if that's convenient). If ENDP is 0 return the
3463 destination pointer, if ENDP is 1 return the end pointer ala
3464 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3468 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3470 if (!validate_arglist (exp,
3471 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3475 tree dest = CALL_EXPR_ARG (exp, 0);
3476 tree src = CALL_EXPR_ARG (exp, 1);
3477 tree len = CALL_EXPR_ARG (exp, 2);
3478 return expand_builtin_mempcpy_args (dest, src, len,
3480 target, mode, /*endp=*/ 1);
3484 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3485 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3486 so that this can also be called without constructing an actual CALL_EXPR.
3487 TYPE is the return type of the call. The other arguments and return value
3488 are the same as for expand_builtin_mempcpy. */
3491 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3492 rtx target, enum machine_mode mode, int endp)
3494 /* If return value is ignored, transform mempcpy into memcpy. */
3495 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3497 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3498 tree result = build_call_expr (fn, 3, dest, src, len);
3500 while (TREE_CODE (result) == COMPOUND_EXPR)
3502 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3504 result = TREE_OPERAND (result, 1);
3506 return expand_expr (result, target, mode, EXPAND_NORMAL);
3510 const char *src_str;
3511 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3512 unsigned int dest_align
3513 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3514 rtx dest_mem, src_mem, len_rtx;
3515 tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
3516 dest, src, len, type, false, endp);
3520 while (TREE_CODE (result) == COMPOUND_EXPR)
3522 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3524 result = TREE_OPERAND (result, 1);
3526 return expand_expr (result, target, mode, EXPAND_NORMAL);
3529 /* If either SRC or DEST is not a pointer type, don't do this
3530 operation in-line. */
3531 if (dest_align == 0 || src_align == 0)
3534 /* If LEN is not constant, call the normal function. */
3535 if (! host_integerp (len, 1))
3538 len_rtx = expand_normal (len);
3539 src_str = c_getstr (src);
3541 /* If SRC is a string constant and block move would be done
3542 by pieces, we can avoid loading the string from memory
3543 and only stored the computed constants. */
3545 && CONST_INT_P (len_rtx)
3546 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3547 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3548 CONST_CAST (char *, src_str),
3551 dest_mem = get_memory_rtx (dest, len);
3552 set_mem_align (dest_mem, dest_align);
3553 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3554 builtin_memcpy_read_str,
3555 CONST_CAST (char *, src_str),
3556 dest_align, false, endp);
3557 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3558 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3562 if (CONST_INT_P (len_rtx)
3563 && can_move_by_pieces (INTVAL (len_rtx),
3564 MIN (dest_align, src_align)))
3566 dest_mem = get_memory_rtx (dest, len);
3567 set_mem_align (dest_mem, dest_align);
3568 src_mem = get_memory_rtx (src, len);
3569 set_mem_align (src_mem, src_align);
3570 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3571 MIN (dest_align, src_align), endp);
3572 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3573 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3581 /* Expand expression EXP, which is a call to the memmove builtin. Return
3582 NULL_RTX if we failed; the caller should emit a normal call. */
3585 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3587 if (!validate_arglist (exp,
3588 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3592 tree dest = CALL_EXPR_ARG (exp, 0);
3593 tree src = CALL_EXPR_ARG (exp, 1);
3594 tree len = CALL_EXPR_ARG (exp, 2);
3595 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3596 target, mode, ignore);
3600 /* Helper function to do the actual work for expand_builtin_memmove. The
3601 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3602 so that this can also be called without constructing an actual CALL_EXPR.
3603 TYPE is the return type of the call. The other arguments and return value
3604 are the same as for expand_builtin_memmove. */
3607 expand_builtin_memmove_args (tree dest, tree src, tree len,
3608 tree type, rtx target, enum machine_mode mode,
3611 tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
3612 dest, src, len, type, ignore, /*endp=*/3);
3616 STRIP_TYPE_NOPS (result);
3617 while (TREE_CODE (result) == COMPOUND_EXPR)
3619 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3621 result = TREE_OPERAND (result, 1);
3623 return expand_expr (result, target, mode, EXPAND_NORMAL);
3626 /* Otherwise, call the normal function. */
3630 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3631 NULL_RTX if we failed the caller should emit a normal call. */
3634 expand_builtin_bcopy (tree exp, int ignore)
3636 tree type = TREE_TYPE (exp);
3637 tree src, dest, size;
3638 location_t loc = EXPR_LOCATION (exp);
3640 if (!validate_arglist (exp,
3641 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3644 src = CALL_EXPR_ARG (exp, 0);
3645 dest = CALL_EXPR_ARG (exp, 1);
3646 size = CALL_EXPR_ARG (exp, 2);
3648 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3649 This is done this way so that if it isn't expanded inline, we fall
3650 back to calling bcopy instead of memmove. */
3651 return expand_builtin_memmove_args (dest, src,
3652 fold_convert_loc (loc, sizetype, size),
3653 type, const0_rtx, VOIDmode,
3658 # define HAVE_movstr 0
3659 # define CODE_FOR_movstr CODE_FOR_nothing
3662 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3663 we failed, the caller should emit a normal call, otherwise try to
3664 get the result in TARGET, if convenient. If ENDP is 0 return the
3665 destination pointer, if ENDP is 1 return the end pointer ala
3666 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3670 expand_movstr (tree dest, tree src, rtx target, int endp)
3676 const struct insn_data * data;
3681 dest_mem = get_memory_rtx (dest, NULL);
3682 src_mem = get_memory_rtx (src, NULL);
3685 target = force_reg (Pmode, XEXP (dest_mem, 0));
3686 dest_mem = replace_equiv_address (dest_mem, target);
3687 end = gen_reg_rtx (Pmode);
3691 if (target == 0 || target == const0_rtx)
3693 end = gen_reg_rtx (Pmode);
3701 data = insn_data + CODE_FOR_movstr;
3703 if (data->operand[0].mode != VOIDmode)
3704 end = gen_lowpart (data->operand[0].mode, end);
3706 insn = data->genfun (end, dest_mem, src_mem);
3712 /* movstr is supposed to set end to the address of the NUL
3713 terminator. If the caller requested a mempcpy-like return value,
3715 if (endp == 1 && target != const0_rtx)
3717 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3718 emit_move_insn (target, force_operand (tem, NULL_RTX));
3724 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3725 NULL_RTX if we failed the caller should emit a normal call, otherwise
3726 try to get the result in TARGET, if convenient (and in mode MODE if that's
3730 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3732 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3734 tree dest = CALL_EXPR_ARG (exp, 0);
3735 tree src = CALL_EXPR_ARG (exp, 1);
3736 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3741 /* Helper function to do the actual work for expand_builtin_strcpy. The
3742 arguments to the builtin_strcpy call DEST and SRC are broken out
3743 so that this can also be called without constructing an actual CALL_EXPR.
3744 The other arguments and return value are the same as for
3745 expand_builtin_strcpy. */
3748 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3749 rtx target, enum machine_mode mode)
3751 tree result = fold_builtin_strcpy (UNKNOWN_LOCATION,
3752 fndecl, dest, src, 0);
3754 return expand_expr (result, target, mode, EXPAND_NORMAL);
3755 return expand_movstr (dest, src, target, /*endp=*/0);
3759 /* Expand a call EXP to the stpcpy builtin.
3760 Return NULL_RTX if we failed the caller should emit a normal call,
3761 otherwise try to get the result in TARGET, if convenient (and in
3762 mode MODE if that's convenient). */
3765 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3768 location_t loc = EXPR_LOCATION (exp);
3770 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3773 dst = CALL_EXPR_ARG (exp, 0);
3774 src = CALL_EXPR_ARG (exp, 1);
3776 /* If return value is ignored, transform stpcpy into strcpy. */
3777 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3779 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3780 tree result = build_call_expr (fn, 2, dst, src);
3782 STRIP_NOPS (result);
3783 while (TREE_CODE (result) == COMPOUND_EXPR)
3785 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3787 result = TREE_OPERAND (result, 1);
3789 return expand_expr (result, target, mode, EXPAND_NORMAL);
3796 /* Ensure we get an actual string whose length can be evaluated at
3797 compile-time, not an expression containing a string. This is
3798 because the latter will potentially produce pessimized code
3799 when used to produce the return value. */
3800 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3801 return expand_movstr (dst, src, target, /*endp=*/2);
3803 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3804 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3805 target, mode, /*endp=*/2);
3810 if (TREE_CODE (len) == INTEGER_CST)
3812 rtx len_rtx = expand_normal (len);
3814 if (CONST_INT_P (len_rtx))
3816 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3817 dst, src, target, mode);
3823 if (mode != VOIDmode)
3824 target = gen_reg_rtx (mode);
3826 target = gen_reg_rtx (GET_MODE (ret));
3828 if (GET_MODE (target) != GET_MODE (ret))
3829 ret = gen_lowpart (GET_MODE (target), ret);
3831 ret = plus_constant (ret, INTVAL (len_rtx));
3832 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3840 return expand_movstr (dst, src, target, /*endp=*/2);
3844 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3845 bytes from constant string DATA + OFFSET and return it as target
3849 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3850 enum machine_mode mode)
3852 const char *str = (const char *) data;
3854 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3857 return c_readstr (str + offset, mode);
3860 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3861 NULL_RTX if we failed the caller should emit a normal call. */
3864 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3866 tree fndecl = get_callee_fndecl (exp);
3867 location_t loc = EXPR_LOCATION (exp);
3869 if (validate_arglist (exp,
3870 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3872 tree dest = CALL_EXPR_ARG (exp, 0);
3873 tree src = CALL_EXPR_ARG (exp, 1);
3874 tree len = CALL_EXPR_ARG (exp, 2);
3875 tree slen = c_strlen (src, 1);
3876 tree result = fold_builtin_strncpy (EXPR_LOCATION (exp),
3877 fndecl, dest, src, len, slen);
3881 while (TREE_CODE (result) == COMPOUND_EXPR)
3883 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3885 result = TREE_OPERAND (result, 1);
3887 return expand_expr (result, target, mode, EXPAND_NORMAL);
3890 /* We must be passed a constant len and src parameter. */
3891 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3894 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3896 /* We're required to pad with trailing zeros if the requested
3897 len is greater than strlen(s2)+1. In that case try to
3898 use store_by_pieces, if it fails, punt. */
3899 if (tree_int_cst_lt (slen, len))
3901 unsigned int dest_align
3902 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3903 const char *p = c_getstr (src);
3906 if (!p || dest_align == 0 || !host_integerp (len, 1)
3907 || !can_store_by_pieces (tree_low_cst (len, 1),
3908 builtin_strncpy_read_str,
3909 CONST_CAST (char *, p),
3913 dest_mem = get_memory_rtx (dest, len);
3914 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3915 builtin_strncpy_read_str,
3916 CONST_CAST (char *, p), dest_align, false, 0);
3917 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3918 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3925 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3926 bytes from constant string DATA + OFFSET and return it as target
3930 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3931 enum machine_mode mode)
3933 const char *c = (const char *) data;
3934 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3936 memset (p, *c, GET_MODE_SIZE (mode));
3938 return c_readstr (p, mode);
3941 /* Callback routine for store_by_pieces. Return the RTL of a register
3942 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3943 char value given in the RTL register data. For example, if mode is
3944 4 bytes wide, return the RTL for 0x01010101*data. */
3947 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3948 enum machine_mode mode)
3954 size = GET_MODE_SIZE (mode);
3958 p = XALLOCAVEC (char, size);
3959 memset (p, 1, size);
3960 coeff = c_readstr (p, mode);
3962 target = convert_to_mode (mode, (rtx) data, 1);
3963 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3964 return force_reg (mode, target);
3967 /* Expand expression EXP, which is a call to the memset builtin. Return
3968 NULL_RTX if we failed the caller should emit a normal call, otherwise
3969 try to get the result in TARGET, if convenient (and in mode MODE if that's
3973 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3975 if (!validate_arglist (exp,
3976 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3980 tree dest = CALL_EXPR_ARG (exp, 0);
3981 tree val = CALL_EXPR_ARG (exp, 1);
3982 tree len = CALL_EXPR_ARG (exp, 2);
3983 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3987 /* Helper function to do the actual work for expand_builtin_memset. The
3988 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3989 so that this can also be called without constructing an actual CALL_EXPR.
3990 The other arguments and return value are the same as for
3991 expand_builtin_memset. */
3994 expand_builtin_memset_args (tree dest, tree val, tree len,
3995 rtx target, enum machine_mode mode, tree orig_exp)
3998 enum built_in_function fcode;
4000 unsigned int dest_align;
4001 rtx dest_mem, dest_addr, len_rtx;
4002 HOST_WIDE_INT expected_size = -1;
4003 unsigned int expected_align = 0;
4004 tree_ann_common_t ann;
4006 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
4008 /* If DEST is not a pointer type, don't do this operation in-line. */
4009 if (dest_align == 0)
4012 ann = tree_common_ann (orig_exp);
4014 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
4016 if (expected_align < dest_align)
4017 expected_align = dest_align;
4019 /* If the LEN parameter is zero, return DEST. */
4020 if (integer_zerop (len))
4022 /* Evaluate and ignore VAL in case it has side-effects. */
4023 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4024 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4027 /* Stabilize the arguments in case we fail. */
4028 dest = builtin_save_expr (dest);
4029 val = builtin_save_expr (val);
4030 len = builtin_save_expr (len);
4032 len_rtx = expand_normal (len);
4033 dest_mem = get_memory_rtx (dest, len);
4035 if (TREE_CODE (val) != INTEGER_CST)
4039 val_rtx = expand_normal (val);
4040 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4043 /* Assume that we can memset by pieces if we can store
4044 * the coefficients by pieces (in the required modes).
4045 * We can't pass builtin_memset_gen_str as that emits RTL. */
4047 if (host_integerp (len, 1)
4048 && can_store_by_pieces (tree_low_cst (len, 1),
4049 builtin_memset_read_str, &c, dest_align,
4052 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4054 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4055 builtin_memset_gen_str, val_rtx, dest_align,
4058 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4059 dest_align, expected_align,
4063 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4064 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4068 if (target_char_cast (val, &c))
4073 if (host_integerp (len, 1)
4074 && can_store_by_pieces (tree_low_cst (len, 1),
4075 builtin_memset_read_str, &c, dest_align,
4077 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4078 builtin_memset_read_str, &c, dest_align, true, 0);
4079 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4080 dest_align, expected_align,
4084 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4085 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4089 set_mem_align (dest_mem, dest_align);
4090 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4091 CALL_EXPR_TAILCALL (orig_exp)
4092 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4093 expected_align, expected_size);
4097 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4098 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4104 fndecl = get_callee_fndecl (orig_exp);
4105 fcode = DECL_FUNCTION_CODE (fndecl);
4106 if (fcode == BUILT_IN_MEMSET)
4107 fn = build_call_expr (fndecl, 3, dest, val, len);
4108 else if (fcode == BUILT_IN_BZERO)
4109 fn = build_call_expr (fndecl, 2, dest, len);
4112 if (TREE_CODE (fn) == CALL_EXPR)
4113 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4114 return expand_call (fn, target, target == const0_rtx);
4117 /* Expand expression EXP, which is a call to the bzero builtin. Return
4118 NULL_RTX if we failed the caller should emit a normal call. */
4121 expand_builtin_bzero (tree exp)
4124 location_t loc = EXPR_LOCATION (exp);
4126 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4129 dest = CALL_EXPR_ARG (exp, 0);
4130 size = CALL_EXPR_ARG (exp, 1);
4132 /* New argument list transforming bzero(ptr x, int y) to
4133 memset(ptr x, int 0, size_t y). This is done this way
4134 so that if it isn't expanded inline, we fallback to
4135 calling bzero instead of memset. */
4137 return expand_builtin_memset_args (dest, integer_zero_node,
4138 fold_convert_loc (loc, sizetype, size),
4139 const0_rtx, VOIDmode, exp);
4142 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4143 caller should emit a normal call, otherwise try to get the result
4144 in TARGET, if convenient (and in mode MODE if that's convenient). */
4147 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4149 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4150 INTEGER_TYPE, VOID_TYPE))
4152 tree type = TREE_TYPE (exp);
4153 tree result = fold_builtin_memchr (EXPR_LOCATION (exp),
4154 CALL_EXPR_ARG (exp, 0),
4155 CALL_EXPR_ARG (exp, 1),
4156 CALL_EXPR_ARG (exp, 2), type);
4158 return expand_expr (result, target, mode, EXPAND_NORMAL);
4163 /* Expand expression EXP, which is a call to the memcmp built-in function.
4164 Return NULL_RTX if we failed and the
4165 caller should emit a normal call, otherwise try to get the result in
4166 TARGET, if convenient (and in mode MODE, if that's convenient). */
4169 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4171 location_t loc = EXPR_LOCATION (exp);
4173 if (!validate_arglist (exp,
4174 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4178 tree result = fold_builtin_memcmp (loc,
4179 CALL_EXPR_ARG (exp, 0),
4180 CALL_EXPR_ARG (exp, 1),
4181 CALL_EXPR_ARG (exp, 2));
4183 return expand_expr (result, target, mode, EXPAND_NORMAL);
4186 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4188 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4191 tree arg1 = CALL_EXPR_ARG (exp, 0);
4192 tree arg2 = CALL_EXPR_ARG (exp, 1);
4193 tree len = CALL_EXPR_ARG (exp, 2);
4196 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4198 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4199 enum machine_mode insn_mode;
4201 #ifdef HAVE_cmpmemsi
4203 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4206 #ifdef HAVE_cmpstrnsi
4208 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4213 /* If we don't have POINTER_TYPE, call the function. */
4214 if (arg1_align == 0 || arg2_align == 0)
4217 /* Make a place to write the result of the instruction. */
4220 && REG_P (result) && GET_MODE (result) == insn_mode
4221 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4222 result = gen_reg_rtx (insn_mode);
4224 arg1_rtx = get_memory_rtx (arg1, len);
4225 arg2_rtx = get_memory_rtx (arg2, len);
4226 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4228 /* Set MEM_SIZE as appropriate. */
4229 if (CONST_INT_P (arg3_rtx))
4231 set_mem_size (arg1_rtx, arg3_rtx);
4232 set_mem_size (arg2_rtx, arg3_rtx);
4235 #ifdef HAVE_cmpmemsi
4237 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4238 GEN_INT (MIN (arg1_align, arg2_align)));
4241 #ifdef HAVE_cmpstrnsi
4243 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4244 GEN_INT (MIN (arg1_align, arg2_align)));
4252 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4253 TYPE_MODE (integer_type_node), 3,
4254 XEXP (arg1_rtx, 0), Pmode,
4255 XEXP (arg2_rtx, 0), Pmode,
4256 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4257 TYPE_UNSIGNED (sizetype)),
4258 TYPE_MODE (sizetype));
4260 /* Return the value in the proper mode for this function. */
4261 mode = TYPE_MODE (TREE_TYPE (exp));
4262 if (GET_MODE (result) == mode)
4264 else if (target != 0)
4266 convert_move (target, result, 0);
4270 return convert_to_mode (mode, result, 0);
4277 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4278 if we failed the caller should emit a normal call, otherwise try to get
4279 the result in TARGET, if convenient. */
4282 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4284 location_t loc = EXPR_LOCATION (exp);
4286 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4290 tree result = fold_builtin_strcmp (loc,
4291 CALL_EXPR_ARG (exp, 0),
4292 CALL_EXPR_ARG (exp, 1));
4294 return expand_expr (result, target, mode, EXPAND_NORMAL);
4297 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4298 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4299 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4301 rtx arg1_rtx, arg2_rtx;
4302 rtx result, insn = NULL_RTX;
4304 tree arg1 = CALL_EXPR_ARG (exp, 0);
4305 tree arg2 = CALL_EXPR_ARG (exp, 1);
4308 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4310 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4312 /* If we don't have POINTER_TYPE, call the function. */
4313 if (arg1_align == 0 || arg2_align == 0)
4316 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4317 arg1 = builtin_save_expr (arg1);
4318 arg2 = builtin_save_expr (arg2);
4320 arg1_rtx = get_memory_rtx (arg1, NULL);
4321 arg2_rtx = get_memory_rtx (arg2, NULL);
4323 #ifdef HAVE_cmpstrsi
4324 /* Try to call cmpstrsi. */
4327 enum machine_mode insn_mode
4328 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4330 /* Make a place to write the result of the instruction. */
4333 && REG_P (result) && GET_MODE (result) == insn_mode
4334 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4335 result = gen_reg_rtx (insn_mode);
4337 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4338 GEN_INT (MIN (arg1_align, arg2_align)));
4341 #ifdef HAVE_cmpstrnsi
4342 /* Try to determine at least one length and call cmpstrnsi. */
4343 if (!insn && HAVE_cmpstrnsi)
4348 enum machine_mode insn_mode
4349 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4350 tree len1 = c_strlen (arg1, 1);
4351 tree len2 = c_strlen (arg2, 1);
4354 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4356 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4358 /* If we don't have a constant length for the first, use the length
4359 of the second, if we know it. We don't require a constant for
4360 this case; some cost analysis could be done if both are available
4361 but neither is constant. For now, assume they're equally cheap,
4362 unless one has side effects. If both strings have constant lengths,
4369 else if (TREE_SIDE_EFFECTS (len1))
4371 else if (TREE_SIDE_EFFECTS (len2))
4373 else if (TREE_CODE (len1) != INTEGER_CST)
4375 else if (TREE_CODE (len2) != INTEGER_CST)
4377 else if (tree_int_cst_lt (len1, len2))
4382 /* If both arguments have side effects, we cannot optimize. */
4383 if (!len || TREE_SIDE_EFFECTS (len))
4386 arg3_rtx = expand_normal (len);
4388 /* Make a place to write the result of the instruction. */
4391 && REG_P (result) && GET_MODE (result) == insn_mode
4392 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4393 result = gen_reg_rtx (insn_mode);
4395 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4396 GEN_INT (MIN (arg1_align, arg2_align)));
4404 /* Return the value in the proper mode for this function. */
4405 mode = TYPE_MODE (TREE_TYPE (exp));
4406 if (GET_MODE (result) == mode)
4409 return convert_to_mode (mode, result, 0);
4410 convert_move (target, result, 0);
4414 /* Expand the library call ourselves using a stabilized argument
4415 list to avoid re-evaluating the function's arguments twice. */
4416 #ifdef HAVE_cmpstrnsi
4419 fndecl = get_callee_fndecl (exp);
4420 fn = build_call_expr (fndecl, 2, arg1, arg2);
4421 if (TREE_CODE (fn) == CALL_EXPR)
4422 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4423 return expand_call (fn, target, target == const0_rtx);
4429 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4430 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4431 the result in TARGET, if convenient. */
4434 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4436 location_t loc = EXPR_LOCATION (exp);
4438 if (!validate_arglist (exp,
4439 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4443 tree result = fold_builtin_strncmp (loc,
4444 CALL_EXPR_ARG (exp, 0),
4445 CALL_EXPR_ARG (exp, 1),
4446 CALL_EXPR_ARG (exp, 2));
4448 return expand_expr (result, target, mode, EXPAND_NORMAL);
4451 /* If c_strlen can determine an expression for one of the string
4452 lengths, and it doesn't have side effects, then emit cmpstrnsi
4453 using length MIN(strlen(string)+1, arg3). */
4454 #ifdef HAVE_cmpstrnsi
4457 tree len, len1, len2;
4458 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4461 tree arg1 = CALL_EXPR_ARG (exp, 0);
4462 tree arg2 = CALL_EXPR_ARG (exp, 1);
4463 tree arg3 = CALL_EXPR_ARG (exp, 2);
4466 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4468 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4469 enum machine_mode insn_mode
4470 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4472 len1 = c_strlen (arg1, 1);
4473 len2 = c_strlen (arg2, 1);
4476 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4478 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4480 /* If we don't have a constant length for the first, use the length
4481 of the second, if we know it. We don't require a constant for
4482 this case; some cost analysis could be done if both are available
4483 but neither is constant. For now, assume they're equally cheap,
4484 unless one has side effects. If both strings have constant lengths,
4491 else if (TREE_SIDE_EFFECTS (len1))
4493 else if (TREE_SIDE_EFFECTS (len2))
4495 else if (TREE_CODE (len1) != INTEGER_CST)
4497 else if (TREE_CODE (len2) != INTEGER_CST)
4499 else if (tree_int_cst_lt (len1, len2))
4504 /* If both arguments have side effects, we cannot optimize. */
4505 if (!len || TREE_SIDE_EFFECTS (len))
4508 /* The actual new length parameter is MIN(len,arg3). */
4509 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4510 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4512 /* If we don't have POINTER_TYPE, call the function. */
4513 if (arg1_align == 0 || arg2_align == 0)
4516 /* Make a place to write the result of the instruction. */
4519 && REG_P (result) && GET_MODE (result) == insn_mode
4520 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4521 result = gen_reg_rtx (insn_mode);
4523 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4524 arg1 = builtin_save_expr (arg1);
4525 arg2 = builtin_save_expr (arg2);
4526 len = builtin_save_expr (len);
4528 arg1_rtx = get_memory_rtx (arg1, len);
4529 arg2_rtx = get_memory_rtx (arg2, len);
4530 arg3_rtx = expand_normal (len);
4531 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4532 GEN_INT (MIN (arg1_align, arg2_align)));
4537 /* Return the value in the proper mode for this function. */
4538 mode = TYPE_MODE (TREE_TYPE (exp));
4539 if (GET_MODE (result) == mode)
4542 return convert_to_mode (mode, result, 0);
4543 convert_move (target, result, 0);
4547 /* Expand the library call ourselves using a stabilized argument
4548 list to avoid re-evaluating the function's arguments twice. */
4549 fndecl = get_callee_fndecl (exp);
4550 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4551 if (TREE_CODE (fn) == CALL_EXPR)
4552 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4553 return expand_call (fn, target, target == const0_rtx);
4559 /* Expand expression EXP, which is a call to the strcat builtin.
4560 Return NULL_RTX if we failed the caller should emit a normal call,
4561 otherwise try to get the result in TARGET, if convenient. */
4564 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4566 location_t loc = EXPR_LOCATION (exp);
4568 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4572 tree dst = CALL_EXPR_ARG (exp, 0);
4573 tree src = CALL_EXPR_ARG (exp, 1);
4574 const char *p = c_getstr (src);
4576 /* If the string length is zero, return the dst parameter. */
4577 if (p && *p == '\0')
4578 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4580 if (optimize_insn_for_speed_p ())
4582 /* See if we can store by pieces into (dst + strlen(dst)). */
4583 tree newsrc, newdst,
4584 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4587 /* Stabilize the argument list. */
4588 newsrc = builtin_save_expr (src);
4589 dst = builtin_save_expr (dst);
4593 /* Create strlen (dst). */
4594 newdst = build_call_expr (strlen_fn, 1, dst);
4595 /* Create (dst p+ strlen (dst)). */
4597 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
4598 TREE_TYPE (dst), dst, newdst);
4599 newdst = builtin_save_expr (newdst);
4601 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4603 end_sequence (); /* Stop sequence. */
4607 /* Output the entire sequence. */
4608 insns = get_insns ();
4612 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4619 /* Expand expression EXP, which is a call to the strncat builtin.
4620 Return NULL_RTX if we failed the caller should emit a normal call,
4621 otherwise try to get the result in TARGET, if convenient. */
4624 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4626 if (validate_arglist (exp,
4627 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4629 tree result = fold_builtin_strncat (EXPR_LOCATION (exp),
4630 CALL_EXPR_ARG (exp, 0),
4631 CALL_EXPR_ARG (exp, 1),
4632 CALL_EXPR_ARG (exp, 2));
4634 return expand_expr (result, target, mode, EXPAND_NORMAL);
4639 /* Expand expression EXP, which is a call to the strspn builtin.
4640 Return NULL_RTX if we failed the caller should emit a normal call,
4641 otherwise try to get the result in TARGET, if convenient. */
4644 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4646 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4648 tree result = fold_builtin_strspn (EXPR_LOCATION (exp),
4649 CALL_EXPR_ARG (exp, 0),
4650 CALL_EXPR_ARG (exp, 1));
4652 return expand_expr (result, target, mode, EXPAND_NORMAL);
4657 /* Expand expression EXP, which is a call to the strcspn builtin.
4658 Return NULL_RTX if we failed the caller should emit a normal call,
4659 otherwise try to get the result in TARGET, if convenient. */
4662 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4664 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4666 tree result = fold_builtin_strcspn (EXPR_LOCATION (exp),
4667 CALL_EXPR_ARG (exp, 0),
4668 CALL_EXPR_ARG (exp, 1));
4670 return expand_expr (result, target, mode, EXPAND_NORMAL);
4675 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4676 if that's convenient. */
4679 expand_builtin_saveregs (void)
4683 /* Don't do __builtin_saveregs more than once in a function.
4684 Save the result of the first call and reuse it. */
4685 if (saveregs_value != 0)
4686 return saveregs_value;
4688 /* When this function is called, it means that registers must be
4689 saved on entry to this function. So we migrate the call to the
4690 first insn of this function. */
4694 /* Do whatever the machine needs done in this case. */
4695 val = targetm.calls.expand_builtin_saveregs ();
4700 saveregs_value = val;
4702 /* Put the insns after the NOTE that starts the function. If this
4703 is inside a start_sequence, make the outer-level insn chain current, so
4704 the code is placed at the start of the function. */
4705 push_topmost_sequence ();
4706 emit_insn_after (seq, entry_of_function ());
4707 pop_topmost_sequence ();
4712 /* __builtin_args_info (N) returns word N of the arg space info
4713 for the current function. The number and meanings of words
4714 is controlled by the definition of CUMULATIVE_ARGS. */
4717 expand_builtin_args_info (tree exp)
4719 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4720 int *word_ptr = (int *) &crtl->args.info;
4722 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4724 if (call_expr_nargs (exp) != 0)
4726 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4727 error ("argument of %<__builtin_args_info%> must be constant");
4730 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4732 if (wordnum < 0 || wordnum >= nwords)
4733 error ("argument of %<__builtin_args_info%> out of range");
4735 return GEN_INT (word_ptr[wordnum]);
4739 error ("missing argument in %<__builtin_args_info%>");
4744 /* Expand a call to __builtin_next_arg. */
4747 expand_builtin_next_arg (void)
4749 /* Checking arguments is already done in fold_builtin_next_arg
4750 that must be called before this function. */
4751 return expand_binop (ptr_mode, add_optab,
4752 crtl->args.internal_arg_pointer,
4753 crtl->args.arg_offset_rtx,
4754 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4757 /* Make it easier for the backends by protecting the valist argument
4758 from multiple evaluations. */
4761 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4763 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4765 gcc_assert (vatype != NULL_TREE);
4767 if (TREE_CODE (vatype) == ARRAY_TYPE)
4769 if (TREE_SIDE_EFFECTS (valist))
4770 valist = save_expr (valist);
4772 /* For this case, the backends will be expecting a pointer to
4773 vatype, but it's possible we've actually been given an array
4774 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4776 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4778 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4779 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4788 if (! TREE_SIDE_EFFECTS (valist))
4791 pt = build_pointer_type (vatype);
4792 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4793 TREE_SIDE_EFFECTS (valist) = 1;
4796 if (TREE_SIDE_EFFECTS (valist))
4797 valist = save_expr (valist);
4798 valist = build_fold_indirect_ref_loc (loc, valist);
4804 /* The "standard" definition of va_list is void*. */
4807 std_build_builtin_va_list (void)
4809 return ptr_type_node;
4812 /* The "standard" abi va_list is va_list_type_node. */
4815 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4817 return va_list_type_node;
4820 /* The "standard" type of va_list is va_list_type_node. */
4823 std_canonical_va_list_type (tree type)
4827 if (INDIRECT_REF_P (type))
4828 type = TREE_TYPE (type);
4829 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4830 type = TREE_TYPE (type);
4831 wtype = va_list_type_node;
4833 /* Treat structure va_list types. */
4834 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4835 htype = TREE_TYPE (htype);
4836 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4838 /* If va_list is an array type, the argument may have decayed
4839 to a pointer type, e.g. by being passed to another function.
4840 In that case, unwrap both types so that we can compare the
4841 underlying records. */
4842 if (TREE_CODE (htype) == ARRAY_TYPE
4843 || POINTER_TYPE_P (htype))
4845 wtype = TREE_TYPE (wtype);
4846 htype = TREE_TYPE (htype);
4849 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4850 return va_list_type_node;
4855 /* The "standard" implementation of va_start: just assign `nextarg' to
4859 std_expand_builtin_va_start (tree valist, rtx nextarg)
4861 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4862 convert_move (va_r, nextarg, 0);
4865 /* Expand EXP, a call to __builtin_va_start. */
4868 expand_builtin_va_start (tree exp)
4872 location_t loc = EXPR_LOCATION (exp);
4874 if (call_expr_nargs (exp) < 2)
4876 error_at (loc, "too few arguments to function %<va_start%>");
4880 if (fold_builtin_next_arg (exp, true))
4883 nextarg = expand_builtin_next_arg ();
4884 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4886 if (targetm.expand_builtin_va_start)
4887 targetm.expand_builtin_va_start (valist, nextarg);
4889 std_expand_builtin_va_start (valist, nextarg);
4894 /* The "standard" implementation of va_arg: read the value from the
4895 current (padded) address and increment by the (padded) size. */
4898 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4901 tree addr, t, type_size, rounded_size, valist_tmp;
4902 unsigned HOST_WIDE_INT align, boundary;
4905 #ifdef ARGS_GROW_DOWNWARD
4906 /* All of the alignment and movement below is for args-grow-up machines.
4907 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4908 implement their own specialized gimplify_va_arg_expr routines. */
4912 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4914 type = build_pointer_type (type);
4916 align = PARM_BOUNDARY / BITS_PER_UNIT;
4917 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4919 /* When we align parameter on stack for caller, if the parameter
4920 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4921 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4922 here with caller. */
4923 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4924 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4926 boundary /= BITS_PER_UNIT;
4928 /* Hoist the valist value into a temporary for the moment. */
4929 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4931 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4932 requires greater alignment, we must perform dynamic alignment. */
4933 if (boundary > align
4934 && !integer_zerop (TYPE_SIZE (type)))
4936 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4937 fold_build2 (POINTER_PLUS_EXPR,
4939 valist_tmp, size_int (boundary - 1)));
4940 gimplify_and_add (t, pre_p);
4942 t = fold_convert (sizetype, valist_tmp);
4943 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4944 fold_convert (TREE_TYPE (valist),
4945 fold_build2 (BIT_AND_EXPR, sizetype, t,
4946 size_int (-boundary))));
4947 gimplify_and_add (t, pre_p);
4952 /* If the actual alignment is less than the alignment of the type,
4953 adjust the type accordingly so that we don't assume strict alignment
4954 when dereferencing the pointer. */
4955 boundary *= BITS_PER_UNIT;
4956 if (boundary < TYPE_ALIGN (type))
4958 type = build_variant_type_copy (type);
4959 TYPE_ALIGN (type) = boundary;
4962 /* Compute the rounded size of the type. */
4963 type_size = size_in_bytes (type);
4964 rounded_size = round_up (type_size, align);
4966 /* Reduce rounded_size so it's sharable with the postqueue. */
4967 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4971 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4973 /* Small args are padded downward. */
4974 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4975 rounded_size, size_int (align));
4976 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4977 size_binop (MINUS_EXPR, rounded_size, type_size));
4978 addr = fold_build2 (POINTER_PLUS_EXPR,
4979 TREE_TYPE (addr), addr, t);
4982 /* Compute new value for AP. */
4983 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4984 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4985 gimplify_and_add (t, pre_p);
4987 addr = fold_convert (build_pointer_type (type), addr);
4990 addr = build_va_arg_indirect_ref (addr);
4992 return build_va_arg_indirect_ref (addr);
4995 /* Build an indirect-ref expression over the given TREE, which represents a
4996 piece of a va_arg() expansion. */
4998 build_va_arg_indirect_ref (tree addr)
5000 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
5002 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
5008 /* Return a dummy expression of type TYPE in order to keep going after an
5012 dummy_object (tree type)
5014 tree t = build_int_cst (build_pointer_type (type), 0);
5015 return build1 (INDIRECT_REF, type, t);
5018 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
5019 builtin function, but a very special sort of operator. */
5021 enum gimplify_status
5022 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5024 tree promoted_type, have_va_type;
5025 tree valist = TREE_OPERAND (*expr_p, 0);
5026 tree type = TREE_TYPE (*expr_p);
5028 location_t loc = EXPR_LOCATION (*expr_p);
5030 /* Verify that valist is of the proper type. */
5031 have_va_type = TREE_TYPE (valist);
5032 if (have_va_type == error_mark_node)
5034 have_va_type = targetm.canonical_va_list_type (have_va_type);
5036 if (have_va_type == NULL_TREE)
5038 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
5042 /* Generate a diagnostic for requesting data of a type that cannot
5043 be passed through `...' due to type promotion at the call site. */
5044 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
5047 static bool gave_help;
5050 /* Unfortunately, this is merely undefined, rather than a constraint
5051 violation, so we cannot make this an error. If this call is never
5052 executed, the program is still strictly conforming. */
5053 warned = warning_at (loc, 0,
5054 "%qT is promoted to %qT when passed through %<...%>",
5055 type, promoted_type);
5056 if (!gave_help && warned)
5059 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
5060 promoted_type, type);
5063 /* We can, however, treat "undefined" any way we please.
5064 Call abort to encourage the user to fix the program. */
5066 inform (loc, "if this code is reached, the program will abort");
5067 /* Before the abort, allow the evaluation of the va_list
5068 expression to exit or longjmp. */
5069 gimplify_and_add (valist, pre_p);
5070 t = build_call_expr_loc (loc,
5071 implicit_built_in_decls[BUILT_IN_TRAP], 0);
5072 gimplify_and_add (t, pre_p);
5074 /* This is dead code, but go ahead and finish so that the
5075 mode of the result comes out right. */
5076 *expr_p = dummy_object (type);
5081 /* Make it easier for the backends by protecting the valist argument
5082 from multiple evaluations. */
5083 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5085 /* For this case, the backends will be expecting a pointer to
5086 TREE_TYPE (abi), but it's possible we've
5087 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5089 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5091 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5092 valist = fold_convert_loc (loc, p1,
5093 build_fold_addr_expr_loc (loc, valist));
5096 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5099 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5101 if (!targetm.gimplify_va_arg_expr)
5102 /* FIXME: Once most targets are converted we should merely
5103 assert this is non-null. */
5106 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5111 /* Expand EXP, a call to __builtin_va_end. */
5114 expand_builtin_va_end (tree exp)
5116 tree valist = CALL_EXPR_ARG (exp, 0);
5118 /* Evaluate for side effects, if needed. I hate macros that don't
5120 if (TREE_SIDE_EFFECTS (valist))
5121 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5126 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5127 builtin rather than just as an assignment in stdarg.h because of the
5128 nastiness of array-type va_list types. */
5131 expand_builtin_va_copy (tree exp)
5134 location_t loc = EXPR_LOCATION (exp);
5136 dst = CALL_EXPR_ARG (exp, 0);
5137 src = CALL_EXPR_ARG (exp, 1);
5139 dst = stabilize_va_list_loc (loc, dst, 1);
5140 src = stabilize_va_list_loc (loc, src, 0);
5142 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5144 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5146 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5147 TREE_SIDE_EFFECTS (t) = 1;
5148 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5152 rtx dstb, srcb, size;
5154 /* Evaluate to pointers. */
5155 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5156 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5157 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5158 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5160 dstb = convert_memory_address (Pmode, dstb);
5161 srcb = convert_memory_address (Pmode, srcb);
5163 /* "Dereference" to BLKmode memories. */
5164 dstb = gen_rtx_MEM (BLKmode, dstb);
5165 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5166 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5167 srcb = gen_rtx_MEM (BLKmode, srcb);
5168 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5169 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5172 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5178 /* Expand a call to one of the builtin functions __builtin_frame_address or
5179 __builtin_return_address. */
5182 expand_builtin_frame_address (tree fndecl, tree exp)
5184 /* The argument must be a nonnegative integer constant.
5185 It counts the number of frames to scan up the stack.
5186 The value is the return address saved in that frame. */
5187 if (call_expr_nargs (exp) == 0)
5188 /* Warning about missing arg was already issued. */
5190 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5192 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5193 error ("invalid argument to %<__builtin_frame_address%>");
5195 error ("invalid argument to %<__builtin_return_address%>");
5201 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5202 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5204 /* Some ports cannot access arbitrary stack frames. */
5207 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5208 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5210 warning (0, "unsupported argument to %<__builtin_return_address%>");
5214 /* For __builtin_frame_address, return what we've got. */
5215 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5219 && ! CONSTANT_P (tem))
5220 tem = copy_to_mode_reg (Pmode, tem);
5225 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5226 we failed and the caller should emit a normal call, otherwise try to get
5227 the result in TARGET, if convenient. */
5230 expand_builtin_alloca (tree exp, rtx target)
5235 /* Emit normal call if marked not-inlineable. */
5236 if (CALL_CANNOT_INLINE_P (exp))
5239 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5242 /* Compute the argument. */
5243 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5245 /* Allocate the desired space. */
5246 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5247 result = convert_memory_address (ptr_mode, result);
5252 /* Expand a call to a bswap builtin with argument ARG0. MODE
5253 is the mode to expand with. */
5256 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5258 enum machine_mode mode;
5262 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5265 arg = CALL_EXPR_ARG (exp, 0);
5266 mode = TYPE_MODE (TREE_TYPE (arg));
5267 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5269 target = expand_unop (mode, bswap_optab, op0, target, 1);
5271 gcc_assert (target);
5273 return convert_to_mode (mode, target, 0);
5276 /* Expand a call to a unary builtin in EXP.
5277 Return NULL_RTX if a normal call should be emitted rather than expanding the
5278 function in-line. If convenient, the result should be placed in TARGET.
5279 SUBTARGET may be used as the target for computing one of EXP's operands. */
5282 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5283 rtx subtarget, optab op_optab)
5287 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5290 /* Compute the argument. */
5291 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5292 VOIDmode, EXPAND_NORMAL);
5293 /* Compute op, into TARGET if possible.
5294 Set TARGET to wherever the result comes back. */
5295 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5296 op_optab, op0, target, 1);
5297 gcc_assert (target);
5299 return convert_to_mode (target_mode, target, 0);
5302 /* If the string passed to fputs is a constant and is one character
5303 long, we attempt to transform this call into __builtin_fputc(). */
5306 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5308 /* Verify the arguments in the original call. */
5309 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5311 tree result = fold_builtin_fputs (EXPR_LOCATION (exp),
5312 CALL_EXPR_ARG (exp, 0),
5313 CALL_EXPR_ARG (exp, 1),
5314 (target == const0_rtx),
5315 unlocked, NULL_TREE);
5317 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5322 /* Expand a call to __builtin_expect. We just return our argument
5323 as the builtin_expect semantic should've been already executed by
5324 tree branch prediction pass. */
5327 expand_builtin_expect (tree exp, rtx target)
5331 if (call_expr_nargs (exp) < 2)
5333 arg = CALL_EXPR_ARG (exp, 0);
5334 c = CALL_EXPR_ARG (exp, 1);
5336 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5337 /* When guessing was done, the hints should be already stripped away. */
5338 gcc_assert (!flag_guess_branch_prob
5339 || optimize == 0 || errorcount || sorrycount);
5344 expand_builtin_trap (void)
5348 emit_insn (gen_trap ());
5351 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5355 /* Expand a call to __builtin_unreachable. We do nothing except emit
5356 a barrier saying that control flow will not pass here.
5358 It is the responsibility of the program being compiled to ensure
5359 that control flow does never reach __builtin_unreachable. */
5361 expand_builtin_unreachable (void)
5366 /* Expand EXP, a call to fabs, fabsf or fabsl.
5367 Return NULL_RTX if a normal call should be emitted rather than expanding
5368 the function inline. If convenient, the result should be placed
5369 in TARGET. SUBTARGET may be used as the target for computing
5373 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5375 enum machine_mode mode;
5379 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5382 arg = CALL_EXPR_ARG (exp, 0);
5383 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5384 mode = TYPE_MODE (TREE_TYPE (arg));
5385 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5386 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5389 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5390 Return NULL is a normal call should be emitted rather than expanding the
5391 function inline. If convenient, the result should be placed in TARGET.
5392 SUBTARGET may be used as the target for computing the operand. */
5395 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5400 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5403 arg = CALL_EXPR_ARG (exp, 0);
5404 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5406 arg = CALL_EXPR_ARG (exp, 1);
5407 op1 = expand_normal (arg);
5409 return expand_copysign (op0, op1, target);
5412 /* Create a new constant string literal and return a char* pointer to it.
5413 The STRING_CST value is the LEN characters at STR. */
5415 build_string_literal (int len, const char *str)
5417 tree t, elem, index, type;
5419 t = build_string (len, str);
5420 elem = build_type_variant (char_type_node, 1, 0);
5421 index = build_index_type (size_int (len - 1));
5422 type = build_array_type (elem, index);
5423 TREE_TYPE (t) = type;
5424 TREE_CONSTANT (t) = 1;
5425 TREE_READONLY (t) = 1;
5426 TREE_STATIC (t) = 1;
5428 type = build_pointer_type (elem);
5429 t = build1 (ADDR_EXPR, type,
5430 build4 (ARRAY_REF, elem,
5431 t, integer_zero_node, NULL_TREE, NULL_TREE));
5435 /* Expand EXP, a call to printf or printf_unlocked.
5436 Return NULL_RTX if a normal call should be emitted rather than transforming
5437 the function inline. If convenient, the result should be placed in
5438 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5441 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5444 /* If we're using an unlocked function, assume the other unlocked
5445 functions exist explicitly. */
5446 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5447 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5448 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5449 : implicit_built_in_decls[BUILT_IN_PUTS];
5450 const char *fmt_str;
5453 int nargs = call_expr_nargs (exp);
5455 /* If the return value is used, don't do the transformation. */
5456 if (target != const0_rtx)
5459 /* Verify the required arguments in the original call. */
5462 fmt = CALL_EXPR_ARG (exp, 0);
5463 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5466 /* Check whether the format is a literal string constant. */
5467 fmt_str = c_getstr (fmt);
5468 if (fmt_str == NULL)
5471 if (!init_target_chars ())
5474 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5475 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5478 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5481 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5483 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5484 else if (strcmp (fmt_str, target_percent_c) == 0)
5487 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5490 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5494 /* We can't handle anything else with % args or %% ... yet. */
5495 if (strchr (fmt_str, target_percent))
5501 /* If the format specifier was "", printf does nothing. */
5502 if (fmt_str[0] == '\0')
5504 /* If the format specifier has length of 1, call putchar. */
5505 if (fmt_str[1] == '\0')
5507 /* Given printf("c"), (where c is any one character,)
5508 convert "c"[0] to an int and pass that to the replacement
5510 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5512 fn = build_call_expr (fn_putchar, 1, arg);
5516 /* If the format specifier was "string\n", call puts("string"). */
5517 size_t len = strlen (fmt_str);
5518 if ((unsigned char)fmt_str[len - 1] == target_newline)
5520 /* Create a NUL-terminated string that's one char shorter
5521 than the original, stripping off the trailing '\n'. */
5522 char *newstr = XALLOCAVEC (char, len);
5523 memcpy (newstr, fmt_str, len - 1);
5524 newstr[len - 1] = 0;
5525 arg = build_string_literal (len, newstr);
5527 fn = build_call_expr (fn_puts, 1, arg);
5530 /* We'd like to arrange to call fputs(string,stdout) here,
5531 but we need stdout and don't have a way to get it yet. */
5538 if (TREE_CODE (fn) == CALL_EXPR)
5539 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5540 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5543 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5544 Return NULL_RTX if a normal call should be emitted rather than transforming
5545 the function inline. If convenient, the result should be placed in
5546 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5549 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5552 /* If we're using an unlocked function, assume the other unlocked
5553 functions exist explicitly. */
5554 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5555 : implicit_built_in_decls[BUILT_IN_FPUTC];
5556 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5557 : implicit_built_in_decls[BUILT_IN_FPUTS];
5558 const char *fmt_str;
5561 int nargs = call_expr_nargs (exp);
5563 /* If the return value is used, don't do the transformation. */
5564 if (target != const0_rtx)
5567 /* Verify the required arguments in the original call. */
5570 fp = CALL_EXPR_ARG (exp, 0);
5571 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5573 fmt = CALL_EXPR_ARG (exp, 1);
5574 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5577 /* Check whether the format is a literal string constant. */
5578 fmt_str = c_getstr (fmt);
5579 if (fmt_str == NULL)
5582 if (!init_target_chars ())
5585 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5586 if (strcmp (fmt_str, target_percent_s) == 0)
5589 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5591 arg = CALL_EXPR_ARG (exp, 2);
5593 fn = build_call_expr (fn_fputs, 2, arg, fp);
5595 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5596 else if (strcmp (fmt_str, target_percent_c) == 0)
5599 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5601 arg = CALL_EXPR_ARG (exp, 2);
5603 fn = build_call_expr (fn_fputc, 2, arg, fp);
5607 /* We can't handle anything else with % args or %% ... yet. */
5608 if (strchr (fmt_str, target_percent))
5614 /* If the format specifier was "", fprintf does nothing. */
5615 if (fmt_str[0] == '\0')
5617 /* Evaluate and ignore FILE* argument for side-effects. */
5618 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5622 /* When "string" doesn't contain %, replace all cases of
5623 fprintf(stream,string) with fputs(string,stream). The fputs
5624 builtin will take care of special cases like length == 1. */
5626 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5631 if (TREE_CODE (fn) == CALL_EXPR)
5632 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5633 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5636 /* Expand a call EXP to sprintf. Return NULL_RTX if
5637 a normal call should be emitted rather than expanding the function
5638 inline. If convenient, the result should be placed in TARGET with
5642 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5645 const char *fmt_str;
5646 int nargs = call_expr_nargs (exp);
5648 /* Verify the required arguments in the original call. */
5651 dest = CALL_EXPR_ARG (exp, 0);
5652 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5654 fmt = CALL_EXPR_ARG (exp, 0);
5655 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5658 /* Check whether the format is a literal string constant. */
5659 fmt_str = c_getstr (fmt);
5660 if (fmt_str == NULL)
5663 if (!init_target_chars ())
5666 /* If the format doesn't contain % args or %%, use strcpy. */
5667 if (strchr (fmt_str, target_percent) == 0)
5669 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5672 if ((nargs > 2) || ! fn)
5674 expand_expr (build_call_expr (fn, 2, dest, fmt),
5675 const0_rtx, VOIDmode, EXPAND_NORMAL);
5676 if (target == const0_rtx)
5678 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5679 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5681 /* If the format is "%s", use strcpy if the result isn't used. */
5682 else if (strcmp (fmt_str, target_percent_s) == 0)
5685 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5691 arg = CALL_EXPR_ARG (exp, 2);
5692 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5695 if (target != const0_rtx)
5697 len = c_strlen (arg, 1);
5698 if (! len || TREE_CODE (len) != INTEGER_CST)
5704 expand_expr (build_call_expr (fn, 2, dest, arg),
5705 const0_rtx, VOIDmode, EXPAND_NORMAL);
5707 if (target == const0_rtx)
5709 return expand_expr (len, target, mode, EXPAND_NORMAL);
5715 /* Expand a call to either the entry or exit function profiler. */
5718 expand_builtin_profile_func (bool exitp)
5720 rtx this_rtx, which;
5722 this_rtx = DECL_RTL (current_function_decl);
5723 gcc_assert (MEM_P (this_rtx));
5724 this_rtx = XEXP (this_rtx, 0);
5727 which = profile_function_exit_libfunc;
5729 which = profile_function_entry_libfunc;
5731 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5732 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5739 /* Expand a call to __builtin___clear_cache. */
5742 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5744 #ifndef HAVE_clear_cache
5745 #ifdef CLEAR_INSN_CACHE
5746 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5747 does something. Just do the default expansion to a call to
5751 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5752 does nothing. There is no need to call it. Do nothing. */
5754 #endif /* CLEAR_INSN_CACHE */
5756 /* We have a "clear_cache" insn, and it will handle everything. */
5758 rtx begin_rtx, end_rtx;
5759 enum insn_code icode;
5761 /* We must not expand to a library call. If we did, any
5762 fallback library function in libgcc that might contain a call to
5763 __builtin___clear_cache() would recurse infinitely. */
5764 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5766 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5770 if (HAVE_clear_cache)
5772 icode = CODE_FOR_clear_cache;
5774 begin = CALL_EXPR_ARG (exp, 0);
5775 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5776 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5777 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5778 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5780 end = CALL_EXPR_ARG (exp, 1);
5781 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5782 end_rtx = convert_memory_address (Pmode, end_rtx);
5783 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5784 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5786 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5789 #endif /* HAVE_clear_cache */
5792 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5795 round_trampoline_addr (rtx tramp)
5797 rtx temp, addend, mask;
5799 /* If we don't need too much alignment, we'll have been guaranteed
5800 proper alignment by get_trampoline_type. */
5801 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5804 /* Round address up to desired boundary. */
5805 temp = gen_reg_rtx (Pmode);
5806 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5807 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5809 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5810 temp, 0, OPTAB_LIB_WIDEN);
5811 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5812 temp, 0, OPTAB_LIB_WIDEN);
5818 expand_builtin_init_trampoline (tree exp)
5820 tree t_tramp, t_func, t_chain;
5821 rtx r_tramp, r_func, r_chain;
5822 #ifdef TRAMPOLINE_TEMPLATE
5826 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5827 POINTER_TYPE, VOID_TYPE))
5830 t_tramp = CALL_EXPR_ARG (exp, 0);
5831 t_func = CALL_EXPR_ARG (exp, 1);
5832 t_chain = CALL_EXPR_ARG (exp, 2);
5834 r_tramp = expand_normal (t_tramp);
5835 r_func = expand_normal (t_func);
5836 r_chain = expand_normal (t_chain);
5838 /* Generate insns to initialize the trampoline. */
5839 r_tramp = round_trampoline_addr (r_tramp);
5840 #ifdef TRAMPOLINE_TEMPLATE
5841 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5842 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5843 emit_block_move (blktramp, assemble_trampoline_template (),
5844 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5846 trampolines_created = 1;
5847 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5853 expand_builtin_adjust_trampoline (tree exp)
5857 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5860 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5861 tramp = round_trampoline_addr (tramp);
5862 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5863 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5869 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5870 function. The function first checks whether the back end provides
5871 an insn to implement signbit for the respective mode. If not, it
5872 checks whether the floating point format of the value is such that
5873 the sign bit can be extracted. If that is not the case, the
5874 function returns NULL_RTX to indicate that a normal call should be
5875 emitted rather than expanding the function in-line. EXP is the
5876 expression that is a call to the builtin function; if convenient,
5877 the result should be placed in TARGET. */
5879 expand_builtin_signbit (tree exp, rtx target)
5881 const struct real_format *fmt;
5882 enum machine_mode fmode, imode, rmode;
5883 HOST_WIDE_INT hi, lo;
5886 enum insn_code icode;
5888 location_t loc = EXPR_LOCATION (exp);
5890 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5893 arg = CALL_EXPR_ARG (exp, 0);
5894 fmode = TYPE_MODE (TREE_TYPE (arg));
5895 rmode = TYPE_MODE (TREE_TYPE (exp));
5896 fmt = REAL_MODE_FORMAT (fmode);
5898 arg = builtin_save_expr (arg);
5900 /* Expand the argument yielding a RTX expression. */
5901 temp = expand_normal (arg);
5903 /* Check if the back end provides an insn that handles signbit for the
5905 icode = signbit_optab->handlers [(int) fmode].insn_code;
5906 if (icode != CODE_FOR_nothing)
5908 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5909 emit_unop_insn (icode, target, temp, UNKNOWN);
5913 /* For floating point formats without a sign bit, implement signbit
5915 bitpos = fmt->signbit_ro;
5918 /* But we can't do this if the format supports signed zero. */
5919 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5922 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5923 build_real (TREE_TYPE (arg), dconst0));
5924 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5927 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5929 imode = int_mode_for_mode (fmode);
5930 if (imode == BLKmode)
5932 temp = gen_lowpart (imode, temp);
5937 /* Handle targets with different FP word orders. */
5938 if (FLOAT_WORDS_BIG_ENDIAN)
5939 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5941 word = bitpos / BITS_PER_WORD;
5942 temp = operand_subword_force (temp, word, fmode);
5943 bitpos = bitpos % BITS_PER_WORD;
5946 /* Force the intermediate word_mode (or narrower) result into a
5947 register. This avoids attempting to create paradoxical SUBREGs
5948 of floating point modes below. */
5949 temp = force_reg (imode, temp);
5951 /* If the bitpos is within the "result mode" lowpart, the operation
5952 can be implement with a single bitwise AND. Otherwise, we need
5953 a right shift and an AND. */
5955 if (bitpos < GET_MODE_BITSIZE (rmode))
5957 if (bitpos < HOST_BITS_PER_WIDE_INT)
5960 lo = (HOST_WIDE_INT) 1 << bitpos;
5964 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5968 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5969 temp = gen_lowpart (rmode, temp);
5970 temp = expand_binop (rmode, and_optab, temp,
5971 immed_double_const (lo, hi, rmode),
5972 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5976 /* Perform a logical right shift to place the signbit in the least
5977 significant bit, then truncate the result to the desired mode
5978 and mask just this bit. */
5979 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5980 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5981 temp = gen_lowpart (rmode, temp);
5982 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5983 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5989 /* Expand fork or exec calls. TARGET is the desired target of the
5990 call. EXP is the call. FN is the
5991 identificator of the actual function. IGNORE is nonzero if the
5992 value is to be ignored. */
5995 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6000 /* If we are not profiling, just call the function. */
6001 if (!profile_arc_flag)
6004 /* Otherwise call the wrapper. This should be equivalent for the rest of
6005 compiler, so the code does not diverge, and the wrapper may run the
6006 code necessary for keeping the profiling sane. */
6008 switch (DECL_FUNCTION_CODE (fn))
6011 id = get_identifier ("__gcov_fork");
6014 case BUILT_IN_EXECL:
6015 id = get_identifier ("__gcov_execl");
6018 case BUILT_IN_EXECV:
6019 id = get_identifier ("__gcov_execv");
6022 case BUILT_IN_EXECLP:
6023 id = get_identifier ("__gcov_execlp");
6026 case BUILT_IN_EXECLE:
6027 id = get_identifier ("__gcov_execle");
6030 case BUILT_IN_EXECVP:
6031 id = get_identifier ("__gcov_execvp");
6034 case BUILT_IN_EXECVE:
6035 id = get_identifier ("__gcov_execve");
6042 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6043 FUNCTION_DECL, id, TREE_TYPE (fn));
6044 DECL_EXTERNAL (decl) = 1;
6045 TREE_PUBLIC (decl) = 1;
6046 DECL_ARTIFICIAL (decl) = 1;
6047 TREE_NOTHROW (decl) = 1;
6048 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6049 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6050 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6051 return expand_call (call, target, ignore);
6056 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6057 the pointer in these functions is void*, the tree optimizers may remove
6058 casts. The mode computed in expand_builtin isn't reliable either, due
6059 to __sync_bool_compare_and_swap.
6061 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6062 group of builtins. This gives us log2 of the mode size. */
6064 static inline enum machine_mode
6065 get_builtin_sync_mode (int fcode_diff)
6067 /* The size is not negotiable, so ask not to get BLKmode in return
6068 if the target indicates that a smaller size would be better. */
6069 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
6072 /* Expand the memory expression LOC and return the appropriate memory operand
6073 for the builtin_sync operations. */
6076 get_builtin_sync_mem (tree loc, enum machine_mode mode)
6080 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6082 /* Note that we explicitly do not want any alias information for this
6083 memory, so that we kill all other live memories. Otherwise we don't
6084 satisfy the full barrier semantics of the intrinsic. */
6085 mem = validize_mem (gen_rtx_MEM (mode, addr));
6087 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6088 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6089 MEM_VOLATILE_P (mem) = 1;
6094 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6095 EXP is the CALL_EXPR. CODE is the rtx code
6096 that corresponds to the arithmetic or logical operation from the name;
6097 an exception here is that NOT actually means NAND. TARGET is an optional
6098 place for us to store the results; AFTER is true if this is the
6099 fetch_and_xxx form. IGNORE is true if we don't actually care about
6100 the result of the operation at all. */
6103 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6104 enum rtx_code code, bool after,
6105 rtx target, bool ignore)
6108 enum machine_mode old_mode;
6109 location_t loc = EXPR_LOCATION (exp);
6111 if (code == NOT && warn_sync_nand)
6113 tree fndecl = get_callee_fndecl (exp);
6114 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6116 static bool warned_f_a_n, warned_n_a_f;
6120 case BUILT_IN_FETCH_AND_NAND_1:
6121 case BUILT_IN_FETCH_AND_NAND_2:
6122 case BUILT_IN_FETCH_AND_NAND_4:
6123 case BUILT_IN_FETCH_AND_NAND_8:
6124 case BUILT_IN_FETCH_AND_NAND_16:
6129 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6130 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6131 warned_f_a_n = true;
6134 case BUILT_IN_NAND_AND_FETCH_1:
6135 case BUILT_IN_NAND_AND_FETCH_2:
6136 case BUILT_IN_NAND_AND_FETCH_4:
6137 case BUILT_IN_NAND_AND_FETCH_8:
6138 case BUILT_IN_NAND_AND_FETCH_16:
6143 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6144 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6145 warned_n_a_f = true;
6153 /* Expand the operands. */
6154 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6156 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6157 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6158 of CONST_INTs, where we know the old_mode only from the call argument. */
6159 old_mode = GET_MODE (val);
6160 if (old_mode == VOIDmode)
6161 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6162 val = convert_modes (mode, old_mode, val, 1);
6165 return expand_sync_operation (mem, val, code);
6167 return expand_sync_fetch_operation (mem, val, code, after, target);
6170 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6171 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6172 true if this is the boolean form. TARGET is a place for us to store the
6173 results; this is NOT optional if IS_BOOL is true. */
6176 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6177 bool is_bool, rtx target)
6179 rtx old_val, new_val, mem;
6180 enum machine_mode old_mode;
6182 /* Expand the operands. */
6183 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6186 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6187 mode, EXPAND_NORMAL);
6188 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6189 of CONST_INTs, where we know the old_mode only from the call argument. */
6190 old_mode = GET_MODE (old_val);
6191 if (old_mode == VOIDmode)
6192 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6193 old_val = convert_modes (mode, old_mode, old_val, 1);
6195 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6196 mode, EXPAND_NORMAL);
6197 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6198 of CONST_INTs, where we know the old_mode only from the call argument. */
6199 old_mode = GET_MODE (new_val);
6200 if (old_mode == VOIDmode)
6201 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6202 new_val = convert_modes (mode, old_mode, new_val, 1);
6205 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6207 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6210 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6211 general form is actually an atomic exchange, and some targets only
6212 support a reduced form with the second argument being a constant 1.
6213 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6217 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6221 enum machine_mode old_mode;
6223 /* Expand the operands. */
6224 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6225 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6226 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6227 of CONST_INTs, where we know the old_mode only from the call argument. */
6228 old_mode = GET_MODE (val);
6229 if (old_mode == VOIDmode)
6230 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6231 val = convert_modes (mode, old_mode, val, 1);
6233 return expand_sync_lock_test_and_set (mem, val, target);
6236 /* Expand the __sync_synchronize intrinsic. */
6239 expand_builtin_synchronize (void)
6243 #ifdef HAVE_memory_barrier
6244 if (HAVE_memory_barrier)
6246 emit_insn (gen_memory_barrier ());
6251 if (synchronize_libfunc != NULL_RTX)
6253 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6257 /* If no explicit memory barrier instruction is available, create an
6258 empty asm stmt with a memory clobber. */
6259 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6260 tree_cons (NULL, build_string (6, "memory"), NULL));
6261 ASM_VOLATILE_P (x) = 1;
6262 expand_asm_expr (x);
6265 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6268 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6270 enum insn_code icode;
6272 rtx val = const0_rtx;
6274 /* Expand the operands. */
6275 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6277 /* If there is an explicit operation in the md file, use it. */
6278 icode = sync_lock_release[mode];
6279 if (icode != CODE_FOR_nothing)
6281 if (!insn_data[icode].operand[1].predicate (val, mode))
6282 val = force_reg (mode, val);
6284 insn = GEN_FCN (icode) (mem, val);
6292 /* Otherwise we can implement this operation by emitting a barrier
6293 followed by a store of zero. */
6294 expand_builtin_synchronize ();
6295 emit_move_insn (mem, val);
6298 /* Expand an expression EXP that calls a built-in function,
6299 with result going to TARGET if that's convenient
6300 (and in mode MODE if that's convenient).
6301 SUBTARGET may be used as the target for computing one of EXP's operands.
6302 IGNORE is nonzero if the value is to be ignored. */
6305 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6308 tree fndecl = get_callee_fndecl (exp);
6309 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6310 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6312 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6313 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6315 /* When not optimizing, generate calls to library functions for a certain
6318 && !called_as_built_in (fndecl)
6319 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6320 && fcode != BUILT_IN_ALLOCA
6321 && fcode != BUILT_IN_FREE)
6322 return expand_call (exp, target, ignore);
6324 /* The built-in function expanders test for target == const0_rtx
6325 to determine whether the function's result will be ignored. */
6327 target = const0_rtx;
6329 /* If the result of a pure or const built-in function is ignored, and
6330 none of its arguments are volatile, we can avoid expanding the
6331 built-in call and just evaluate the arguments for side-effects. */
6332 if (target == const0_rtx
6333 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6335 bool volatilep = false;
6337 call_expr_arg_iterator iter;
6339 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6340 if (TREE_THIS_VOLATILE (arg))
6348 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6349 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6356 CASE_FLT_FN (BUILT_IN_FABS):
6357 target = expand_builtin_fabs (exp, target, subtarget);
6362 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6363 target = expand_builtin_copysign (exp, target, subtarget);
6368 /* Just do a normal library call if we were unable to fold
6370 CASE_FLT_FN (BUILT_IN_CABS):
6373 CASE_FLT_FN (BUILT_IN_EXP):
6374 CASE_FLT_FN (BUILT_IN_EXP10):
6375 CASE_FLT_FN (BUILT_IN_POW10):
6376 CASE_FLT_FN (BUILT_IN_EXP2):
6377 CASE_FLT_FN (BUILT_IN_EXPM1):
6378 CASE_FLT_FN (BUILT_IN_LOGB):
6379 CASE_FLT_FN (BUILT_IN_LOG):
6380 CASE_FLT_FN (BUILT_IN_LOG10):
6381 CASE_FLT_FN (BUILT_IN_LOG2):
6382 CASE_FLT_FN (BUILT_IN_LOG1P):
6383 CASE_FLT_FN (BUILT_IN_TAN):
6384 CASE_FLT_FN (BUILT_IN_ASIN):
6385 CASE_FLT_FN (BUILT_IN_ACOS):
6386 CASE_FLT_FN (BUILT_IN_ATAN):
6387 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6388 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6389 because of possible accuracy problems. */
6390 if (! flag_unsafe_math_optimizations)
6392 CASE_FLT_FN (BUILT_IN_SQRT):
6393 CASE_FLT_FN (BUILT_IN_FLOOR):
6394 CASE_FLT_FN (BUILT_IN_CEIL):
6395 CASE_FLT_FN (BUILT_IN_TRUNC):
6396 CASE_FLT_FN (BUILT_IN_ROUND):
6397 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6398 CASE_FLT_FN (BUILT_IN_RINT):
6399 target = expand_builtin_mathfn (exp, target, subtarget);
6404 CASE_FLT_FN (BUILT_IN_ILOGB):
6405 if (! flag_unsafe_math_optimizations)
6407 CASE_FLT_FN (BUILT_IN_ISINF):
6408 CASE_FLT_FN (BUILT_IN_FINITE):
6409 case BUILT_IN_ISFINITE:
6410 case BUILT_IN_ISNORMAL:
6411 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6416 CASE_FLT_FN (BUILT_IN_LCEIL):
6417 CASE_FLT_FN (BUILT_IN_LLCEIL):
6418 CASE_FLT_FN (BUILT_IN_LFLOOR):
6419 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6420 target = expand_builtin_int_roundingfn (exp, target);
6425 CASE_FLT_FN (BUILT_IN_LRINT):
6426 CASE_FLT_FN (BUILT_IN_LLRINT):
6427 CASE_FLT_FN (BUILT_IN_LROUND):
6428 CASE_FLT_FN (BUILT_IN_LLROUND):
6429 target = expand_builtin_int_roundingfn_2 (exp, target);
6434 CASE_FLT_FN (BUILT_IN_POW):
6435 target = expand_builtin_pow (exp, target, subtarget);
6440 CASE_FLT_FN (BUILT_IN_POWI):
6441 target = expand_builtin_powi (exp, target, subtarget);
6446 CASE_FLT_FN (BUILT_IN_ATAN2):
6447 CASE_FLT_FN (BUILT_IN_LDEXP):
6448 CASE_FLT_FN (BUILT_IN_SCALB):
6449 CASE_FLT_FN (BUILT_IN_SCALBN):
6450 CASE_FLT_FN (BUILT_IN_SCALBLN):
6451 if (! flag_unsafe_math_optimizations)
6454 CASE_FLT_FN (BUILT_IN_FMOD):
6455 CASE_FLT_FN (BUILT_IN_REMAINDER):
6456 CASE_FLT_FN (BUILT_IN_DREM):
6457 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6462 CASE_FLT_FN (BUILT_IN_CEXPI):
6463 target = expand_builtin_cexpi (exp, target, subtarget);
6464 gcc_assert (target);
6467 CASE_FLT_FN (BUILT_IN_SIN):
6468 CASE_FLT_FN (BUILT_IN_COS):
6469 if (! flag_unsafe_math_optimizations)
6471 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6476 CASE_FLT_FN (BUILT_IN_SINCOS):
6477 if (! flag_unsafe_math_optimizations)
6479 target = expand_builtin_sincos (exp);
6484 case BUILT_IN_APPLY_ARGS:
6485 return expand_builtin_apply_args ();
6487 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6488 FUNCTION with a copy of the parameters described by
6489 ARGUMENTS, and ARGSIZE. It returns a block of memory
6490 allocated on the stack into which is stored all the registers
6491 that might possibly be used for returning the result of a
6492 function. ARGUMENTS is the value returned by
6493 __builtin_apply_args. ARGSIZE is the number of bytes of
6494 arguments that must be copied. ??? How should this value be
6495 computed? We'll also need a safe worst case value for varargs
6497 case BUILT_IN_APPLY:
6498 if (!validate_arglist (exp, POINTER_TYPE,
6499 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6500 && !validate_arglist (exp, REFERENCE_TYPE,
6501 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6507 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6508 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6509 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6511 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6514 /* __builtin_return (RESULT) causes the function to return the
6515 value described by RESULT. RESULT is address of the block of
6516 memory returned by __builtin_apply. */
6517 case BUILT_IN_RETURN:
6518 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6519 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6522 case BUILT_IN_SAVEREGS:
6523 return expand_builtin_saveregs ();
6525 case BUILT_IN_ARGS_INFO:
6526 return expand_builtin_args_info (exp);
6528 case BUILT_IN_VA_ARG_PACK:
6529 /* All valid uses of __builtin_va_arg_pack () are removed during
6531 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6534 case BUILT_IN_VA_ARG_PACK_LEN:
6535 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6537 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6540 /* Return the address of the first anonymous stack arg. */
6541 case BUILT_IN_NEXT_ARG:
6542 if (fold_builtin_next_arg (exp, false))
6544 return expand_builtin_next_arg ();
6546 case BUILT_IN_CLEAR_CACHE:
6547 target = expand_builtin___clear_cache (exp);
6552 case BUILT_IN_CLASSIFY_TYPE:
6553 return expand_builtin_classify_type (exp);
6555 case BUILT_IN_CONSTANT_P:
6558 case BUILT_IN_FRAME_ADDRESS:
6559 case BUILT_IN_RETURN_ADDRESS:
6560 return expand_builtin_frame_address (fndecl, exp);
6562 /* Returns the address of the area where the structure is returned.
6564 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6565 if (call_expr_nargs (exp) != 0
6566 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6567 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6570 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6572 case BUILT_IN_ALLOCA:
6573 target = expand_builtin_alloca (exp, target);
6578 case BUILT_IN_STACK_SAVE:
6579 return expand_stack_save ();
6581 case BUILT_IN_STACK_RESTORE:
6582 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6585 case BUILT_IN_BSWAP32:
6586 case BUILT_IN_BSWAP64:
6587 target = expand_builtin_bswap (exp, target, subtarget);
6593 CASE_INT_FN (BUILT_IN_FFS):
6594 case BUILT_IN_FFSIMAX:
6595 target = expand_builtin_unop (target_mode, exp, target,
6596 subtarget, ffs_optab);
6601 CASE_INT_FN (BUILT_IN_CLZ):
6602 case BUILT_IN_CLZIMAX:
6603 target = expand_builtin_unop (target_mode, exp, target,
6604 subtarget, clz_optab);
6609 CASE_INT_FN (BUILT_IN_CTZ):
6610 case BUILT_IN_CTZIMAX:
6611 target = expand_builtin_unop (target_mode, exp, target,
6612 subtarget, ctz_optab);
6617 CASE_INT_FN (BUILT_IN_POPCOUNT):
6618 case BUILT_IN_POPCOUNTIMAX:
6619 target = expand_builtin_unop (target_mode, exp, target,
6620 subtarget, popcount_optab);
6625 CASE_INT_FN (BUILT_IN_PARITY):
6626 case BUILT_IN_PARITYIMAX:
6627 target = expand_builtin_unop (target_mode, exp, target,
6628 subtarget, parity_optab);
6633 case BUILT_IN_STRLEN:
6634 target = expand_builtin_strlen (exp, target, target_mode);
6639 case BUILT_IN_STRCPY:
6640 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6645 case BUILT_IN_STRNCPY:
6646 target = expand_builtin_strncpy (exp, target, mode);
6651 case BUILT_IN_STPCPY:
6652 target = expand_builtin_stpcpy (exp, target, mode);
6657 case BUILT_IN_STRCAT:
6658 target = expand_builtin_strcat (fndecl, exp, target, mode);
6663 case BUILT_IN_STRNCAT:
6664 target = expand_builtin_strncat (exp, target, mode);
6669 case BUILT_IN_STRSPN:
6670 target = expand_builtin_strspn (exp, target, mode);
6675 case BUILT_IN_STRCSPN:
6676 target = expand_builtin_strcspn (exp, target, mode);
6681 case BUILT_IN_STRSTR:
6682 target = expand_builtin_strstr (exp, target, mode);
6687 case BUILT_IN_STRPBRK:
6688 target = expand_builtin_strpbrk (exp, target, mode);
6693 case BUILT_IN_INDEX:
6694 case BUILT_IN_STRCHR:
6695 target = expand_builtin_strchr (exp, target, mode);
6700 case BUILT_IN_RINDEX:
6701 case BUILT_IN_STRRCHR:
6702 target = expand_builtin_strrchr (exp, target, mode);
6707 case BUILT_IN_MEMCPY:
6708 target = expand_builtin_memcpy (exp, target, mode);
6713 case BUILT_IN_MEMPCPY:
6714 target = expand_builtin_mempcpy (exp, target, mode);
6719 case BUILT_IN_MEMMOVE:
6720 target = expand_builtin_memmove (exp, target, mode, ignore);
6725 case BUILT_IN_BCOPY:
6726 target = expand_builtin_bcopy (exp, ignore);
6731 case BUILT_IN_MEMSET:
6732 target = expand_builtin_memset (exp, target, mode);
6737 case BUILT_IN_BZERO:
6738 target = expand_builtin_bzero (exp);
6743 case BUILT_IN_STRCMP:
6744 target = expand_builtin_strcmp (exp, target, mode);
6749 case BUILT_IN_STRNCMP:
6750 target = expand_builtin_strncmp (exp, target, mode);
6755 case BUILT_IN_MEMCHR:
6756 target = expand_builtin_memchr (exp, target, mode);
6762 case BUILT_IN_MEMCMP:
6763 target = expand_builtin_memcmp (exp, target, mode);
6768 case BUILT_IN_SETJMP:
6769 /* This should have been lowered to the builtins below. */
6772 case BUILT_IN_SETJMP_SETUP:
6773 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6774 and the receiver label. */
6775 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6777 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6778 VOIDmode, EXPAND_NORMAL);
6779 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6780 rtx label_r = label_rtx (label);
6782 /* This is copied from the handling of non-local gotos. */
6783 expand_builtin_setjmp_setup (buf_addr, label_r);
6784 nonlocal_goto_handler_labels
6785 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6786 nonlocal_goto_handler_labels);
6787 /* ??? Do not let expand_label treat us as such since we would
6788 not want to be both on the list of non-local labels and on
6789 the list of forced labels. */
6790 FORCED_LABEL (label) = 0;
6795 case BUILT_IN_SETJMP_DISPATCHER:
6796 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6797 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6799 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6800 rtx label_r = label_rtx (label);
6802 /* Remove the dispatcher label from the list of non-local labels
6803 since the receiver labels have been added to it above. */
6804 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6809 case BUILT_IN_SETJMP_RECEIVER:
6810 /* __builtin_setjmp_receiver is passed the receiver label. */
6811 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6813 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6814 rtx label_r = label_rtx (label);
6816 expand_builtin_setjmp_receiver (label_r);
6821 /* __builtin_longjmp is passed a pointer to an array of five words.
6822 It's similar to the C library longjmp function but works with
6823 __builtin_setjmp above. */
6824 case BUILT_IN_LONGJMP:
6825 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6827 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6828 VOIDmode, EXPAND_NORMAL);
6829 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6831 if (value != const1_rtx)
6833 error ("%<__builtin_longjmp%> second argument must be 1");
6837 expand_builtin_longjmp (buf_addr, value);
6842 case BUILT_IN_NONLOCAL_GOTO:
6843 target = expand_builtin_nonlocal_goto (exp);
6848 /* This updates the setjmp buffer that is its argument with the value
6849 of the current stack pointer. */
6850 case BUILT_IN_UPDATE_SETJMP_BUF:
6851 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6854 = expand_normal (CALL_EXPR_ARG (exp, 0));
6856 expand_builtin_update_setjmp_buf (buf_addr);
6862 expand_builtin_trap ();
6865 case BUILT_IN_UNREACHABLE:
6866 expand_builtin_unreachable ();
6869 case BUILT_IN_PRINTF:
6870 target = expand_builtin_printf (exp, target, mode, false);
6875 case BUILT_IN_PRINTF_UNLOCKED:
6876 target = expand_builtin_printf (exp, target, mode, true);
6881 case BUILT_IN_FPUTS:
6882 target = expand_builtin_fputs (exp, target, false);
6886 case BUILT_IN_FPUTS_UNLOCKED:
6887 target = expand_builtin_fputs (exp, target, true);
6892 case BUILT_IN_FPRINTF:
6893 target = expand_builtin_fprintf (exp, target, mode, false);
6898 case BUILT_IN_FPRINTF_UNLOCKED:
6899 target = expand_builtin_fprintf (exp, target, mode, true);
6904 case BUILT_IN_SPRINTF:
6905 target = expand_builtin_sprintf (exp, target, mode);
6910 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6911 case BUILT_IN_SIGNBITD32:
6912 case BUILT_IN_SIGNBITD64:
6913 case BUILT_IN_SIGNBITD128:
6914 target = expand_builtin_signbit (exp, target);
6919 /* Various hooks for the DWARF 2 __throw routine. */
6920 case BUILT_IN_UNWIND_INIT:
6921 expand_builtin_unwind_init ();
6923 case BUILT_IN_DWARF_CFA:
6924 return virtual_cfa_rtx;
6925 #ifdef DWARF2_UNWIND_INFO
6926 case BUILT_IN_DWARF_SP_COLUMN:
6927 return expand_builtin_dwarf_sp_column ();
6928 case BUILT_IN_INIT_DWARF_REG_SIZES:
6929 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6932 case BUILT_IN_FROB_RETURN_ADDR:
6933 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6934 case BUILT_IN_EXTRACT_RETURN_ADDR:
6935 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6936 case BUILT_IN_EH_RETURN:
6937 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6938 CALL_EXPR_ARG (exp, 1));
6940 #ifdef EH_RETURN_DATA_REGNO
6941 case BUILT_IN_EH_RETURN_DATA_REGNO:
6942 return expand_builtin_eh_return_data_regno (exp);
6944 case BUILT_IN_EXTEND_POINTER:
6945 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6947 case BUILT_IN_VA_START:
6948 return expand_builtin_va_start (exp);
6949 case BUILT_IN_VA_END:
6950 return expand_builtin_va_end (exp);
6951 case BUILT_IN_VA_COPY:
6952 return expand_builtin_va_copy (exp);
6953 case BUILT_IN_EXPECT:
6954 return expand_builtin_expect (exp, target);
6955 case BUILT_IN_PREFETCH:
6956 expand_builtin_prefetch (exp);
6959 case BUILT_IN_PROFILE_FUNC_ENTER:
6960 return expand_builtin_profile_func (false);
6961 case BUILT_IN_PROFILE_FUNC_EXIT:
6962 return expand_builtin_profile_func (true);
6964 case BUILT_IN_INIT_TRAMPOLINE:
6965 return expand_builtin_init_trampoline (exp);
6966 case BUILT_IN_ADJUST_TRAMPOLINE:
6967 return expand_builtin_adjust_trampoline (exp);
6970 case BUILT_IN_EXECL:
6971 case BUILT_IN_EXECV:
6972 case BUILT_IN_EXECLP:
6973 case BUILT_IN_EXECLE:
6974 case BUILT_IN_EXECVP:
6975 case BUILT_IN_EXECVE:
6976 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6981 case BUILT_IN_FETCH_AND_ADD_1:
6982 case BUILT_IN_FETCH_AND_ADD_2:
6983 case BUILT_IN_FETCH_AND_ADD_4:
6984 case BUILT_IN_FETCH_AND_ADD_8:
6985 case BUILT_IN_FETCH_AND_ADD_16:
6986 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6987 target = expand_builtin_sync_operation (mode, exp, PLUS,
6988 false, target, ignore);
6993 case BUILT_IN_FETCH_AND_SUB_1:
6994 case BUILT_IN_FETCH_AND_SUB_2:
6995 case BUILT_IN_FETCH_AND_SUB_4:
6996 case BUILT_IN_FETCH_AND_SUB_8:
6997 case BUILT_IN_FETCH_AND_SUB_16:
6998 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6999 target = expand_builtin_sync_operation (mode, exp, MINUS,
7000 false, target, ignore);
7005 case BUILT_IN_FETCH_AND_OR_1:
7006 case BUILT_IN_FETCH_AND_OR_2:
7007 case BUILT_IN_FETCH_AND_OR_4:
7008 case BUILT_IN_FETCH_AND_OR_8:
7009 case BUILT_IN_FETCH_AND_OR_16:
7010 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
7011 target = expand_builtin_sync_operation (mode, exp, IOR,
7012 false, target, ignore);
7017 case BUILT_IN_FETCH_AND_AND_1:
7018 case BUILT_IN_FETCH_AND_AND_2:
7019 case BUILT_IN_FETCH_AND_AND_4:
7020 case BUILT_IN_FETCH_AND_AND_8:
7021 case BUILT_IN_FETCH_AND_AND_16:
7022 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
7023 target = expand_builtin_sync_operation (mode, exp, AND,
7024 false, target, ignore);
7029 case BUILT_IN_FETCH_AND_XOR_1:
7030 case BUILT_IN_FETCH_AND_XOR_2:
7031 case BUILT_IN_FETCH_AND_XOR_4:
7032 case BUILT_IN_FETCH_AND_XOR_8:
7033 case BUILT_IN_FETCH_AND_XOR_16:
7034 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
7035 target = expand_builtin_sync_operation (mode, exp, XOR,
7036 false, target, ignore);
7041 case BUILT_IN_FETCH_AND_NAND_1:
7042 case BUILT_IN_FETCH_AND_NAND_2:
7043 case BUILT_IN_FETCH_AND_NAND_4:
7044 case BUILT_IN_FETCH_AND_NAND_8:
7045 case BUILT_IN_FETCH_AND_NAND_16:
7046 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
7047 target = expand_builtin_sync_operation (mode, exp, NOT,
7048 false, target, ignore);
7053 case BUILT_IN_ADD_AND_FETCH_1:
7054 case BUILT_IN_ADD_AND_FETCH_2:
7055 case BUILT_IN_ADD_AND_FETCH_4:
7056 case BUILT_IN_ADD_AND_FETCH_8:
7057 case BUILT_IN_ADD_AND_FETCH_16:
7058 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
7059 target = expand_builtin_sync_operation (mode, exp, PLUS,
7060 true, target, ignore);
7065 case BUILT_IN_SUB_AND_FETCH_1:
7066 case BUILT_IN_SUB_AND_FETCH_2:
7067 case BUILT_IN_SUB_AND_FETCH_4:
7068 case BUILT_IN_SUB_AND_FETCH_8:
7069 case BUILT_IN_SUB_AND_FETCH_16:
7070 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
7071 target = expand_builtin_sync_operation (mode, exp, MINUS,
7072 true, target, ignore);
7077 case BUILT_IN_OR_AND_FETCH_1:
7078 case BUILT_IN_OR_AND_FETCH_2:
7079 case BUILT_IN_OR_AND_FETCH_4:
7080 case BUILT_IN_OR_AND_FETCH_8:
7081 case BUILT_IN_OR_AND_FETCH_16:
7082 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
7083 target = expand_builtin_sync_operation (mode, exp, IOR,
7084 true, target, ignore);
7089 case BUILT_IN_AND_AND_FETCH_1:
7090 case BUILT_IN_AND_AND_FETCH_2:
7091 case BUILT_IN_AND_AND_FETCH_4:
7092 case BUILT_IN_AND_AND_FETCH_8:
7093 case BUILT_IN_AND_AND_FETCH_16:
7094 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7095 target = expand_builtin_sync_operation (mode, exp, AND,
7096 true, target, ignore);
7101 case BUILT_IN_XOR_AND_FETCH_1:
7102 case BUILT_IN_XOR_AND_FETCH_2:
7103 case BUILT_IN_XOR_AND_FETCH_4:
7104 case BUILT_IN_XOR_AND_FETCH_8:
7105 case BUILT_IN_XOR_AND_FETCH_16:
7106 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7107 target = expand_builtin_sync_operation (mode, exp, XOR,
7108 true, target, ignore);
7113 case BUILT_IN_NAND_AND_FETCH_1:
7114 case BUILT_IN_NAND_AND_FETCH_2:
7115 case BUILT_IN_NAND_AND_FETCH_4:
7116 case BUILT_IN_NAND_AND_FETCH_8:
7117 case BUILT_IN_NAND_AND_FETCH_16:
7118 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7119 target = expand_builtin_sync_operation (mode, exp, NOT,
7120 true, target, ignore);
7125 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7126 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7127 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7128 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7129 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7130 if (mode == VOIDmode)
7131 mode = TYPE_MODE (boolean_type_node);
7132 if (!target || !register_operand (target, mode))
7133 target = gen_reg_rtx (mode);
7135 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7136 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7141 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7142 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7143 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7144 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7145 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7146 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7147 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7152 case BUILT_IN_LOCK_TEST_AND_SET_1:
7153 case BUILT_IN_LOCK_TEST_AND_SET_2:
7154 case BUILT_IN_LOCK_TEST_AND_SET_4:
7155 case BUILT_IN_LOCK_TEST_AND_SET_8:
7156 case BUILT_IN_LOCK_TEST_AND_SET_16:
7157 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7158 target = expand_builtin_lock_test_and_set (mode, exp, target);
7163 case BUILT_IN_LOCK_RELEASE_1:
7164 case BUILT_IN_LOCK_RELEASE_2:
7165 case BUILT_IN_LOCK_RELEASE_4:
7166 case BUILT_IN_LOCK_RELEASE_8:
7167 case BUILT_IN_LOCK_RELEASE_16:
7168 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7169 expand_builtin_lock_release (mode, exp);
7172 case BUILT_IN_SYNCHRONIZE:
7173 expand_builtin_synchronize ();
7176 case BUILT_IN_OBJECT_SIZE:
7177 return expand_builtin_object_size (exp);
7179 case BUILT_IN_MEMCPY_CHK:
7180 case BUILT_IN_MEMPCPY_CHK:
7181 case BUILT_IN_MEMMOVE_CHK:
7182 case BUILT_IN_MEMSET_CHK:
7183 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7188 case BUILT_IN_STRCPY_CHK:
7189 case BUILT_IN_STPCPY_CHK:
7190 case BUILT_IN_STRNCPY_CHK:
7191 case BUILT_IN_STRCAT_CHK:
7192 case BUILT_IN_STRNCAT_CHK:
7193 case BUILT_IN_SNPRINTF_CHK:
7194 case BUILT_IN_VSNPRINTF_CHK:
7195 maybe_emit_chk_warning (exp, fcode);
7198 case BUILT_IN_SPRINTF_CHK:
7199 case BUILT_IN_VSPRINTF_CHK:
7200 maybe_emit_sprintf_chk_warning (exp, fcode);
7204 maybe_emit_free_warning (exp);
7207 default: /* just do library call, if unknown builtin */
7211 /* The switch statement above can drop through to cause the function
7212 to be called normally. */
7213 return expand_call (exp, target, ignore);
7216 /* Determine whether a tree node represents a call to a built-in
7217 function. If the tree T is a call to a built-in function with
7218 the right number of arguments of the appropriate types, return
7219 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7220 Otherwise the return value is END_BUILTINS. */
7222 enum built_in_function
7223 builtin_mathfn_code (const_tree t)
7225 const_tree fndecl, arg, parmlist;
7226 const_tree argtype, parmtype;
7227 const_call_expr_arg_iterator iter;
7229 if (TREE_CODE (t) != CALL_EXPR
7230 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7231 return END_BUILTINS;
7233 fndecl = get_callee_fndecl (t);
7234 if (fndecl == NULL_TREE
7235 || TREE_CODE (fndecl) != FUNCTION_DECL
7236 || ! DECL_BUILT_IN (fndecl)
7237 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7238 return END_BUILTINS;
7240 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7241 init_const_call_expr_arg_iterator (t, &iter);
7242 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7244 /* If a function doesn't take a variable number of arguments,
7245 the last element in the list will have type `void'. */
7246 parmtype = TREE_VALUE (parmlist);
7247 if (VOID_TYPE_P (parmtype))
7249 if (more_const_call_expr_args_p (&iter))
7250 return END_BUILTINS;
7251 return DECL_FUNCTION_CODE (fndecl);
7254 if (! more_const_call_expr_args_p (&iter))
7255 return END_BUILTINS;
7257 arg = next_const_call_expr_arg (&iter);
7258 argtype = TREE_TYPE (arg);
7260 if (SCALAR_FLOAT_TYPE_P (parmtype))
7262 if (! SCALAR_FLOAT_TYPE_P (argtype))
7263 return END_BUILTINS;
7265 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7267 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7268 return END_BUILTINS;
7270 else if (POINTER_TYPE_P (parmtype))
7272 if (! POINTER_TYPE_P (argtype))
7273 return END_BUILTINS;
7275 else if (INTEGRAL_TYPE_P (parmtype))
7277 if (! INTEGRAL_TYPE_P (argtype))
7278 return END_BUILTINS;
7281 return END_BUILTINS;
7284 /* Variable-length argument list. */
7285 return DECL_FUNCTION_CODE (fndecl);
7288 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7289 evaluate to a constant. */
7292 fold_builtin_constant_p (tree arg)
7294 /* We return 1 for a numeric type that's known to be a constant
7295 value at compile-time or for an aggregate type that's a
7296 literal constant. */
7299 /* If we know this is a constant, emit the constant of one. */
7300 if (CONSTANT_CLASS_P (arg)
7301 || (TREE_CODE (arg) == CONSTRUCTOR
7302 && TREE_CONSTANT (arg)))
7303 return integer_one_node;
7304 if (TREE_CODE (arg) == ADDR_EXPR)
7306 tree op = TREE_OPERAND (arg, 0);
7307 if (TREE_CODE (op) == STRING_CST
7308 || (TREE_CODE (op) == ARRAY_REF
7309 && integer_zerop (TREE_OPERAND (op, 1))
7310 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7311 return integer_one_node;
7314 /* If this expression has side effects, show we don't know it to be a
7315 constant. Likewise if it's a pointer or aggregate type since in
7316 those case we only want literals, since those are only optimized
7317 when generating RTL, not later.
7318 And finally, if we are compiling an initializer, not code, we
7319 need to return a definite result now; there's not going to be any
7320 more optimization done. */
7321 if (TREE_SIDE_EFFECTS (arg)
7322 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7323 || POINTER_TYPE_P (TREE_TYPE (arg))
7325 || folding_initializer)
7326 return integer_zero_node;
7331 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7332 return it as a truthvalue. */
7335 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7337 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7339 fn = built_in_decls[BUILT_IN_EXPECT];
7340 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7341 ret_type = TREE_TYPE (TREE_TYPE (fn));
7342 pred_type = TREE_VALUE (arg_types);
7343 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7345 pred = fold_convert_loc (loc, pred_type, pred);
7346 expected = fold_convert_loc (loc, expected_type, expected);
7347 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7349 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7350 build_int_cst (ret_type, 0));
7353 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7354 NULL_TREE if no simplification is possible. */
7357 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7360 enum tree_code code;
7362 /* If this is a builtin_expect within a builtin_expect keep the
7363 inner one. See through a comparison against a constant. It
7364 might have been added to create a thruthvalue. */
7366 if (COMPARISON_CLASS_P (inner)
7367 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7368 inner = TREE_OPERAND (inner, 0);
7370 if (TREE_CODE (inner) == CALL_EXPR
7371 && (fndecl = get_callee_fndecl (inner))
7372 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7373 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7376 /* Distribute the expected value over short-circuiting operators.
7377 See through the cast from truthvalue_type_node to long. */
7379 while (TREE_CODE (inner) == NOP_EXPR
7380 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7381 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7382 inner = TREE_OPERAND (inner, 0);
7384 code = TREE_CODE (inner);
7385 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7387 tree op0 = TREE_OPERAND (inner, 0);
7388 tree op1 = TREE_OPERAND (inner, 1);
7390 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7391 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7392 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7394 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7397 /* If the argument isn't invariant then there's nothing else we can do. */
7398 if (!TREE_CONSTANT (arg0))
7401 /* If we expect that a comparison against the argument will fold to
7402 a constant return the constant. In practice, this means a true
7403 constant or the address of a non-weak symbol. */
7406 if (TREE_CODE (inner) == ADDR_EXPR)
7410 inner = TREE_OPERAND (inner, 0);
7412 while (TREE_CODE (inner) == COMPONENT_REF
7413 || TREE_CODE (inner) == ARRAY_REF);
7414 if ((TREE_CODE (inner) == VAR_DECL
7415 || TREE_CODE (inner) == FUNCTION_DECL)
7416 && DECL_WEAK (inner))
7420 /* Otherwise, ARG0 already has the proper type for the return value. */
7424 /* Fold a call to __builtin_classify_type with argument ARG. */
7427 fold_builtin_classify_type (tree arg)
7430 return build_int_cst (NULL_TREE, no_type_class);
7432 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7435 /* Fold a call to __builtin_strlen with argument ARG. */
7438 fold_builtin_strlen (location_t loc, tree arg)
7440 if (!validate_arg (arg, POINTER_TYPE))
7444 tree len = c_strlen (arg, 0);
7448 /* Convert from the internal "sizetype" type to "size_t". */
7450 len = fold_convert_loc (loc, size_type_node, len);
7458 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7461 fold_builtin_inf (location_t loc, tree type, int warn)
7463 REAL_VALUE_TYPE real;
7465 /* __builtin_inff is intended to be usable to define INFINITY on all
7466 targets. If an infinity is not available, INFINITY expands "to a
7467 positive constant of type float that overflows at translation
7468 time", footnote "In this case, using INFINITY will violate the
7469 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7470 Thus we pedwarn to ensure this constraint violation is
7472 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7473 pedwarn (loc, 0, "target format does not support infinity");
7476 return build_real (type, real);
7479 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7482 fold_builtin_nan (tree arg, tree type, int quiet)
7484 REAL_VALUE_TYPE real;
7487 if (!validate_arg (arg, POINTER_TYPE))
7489 str = c_getstr (arg);
7493 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7496 return build_real (type, real);
7499 /* Return true if the floating point expression T has an integer value.
7500 We also allow +Inf, -Inf and NaN to be considered integer values. */
7503 integer_valued_real_p (tree t)
7505 switch (TREE_CODE (t))
7512 return integer_valued_real_p (TREE_OPERAND (t, 0));
7517 return integer_valued_real_p (TREE_OPERAND (t, 1));
7524 return integer_valued_real_p (TREE_OPERAND (t, 0))
7525 && integer_valued_real_p (TREE_OPERAND (t, 1));
7528 return integer_valued_real_p (TREE_OPERAND (t, 1))
7529 && integer_valued_real_p (TREE_OPERAND (t, 2));
7532 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7536 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7537 if (TREE_CODE (type) == INTEGER_TYPE)
7539 if (TREE_CODE (type) == REAL_TYPE)
7540 return integer_valued_real_p (TREE_OPERAND (t, 0));
7545 switch (builtin_mathfn_code (t))
7547 CASE_FLT_FN (BUILT_IN_CEIL):
7548 CASE_FLT_FN (BUILT_IN_FLOOR):
7549 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7550 CASE_FLT_FN (BUILT_IN_RINT):
7551 CASE_FLT_FN (BUILT_IN_ROUND):
7552 CASE_FLT_FN (BUILT_IN_TRUNC):
7555 CASE_FLT_FN (BUILT_IN_FMIN):
7556 CASE_FLT_FN (BUILT_IN_FMAX):
7557 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7558 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7571 /* FNDECL is assumed to be a builtin where truncation can be propagated
7572 across (for instance floor((double)f) == (double)floorf (f).
7573 Do the transformation for a call with argument ARG. */
7576 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7578 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7580 if (!validate_arg (arg, REAL_TYPE))
7583 /* Integer rounding functions are idempotent. */
7584 if (fcode == builtin_mathfn_code (arg))
7587 /* If argument is already integer valued, and we don't need to worry
7588 about setting errno, there's no need to perform rounding. */
7589 if (! flag_errno_math && integer_valued_real_p (arg))
7594 tree arg0 = strip_float_extensions (arg);
7595 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7596 tree newtype = TREE_TYPE (arg0);
7599 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7600 && (decl = mathfn_built_in (newtype, fcode)))
7601 return fold_convert_loc (loc, ftype,
7602 build_call_expr_loc (loc, decl, 1,
7603 fold_convert_loc (loc,
7610 /* FNDECL is assumed to be builtin which can narrow the FP type of
7611 the argument, for instance lround((double)f) -> lroundf (f).
7612 Do the transformation for a call with argument ARG. */
7615 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7617 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7619 if (!validate_arg (arg, REAL_TYPE))
7622 /* If argument is already integer valued, and we don't need to worry
7623 about setting errno, there's no need to perform rounding. */
7624 if (! flag_errno_math && integer_valued_real_p (arg))
7625 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7626 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7630 tree ftype = TREE_TYPE (arg);
7631 tree arg0 = strip_float_extensions (arg);
7632 tree newtype = TREE_TYPE (arg0);
7635 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7636 && (decl = mathfn_built_in (newtype, fcode)))
7637 return build_call_expr_loc (loc, decl, 1,
7638 fold_convert_loc (loc, newtype, arg0));
7641 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7642 sizeof (long long) == sizeof (long). */
7643 if (TYPE_PRECISION (long_long_integer_type_node)
7644 == TYPE_PRECISION (long_integer_type_node))
7646 tree newfn = NULL_TREE;
7649 CASE_FLT_FN (BUILT_IN_LLCEIL):
7650 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7653 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7654 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7657 CASE_FLT_FN (BUILT_IN_LLROUND):
7658 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7661 CASE_FLT_FN (BUILT_IN_LLRINT):
7662 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7671 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7672 return fold_convert_loc (loc,
7673 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7680 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7681 return type. Return NULL_TREE if no simplification can be made. */
7684 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7688 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7689 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7692 /* Calculate the result when the argument is a constant. */
7693 if (TREE_CODE (arg) == COMPLEX_CST
7694 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7698 if (TREE_CODE (arg) == COMPLEX_EXPR)
7700 tree real = TREE_OPERAND (arg, 0);
7701 tree imag = TREE_OPERAND (arg, 1);
7703 /* If either part is zero, cabs is fabs of the other. */
7704 if (real_zerop (real))
7705 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7706 if (real_zerop (imag))
7707 return fold_build1_loc (loc, ABS_EXPR, type, real);
7709 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7710 if (flag_unsafe_math_optimizations
7711 && operand_equal_p (real, imag, OEP_PURE_SAME))
7713 const REAL_VALUE_TYPE sqrt2_trunc
7714 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7716 return fold_build2_loc (loc, MULT_EXPR, type,
7717 fold_build1_loc (loc, ABS_EXPR, type, real),
7718 build_real (type, sqrt2_trunc));
7722 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7723 if (TREE_CODE (arg) == NEGATE_EXPR
7724 || TREE_CODE (arg) == CONJ_EXPR)
7725 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7727 /* Don't do this when optimizing for size. */
7728 if (flag_unsafe_math_optimizations
7729 && optimize && optimize_function_for_speed_p (cfun))
7731 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7733 if (sqrtfn != NULL_TREE)
7735 tree rpart, ipart, result;
7737 arg = builtin_save_expr (arg);
7739 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7740 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7742 rpart = builtin_save_expr (rpart);
7743 ipart = builtin_save_expr (ipart);
7745 result = fold_build2_loc (loc, PLUS_EXPR, type,
7746 fold_build2_loc (loc, MULT_EXPR, type,
7748 fold_build2_loc (loc, MULT_EXPR, type,
7751 return build_call_expr_loc (loc, sqrtfn, 1, result);
7758 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7759 Return NULL_TREE if no simplification can be made. */
7762 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7765 enum built_in_function fcode;
7768 if (!validate_arg (arg, REAL_TYPE))
7771 /* Calculate the result when the argument is a constant. */
7772 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7775 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7776 fcode = builtin_mathfn_code (arg);
7777 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7779 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7780 arg = fold_build2_loc (loc, MULT_EXPR, type,
7781 CALL_EXPR_ARG (arg, 0),
7782 build_real (type, dconsthalf));
7783 return build_call_expr_loc (loc, expfn, 1, arg);
7786 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7787 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7789 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7793 tree arg0 = CALL_EXPR_ARG (arg, 0);
7795 /* The inner root was either sqrt or cbrt. */
7796 /* This was a conditional expression but it triggered a bug
7798 REAL_VALUE_TYPE dconstroot;
7799 if (BUILTIN_SQRT_P (fcode))
7800 dconstroot = dconsthalf;
7802 dconstroot = dconst_third ();
7804 /* Adjust for the outer root. */
7805 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7806 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7807 tree_root = build_real (type, dconstroot);
7808 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7812 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7813 if (flag_unsafe_math_optimizations
7814 && (fcode == BUILT_IN_POW
7815 || fcode == BUILT_IN_POWF
7816 || fcode == BUILT_IN_POWL))
7818 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7819 tree arg0 = CALL_EXPR_ARG (arg, 0);
7820 tree arg1 = CALL_EXPR_ARG (arg, 1);
7822 if (!tree_expr_nonnegative_p (arg0))
7823 arg0 = build1 (ABS_EXPR, type, arg0);
7824 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7825 build_real (type, dconsthalf));
7826 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7832 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7833 Return NULL_TREE if no simplification can be made. */
7836 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7838 const enum built_in_function fcode = builtin_mathfn_code (arg);
7841 if (!validate_arg (arg, REAL_TYPE))
7844 /* Calculate the result when the argument is a constant. */
7845 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7848 if (flag_unsafe_math_optimizations)
7850 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7851 if (BUILTIN_EXPONENT_P (fcode))
7853 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7854 const REAL_VALUE_TYPE third_trunc =
7855 real_value_truncate (TYPE_MODE (type), dconst_third ());
7856 arg = fold_build2_loc (loc, MULT_EXPR, type,
7857 CALL_EXPR_ARG (arg, 0),
7858 build_real (type, third_trunc));
7859 return build_call_expr_loc (loc, expfn, 1, arg);
7862 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7863 if (BUILTIN_SQRT_P (fcode))
7865 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7869 tree arg0 = CALL_EXPR_ARG (arg, 0);
7871 REAL_VALUE_TYPE dconstroot = dconst_third ();
7873 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7874 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7875 tree_root = build_real (type, dconstroot);
7876 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7880 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7881 if (BUILTIN_CBRT_P (fcode))
7883 tree arg0 = CALL_EXPR_ARG (arg, 0);
7884 if (tree_expr_nonnegative_p (arg0))
7886 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7891 REAL_VALUE_TYPE dconstroot;
7893 real_arithmetic (&dconstroot, MULT_EXPR,
7894 dconst_third_ptr (), dconst_third_ptr ());
7895 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7896 tree_root = build_real (type, dconstroot);
7897 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7902 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7903 if (fcode == BUILT_IN_POW
7904 || fcode == BUILT_IN_POWF
7905 || fcode == BUILT_IN_POWL)
7907 tree arg00 = CALL_EXPR_ARG (arg, 0);
7908 tree arg01 = CALL_EXPR_ARG (arg, 1);
7909 if (tree_expr_nonnegative_p (arg00))
7911 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7912 const REAL_VALUE_TYPE dconstroot
7913 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7914 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7915 build_real (type, dconstroot));
7916 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7923 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7924 TYPE is the type of the return value. Return NULL_TREE if no
7925 simplification can be made. */
7928 fold_builtin_cos (location_t loc,
7929 tree arg, tree type, tree fndecl)
7933 if (!validate_arg (arg, REAL_TYPE))
7936 /* Calculate the result when the argument is a constant. */
7937 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7940 /* Optimize cos(-x) into cos (x). */
7941 if ((narg = fold_strip_sign_ops (arg)))
7942 return build_call_expr_loc (loc, fndecl, 1, narg);
7947 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7948 Return NULL_TREE if no simplification can be made. */
7951 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7953 if (validate_arg (arg, REAL_TYPE))
7957 /* Calculate the result when the argument is a constant. */
7958 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7961 /* Optimize cosh(-x) into cosh (x). */
7962 if ((narg = fold_strip_sign_ops (arg)))
7963 return build_call_expr_loc (loc, fndecl, 1, narg);
7969 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7970 argument ARG. TYPE is the type of the return value. Return
7971 NULL_TREE if no simplification can be made. */
7974 fold_builtin_ccos (location_t loc,
7975 tree arg, tree type ATTRIBUTE_UNUSED, tree fndecl,
7976 bool hyper ATTRIBUTE_UNUSED)
7978 if (validate_arg (arg, COMPLEX_TYPE)
7979 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7984 /* Calculate the result when the argument is a constant. */
7985 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7989 /* Optimize fn(-x) into fn(x). */
7990 if ((tmp = fold_strip_sign_ops (arg)))
7991 return build_call_expr_loc (loc, fndecl, 1, tmp);
7997 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7998 Return NULL_TREE if no simplification can be made. */
8001 fold_builtin_tan (tree arg, tree type)
8003 enum built_in_function fcode;
8006 if (!validate_arg (arg, REAL_TYPE))
8009 /* Calculate the result when the argument is a constant. */
8010 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
8013 /* Optimize tan(atan(x)) = x. */
8014 fcode = builtin_mathfn_code (arg);
8015 if (flag_unsafe_math_optimizations
8016 && (fcode == BUILT_IN_ATAN
8017 || fcode == BUILT_IN_ATANF
8018 || fcode == BUILT_IN_ATANL))
8019 return CALL_EXPR_ARG (arg, 0);
8024 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8025 NULL_TREE if no simplification can be made. */
8028 fold_builtin_sincos (location_t loc,
8029 tree arg0, tree arg1, tree arg2)
8034 if (!validate_arg (arg0, REAL_TYPE)
8035 || !validate_arg (arg1, POINTER_TYPE)
8036 || !validate_arg (arg2, POINTER_TYPE))
8039 type = TREE_TYPE (arg0);
8041 /* Calculate the result when the argument is a constant. */
8042 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8045 /* Canonicalize sincos to cexpi. */
8046 if (!TARGET_C99_FUNCTIONS)
8048 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8052 call = build_call_expr_loc (loc, fn, 1, arg0);
8053 call = builtin_save_expr (call);
8055 return build2 (COMPOUND_EXPR, void_type_node,
8056 build2 (MODIFY_EXPR, void_type_node,
8057 build_fold_indirect_ref_loc (loc, arg1),
8058 build1 (IMAGPART_EXPR, type, call)),
8059 build2 (MODIFY_EXPR, void_type_node,
8060 build_fold_indirect_ref_loc (loc, arg2),
8061 build1 (REALPART_EXPR, type, call)));
8064 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8065 NULL_TREE if no simplification can be made. */
8068 fold_builtin_cexp (location_t loc, tree arg0, tree type)
8071 tree realp, imagp, ifn;
8076 if (!validate_arg (arg0, COMPLEX_TYPE)
8077 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8081 /* Calculate the result when the argument is a constant. */
8082 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8086 rtype = TREE_TYPE (TREE_TYPE (arg0));
8088 /* In case we can figure out the real part of arg0 and it is constant zero
8090 if (!TARGET_C99_FUNCTIONS)
8092 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8096 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
8097 && real_zerop (realp))
8099 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8100 return build_call_expr_loc (loc, ifn, 1, narg);
8103 /* In case we can easily decompose real and imaginary parts split cexp
8104 to exp (r) * cexpi (i). */
8105 if (flag_unsafe_math_optimizations
8108 tree rfn, rcall, icall;
8110 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8114 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
8118 icall = build_call_expr_loc (loc, ifn, 1, imagp);
8119 icall = builtin_save_expr (icall);
8120 rcall = build_call_expr_loc (loc, rfn, 1, realp);
8121 rcall = builtin_save_expr (rcall);
8122 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8123 fold_build2_loc (loc, MULT_EXPR, rtype,
8125 fold_build1_loc (loc, REALPART_EXPR,
8127 fold_build2_loc (loc, MULT_EXPR, rtype,
8129 fold_build1_loc (loc, IMAGPART_EXPR,
8136 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8137 Return NULL_TREE if no simplification can be made. */
8140 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
8142 if (!validate_arg (arg, REAL_TYPE))
8145 /* Optimize trunc of constant value. */
8146 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8148 REAL_VALUE_TYPE r, x;
8149 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8151 x = TREE_REAL_CST (arg);
8152 real_trunc (&r, TYPE_MODE (type), &x);
8153 return build_real (type, r);
8156 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8159 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8160 Return NULL_TREE if no simplification can be made. */
8163 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
8165 if (!validate_arg (arg, REAL_TYPE))
8168 /* Optimize floor of constant value. */
8169 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8173 x = TREE_REAL_CST (arg);
8174 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8176 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8179 real_floor (&r, TYPE_MODE (type), &x);
8180 return build_real (type, r);
8184 /* Fold floor (x) where x is nonnegative to trunc (x). */
8185 if (tree_expr_nonnegative_p (arg))
8187 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8189 return build_call_expr_loc (loc, truncfn, 1, arg);
8192 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8195 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8196 Return NULL_TREE if no simplification can be made. */
8199 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8201 if (!validate_arg (arg, REAL_TYPE))
8204 /* Optimize ceil of constant value. */
8205 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8209 x = TREE_REAL_CST (arg);
8210 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8212 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8215 real_ceil (&r, TYPE_MODE (type), &x);
8216 return build_real (type, r);
8220 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8223 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8224 Return NULL_TREE if no simplification can be made. */
8227 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8229 if (!validate_arg (arg, REAL_TYPE))
8232 /* Optimize round of constant value. */
8233 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8237 x = TREE_REAL_CST (arg);
8238 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8240 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8243 real_round (&r, TYPE_MODE (type), &x);
8244 return build_real (type, r);
8248 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8251 /* Fold function call to builtin lround, lroundf or lroundl (or the
8252 corresponding long long versions) and other rounding functions. ARG
8253 is the argument to the call. Return NULL_TREE if no simplification
8257 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8259 if (!validate_arg (arg, REAL_TYPE))
8262 /* Optimize lround of constant value. */
8263 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8265 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8267 if (real_isfinite (&x))
8269 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8270 tree ftype = TREE_TYPE (arg);
8271 unsigned HOST_WIDE_INT lo2;
8272 HOST_WIDE_INT hi, lo;
8275 switch (DECL_FUNCTION_CODE (fndecl))
8277 CASE_FLT_FN (BUILT_IN_LFLOOR):
8278 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8279 real_floor (&r, TYPE_MODE (ftype), &x);
8282 CASE_FLT_FN (BUILT_IN_LCEIL):
8283 CASE_FLT_FN (BUILT_IN_LLCEIL):
8284 real_ceil (&r, TYPE_MODE (ftype), &x);
8287 CASE_FLT_FN (BUILT_IN_LROUND):
8288 CASE_FLT_FN (BUILT_IN_LLROUND):
8289 real_round (&r, TYPE_MODE (ftype), &x);
8296 REAL_VALUE_TO_INT (&lo, &hi, r);
8297 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8298 return build_int_cst_wide (itype, lo2, hi);
8302 switch (DECL_FUNCTION_CODE (fndecl))
8304 CASE_FLT_FN (BUILT_IN_LFLOOR):
8305 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8306 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8307 if (tree_expr_nonnegative_p (arg))
8308 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8309 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8314 return fold_fixed_mathfn (loc, fndecl, arg);
8317 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8318 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8319 the argument to the call. Return NULL_TREE if no simplification can
8323 fold_builtin_bitop (tree fndecl, tree arg)
8325 if (!validate_arg (arg, INTEGER_TYPE))
8328 /* Optimize for constant argument. */
8329 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8331 HOST_WIDE_INT hi, width, result;
8332 unsigned HOST_WIDE_INT lo;
8335 type = TREE_TYPE (arg);
8336 width = TYPE_PRECISION (type);
8337 lo = TREE_INT_CST_LOW (arg);
8339 /* Clear all the bits that are beyond the type's precision. */
8340 if (width > HOST_BITS_PER_WIDE_INT)
8342 hi = TREE_INT_CST_HIGH (arg);
8343 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8344 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8349 if (width < HOST_BITS_PER_WIDE_INT)
8350 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8353 switch (DECL_FUNCTION_CODE (fndecl))
8355 CASE_INT_FN (BUILT_IN_FFS):
8357 result = exact_log2 (lo & -lo) + 1;
8359 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8364 CASE_INT_FN (BUILT_IN_CLZ):
8366 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8368 result = width - floor_log2 (lo) - 1;
8369 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8373 CASE_INT_FN (BUILT_IN_CTZ):
8375 result = exact_log2 (lo & -lo);
8377 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8378 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8382 CASE_INT_FN (BUILT_IN_POPCOUNT):
8385 result++, lo &= lo - 1;
8387 result++, hi &= hi - 1;
8390 CASE_INT_FN (BUILT_IN_PARITY):
8393 result++, lo &= lo - 1;
8395 result++, hi &= hi - 1;
8403 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8409 /* Fold function call to builtin_bswap and the long and long long
8410 variants. Return NULL_TREE if no simplification can be made. */
8412 fold_builtin_bswap (tree fndecl, tree arg)
8414 if (! validate_arg (arg, INTEGER_TYPE))
8417 /* Optimize constant value. */
8418 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8420 HOST_WIDE_INT hi, width, r_hi = 0;
8421 unsigned HOST_WIDE_INT lo, r_lo = 0;
8424 type = TREE_TYPE (arg);
8425 width = TYPE_PRECISION (type);
8426 lo = TREE_INT_CST_LOW (arg);
8427 hi = TREE_INT_CST_HIGH (arg);
8429 switch (DECL_FUNCTION_CODE (fndecl))
8431 case BUILT_IN_BSWAP32:
8432 case BUILT_IN_BSWAP64:
8436 for (s = 0; s < width; s += 8)
8438 int d = width - s - 8;
8439 unsigned HOST_WIDE_INT byte;
8441 if (s < HOST_BITS_PER_WIDE_INT)
8442 byte = (lo >> s) & 0xff;
8444 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8446 if (d < HOST_BITS_PER_WIDE_INT)
8449 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8459 if (width < HOST_BITS_PER_WIDE_INT)
8460 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8462 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8468 /* A subroutine of fold_builtin to fold the various logarithmic
8469 functions. Return NULL_TREE if no simplification can me made.
8470 FUNC is the corresponding MPFR logarithm function. */
8473 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8474 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8476 if (validate_arg (arg, REAL_TYPE))
8478 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8480 const enum built_in_function fcode = builtin_mathfn_code (arg);
8482 /* Calculate the result when the argument is a constant. */
8483 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8486 /* Special case, optimize logN(expN(x)) = x. */
8487 if (flag_unsafe_math_optimizations
8488 && ((func == mpfr_log
8489 && (fcode == BUILT_IN_EXP
8490 || fcode == BUILT_IN_EXPF
8491 || fcode == BUILT_IN_EXPL))
8492 || (func == mpfr_log2
8493 && (fcode == BUILT_IN_EXP2
8494 || fcode == BUILT_IN_EXP2F
8495 || fcode == BUILT_IN_EXP2L))
8496 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8497 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8499 /* Optimize logN(func()) for various exponential functions. We
8500 want to determine the value "x" and the power "exponent" in
8501 order to transform logN(x**exponent) into exponent*logN(x). */
8502 if (flag_unsafe_math_optimizations)
8504 tree exponent = 0, x = 0;
8508 CASE_FLT_FN (BUILT_IN_EXP):
8509 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8510 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8512 exponent = CALL_EXPR_ARG (arg, 0);
8514 CASE_FLT_FN (BUILT_IN_EXP2):
8515 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8516 x = build_real (type, dconst2);
8517 exponent = CALL_EXPR_ARG (arg, 0);
8519 CASE_FLT_FN (BUILT_IN_EXP10):
8520 CASE_FLT_FN (BUILT_IN_POW10):
8521 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8523 REAL_VALUE_TYPE dconst10;
8524 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8525 x = build_real (type, dconst10);
8527 exponent = CALL_EXPR_ARG (arg, 0);
8529 CASE_FLT_FN (BUILT_IN_SQRT):
8530 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8531 x = CALL_EXPR_ARG (arg, 0);
8532 exponent = build_real (type, dconsthalf);
8534 CASE_FLT_FN (BUILT_IN_CBRT):
8535 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8536 x = CALL_EXPR_ARG (arg, 0);
8537 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8540 CASE_FLT_FN (BUILT_IN_POW):
8541 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8542 x = CALL_EXPR_ARG (arg, 0);
8543 exponent = CALL_EXPR_ARG (arg, 1);
8549 /* Now perform the optimization. */
8552 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8553 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8561 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8562 NULL_TREE if no simplification can be made. */
8565 fold_builtin_hypot (location_t loc, tree fndecl,
8566 tree arg0, tree arg1, tree type)
8568 tree res, narg0, narg1;
8570 if (!validate_arg (arg0, REAL_TYPE)
8571 || !validate_arg (arg1, REAL_TYPE))
8574 /* Calculate the result when the argument is a constant. */
8575 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8578 /* If either argument to hypot has a negate or abs, strip that off.
8579 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8580 narg0 = fold_strip_sign_ops (arg0);
8581 narg1 = fold_strip_sign_ops (arg1);
8584 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8585 narg1 ? narg1 : arg1);
8588 /* If either argument is zero, hypot is fabs of the other. */
8589 if (real_zerop (arg0))
8590 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8591 else if (real_zerop (arg1))
8592 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8594 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8595 if (flag_unsafe_math_optimizations
8596 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8598 const REAL_VALUE_TYPE sqrt2_trunc
8599 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8600 return fold_build2_loc (loc, MULT_EXPR, type,
8601 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8602 build_real (type, sqrt2_trunc));
8609 /* Fold a builtin function call to pow, powf, or powl. Return
8610 NULL_TREE if no simplification can be made. */
8612 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8616 if (!validate_arg (arg0, REAL_TYPE)
8617 || !validate_arg (arg1, REAL_TYPE))
8620 /* Calculate the result when the argument is a constant. */
8621 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8624 /* Optimize pow(1.0,y) = 1.0. */
8625 if (real_onep (arg0))
8626 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8628 if (TREE_CODE (arg1) == REAL_CST
8629 && !TREE_OVERFLOW (arg1))
8631 REAL_VALUE_TYPE cint;
8635 c = TREE_REAL_CST (arg1);
8637 /* Optimize pow(x,0.0) = 1.0. */
8638 if (REAL_VALUES_EQUAL (c, dconst0))
8639 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8642 /* Optimize pow(x,1.0) = x. */
8643 if (REAL_VALUES_EQUAL (c, dconst1))
8646 /* Optimize pow(x,-1.0) = 1.0/x. */
8647 if (REAL_VALUES_EQUAL (c, dconstm1))
8648 return fold_build2_loc (loc, RDIV_EXPR, type,
8649 build_real (type, dconst1), arg0);
8651 /* Optimize pow(x,0.5) = sqrt(x). */
8652 if (flag_unsafe_math_optimizations
8653 && REAL_VALUES_EQUAL (c, dconsthalf))
8655 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8657 if (sqrtfn != NULL_TREE)
8658 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8661 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8662 if (flag_unsafe_math_optimizations)
8664 const REAL_VALUE_TYPE dconstroot
8665 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8667 if (REAL_VALUES_EQUAL (c, dconstroot))
8669 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8670 if (cbrtfn != NULL_TREE)
8671 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8675 /* Check for an integer exponent. */
8676 n = real_to_integer (&c);
8677 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8678 if (real_identical (&c, &cint))
8680 /* Attempt to evaluate pow at compile-time, unless this should
8681 raise an exception. */
8682 if (TREE_CODE (arg0) == REAL_CST
8683 && !TREE_OVERFLOW (arg0)
8685 || (!flag_trapping_math && !flag_errno_math)
8686 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8691 x = TREE_REAL_CST (arg0);
8692 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8693 if (flag_unsafe_math_optimizations || !inexact)
8694 return build_real (type, x);
8697 /* Strip sign ops from even integer powers. */
8698 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8700 tree narg0 = fold_strip_sign_ops (arg0);
8702 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8707 if (flag_unsafe_math_optimizations)
8709 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8711 /* Optimize pow(expN(x),y) = expN(x*y). */
8712 if (BUILTIN_EXPONENT_P (fcode))
8714 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8715 tree arg = CALL_EXPR_ARG (arg0, 0);
8716 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8717 return build_call_expr_loc (loc, expfn, 1, arg);
8720 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8721 if (BUILTIN_SQRT_P (fcode))
8723 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8724 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8725 build_real (type, dconsthalf));
8726 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8729 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8730 if (BUILTIN_CBRT_P (fcode))
8732 tree arg = CALL_EXPR_ARG (arg0, 0);
8733 if (tree_expr_nonnegative_p (arg))
8735 const REAL_VALUE_TYPE dconstroot
8736 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8737 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8738 build_real (type, dconstroot));
8739 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8743 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8744 if (fcode == BUILT_IN_POW
8745 || fcode == BUILT_IN_POWF
8746 || fcode == BUILT_IN_POWL)
8748 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8749 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8750 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8751 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8758 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8759 Return NULL_TREE if no simplification can be made. */
8761 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8762 tree arg0, tree arg1, tree type)
8764 if (!validate_arg (arg0, REAL_TYPE)
8765 || !validate_arg (arg1, INTEGER_TYPE))
8768 /* Optimize pow(1.0,y) = 1.0. */
8769 if (real_onep (arg0))
8770 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8772 if (host_integerp (arg1, 0))
8774 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8776 /* Evaluate powi at compile-time. */
8777 if (TREE_CODE (arg0) == REAL_CST
8778 && !TREE_OVERFLOW (arg0))
8781 x = TREE_REAL_CST (arg0);
8782 real_powi (&x, TYPE_MODE (type), &x, c);
8783 return build_real (type, x);
8786 /* Optimize pow(x,0) = 1.0. */
8788 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8791 /* Optimize pow(x,1) = x. */
8795 /* Optimize pow(x,-1) = 1.0/x. */
8797 return fold_build2_loc (loc, RDIV_EXPR, type,
8798 build_real (type, dconst1), arg0);
8804 /* A subroutine of fold_builtin to fold the various exponent
8805 functions. Return NULL_TREE if no simplification can be made.
8806 FUNC is the corresponding MPFR exponent function. */
8809 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8810 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8812 if (validate_arg (arg, REAL_TYPE))
8814 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8817 /* Calculate the result when the argument is a constant. */
8818 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8821 /* Optimize expN(logN(x)) = x. */
8822 if (flag_unsafe_math_optimizations)
8824 const enum built_in_function fcode = builtin_mathfn_code (arg);
8826 if ((func == mpfr_exp
8827 && (fcode == BUILT_IN_LOG
8828 || fcode == BUILT_IN_LOGF
8829 || fcode == BUILT_IN_LOGL))
8830 || (func == mpfr_exp2
8831 && (fcode == BUILT_IN_LOG2
8832 || fcode == BUILT_IN_LOG2F
8833 || fcode == BUILT_IN_LOG2L))
8834 || (func == mpfr_exp10
8835 && (fcode == BUILT_IN_LOG10
8836 || fcode == BUILT_IN_LOG10F
8837 || fcode == BUILT_IN_LOG10L)))
8838 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8845 /* Return true if VAR is a VAR_DECL or a component thereof. */
8848 var_decl_component_p (tree var)
8851 while (handled_component_p (inner))
8852 inner = TREE_OPERAND (inner, 0);
8853 return SSA_VAR_P (inner);
8856 /* Fold function call to builtin memset. Return
8857 NULL_TREE if no simplification can be made. */
8860 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8861 tree type, bool ignore)
8863 tree var, ret, etype;
8864 unsigned HOST_WIDE_INT length, cval;
8866 if (! validate_arg (dest, POINTER_TYPE)
8867 || ! validate_arg (c, INTEGER_TYPE)
8868 || ! validate_arg (len, INTEGER_TYPE))
8871 if (! host_integerp (len, 1))
8874 /* If the LEN parameter is zero, return DEST. */
8875 if (integer_zerop (len))
8876 return omit_one_operand_loc (loc, type, dest, c);
8878 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8883 if (TREE_CODE (var) != ADDR_EXPR)
8886 var = TREE_OPERAND (var, 0);
8887 if (TREE_THIS_VOLATILE (var))
8890 etype = TREE_TYPE (var);
8891 if (TREE_CODE (etype) == ARRAY_TYPE)
8892 etype = TREE_TYPE (etype);
8894 if (!INTEGRAL_TYPE_P (etype)
8895 && !POINTER_TYPE_P (etype))
8898 if (! var_decl_component_p (var))
8901 length = tree_low_cst (len, 1);
8902 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8903 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8907 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8910 if (integer_zerop (c))
8914 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8917 cval = tree_low_cst (c, 1);
8921 cval |= (cval << 31) << 1;
8924 ret = build_int_cst_type (etype, cval);
8925 var = build_fold_indirect_ref_loc (loc,
8926 fold_convert_loc (loc,
8927 build_pointer_type (etype),
8929 ret = build2 (MODIFY_EXPR, etype, var, ret);
8933 return omit_one_operand_loc (loc, type, dest, ret);
8936 /* Fold function call to builtin memset. Return
8937 NULL_TREE if no simplification can be made. */
8940 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8942 if (! validate_arg (dest, POINTER_TYPE)
8943 || ! validate_arg (size, INTEGER_TYPE))
8949 /* New argument list transforming bzero(ptr x, int y) to
8950 memset(ptr x, int 0, size_t y). This is done this way
8951 so that if it isn't expanded inline, we fallback to
8952 calling bzero instead of memset. */
8954 return fold_builtin_memset (loc, dest, integer_zero_node,
8955 fold_convert_loc (loc, sizetype, size),
8956 void_type_node, ignore);
8959 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8960 NULL_TREE if no simplification can be made.
8961 If ENDP is 0, return DEST (like memcpy).
8962 If ENDP is 1, return DEST+LEN (like mempcpy).
8963 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8964 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8968 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8969 tree len, tree type, bool ignore, int endp)
8971 tree destvar, srcvar, expr;
8973 if (! validate_arg (dest, POINTER_TYPE)
8974 || ! validate_arg (src, POINTER_TYPE)
8975 || ! validate_arg (len, INTEGER_TYPE))
8978 /* If the LEN parameter is zero, return DEST. */
8979 if (integer_zerop (len))
8980 return omit_one_operand_loc (loc, type, dest, src);
8982 /* If SRC and DEST are the same (and not volatile), return
8983 DEST{,+LEN,+LEN-1}. */
8984 if (operand_equal_p (src, dest, 0))
8988 tree srctype, desttype;
8989 int src_align, dest_align;
8993 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8994 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8996 /* Both DEST and SRC must be pointer types.
8997 ??? This is what old code did. Is the testing for pointer types
9000 If either SRC is readonly or length is 1, we can use memcpy. */
9001 if (!dest_align || !src_align)
9003 if (readonly_data_expr (src)
9004 || (host_integerp (len, 1)
9005 && (MIN (src_align, dest_align) / BITS_PER_UNIT
9006 >= tree_low_cst (len, 1))))
9008 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9011 return build_call_expr_loc (loc, fn, 3, dest, src, len);
9014 /* If *src and *dest can't overlap, optimize into memcpy as well. */
9015 srcvar = build_fold_indirect_ref_loc (loc, src);
9016 destvar = build_fold_indirect_ref_loc (loc, dest);
9018 && !TREE_THIS_VOLATILE (srcvar)
9020 && !TREE_THIS_VOLATILE (destvar))
9022 tree src_base, dest_base, fn;
9023 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
9024 HOST_WIDE_INT size = -1;
9025 HOST_WIDE_INT maxsize = -1;
9028 if (handled_component_p (src_base))
9029 src_base = get_ref_base_and_extent (src_base, &src_offset,
9031 dest_base = destvar;
9032 if (handled_component_p (dest_base))
9033 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
9035 if (host_integerp (len, 1))
9037 maxsize = tree_low_cst (len, 1);
9039 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
9042 maxsize *= BITS_PER_UNIT;
9046 if (SSA_VAR_P (src_base)
9047 && SSA_VAR_P (dest_base))
9049 if (operand_equal_p (src_base, dest_base, 0)
9050 && ranges_overlap_p (src_offset, maxsize,
9051 dest_offset, maxsize))
9054 else if (TREE_CODE (src_base) == INDIRECT_REF
9055 && TREE_CODE (dest_base) == INDIRECT_REF)
9057 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
9058 TREE_OPERAND (dest_base, 0), 0)
9059 || ranges_overlap_p (src_offset, maxsize,
9060 dest_offset, maxsize))
9066 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9069 return build_call_expr_loc (loc, fn, 3, dest, src, len);
9074 if (!host_integerp (len, 0))
9077 This logic lose for arguments like (type *)malloc (sizeof (type)),
9078 since we strip the casts of up to VOID return value from malloc.
9079 Perhaps we ought to inherit type from non-VOID argument here? */
9082 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
9083 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
9085 tree tem = TREE_OPERAND (src, 0);
9087 if (tem != TREE_OPERAND (src, 0))
9088 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
9090 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
9092 tree tem = TREE_OPERAND (dest, 0);
9094 if (tem != TREE_OPERAND (dest, 0))
9095 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
9097 srctype = TREE_TYPE (TREE_TYPE (src));
9099 && TREE_CODE (srctype) == ARRAY_TYPE
9100 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9102 srctype = TREE_TYPE (srctype);
9104 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
9106 desttype = TREE_TYPE (TREE_TYPE (dest));
9108 && TREE_CODE (desttype) == ARRAY_TYPE
9109 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9111 desttype = TREE_TYPE (desttype);
9113 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
9115 if (!srctype || !desttype
9116 || !TYPE_SIZE_UNIT (srctype)
9117 || !TYPE_SIZE_UNIT (desttype)
9118 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
9119 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
9120 || TYPE_VOLATILE (srctype)
9121 || TYPE_VOLATILE (desttype))
9124 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
9125 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
9126 if (dest_align < (int) TYPE_ALIGN (desttype)
9127 || src_align < (int) TYPE_ALIGN (srctype))
9131 dest = builtin_save_expr (dest);
9134 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
9136 srcvar = build_fold_indirect_ref_loc (loc, src);
9137 if (TREE_THIS_VOLATILE (srcvar))
9139 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
9141 /* With memcpy, it is possible to bypass aliasing rules, so without
9142 this check i.e. execute/20060930-2.c would be misoptimized,
9143 because it use conflicting alias set to hold argument for the
9144 memcpy call. This check is probably unnecessary with
9145 -fno-strict-aliasing. Similarly for destvar. See also
9147 else if (!var_decl_component_p (srcvar))
9151 destvar = NULL_TREE;
9152 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
9154 destvar = build_fold_indirect_ref_loc (loc, dest);
9155 if (TREE_THIS_VOLATILE (destvar))
9157 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
9158 destvar = NULL_TREE;
9159 else if (!var_decl_component_p (destvar))
9160 destvar = NULL_TREE;
9163 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9166 if (srcvar == NULL_TREE)
9169 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
9172 srctype = build_qualified_type (desttype, 0);
9173 if (src_align < (int) TYPE_ALIGN (srctype))
9175 if (AGGREGATE_TYPE_P (srctype)
9176 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
9179 srctype = build_variant_type_copy (srctype);
9180 TYPE_ALIGN (srctype) = src_align;
9181 TYPE_USER_ALIGN (srctype) = 1;
9182 TYPE_PACKED (srctype) = 1;
9184 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
9185 src = fold_convert_loc (loc, srcptype, src);
9186 srcvar = build_fold_indirect_ref_loc (loc, src);
9188 else if (destvar == NULL_TREE)
9191 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9194 desttype = build_qualified_type (srctype, 0);
9195 if (dest_align < (int) TYPE_ALIGN (desttype))
9197 if (AGGREGATE_TYPE_P (desttype)
9198 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9201 desttype = build_variant_type_copy (desttype);
9202 TYPE_ALIGN (desttype) = dest_align;
9203 TYPE_USER_ALIGN (desttype) = 1;
9204 TYPE_PACKED (desttype) = 1;
9206 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9207 dest = fold_convert_loc (loc, destptype, dest);
9208 destvar = build_fold_indirect_ref_loc (loc, dest);
9211 if (srctype == desttype
9212 || (gimple_in_ssa_p (cfun)
9213 && useless_type_conversion_p (desttype, srctype)))
9215 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9216 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9217 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9218 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9219 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
9221 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
9222 TREE_TYPE (destvar), srcvar);
9223 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9229 if (endp == 0 || endp == 3)
9230 return omit_one_operand_loc (loc, type, dest, expr);
9236 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9239 len = fold_convert_loc (loc, sizetype, len);
9240 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9241 dest = fold_convert_loc (loc, type, dest);
9243 dest = omit_one_operand_loc (loc, type, dest, expr);
9247 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9248 If LEN is not NULL, it represents the length of the string to be
9249 copied. Return NULL_TREE if no simplification can be made. */
9252 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9256 if (!validate_arg (dest, POINTER_TYPE)
9257 || !validate_arg (src, POINTER_TYPE))
9260 /* If SRC and DEST are the same (and not volatile), return DEST. */
9261 if (operand_equal_p (src, dest, 0))
9262 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9264 if (optimize_function_for_size_p (cfun))
9267 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9273 len = c_strlen (src, 1);
9274 if (! len || TREE_SIDE_EFFECTS (len))
9278 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
9279 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9280 build_call_expr_loc (loc, fn, 3, dest, src, len));
9283 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9284 If SLEN is not NULL, it represents the length of the source string.
9285 Return NULL_TREE if no simplification can be made. */
9288 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9289 tree src, tree len, tree slen)
9293 if (!validate_arg (dest, POINTER_TYPE)
9294 || !validate_arg (src, POINTER_TYPE)
9295 || !validate_arg (len, INTEGER_TYPE))
9298 /* If the LEN parameter is zero, return DEST. */
9299 if (integer_zerop (len))
9300 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9302 /* We can't compare slen with len as constants below if len is not a
9304 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9308 slen = c_strlen (src, 1);
9310 /* Now, we must be passed a constant src ptr parameter. */
9311 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9314 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9316 /* We do not support simplification of this case, though we do
9317 support it when expanding trees into RTL. */
9318 /* FIXME: generate a call to __builtin_memset. */
9319 if (tree_int_cst_lt (slen, len))
9322 /* OK transform into builtin memcpy. */
9323 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9326 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9327 build_call_expr_loc (loc, fn, 3, dest, src, len));
9330 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9331 arguments to the call, and TYPE is its return type.
9332 Return NULL_TREE if no simplification can be made. */
9335 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9337 if (!validate_arg (arg1, POINTER_TYPE)
9338 || !validate_arg (arg2, INTEGER_TYPE)
9339 || !validate_arg (len, INTEGER_TYPE))
9345 if (TREE_CODE (arg2) != INTEGER_CST
9346 || !host_integerp (len, 1))
9349 p1 = c_getstr (arg1);
9350 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9356 if (target_char_cast (arg2, &c))
9359 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9362 return build_int_cst (TREE_TYPE (arg1), 0);
9364 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9366 return fold_convert_loc (loc, type, tem);
9372 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9373 Return NULL_TREE if no simplification can be made. */
9376 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9378 const char *p1, *p2;
9380 if (!validate_arg (arg1, POINTER_TYPE)
9381 || !validate_arg (arg2, POINTER_TYPE)
9382 || !validate_arg (len, INTEGER_TYPE))
9385 /* If the LEN parameter is zero, return zero. */
9386 if (integer_zerop (len))
9387 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9390 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9391 if (operand_equal_p (arg1, arg2, 0))
9392 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9394 p1 = c_getstr (arg1);
9395 p2 = c_getstr (arg2);
9397 /* If all arguments are constant, and the value of len is not greater
9398 than the lengths of arg1 and arg2, evaluate at compile-time. */
9399 if (host_integerp (len, 1) && p1 && p2
9400 && compare_tree_int (len, strlen (p1) + 1) <= 0
9401 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9403 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9406 return integer_one_node;
9408 return integer_minus_one_node;
9410 return integer_zero_node;
9413 /* If len parameter is one, return an expression corresponding to
9414 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9415 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9417 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9418 tree cst_uchar_ptr_node
9419 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9422 = fold_convert_loc (loc, integer_type_node,
9423 build1 (INDIRECT_REF, cst_uchar_node,
9424 fold_convert_loc (loc,
9428 = fold_convert_loc (loc, integer_type_node,
9429 build1 (INDIRECT_REF, cst_uchar_node,
9430 fold_convert_loc (loc,
9433 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9439 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9440 Return NULL_TREE if no simplification can be made. */
9443 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9445 const char *p1, *p2;
9447 if (!validate_arg (arg1, POINTER_TYPE)
9448 || !validate_arg (arg2, POINTER_TYPE))
9451 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9452 if (operand_equal_p (arg1, arg2, 0))
9453 return integer_zero_node;
9455 p1 = c_getstr (arg1);
9456 p2 = c_getstr (arg2);
9460 const int i = strcmp (p1, p2);
9462 return integer_minus_one_node;
9464 return integer_one_node;
9466 return integer_zero_node;
9469 /* If the second arg is "", return *(const unsigned char*)arg1. */
9470 if (p2 && *p2 == '\0')
9472 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9473 tree cst_uchar_ptr_node
9474 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9476 return fold_convert_loc (loc, integer_type_node,
9477 build1 (INDIRECT_REF, cst_uchar_node,
9478 fold_convert_loc (loc,
9483 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9484 if (p1 && *p1 == '\0')
9486 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9487 tree cst_uchar_ptr_node
9488 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9491 = fold_convert_loc (loc, integer_type_node,
9492 build1 (INDIRECT_REF, cst_uchar_node,
9493 fold_convert_loc (loc,
9496 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9502 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9503 Return NULL_TREE if no simplification can be made. */
9506 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9508 const char *p1, *p2;
9510 if (!validate_arg (arg1, POINTER_TYPE)
9511 || !validate_arg (arg2, POINTER_TYPE)
9512 || !validate_arg (len, INTEGER_TYPE))
9515 /* If the LEN parameter is zero, return zero. */
9516 if (integer_zerop (len))
9517 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9520 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9521 if (operand_equal_p (arg1, arg2, 0))
9522 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9524 p1 = c_getstr (arg1);
9525 p2 = c_getstr (arg2);
9527 if (host_integerp (len, 1) && p1 && p2)
9529 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9531 return integer_one_node;
9533 return integer_minus_one_node;
9535 return integer_zero_node;
9538 /* If the second arg is "", and the length is greater than zero,
9539 return *(const unsigned char*)arg1. */
9540 if (p2 && *p2 == '\0'
9541 && TREE_CODE (len) == INTEGER_CST
9542 && tree_int_cst_sgn (len) == 1)
9544 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9545 tree cst_uchar_ptr_node
9546 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9548 return fold_convert_loc (loc, integer_type_node,
9549 build1 (INDIRECT_REF, cst_uchar_node,
9550 fold_convert_loc (loc,
9555 /* If the first arg is "", and the length is greater than zero,
9556 return -*(const unsigned char*)arg2. */
9557 if (p1 && *p1 == '\0'
9558 && TREE_CODE (len) == INTEGER_CST
9559 && tree_int_cst_sgn (len) == 1)
9561 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9562 tree cst_uchar_ptr_node
9563 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9565 tree temp = fold_convert_loc (loc, integer_type_node,
9566 build1 (INDIRECT_REF, cst_uchar_node,
9567 fold_convert_loc (loc,
9570 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9573 /* If len parameter is one, return an expression corresponding to
9574 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9575 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9577 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9578 tree cst_uchar_ptr_node
9579 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9581 tree ind1 = fold_convert_loc (loc, integer_type_node,
9582 build1 (INDIRECT_REF, cst_uchar_node,
9583 fold_convert_loc (loc,
9586 tree ind2 = fold_convert_loc (loc, integer_type_node,
9587 build1 (INDIRECT_REF, cst_uchar_node,
9588 fold_convert_loc (loc,
9591 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9597 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9598 ARG. Return NULL_TREE if no simplification can be made. */
9601 fold_builtin_signbit (location_t loc, tree arg, tree type)
9605 if (!validate_arg (arg, REAL_TYPE))
9608 /* If ARG is a compile-time constant, determine the result. */
9609 if (TREE_CODE (arg) == REAL_CST
9610 && !TREE_OVERFLOW (arg))
9614 c = TREE_REAL_CST (arg);
9615 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9616 return fold_convert_loc (loc, type, temp);
9619 /* If ARG is non-negative, the result is always zero. */
9620 if (tree_expr_nonnegative_p (arg))
9621 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9623 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9624 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9625 return fold_build2_loc (loc, LT_EXPR, type, arg,
9626 build_real (TREE_TYPE (arg), dconst0));
9631 /* Fold function call to builtin copysign, copysignf or copysignl with
9632 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9636 fold_builtin_copysign (location_t loc, tree fndecl,
9637 tree arg1, tree arg2, tree type)
9641 if (!validate_arg (arg1, REAL_TYPE)
9642 || !validate_arg (arg2, REAL_TYPE))
9645 /* copysign(X,X) is X. */
9646 if (operand_equal_p (arg1, arg2, 0))
9647 return fold_convert_loc (loc, type, arg1);
9649 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9650 if (TREE_CODE (arg1) == REAL_CST
9651 && TREE_CODE (arg2) == REAL_CST
9652 && !TREE_OVERFLOW (arg1)
9653 && !TREE_OVERFLOW (arg2))
9655 REAL_VALUE_TYPE c1, c2;
9657 c1 = TREE_REAL_CST (arg1);
9658 c2 = TREE_REAL_CST (arg2);
9659 /* c1.sign := c2.sign. */
9660 real_copysign (&c1, &c2);
9661 return build_real (type, c1);
9664 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9665 Remember to evaluate Y for side-effects. */
9666 if (tree_expr_nonnegative_p (arg2))
9667 return omit_one_operand_loc (loc, type,
9668 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9671 /* Strip sign changing operations for the first argument. */
9672 tem = fold_strip_sign_ops (arg1);
9674 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9679 /* Fold a call to builtin isascii with argument ARG. */
9682 fold_builtin_isascii (location_t loc, tree arg)
9684 if (!validate_arg (arg, INTEGER_TYPE))
9688 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9689 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9690 build_int_cst (NULL_TREE,
9691 ~ (unsigned HOST_WIDE_INT) 0x7f));
9692 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9693 arg, integer_zero_node);
9697 /* Fold a call to builtin toascii with argument ARG. */
9700 fold_builtin_toascii (location_t loc, tree arg)
9702 if (!validate_arg (arg, INTEGER_TYPE))
9705 /* Transform toascii(c) -> (c & 0x7f). */
9706 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9707 build_int_cst (NULL_TREE, 0x7f));
9710 /* Fold a call to builtin isdigit with argument ARG. */
9713 fold_builtin_isdigit (location_t loc, tree arg)
9715 if (!validate_arg (arg, INTEGER_TYPE))
9719 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9720 /* According to the C standard, isdigit is unaffected by locale.
9721 However, it definitely is affected by the target character set. */
9722 unsigned HOST_WIDE_INT target_digit0
9723 = lang_hooks.to_target_charset ('0');
9725 if (target_digit0 == 0)
9728 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9729 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9730 build_int_cst (unsigned_type_node, target_digit0));
9731 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9732 build_int_cst (unsigned_type_node, 9));
9736 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9739 fold_builtin_fabs (location_t loc, tree arg, tree type)
9741 if (!validate_arg (arg, REAL_TYPE))
9744 arg = fold_convert_loc (loc, type, arg);
9745 if (TREE_CODE (arg) == REAL_CST)
9746 return fold_abs_const (arg, type);
9747 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9750 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9753 fold_builtin_abs (location_t loc, tree arg, tree type)
9755 if (!validate_arg (arg, INTEGER_TYPE))
9758 arg = fold_convert_loc (loc, type, arg);
9759 if (TREE_CODE (arg) == INTEGER_CST)
9760 return fold_abs_const (arg, type);
9761 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9764 /* Fold a call to builtin fmin or fmax. */
9767 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9768 tree type, bool max)
9770 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9772 /* Calculate the result when the argument is a constant. */
9773 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9778 /* If either argument is NaN, return the other one. Avoid the
9779 transformation if we get (and honor) a signalling NaN. Using
9780 omit_one_operand() ensures we create a non-lvalue. */
9781 if (TREE_CODE (arg0) == REAL_CST
9782 && real_isnan (&TREE_REAL_CST (arg0))
9783 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9784 || ! TREE_REAL_CST (arg0).signalling))
9785 return omit_one_operand_loc (loc, type, arg1, arg0);
9786 if (TREE_CODE (arg1) == REAL_CST
9787 && real_isnan (&TREE_REAL_CST (arg1))
9788 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9789 || ! TREE_REAL_CST (arg1).signalling))
9790 return omit_one_operand_loc (loc, type, arg0, arg1);
9792 /* Transform fmin/fmax(x,x) -> x. */
9793 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9794 return omit_one_operand_loc (loc, type, arg0, arg1);
9796 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9797 functions to return the numeric arg if the other one is NaN.
9798 These tree codes don't honor that, so only transform if
9799 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9800 handled, so we don't have to worry about it either. */
9801 if (flag_finite_math_only)
9802 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9803 fold_convert_loc (loc, type, arg0),
9804 fold_convert_loc (loc, type, arg1));
9809 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9812 fold_builtin_carg (location_t loc, tree arg, tree type)
9814 if (validate_arg (arg, COMPLEX_TYPE)
9815 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9817 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9821 tree new_arg = builtin_save_expr (arg);
9822 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9823 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9824 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9831 /* Fold a call to builtin logb/ilogb. */
9834 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9836 if (! validate_arg (arg, REAL_TYPE))
9841 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9843 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9849 /* If arg is Inf or NaN and we're logb, return it. */
9850 if (TREE_CODE (rettype) == REAL_TYPE)
9851 return fold_convert_loc (loc, rettype, arg);
9852 /* Fall through... */
9854 /* Zero may set errno and/or raise an exception for logb, also
9855 for ilogb we don't know FP_ILOGB0. */
9858 /* For normal numbers, proceed iff radix == 2. In GCC,
9859 normalized significands are in the range [0.5, 1.0). We
9860 want the exponent as if they were [1.0, 2.0) so get the
9861 exponent and subtract 1. */
9862 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9863 return fold_convert_loc (loc, rettype,
9864 build_int_cst (NULL_TREE,
9865 REAL_EXP (value)-1));
9873 /* Fold a call to builtin significand, if radix == 2. */
9876 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9878 if (! validate_arg (arg, REAL_TYPE))
9883 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9885 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9892 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9893 return fold_convert_loc (loc, rettype, arg);
9895 /* For normal numbers, proceed iff radix == 2. */
9896 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9898 REAL_VALUE_TYPE result = *value;
9899 /* In GCC, normalized significands are in the range [0.5,
9900 1.0). We want them to be [1.0, 2.0) so set the
9902 SET_REAL_EXP (&result, 1);
9903 return build_real (rettype, result);
9912 /* Fold a call to builtin frexp, we can assume the base is 2. */
9915 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9917 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9922 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9925 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9927 /* Proceed if a valid pointer type was passed in. */
9928 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9930 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9936 /* For +-0, return (*exp = 0, +-0). */
9937 exp = integer_zero_node;
9942 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9943 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9946 /* Since the frexp function always expects base 2, and in
9947 GCC normalized significands are already in the range
9948 [0.5, 1.0), we have exactly what frexp wants. */
9949 REAL_VALUE_TYPE frac_rvt = *value;
9950 SET_REAL_EXP (&frac_rvt, 0);
9951 frac = build_real (rettype, frac_rvt);
9952 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9959 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9960 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9961 TREE_SIDE_EFFECTS (arg1) = 1;
9962 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9968 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9969 then we can assume the base is two. If it's false, then we have to
9970 check the mode of the TYPE parameter in certain cases. */
9973 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9974 tree type, bool ldexp)
9976 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9981 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9982 if (real_zerop (arg0) || integer_zerop (arg1)
9983 || (TREE_CODE (arg0) == REAL_CST
9984 && !real_isfinite (&TREE_REAL_CST (arg0))))
9985 return omit_one_operand_loc (loc, type, arg0, arg1);
9987 /* If both arguments are constant, then try to evaluate it. */
9988 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9989 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9990 && host_integerp (arg1, 0))
9992 /* Bound the maximum adjustment to twice the range of the
9993 mode's valid exponents. Use abs to ensure the range is
9994 positive as a sanity check. */
9995 const long max_exp_adj = 2 *
9996 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9997 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9999 /* Get the user-requested adjustment. */
10000 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
10002 /* The requested adjustment must be inside this range. This
10003 is a preliminary cap to avoid things like overflow, we
10004 may still fail to compute the result for other reasons. */
10005 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
10007 REAL_VALUE_TYPE initial_result;
10009 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
10011 /* Ensure we didn't overflow. */
10012 if (! real_isinf (&initial_result))
10014 const REAL_VALUE_TYPE trunc_result
10015 = real_value_truncate (TYPE_MODE (type), initial_result);
10017 /* Only proceed if the target mode can hold the
10018 resulting value. */
10019 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
10020 return build_real (type, trunc_result);
10029 /* Fold a call to builtin modf. */
10032 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
10034 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
10039 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
10042 arg1 = build_fold_indirect_ref_loc (loc, arg1);
10044 /* Proceed if a valid pointer type was passed in. */
10045 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
10047 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
10048 REAL_VALUE_TYPE trunc, frac;
10054 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
10055 trunc = frac = *value;
10058 /* For +-Inf, return (*arg1 = arg0, +-0). */
10060 frac.sign = value->sign;
10064 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
10065 real_trunc (&trunc, VOIDmode, value);
10066 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
10067 /* If the original number was negative and already
10068 integral, then the fractional part is -0.0. */
10069 if (value->sign && frac.cl == rvc_zero)
10070 frac.sign = value->sign;
10074 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
10075 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
10076 build_real (rettype, trunc));
10077 TREE_SIDE_EFFECTS (arg1) = 1;
10078 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
10079 build_real (rettype, frac));
10085 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10086 ARG is the argument for the call. */
10089 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10091 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10094 if (!validate_arg (arg, REAL_TYPE))
10097 switch (builtin_index)
10099 case BUILT_IN_ISINF:
10100 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10101 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10103 if (TREE_CODE (arg) == REAL_CST)
10105 r = TREE_REAL_CST (arg);
10106 if (real_isinf (&r))
10107 return real_compare (GT_EXPR, &r, &dconst0)
10108 ? integer_one_node : integer_minus_one_node;
10110 return integer_zero_node;
10115 case BUILT_IN_ISINF_SIGN:
10117 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10118 /* In a boolean context, GCC will fold the inner COND_EXPR to
10119 1. So e.g. "if (isinf_sign(x))" would be folded to just
10120 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10121 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10122 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
10123 tree tmp = NULL_TREE;
10125 arg = builtin_save_expr (arg);
10127 if (signbit_fn && isinf_fn)
10129 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10130 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10132 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10133 signbit_call, integer_zero_node);
10134 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10135 isinf_call, integer_zero_node);
10137 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10138 integer_minus_one_node, integer_one_node);
10139 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10141 integer_zero_node);
10147 case BUILT_IN_ISFINITE:
10148 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10149 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10150 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10152 if (TREE_CODE (arg) == REAL_CST)
10154 r = TREE_REAL_CST (arg);
10155 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10160 case BUILT_IN_ISNAN:
10161 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10162 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10164 if (TREE_CODE (arg) == REAL_CST)
10166 r = TREE_REAL_CST (arg);
10167 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10170 arg = builtin_save_expr (arg);
10171 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10174 gcc_unreachable ();
10178 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10179 This builtin will generate code to return the appropriate floating
10180 point classification depending on the value of the floating point
10181 number passed in. The possible return values must be supplied as
10182 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10183 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10184 one floating point argument which is "type generic". */
10187 fold_builtin_fpclassify (location_t loc, tree exp)
10189 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10190 arg, type, res, tmp;
10191 enum machine_mode mode;
10195 /* Verify the required arguments in the original call. */
10196 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10197 INTEGER_TYPE, INTEGER_TYPE,
10198 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10201 fp_nan = CALL_EXPR_ARG (exp, 0);
10202 fp_infinite = CALL_EXPR_ARG (exp, 1);
10203 fp_normal = CALL_EXPR_ARG (exp, 2);
10204 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10205 fp_zero = CALL_EXPR_ARG (exp, 4);
10206 arg = CALL_EXPR_ARG (exp, 5);
10207 type = TREE_TYPE (arg);
10208 mode = TYPE_MODE (type);
10209 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10211 /* fpclassify(x) ->
10212 isnan(x) ? FP_NAN :
10213 (fabs(x) == Inf ? FP_INFINITE :
10214 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10215 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10217 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10218 build_real (type, dconst0));
10219 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10220 tmp, fp_zero, fp_subnormal);
10222 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10223 real_from_string (&r, buf);
10224 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10225 arg, build_real (type, r));
10226 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10228 if (HONOR_INFINITIES (mode))
10231 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10232 build_real (type, r));
10233 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10237 if (HONOR_NANS (mode))
10239 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10240 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10246 /* Fold a call to an unordered comparison function such as
10247 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10248 being called and ARG0 and ARG1 are the arguments for the call.
10249 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10250 the opposite of the desired result. UNORDERED_CODE is used
10251 for modes that can hold NaNs and ORDERED_CODE is used for
10255 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10256 enum tree_code unordered_code,
10257 enum tree_code ordered_code)
10259 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10260 enum tree_code code;
10262 enum tree_code code0, code1;
10263 tree cmp_type = NULL_TREE;
10265 type0 = TREE_TYPE (arg0);
10266 type1 = TREE_TYPE (arg1);
10268 code0 = TREE_CODE (type0);
10269 code1 = TREE_CODE (type1);
10271 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10272 /* Choose the wider of two real types. */
10273 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10275 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10277 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10280 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10281 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10283 if (unordered_code == UNORDERED_EXPR)
10285 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10286 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10287 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10290 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10292 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10293 fold_build2_loc (loc, code, type, arg0, arg1));
10296 /* Fold a call to built-in function FNDECL with 0 arguments.
10297 IGNORE is true if the result of the function call is ignored. This
10298 function returns NULL_TREE if no simplification was possible. */
10301 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10303 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10304 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10307 CASE_FLT_FN (BUILT_IN_INF):
10308 case BUILT_IN_INFD32:
10309 case BUILT_IN_INFD64:
10310 case BUILT_IN_INFD128:
10311 return fold_builtin_inf (loc, type, true);
10313 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10314 return fold_builtin_inf (loc, type, false);
10316 case BUILT_IN_CLASSIFY_TYPE:
10317 return fold_builtin_classify_type (NULL_TREE);
10325 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10326 IGNORE is true if the result of the function call is ignored. This
10327 function returns NULL_TREE if no simplification was possible. */
10330 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10332 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10333 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10337 case BUILT_IN_CONSTANT_P:
10339 tree val = fold_builtin_constant_p (arg0);
10341 /* Gimplification will pull the CALL_EXPR for the builtin out of
10342 an if condition. When not optimizing, we'll not CSE it back.
10343 To avoid link error types of regressions, return false now. */
10344 if (!val && !optimize)
10345 val = integer_zero_node;
10350 case BUILT_IN_CLASSIFY_TYPE:
10351 return fold_builtin_classify_type (arg0);
10353 case BUILT_IN_STRLEN:
10354 return fold_builtin_strlen (loc, arg0);
10356 CASE_FLT_FN (BUILT_IN_FABS):
10357 return fold_builtin_fabs (loc, arg0, type);
10360 case BUILT_IN_LABS:
10361 case BUILT_IN_LLABS:
10362 case BUILT_IN_IMAXABS:
10363 return fold_builtin_abs (loc, arg0, type);
10365 CASE_FLT_FN (BUILT_IN_CONJ):
10366 if (validate_arg (arg0, COMPLEX_TYPE)
10367 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10368 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10371 CASE_FLT_FN (BUILT_IN_CREAL):
10372 if (validate_arg (arg0, COMPLEX_TYPE)
10373 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10374 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10377 CASE_FLT_FN (BUILT_IN_CIMAG):
10378 if (validate_arg (arg0, COMPLEX_TYPE))
10379 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10382 CASE_FLT_FN (BUILT_IN_CCOS):
10383 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10385 CASE_FLT_FN (BUILT_IN_CCOSH):
10386 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10389 CASE_FLT_FN (BUILT_IN_CSIN):
10390 if (validate_arg (arg0, COMPLEX_TYPE)
10391 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10392 return do_mpc_arg1 (arg0, type, mpc_sin);
10395 CASE_FLT_FN (BUILT_IN_CSINH):
10396 if (validate_arg (arg0, COMPLEX_TYPE)
10397 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10398 return do_mpc_arg1 (arg0, type, mpc_sinh);
10401 CASE_FLT_FN (BUILT_IN_CTAN):
10402 if (validate_arg (arg0, COMPLEX_TYPE)
10403 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10404 return do_mpc_arg1 (arg0, type, mpc_tan);
10407 CASE_FLT_FN (BUILT_IN_CTANH):
10408 if (validate_arg (arg0, COMPLEX_TYPE)
10409 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10410 return do_mpc_arg1 (arg0, type, mpc_tanh);
10413 CASE_FLT_FN (BUILT_IN_CLOG):
10414 if (validate_arg (arg0, COMPLEX_TYPE)
10415 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10416 return do_mpc_arg1 (arg0, type, mpc_log);
10419 CASE_FLT_FN (BUILT_IN_CSQRT):
10420 if (validate_arg (arg0, COMPLEX_TYPE)
10421 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10422 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10426 CASE_FLT_FN (BUILT_IN_CABS):
10427 return fold_builtin_cabs (loc, arg0, type, fndecl);
10429 CASE_FLT_FN (BUILT_IN_CARG):
10430 return fold_builtin_carg (loc, arg0, type);
10432 CASE_FLT_FN (BUILT_IN_SQRT):
10433 return fold_builtin_sqrt (loc, arg0, type);
10435 CASE_FLT_FN (BUILT_IN_CBRT):
10436 return fold_builtin_cbrt (loc, arg0, type);
10438 CASE_FLT_FN (BUILT_IN_ASIN):
10439 if (validate_arg (arg0, REAL_TYPE))
10440 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10441 &dconstm1, &dconst1, true);
10444 CASE_FLT_FN (BUILT_IN_ACOS):
10445 if (validate_arg (arg0, REAL_TYPE))
10446 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10447 &dconstm1, &dconst1, true);
10450 CASE_FLT_FN (BUILT_IN_ATAN):
10451 if (validate_arg (arg0, REAL_TYPE))
10452 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10455 CASE_FLT_FN (BUILT_IN_ASINH):
10456 if (validate_arg (arg0, REAL_TYPE))
10457 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10460 CASE_FLT_FN (BUILT_IN_ACOSH):
10461 if (validate_arg (arg0, REAL_TYPE))
10462 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10463 &dconst1, NULL, true);
10466 CASE_FLT_FN (BUILT_IN_ATANH):
10467 if (validate_arg (arg0, REAL_TYPE))
10468 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10469 &dconstm1, &dconst1, false);
10472 CASE_FLT_FN (BUILT_IN_SIN):
10473 if (validate_arg (arg0, REAL_TYPE))
10474 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10477 CASE_FLT_FN (BUILT_IN_COS):
10478 return fold_builtin_cos (loc, arg0, type, fndecl);
10480 CASE_FLT_FN (BUILT_IN_TAN):
10481 return fold_builtin_tan (arg0, type);
10483 CASE_FLT_FN (BUILT_IN_CEXP):
10484 return fold_builtin_cexp (loc, arg0, type);
10486 CASE_FLT_FN (BUILT_IN_CEXPI):
10487 if (validate_arg (arg0, REAL_TYPE))
10488 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10491 CASE_FLT_FN (BUILT_IN_SINH):
10492 if (validate_arg (arg0, REAL_TYPE))
10493 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10496 CASE_FLT_FN (BUILT_IN_COSH):
10497 return fold_builtin_cosh (loc, arg0, type, fndecl);
10499 CASE_FLT_FN (BUILT_IN_TANH):
10500 if (validate_arg (arg0, REAL_TYPE))
10501 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10504 CASE_FLT_FN (BUILT_IN_ERF):
10505 if (validate_arg (arg0, REAL_TYPE))
10506 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10509 CASE_FLT_FN (BUILT_IN_ERFC):
10510 if (validate_arg (arg0, REAL_TYPE))
10511 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10514 CASE_FLT_FN (BUILT_IN_TGAMMA):
10515 if (validate_arg (arg0, REAL_TYPE))
10516 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10519 CASE_FLT_FN (BUILT_IN_EXP):
10520 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10522 CASE_FLT_FN (BUILT_IN_EXP2):
10523 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10525 CASE_FLT_FN (BUILT_IN_EXP10):
10526 CASE_FLT_FN (BUILT_IN_POW10):
10527 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10529 CASE_FLT_FN (BUILT_IN_EXPM1):
10530 if (validate_arg (arg0, REAL_TYPE))
10531 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10534 CASE_FLT_FN (BUILT_IN_LOG):
10535 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10537 CASE_FLT_FN (BUILT_IN_LOG2):
10538 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10540 CASE_FLT_FN (BUILT_IN_LOG10):
10541 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10543 CASE_FLT_FN (BUILT_IN_LOG1P):
10544 if (validate_arg (arg0, REAL_TYPE))
10545 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10546 &dconstm1, NULL, false);
10549 CASE_FLT_FN (BUILT_IN_J0):
10550 if (validate_arg (arg0, REAL_TYPE))
10551 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10555 CASE_FLT_FN (BUILT_IN_J1):
10556 if (validate_arg (arg0, REAL_TYPE))
10557 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10561 CASE_FLT_FN (BUILT_IN_Y0):
10562 if (validate_arg (arg0, REAL_TYPE))
10563 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10564 &dconst0, NULL, false);
10567 CASE_FLT_FN (BUILT_IN_Y1):
10568 if (validate_arg (arg0, REAL_TYPE))
10569 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10570 &dconst0, NULL, false);
10573 CASE_FLT_FN (BUILT_IN_NAN):
10574 case BUILT_IN_NAND32:
10575 case BUILT_IN_NAND64:
10576 case BUILT_IN_NAND128:
10577 return fold_builtin_nan (arg0, type, true);
10579 CASE_FLT_FN (BUILT_IN_NANS):
10580 return fold_builtin_nan (arg0, type, false);
10582 CASE_FLT_FN (BUILT_IN_FLOOR):
10583 return fold_builtin_floor (loc, fndecl, arg0);
10585 CASE_FLT_FN (BUILT_IN_CEIL):
10586 return fold_builtin_ceil (loc, fndecl, arg0);
10588 CASE_FLT_FN (BUILT_IN_TRUNC):
10589 return fold_builtin_trunc (loc, fndecl, arg0);
10591 CASE_FLT_FN (BUILT_IN_ROUND):
10592 return fold_builtin_round (loc, fndecl, arg0);
10594 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10595 CASE_FLT_FN (BUILT_IN_RINT):
10596 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10598 CASE_FLT_FN (BUILT_IN_LCEIL):
10599 CASE_FLT_FN (BUILT_IN_LLCEIL):
10600 CASE_FLT_FN (BUILT_IN_LFLOOR):
10601 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10602 CASE_FLT_FN (BUILT_IN_LROUND):
10603 CASE_FLT_FN (BUILT_IN_LLROUND):
10604 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10606 CASE_FLT_FN (BUILT_IN_LRINT):
10607 CASE_FLT_FN (BUILT_IN_LLRINT):
10608 return fold_fixed_mathfn (loc, fndecl, arg0);
10610 case BUILT_IN_BSWAP32:
10611 case BUILT_IN_BSWAP64:
10612 return fold_builtin_bswap (fndecl, arg0);
10614 CASE_INT_FN (BUILT_IN_FFS):
10615 CASE_INT_FN (BUILT_IN_CLZ):
10616 CASE_INT_FN (BUILT_IN_CTZ):
10617 CASE_INT_FN (BUILT_IN_POPCOUNT):
10618 CASE_INT_FN (BUILT_IN_PARITY):
10619 return fold_builtin_bitop (fndecl, arg0);
10621 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10622 return fold_builtin_signbit (loc, arg0, type);
10624 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10625 return fold_builtin_significand (loc, arg0, type);
10627 CASE_FLT_FN (BUILT_IN_ILOGB):
10628 CASE_FLT_FN (BUILT_IN_LOGB):
10629 return fold_builtin_logb (loc, arg0, type);
10631 case BUILT_IN_ISASCII:
10632 return fold_builtin_isascii (loc, arg0);
10634 case BUILT_IN_TOASCII:
10635 return fold_builtin_toascii (loc, arg0);
10637 case BUILT_IN_ISDIGIT:
10638 return fold_builtin_isdigit (loc, arg0);
10640 CASE_FLT_FN (BUILT_IN_FINITE):
10641 case BUILT_IN_FINITED32:
10642 case BUILT_IN_FINITED64:
10643 case BUILT_IN_FINITED128:
10644 case BUILT_IN_ISFINITE:
10645 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10647 CASE_FLT_FN (BUILT_IN_ISINF):
10648 case BUILT_IN_ISINFD32:
10649 case BUILT_IN_ISINFD64:
10650 case BUILT_IN_ISINFD128:
10651 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10653 case BUILT_IN_ISINF_SIGN:
10654 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10656 CASE_FLT_FN (BUILT_IN_ISNAN):
10657 case BUILT_IN_ISNAND32:
10658 case BUILT_IN_ISNAND64:
10659 case BUILT_IN_ISNAND128:
10660 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10662 case BUILT_IN_PRINTF:
10663 case BUILT_IN_PRINTF_UNLOCKED:
10664 case BUILT_IN_VPRINTF:
10665 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10675 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10676 IGNORE is true if the result of the function call is ignored. This
10677 function returns NULL_TREE if no simplification was possible. */
10680 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10682 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10683 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10687 CASE_FLT_FN (BUILT_IN_JN):
10688 if (validate_arg (arg0, INTEGER_TYPE)
10689 && validate_arg (arg1, REAL_TYPE))
10690 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10693 CASE_FLT_FN (BUILT_IN_YN):
10694 if (validate_arg (arg0, INTEGER_TYPE)
10695 && validate_arg (arg1, REAL_TYPE))
10696 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10700 CASE_FLT_FN (BUILT_IN_DREM):
10701 CASE_FLT_FN (BUILT_IN_REMAINDER):
10702 if (validate_arg (arg0, REAL_TYPE)
10703 && validate_arg(arg1, REAL_TYPE))
10704 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10707 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10708 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10709 if (validate_arg (arg0, REAL_TYPE)
10710 && validate_arg(arg1, POINTER_TYPE))
10711 return do_mpfr_lgamma_r (arg0, arg1, type);
10714 CASE_FLT_FN (BUILT_IN_ATAN2):
10715 if (validate_arg (arg0, REAL_TYPE)
10716 && validate_arg(arg1, REAL_TYPE))
10717 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10720 CASE_FLT_FN (BUILT_IN_FDIM):
10721 if (validate_arg (arg0, REAL_TYPE)
10722 && validate_arg(arg1, REAL_TYPE))
10723 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10726 CASE_FLT_FN (BUILT_IN_HYPOT):
10727 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10729 #ifdef HAVE_mpc_pow
10730 CASE_FLT_FN (BUILT_IN_CPOW):
10731 if (validate_arg (arg0, COMPLEX_TYPE)
10732 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10733 && validate_arg (arg1, COMPLEX_TYPE)
10734 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10735 return do_mpc_arg2 (arg0, arg1, type, mpc_pow);
10739 CASE_FLT_FN (BUILT_IN_LDEXP):
10740 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10741 CASE_FLT_FN (BUILT_IN_SCALBN):
10742 CASE_FLT_FN (BUILT_IN_SCALBLN):
10743 return fold_builtin_load_exponent (loc, arg0, arg1,
10744 type, /*ldexp=*/false);
10746 CASE_FLT_FN (BUILT_IN_FREXP):
10747 return fold_builtin_frexp (loc, arg0, arg1, type);
10749 CASE_FLT_FN (BUILT_IN_MODF):
10750 return fold_builtin_modf (loc, arg0, arg1, type);
10752 case BUILT_IN_BZERO:
10753 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10755 case BUILT_IN_FPUTS:
10756 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10758 case BUILT_IN_FPUTS_UNLOCKED:
10759 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10761 case BUILT_IN_STRSTR:
10762 return fold_builtin_strstr (loc, arg0, arg1, type);
10764 case BUILT_IN_STRCAT:
10765 return fold_builtin_strcat (loc, arg0, arg1);
10767 case BUILT_IN_STRSPN:
10768 return fold_builtin_strspn (loc, arg0, arg1);
10770 case BUILT_IN_STRCSPN:
10771 return fold_builtin_strcspn (loc, arg0, arg1);
10773 case BUILT_IN_STRCHR:
10774 case BUILT_IN_INDEX:
10775 return fold_builtin_strchr (loc, arg0, arg1, type);
10777 case BUILT_IN_STRRCHR:
10778 case BUILT_IN_RINDEX:
10779 return fold_builtin_strrchr (loc, arg0, arg1, type);
10781 case BUILT_IN_STRCPY:
10782 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10784 case BUILT_IN_STPCPY:
10787 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10791 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10795 case BUILT_IN_STRCMP:
10796 return fold_builtin_strcmp (loc, arg0, arg1);
10798 case BUILT_IN_STRPBRK:
10799 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10801 case BUILT_IN_EXPECT:
10802 return fold_builtin_expect (loc, arg0, arg1);
10804 CASE_FLT_FN (BUILT_IN_POW):
10805 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10807 CASE_FLT_FN (BUILT_IN_POWI):
10808 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10810 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10811 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10813 CASE_FLT_FN (BUILT_IN_FMIN):
10814 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10816 CASE_FLT_FN (BUILT_IN_FMAX):
10817 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10819 case BUILT_IN_ISGREATER:
10820 return fold_builtin_unordered_cmp (loc, fndecl,
10821 arg0, arg1, UNLE_EXPR, LE_EXPR);
10822 case BUILT_IN_ISGREATEREQUAL:
10823 return fold_builtin_unordered_cmp (loc, fndecl,
10824 arg0, arg1, UNLT_EXPR, LT_EXPR);
10825 case BUILT_IN_ISLESS:
10826 return fold_builtin_unordered_cmp (loc, fndecl,
10827 arg0, arg1, UNGE_EXPR, GE_EXPR);
10828 case BUILT_IN_ISLESSEQUAL:
10829 return fold_builtin_unordered_cmp (loc, fndecl,
10830 arg0, arg1, UNGT_EXPR, GT_EXPR);
10831 case BUILT_IN_ISLESSGREATER:
10832 return fold_builtin_unordered_cmp (loc, fndecl,
10833 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10834 case BUILT_IN_ISUNORDERED:
10835 return fold_builtin_unordered_cmp (loc, fndecl,
10836 arg0, arg1, UNORDERED_EXPR,
10839 /* We do the folding for va_start in the expander. */
10840 case BUILT_IN_VA_START:
10843 case BUILT_IN_SPRINTF:
10844 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10846 case BUILT_IN_OBJECT_SIZE:
10847 return fold_builtin_object_size (arg0, arg1);
10849 case BUILT_IN_PRINTF:
10850 case BUILT_IN_PRINTF_UNLOCKED:
10851 case BUILT_IN_VPRINTF:
10852 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10854 case BUILT_IN_PRINTF_CHK:
10855 case BUILT_IN_VPRINTF_CHK:
10856 if (!validate_arg (arg0, INTEGER_TYPE)
10857 || TREE_SIDE_EFFECTS (arg0))
10860 return fold_builtin_printf (loc, fndecl,
10861 arg1, NULL_TREE, ignore, fcode);
10864 case BUILT_IN_FPRINTF:
10865 case BUILT_IN_FPRINTF_UNLOCKED:
10866 case BUILT_IN_VFPRINTF:
10867 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10876 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10877 and ARG2. IGNORE is true if the result of the function call is ignored.
10878 This function returns NULL_TREE if no simplification was possible. */
10881 fold_builtin_3 (location_t loc, tree fndecl,
10882 tree arg0, tree arg1, tree arg2, bool ignore)
10884 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10885 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10889 CASE_FLT_FN (BUILT_IN_SINCOS):
10890 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10892 CASE_FLT_FN (BUILT_IN_FMA):
10893 if (validate_arg (arg0, REAL_TYPE)
10894 && validate_arg(arg1, REAL_TYPE)
10895 && validate_arg(arg2, REAL_TYPE))
10896 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10899 CASE_FLT_FN (BUILT_IN_REMQUO):
10900 if (validate_arg (arg0, REAL_TYPE)
10901 && validate_arg(arg1, REAL_TYPE)
10902 && validate_arg(arg2, POINTER_TYPE))
10903 return do_mpfr_remquo (arg0, arg1, arg2);
10906 case BUILT_IN_MEMSET:
10907 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10909 case BUILT_IN_BCOPY:
10910 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10911 void_type_node, true, /*endp=*/3);
10913 case BUILT_IN_MEMCPY:
10914 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10915 type, ignore, /*endp=*/0);
10917 case BUILT_IN_MEMPCPY:
10918 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10919 type, ignore, /*endp=*/1);
10921 case BUILT_IN_MEMMOVE:
10922 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10923 type, ignore, /*endp=*/3);
10925 case BUILT_IN_STRNCAT:
10926 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10928 case BUILT_IN_STRNCPY:
10929 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10931 case BUILT_IN_STRNCMP:
10932 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10934 case BUILT_IN_MEMCHR:
10935 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10937 case BUILT_IN_BCMP:
10938 case BUILT_IN_MEMCMP:
10939 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10941 case BUILT_IN_SPRINTF:
10942 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10944 case BUILT_IN_STRCPY_CHK:
10945 case BUILT_IN_STPCPY_CHK:
10946 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10949 case BUILT_IN_STRCAT_CHK:
10950 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10952 case BUILT_IN_PRINTF_CHK:
10953 case BUILT_IN_VPRINTF_CHK:
10954 if (!validate_arg (arg0, INTEGER_TYPE)
10955 || TREE_SIDE_EFFECTS (arg0))
10958 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10961 case BUILT_IN_FPRINTF:
10962 case BUILT_IN_FPRINTF_UNLOCKED:
10963 case BUILT_IN_VFPRINTF:
10964 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10967 case BUILT_IN_FPRINTF_CHK:
10968 case BUILT_IN_VFPRINTF_CHK:
10969 if (!validate_arg (arg1, INTEGER_TYPE)
10970 || TREE_SIDE_EFFECTS (arg1))
10973 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10982 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10983 ARG2, and ARG3. IGNORE is true if the result of the function call is
10984 ignored. This function returns NULL_TREE if no simplification was
10988 fold_builtin_4 (location_t loc, tree fndecl,
10989 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10991 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10995 case BUILT_IN_MEMCPY_CHK:
10996 case BUILT_IN_MEMPCPY_CHK:
10997 case BUILT_IN_MEMMOVE_CHK:
10998 case BUILT_IN_MEMSET_CHK:
10999 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11001 DECL_FUNCTION_CODE (fndecl));
11003 case BUILT_IN_STRNCPY_CHK:
11004 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
11006 case BUILT_IN_STRNCAT_CHK:
11007 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11009 case BUILT_IN_FPRINTF_CHK:
11010 case BUILT_IN_VFPRINTF_CHK:
11011 if (!validate_arg (arg1, INTEGER_TYPE)
11012 || TREE_SIDE_EFFECTS (arg1))
11015 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11025 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11026 arguments, where NARGS <= 4. IGNORE is true if the result of the
11027 function call is ignored. This function returns NULL_TREE if no
11028 simplification was possible. Note that this only folds builtins with
11029 fixed argument patterns. Foldings that do varargs-to-varargs
11030 transformations, or that match calls with more than 4 arguments,
11031 need to be handled with fold_builtin_varargs instead. */
11033 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11036 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11038 tree ret = NULL_TREE;
11043 ret = fold_builtin_0 (loc, fndecl, ignore);
11046 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11049 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11052 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11055 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11063 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11064 SET_EXPR_LOCATION (ret, loc);
11065 TREE_NO_WARNING (ret) = 1;
11071 /* Builtins with folding operations that operate on "..." arguments
11072 need special handling; we need to store the arguments in a convenient
11073 data structure before attempting any folding. Fortunately there are
11074 only a few builtins that fall into this category. FNDECL is the
11075 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11076 result of the function call is ignored. */
11079 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11080 bool ignore ATTRIBUTE_UNUSED)
11082 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11083 tree ret = NULL_TREE;
11087 case BUILT_IN_SPRINTF_CHK:
11088 case BUILT_IN_VSPRINTF_CHK:
11089 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11092 case BUILT_IN_SNPRINTF_CHK:
11093 case BUILT_IN_VSNPRINTF_CHK:
11094 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11097 case BUILT_IN_FPCLASSIFY:
11098 ret = fold_builtin_fpclassify (loc, exp);
11106 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11107 SET_EXPR_LOCATION (ret, loc);
11108 TREE_NO_WARNING (ret) = 1;
11114 /* Return true if FNDECL shouldn't be folded right now.
11115 If a built-in function has an inline attribute always_inline
11116 wrapper, defer folding it after always_inline functions have
11117 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11118 might not be performed. */
11121 avoid_folding_inline_builtin (tree fndecl)
11123 return (DECL_DECLARED_INLINE_P (fndecl)
11124 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11126 && !cfun->always_inline_functions_inlined
11127 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11130 /* A wrapper function for builtin folding that prevents warnings for
11131 "statement without effect" and the like, caused by removing the
11132 call node earlier than the warning is generated. */
11135 fold_call_expr (location_t loc, tree exp, bool ignore)
11137 tree ret = NULL_TREE;
11138 tree fndecl = get_callee_fndecl (exp);
11140 && TREE_CODE (fndecl) == FUNCTION_DECL
11141 && DECL_BUILT_IN (fndecl)
11142 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11143 yet. Defer folding until we see all the arguments
11144 (after inlining). */
11145 && !CALL_EXPR_VA_ARG_PACK (exp))
11147 int nargs = call_expr_nargs (exp);
11149 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11150 instead last argument is __builtin_va_arg_pack (). Defer folding
11151 even in that case, until arguments are finalized. */
11152 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11154 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11156 && TREE_CODE (fndecl2) == FUNCTION_DECL
11157 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11158 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11162 if (avoid_folding_inline_builtin (fndecl))
11165 /* FIXME: Don't use a list in this interface. */
11166 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11167 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
11170 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11172 tree *args = CALL_EXPR_ARGP (exp);
11173 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11176 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11184 /* Conveniently construct a function call expression. FNDECL names the
11185 function to be called and ARGLIST is a TREE_LIST of arguments. */
11188 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
11190 tree fntype = TREE_TYPE (fndecl);
11191 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11192 int n = list_length (arglist);
11193 tree *argarray = (tree *) alloca (n * sizeof (tree));
11196 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
11197 argarray[i] = TREE_VALUE (arglist);
11198 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11201 /* Conveniently construct a function call expression. FNDECL names the
11202 function to be called, N is the number of arguments, and the "..."
11203 parameters are the argument expressions. */
11206 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11209 tree fntype = TREE_TYPE (fndecl);
11210 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11211 tree *argarray = (tree *) alloca (n * sizeof (tree));
11215 for (i = 0; i < n; i++)
11216 argarray[i] = va_arg (ap, tree);
11218 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11221 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11222 N arguments are passed in the array ARGARRAY. */
11225 fold_builtin_call_array (location_t loc, tree type,
11230 tree ret = NULL_TREE;
11234 if (TREE_CODE (fn) == ADDR_EXPR)
11236 tree fndecl = TREE_OPERAND (fn, 0);
11237 if (TREE_CODE (fndecl) == FUNCTION_DECL
11238 && DECL_BUILT_IN (fndecl))
11240 /* If last argument is __builtin_va_arg_pack (), arguments to this
11241 function are not finalized yet. Defer folding until they are. */
11242 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11244 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11246 && TREE_CODE (fndecl2) == FUNCTION_DECL
11247 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11248 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11249 return build_call_array_loc (loc, type, fn, n, argarray);
11251 if (avoid_folding_inline_builtin (fndecl))
11252 return build_call_array_loc (loc, type, fn, n, argarray);
11253 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11255 tree arglist = NULL_TREE;
11256 for (i = n - 1; i >= 0; i--)
11257 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11258 ret = targetm.fold_builtin (fndecl, arglist, false);
11261 return build_call_array_loc (loc, type, fn, n, argarray);
11263 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11265 /* First try the transformations that don't require consing up
11267 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11272 /* If we got this far, we need to build an exp. */
11273 exp = build_call_array_loc (loc, type, fn, n, argarray);
11274 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11275 return ret ? ret : exp;
11279 return build_call_array_loc (loc, type, fn, n, argarray);
11282 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11283 along with N new arguments specified as the "..." parameters. SKIP
11284 is the number of arguments in EXP to be omitted. This function is used
11285 to do varargs-to-varargs transformations. */
11288 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11290 int oldnargs = call_expr_nargs (exp);
11291 int nargs = oldnargs - skip + n;
11292 tree fntype = TREE_TYPE (fndecl);
11293 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11301 buffer = XALLOCAVEC (tree, nargs);
11303 for (i = 0; i < n; i++)
11304 buffer[i] = va_arg (ap, tree);
11306 for (j = skip; j < oldnargs; j++, i++)
11307 buffer[i] = CALL_EXPR_ARG (exp, j);
11310 buffer = CALL_EXPR_ARGP (exp) + skip;
11312 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
11315 /* Validate a single argument ARG against a tree code CODE representing
11319 validate_arg (const_tree arg, enum tree_code code)
11323 else if (code == POINTER_TYPE)
11324 return POINTER_TYPE_P (TREE_TYPE (arg));
11325 else if (code == INTEGER_TYPE)
11326 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11327 return code == TREE_CODE (TREE_TYPE (arg));
11330 /* This function validates the types of a function call argument list
11331 against a specified list of tree_codes. If the last specifier is a 0,
11332 that represents an ellipses, otherwise the last specifier must be a
11335 This is the GIMPLE version of validate_arglist. Eventually we want to
11336 completely convert builtins.c to work from GIMPLEs and the tree based
11337 validate_arglist will then be removed. */
11340 validate_gimple_arglist (const_gimple call, ...)
11342 enum tree_code code;
11348 va_start (ap, call);
11353 code = (enum tree_code) va_arg (ap, int);
11357 /* This signifies an ellipses, any further arguments are all ok. */
11361 /* This signifies an endlink, if no arguments remain, return
11362 true, otherwise return false. */
11363 res = (i == gimple_call_num_args (call));
11366 /* If no parameters remain or the parameter's code does not
11367 match the specified code, return false. Otherwise continue
11368 checking any remaining arguments. */
11369 arg = gimple_call_arg (call, i++);
11370 if (!validate_arg (arg, code))
11377 /* We need gotos here since we can only have one VA_CLOSE in a
11385 /* This function validates the types of a function call argument list
11386 against a specified list of tree_codes. If the last specifier is a 0,
11387 that represents an ellipses, otherwise the last specifier must be a
11391 validate_arglist (const_tree callexpr, ...)
11393 enum tree_code code;
11396 const_call_expr_arg_iterator iter;
11399 va_start (ap, callexpr);
11400 init_const_call_expr_arg_iterator (callexpr, &iter);
11404 code = (enum tree_code) va_arg (ap, int);
11408 /* This signifies an ellipses, any further arguments are all ok. */
11412 /* This signifies an endlink, if no arguments remain, return
11413 true, otherwise return false. */
11414 res = !more_const_call_expr_args_p (&iter);
11417 /* If no parameters remain or the parameter's code does not
11418 match the specified code, return false. Otherwise continue
11419 checking any remaining arguments. */
11420 arg = next_const_call_expr_arg (&iter);
11421 if (!validate_arg (arg, code))
11428 /* We need gotos here since we can only have one VA_CLOSE in a
11436 /* Default target-specific builtin expander that does nothing. */
11439 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11440 rtx target ATTRIBUTE_UNUSED,
11441 rtx subtarget ATTRIBUTE_UNUSED,
11442 enum machine_mode mode ATTRIBUTE_UNUSED,
11443 int ignore ATTRIBUTE_UNUSED)
11448 /* Returns true is EXP represents data that would potentially reside
11449 in a readonly section. */
11452 readonly_data_expr (tree exp)
11456 if (TREE_CODE (exp) != ADDR_EXPR)
11459 exp = get_base_address (TREE_OPERAND (exp, 0));
11463 /* Make sure we call decl_readonly_section only for trees it
11464 can handle (since it returns true for everything it doesn't
11466 if (TREE_CODE (exp) == STRING_CST
11467 || TREE_CODE (exp) == CONSTRUCTOR
11468 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11469 return decl_readonly_section (exp, 0);
11474 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11475 to the call, and TYPE is its return type.
11477 Return NULL_TREE if no simplification was possible, otherwise return the
11478 simplified form of the call as a tree.
11480 The simplified form may be a constant or other expression which
11481 computes the same value, but in a more efficient manner (including
11482 calls to other builtin functions).
11484 The call may contain arguments which need to be evaluated, but
11485 which are not useful to determine the result of the call. In
11486 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11487 COMPOUND_EXPR will be an argument which must be evaluated.
11488 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11489 COMPOUND_EXPR in the chain will contain the tree for the simplified
11490 form of the builtin function call. */
11493 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11495 if (!validate_arg (s1, POINTER_TYPE)
11496 || !validate_arg (s2, POINTER_TYPE))
11501 const char *p1, *p2;
11503 p2 = c_getstr (s2);
11507 p1 = c_getstr (s1);
11510 const char *r = strstr (p1, p2);
11514 return build_int_cst (TREE_TYPE (s1), 0);
11516 /* Return an offset into the constant string argument. */
11517 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11518 s1, size_int (r - p1));
11519 return fold_convert_loc (loc, type, tem);
11522 /* The argument is const char *, and the result is char *, so we need
11523 a type conversion here to avoid a warning. */
11525 return fold_convert_loc (loc, type, s1);
11530 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11534 /* New argument list transforming strstr(s1, s2) to
11535 strchr(s1, s2[0]). */
11536 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11540 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11541 the call, and TYPE is its return type.
11543 Return NULL_TREE if no simplification was possible, otherwise return the
11544 simplified form of the call as a tree.
11546 The simplified form may be a constant or other expression which
11547 computes the same value, but in a more efficient manner (including
11548 calls to other builtin functions).
11550 The call may contain arguments which need to be evaluated, but
11551 which are not useful to determine the result of the call. In
11552 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11553 COMPOUND_EXPR will be an argument which must be evaluated.
11554 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11555 COMPOUND_EXPR in the chain will contain the tree for the simplified
11556 form of the builtin function call. */
11559 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11561 if (!validate_arg (s1, POINTER_TYPE)
11562 || !validate_arg (s2, INTEGER_TYPE))
11568 if (TREE_CODE (s2) != INTEGER_CST)
11571 p1 = c_getstr (s1);
11578 if (target_char_cast (s2, &c))
11581 r = strchr (p1, c);
11584 return build_int_cst (TREE_TYPE (s1), 0);
11586 /* Return an offset into the constant string argument. */
11587 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11588 s1, size_int (r - p1));
11589 return fold_convert_loc (loc, type, tem);
11595 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11596 the call, and TYPE is its return type.
11598 Return NULL_TREE if no simplification was possible, otherwise return the
11599 simplified form of the call as a tree.
11601 The simplified form may be a constant or other expression which
11602 computes the same value, but in a more efficient manner (including
11603 calls to other builtin functions).
11605 The call may contain arguments which need to be evaluated, but
11606 which are not useful to determine the result of the call. In
11607 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11608 COMPOUND_EXPR will be an argument which must be evaluated.
11609 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11610 COMPOUND_EXPR in the chain will contain the tree for the simplified
11611 form of the builtin function call. */
11614 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11616 if (!validate_arg (s1, POINTER_TYPE)
11617 || !validate_arg (s2, INTEGER_TYPE))
11624 if (TREE_CODE (s2) != INTEGER_CST)
11627 p1 = c_getstr (s1);
11634 if (target_char_cast (s2, &c))
11637 r = strrchr (p1, c);
11640 return build_int_cst (TREE_TYPE (s1), 0);
11642 /* Return an offset into the constant string argument. */
11643 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11644 s1, size_int (r - p1));
11645 return fold_convert_loc (loc, type, tem);
11648 if (! integer_zerop (s2))
11651 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11655 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11656 return build_call_expr_loc (loc, fn, 2, s1, s2);
11660 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11661 to the call, and TYPE is its return type.
11663 Return NULL_TREE if no simplification was possible, otherwise return the
11664 simplified form of the call as a tree.
11666 The simplified form may be a constant or other expression which
11667 computes the same value, but in a more efficient manner (including
11668 calls to other builtin functions).
11670 The call may contain arguments which need to be evaluated, but
11671 which are not useful to determine the result of the call. In
11672 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11673 COMPOUND_EXPR will be an argument which must be evaluated.
11674 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11675 COMPOUND_EXPR in the chain will contain the tree for the simplified
11676 form of the builtin function call. */
11679 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11681 if (!validate_arg (s1, POINTER_TYPE)
11682 || !validate_arg (s2, POINTER_TYPE))
11687 const char *p1, *p2;
11689 p2 = c_getstr (s2);
11693 p1 = c_getstr (s1);
11696 const char *r = strpbrk (p1, p2);
11700 return build_int_cst (TREE_TYPE (s1), 0);
11702 /* Return an offset into the constant string argument. */
11703 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11704 s1, size_int (r - p1));
11705 return fold_convert_loc (loc, type, tem);
11709 /* strpbrk(x, "") == NULL.
11710 Evaluate and ignore s1 in case it had side-effects. */
11711 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11714 return NULL_TREE; /* Really call strpbrk. */
11716 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11720 /* New argument list transforming strpbrk(s1, s2) to
11721 strchr(s1, s2[0]). */
11722 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11726 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11729 Return NULL_TREE if no simplification was possible, otherwise return the
11730 simplified form of the call as a tree.
11732 The simplified form may be a constant or other expression which
11733 computes the same value, but in a more efficient manner (including
11734 calls to other builtin functions).
11736 The call may contain arguments which need to be evaluated, but
11737 which are not useful to determine the result of the call. In
11738 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11739 COMPOUND_EXPR will be an argument which must be evaluated.
11740 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11741 COMPOUND_EXPR in the chain will contain the tree for the simplified
11742 form of the builtin function call. */
11745 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11747 if (!validate_arg (dst, POINTER_TYPE)
11748 || !validate_arg (src, POINTER_TYPE))
11752 const char *p = c_getstr (src);
11754 /* If the string length is zero, return the dst parameter. */
11755 if (p && *p == '\0')
11762 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11763 arguments to the call.
11765 Return NULL_TREE if no simplification was possible, otherwise return the
11766 simplified form of the call as a tree.
11768 The simplified form may be a constant or other expression which
11769 computes the same value, but in a more efficient manner (including
11770 calls to other builtin functions).
11772 The call may contain arguments which need to be evaluated, but
11773 which are not useful to determine the result of the call. In
11774 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11775 COMPOUND_EXPR will be an argument which must be evaluated.
11776 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11777 COMPOUND_EXPR in the chain will contain the tree for the simplified
11778 form of the builtin function call. */
11781 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11783 if (!validate_arg (dst, POINTER_TYPE)
11784 || !validate_arg (src, POINTER_TYPE)
11785 || !validate_arg (len, INTEGER_TYPE))
11789 const char *p = c_getstr (src);
11791 /* If the requested length is zero, or the src parameter string
11792 length is zero, return the dst parameter. */
11793 if (integer_zerop (len) || (p && *p == '\0'))
11794 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11796 /* If the requested len is greater than or equal to the string
11797 length, call strcat. */
11798 if (TREE_CODE (len) == INTEGER_CST && p
11799 && compare_tree_int (len, strlen (p)) >= 0)
11801 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11803 /* If the replacement _DECL isn't initialized, don't do the
11808 return build_call_expr_loc (loc, fn, 2, dst, src);
11814 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11817 Return NULL_TREE if no simplification was possible, otherwise return the
11818 simplified form of the call as a tree.
11820 The simplified form may be a constant or other expression which
11821 computes the same value, but in a more efficient manner (including
11822 calls to other builtin functions).
11824 The call may contain arguments which need to be evaluated, but
11825 which are not useful to determine the result of the call. In
11826 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11827 COMPOUND_EXPR will be an argument which must be evaluated.
11828 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11829 COMPOUND_EXPR in the chain will contain the tree for the simplified
11830 form of the builtin function call. */
11833 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11835 if (!validate_arg (s1, POINTER_TYPE)
11836 || !validate_arg (s2, POINTER_TYPE))
11840 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11842 /* If both arguments are constants, evaluate at compile-time. */
11845 const size_t r = strspn (p1, p2);
11846 return size_int (r);
11849 /* If either argument is "", return NULL_TREE. */
11850 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11851 /* Evaluate and ignore both arguments in case either one has
11853 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11859 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11862 Return NULL_TREE if no simplification was possible, otherwise return the
11863 simplified form of the call as a tree.
11865 The simplified form may be a constant or other expression which
11866 computes the same value, but in a more efficient manner (including
11867 calls to other builtin functions).
11869 The call may contain arguments which need to be evaluated, but
11870 which are not useful to determine the result of the call. In
11871 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11872 COMPOUND_EXPR will be an argument which must be evaluated.
11873 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11874 COMPOUND_EXPR in the chain will contain the tree for the simplified
11875 form of the builtin function call. */
11878 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11880 if (!validate_arg (s1, POINTER_TYPE)
11881 || !validate_arg (s2, POINTER_TYPE))
11885 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11887 /* If both arguments are constants, evaluate at compile-time. */
11890 const size_t r = strcspn (p1, p2);
11891 return size_int (r);
11894 /* If the first argument is "", return NULL_TREE. */
11895 if (p1 && *p1 == '\0')
11897 /* Evaluate and ignore argument s2 in case it has
11899 return omit_one_operand_loc (loc, size_type_node,
11900 size_zero_node, s2);
11903 /* If the second argument is "", return __builtin_strlen(s1). */
11904 if (p2 && *p2 == '\0')
11906 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11908 /* If the replacement _DECL isn't initialized, don't do the
11913 return build_call_expr_loc (loc, fn, 1, s1);
11919 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11920 to the call. IGNORE is true if the value returned
11921 by the builtin will be ignored. UNLOCKED is true is true if this
11922 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11923 the known length of the string. Return NULL_TREE if no simplification
11927 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11928 bool ignore, bool unlocked, tree len)
11930 /* If we're using an unlocked function, assume the other unlocked
11931 functions exist explicitly. */
11932 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11933 : implicit_built_in_decls[BUILT_IN_FPUTC];
11934 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11935 : implicit_built_in_decls[BUILT_IN_FWRITE];
11937 /* If the return value is used, don't do the transformation. */
11941 /* Verify the arguments in the original call. */
11942 if (!validate_arg (arg0, POINTER_TYPE)
11943 || !validate_arg (arg1, POINTER_TYPE))
11947 len = c_strlen (arg0, 0);
11949 /* Get the length of the string passed to fputs. If the length
11950 can't be determined, punt. */
11952 || TREE_CODE (len) != INTEGER_CST)
11955 switch (compare_tree_int (len, 1))
11957 case -1: /* length is 0, delete the call entirely . */
11958 return omit_one_operand_loc (loc, integer_type_node,
11959 integer_zero_node, arg1);;
11961 case 0: /* length is 1, call fputc. */
11963 const char *p = c_getstr (arg0);
11968 return build_call_expr_loc (loc, fn_fputc, 2,
11969 build_int_cst (NULL_TREE, p[0]), arg1);
11975 case 1: /* length is greater than 1, call fwrite. */
11977 /* If optimizing for size keep fputs. */
11978 if (optimize_function_for_size_p (cfun))
11980 /* New argument list transforming fputs(string, stream) to
11981 fwrite(string, 1, len, stream). */
11983 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11984 size_one_node, len, arg1);
11989 gcc_unreachable ();
11994 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11995 produced. False otherwise. This is done so that we don't output the error
11996 or warning twice or three times. */
11999 fold_builtin_next_arg (tree exp, bool va_start_p)
12001 tree fntype = TREE_TYPE (current_function_decl);
12002 int nargs = call_expr_nargs (exp);
12005 if (TYPE_ARG_TYPES (fntype) == 0
12006 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
12007 == void_type_node))
12009 error ("%<va_start%> used in function with fixed args");
12015 if (va_start_p && (nargs != 2))
12017 error ("wrong number of arguments to function %<va_start%>");
12020 arg = CALL_EXPR_ARG (exp, 1);
12022 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12023 when we checked the arguments and if needed issued a warning. */
12028 /* Evidently an out of date version of <stdarg.h>; can't validate
12029 va_start's second argument, but can still work as intended. */
12030 warning (0, "%<__builtin_next_arg%> called without an argument");
12033 else if (nargs > 1)
12035 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12038 arg = CALL_EXPR_ARG (exp, 0);
12041 if (TREE_CODE (arg) == SSA_NAME)
12042 arg = SSA_NAME_VAR (arg);
12044 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12045 or __builtin_next_arg (0) the first time we see it, after checking
12046 the arguments and if needed issuing a warning. */
12047 if (!integer_zerop (arg))
12049 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12051 /* Strip off all nops for the sake of the comparison. This
12052 is not quite the same as STRIP_NOPS. It does more.
12053 We must also strip off INDIRECT_EXPR for C++ reference
12055 while (CONVERT_EXPR_P (arg)
12056 || TREE_CODE (arg) == INDIRECT_REF)
12057 arg = TREE_OPERAND (arg, 0);
12058 if (arg != last_parm)
12060 /* FIXME: Sometimes with the tree optimizers we can get the
12061 not the last argument even though the user used the last
12062 argument. We just warn and set the arg to be the last
12063 argument so that we will get wrong-code because of
12065 warning (0, "second parameter of %<va_start%> not last named argument");
12068 /* Undefined by C99 7.15.1.4p4 (va_start):
12069 "If the parameter parmN is declared with the register storage
12070 class, with a function or array type, or with a type that is
12071 not compatible with the type that results after application of
12072 the default argument promotions, the behavior is undefined."
12074 else if (DECL_REGISTER (arg))
12075 warning (0, "undefined behaviour when second parameter of "
12076 "%<va_start%> is declared with %<register%> storage");
12078 /* We want to verify the second parameter just once before the tree
12079 optimizers are run and then avoid keeping it in the tree,
12080 as otherwise we could warn even for correct code like:
12081 void foo (int i, ...)
12082 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12084 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12086 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12092 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12093 ORIG may be null if this is a 2-argument call. We don't attempt to
12094 simplify calls with more than 3 arguments.
12096 Return NULL_TREE if no simplification was possible, otherwise return the
12097 simplified form of the call as a tree. If IGNORED is true, it means that
12098 the caller does not use the returned value of the function. */
12101 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12102 tree orig, int ignored)
12105 const char *fmt_str = NULL;
12107 /* Verify the required arguments in the original call. We deal with two
12108 types of sprintf() calls: 'sprintf (str, fmt)' and
12109 'sprintf (dest, "%s", orig)'. */
12110 if (!validate_arg (dest, POINTER_TYPE)
12111 || !validate_arg (fmt, POINTER_TYPE))
12113 if (orig && !validate_arg (orig, POINTER_TYPE))
12116 /* Check whether the format is a literal string constant. */
12117 fmt_str = c_getstr (fmt);
12118 if (fmt_str == NULL)
12122 retval = NULL_TREE;
12124 if (!init_target_chars ())
12127 /* If the format doesn't contain % args or %%, use strcpy. */
12128 if (strchr (fmt_str, target_percent) == NULL)
12130 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12135 /* Don't optimize sprintf (buf, "abc", ptr++). */
12139 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12140 'format' is known to contain no % formats. */
12141 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12143 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
12146 /* If the format is "%s", use strcpy if the result isn't used. */
12147 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12150 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12155 /* Don't crash on sprintf (str1, "%s"). */
12159 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12162 retval = c_strlen (orig, 1);
12163 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12166 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12169 if (call && retval)
12171 retval = fold_convert_loc
12172 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
12174 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12180 /* Expand a call EXP to __builtin_object_size. */
12183 expand_builtin_object_size (tree exp)
12186 int object_size_type;
12187 tree fndecl = get_callee_fndecl (exp);
12189 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12191 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12193 expand_builtin_trap ();
12197 ost = CALL_EXPR_ARG (exp, 1);
12200 if (TREE_CODE (ost) != INTEGER_CST
12201 || tree_int_cst_sgn (ost) < 0
12202 || compare_tree_int (ost, 3) > 0)
12204 error ("%Klast argument of %D is not integer constant between 0 and 3",
12206 expand_builtin_trap ();
12210 object_size_type = tree_low_cst (ost, 0);
12212 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12215 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12216 FCODE is the BUILT_IN_* to use.
12217 Return NULL_RTX if we failed; the caller should emit a normal call,
12218 otherwise try to get the result in TARGET, if convenient (and in
12219 mode MODE if that's convenient). */
12222 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12223 enum built_in_function fcode)
12225 tree dest, src, len, size;
12227 if (!validate_arglist (exp,
12229 fcode == BUILT_IN_MEMSET_CHK
12230 ? INTEGER_TYPE : POINTER_TYPE,
12231 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12234 dest = CALL_EXPR_ARG (exp, 0);
12235 src = CALL_EXPR_ARG (exp, 1);
12236 len = CALL_EXPR_ARG (exp, 2);
12237 size = CALL_EXPR_ARG (exp, 3);
12239 if (! host_integerp (size, 1))
12242 if (host_integerp (len, 1) || integer_all_onesp (size))
12246 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12248 warning_at (tree_nonartificial_location (exp),
12249 0, "%Kcall to %D will always overflow destination buffer",
12250 exp, get_callee_fndecl (exp));
12255 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12256 mem{cpy,pcpy,move,set} is available. */
12259 case BUILT_IN_MEMCPY_CHK:
12260 fn = built_in_decls[BUILT_IN_MEMCPY];
12262 case BUILT_IN_MEMPCPY_CHK:
12263 fn = built_in_decls[BUILT_IN_MEMPCPY];
12265 case BUILT_IN_MEMMOVE_CHK:
12266 fn = built_in_decls[BUILT_IN_MEMMOVE];
12268 case BUILT_IN_MEMSET_CHK:
12269 fn = built_in_decls[BUILT_IN_MEMSET];
12278 fn = build_call_expr (fn, 3, dest, src, len);
12279 STRIP_TYPE_NOPS (fn);
12280 while (TREE_CODE (fn) == COMPOUND_EXPR)
12282 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12284 fn = TREE_OPERAND (fn, 1);
12286 if (TREE_CODE (fn) == CALL_EXPR)
12287 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12288 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12290 else if (fcode == BUILT_IN_MEMSET_CHK)
12294 unsigned int dest_align
12295 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12297 /* If DEST is not a pointer type, call the normal function. */
12298 if (dest_align == 0)
12301 /* If SRC and DEST are the same (and not volatile), do nothing. */
12302 if (operand_equal_p (src, dest, 0))
12306 if (fcode != BUILT_IN_MEMPCPY_CHK)
12308 /* Evaluate and ignore LEN in case it has side-effects. */
12309 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12310 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12313 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12314 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12317 /* __memmove_chk special case. */
12318 if (fcode == BUILT_IN_MEMMOVE_CHK)
12320 unsigned int src_align
12321 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12323 if (src_align == 0)
12326 /* If src is categorized for a readonly section we can use
12327 normal __memcpy_chk. */
12328 if (readonly_data_expr (src))
12330 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12333 fn = build_call_expr (fn, 4, dest, src, len, size);
12334 STRIP_TYPE_NOPS (fn);
12335 while (TREE_CODE (fn) == COMPOUND_EXPR)
12337 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12339 fn = TREE_OPERAND (fn, 1);
12341 if (TREE_CODE (fn) == CALL_EXPR)
12342 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12343 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12350 /* Emit warning if a buffer overflow is detected at compile time. */
12353 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12357 location_t loc = tree_nonartificial_location (exp);
12361 case BUILT_IN_STRCPY_CHK:
12362 case BUILT_IN_STPCPY_CHK:
12363 /* For __strcat_chk the warning will be emitted only if overflowing
12364 by at least strlen (dest) + 1 bytes. */
12365 case BUILT_IN_STRCAT_CHK:
12366 len = CALL_EXPR_ARG (exp, 1);
12367 size = CALL_EXPR_ARG (exp, 2);
12370 case BUILT_IN_STRNCAT_CHK:
12371 case BUILT_IN_STRNCPY_CHK:
12372 len = CALL_EXPR_ARG (exp, 2);
12373 size = CALL_EXPR_ARG (exp, 3);
12375 case BUILT_IN_SNPRINTF_CHK:
12376 case BUILT_IN_VSNPRINTF_CHK:
12377 len = CALL_EXPR_ARG (exp, 1);
12378 size = CALL_EXPR_ARG (exp, 3);
12381 gcc_unreachable ();
12387 if (! host_integerp (size, 1) || integer_all_onesp (size))
12392 len = c_strlen (len, 1);
12393 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12396 else if (fcode == BUILT_IN_STRNCAT_CHK)
12398 tree src = CALL_EXPR_ARG (exp, 1);
12399 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12401 src = c_strlen (src, 1);
12402 if (! src || ! host_integerp (src, 1))
12404 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12405 exp, get_callee_fndecl (exp));
12408 else if (tree_int_cst_lt (src, size))
12411 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12414 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12415 exp, get_callee_fndecl (exp));
12418 /* Emit warning if a buffer overflow is detected at compile time
12419 in __sprintf_chk/__vsprintf_chk calls. */
12422 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12424 tree dest, size, len, fmt, flag;
12425 const char *fmt_str;
12426 int nargs = call_expr_nargs (exp);
12428 /* Verify the required arguments in the original call. */
12432 dest = CALL_EXPR_ARG (exp, 0);
12433 flag = CALL_EXPR_ARG (exp, 1);
12434 size = CALL_EXPR_ARG (exp, 2);
12435 fmt = CALL_EXPR_ARG (exp, 3);
12437 if (! host_integerp (size, 1) || integer_all_onesp (size))
12440 /* Check whether the format is a literal string constant. */
12441 fmt_str = c_getstr (fmt);
12442 if (fmt_str == NULL)
12445 if (!init_target_chars ())
12448 /* If the format doesn't contain % args or %%, we know its size. */
12449 if (strchr (fmt_str, target_percent) == 0)
12450 len = build_int_cstu (size_type_node, strlen (fmt_str));
12451 /* If the format is "%s" and first ... argument is a string literal,
12453 else if (fcode == BUILT_IN_SPRINTF_CHK
12454 && strcmp (fmt_str, target_percent_s) == 0)
12460 arg = CALL_EXPR_ARG (exp, 4);
12461 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12464 len = c_strlen (arg, 1);
12465 if (!len || ! host_integerp (len, 1))
12471 if (! tree_int_cst_lt (len, size))
12472 warning_at (tree_nonartificial_location (exp),
12473 0, "%Kcall to %D will always overflow destination buffer",
12474 exp, get_callee_fndecl (exp));
12477 /* Emit warning if a free is called with address of a variable. */
12480 maybe_emit_free_warning (tree exp)
12482 tree arg = CALL_EXPR_ARG (exp, 0);
12485 if (TREE_CODE (arg) != ADDR_EXPR)
12488 arg = get_base_address (TREE_OPERAND (arg, 0));
12489 if (arg == NULL || INDIRECT_REF_P (arg))
12492 if (SSA_VAR_P (arg))
12493 warning_at (tree_nonartificial_location (exp),
12494 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12496 warning_at (tree_nonartificial_location (exp),
12497 0, "%Kattempt to free a non-heap object", exp);
12500 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12504 fold_builtin_object_size (tree ptr, tree ost)
12506 tree ret = NULL_TREE;
12507 int object_size_type;
12509 if (!validate_arg (ptr, POINTER_TYPE)
12510 || !validate_arg (ost, INTEGER_TYPE))
12515 if (TREE_CODE (ost) != INTEGER_CST
12516 || tree_int_cst_sgn (ost) < 0
12517 || compare_tree_int (ost, 3) > 0)
12520 object_size_type = tree_low_cst (ost, 0);
12522 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12523 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12524 and (size_t) 0 for types 2 and 3. */
12525 if (TREE_SIDE_EFFECTS (ptr))
12526 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12528 if (TREE_CODE (ptr) == ADDR_EXPR)
12529 ret = build_int_cstu (size_type_node,
12530 compute_builtin_object_size (ptr, object_size_type));
12532 else if (TREE_CODE (ptr) == SSA_NAME)
12534 unsigned HOST_WIDE_INT bytes;
12536 /* If object size is not known yet, delay folding until
12537 later. Maybe subsequent passes will help determining
12539 bytes = compute_builtin_object_size (ptr, object_size_type);
12540 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12542 ret = build_int_cstu (size_type_node, bytes);
12547 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12548 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12549 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12556 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12557 DEST, SRC, LEN, and SIZE are the arguments to the call.
12558 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12559 code of the builtin. If MAXLEN is not NULL, it is maximum length
12560 passed as third argument. */
12563 fold_builtin_memory_chk (location_t loc, tree fndecl,
12564 tree dest, tree src, tree len, tree size,
12565 tree maxlen, bool ignore,
12566 enum built_in_function fcode)
12570 if (!validate_arg (dest, POINTER_TYPE)
12571 || !validate_arg (src,
12572 (fcode == BUILT_IN_MEMSET_CHK
12573 ? INTEGER_TYPE : POINTER_TYPE))
12574 || !validate_arg (len, INTEGER_TYPE)
12575 || !validate_arg (size, INTEGER_TYPE))
12578 /* If SRC and DEST are the same (and not volatile), return DEST
12579 (resp. DEST+LEN for __mempcpy_chk). */
12580 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12582 if (fcode != BUILT_IN_MEMPCPY_CHK)
12583 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12587 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12589 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12593 if (! host_integerp (size, 1))
12596 if (! integer_all_onesp (size))
12598 if (! host_integerp (len, 1))
12600 /* If LEN is not constant, try MAXLEN too.
12601 For MAXLEN only allow optimizing into non-_ocs function
12602 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12603 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12605 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12607 /* (void) __mempcpy_chk () can be optimized into
12608 (void) __memcpy_chk (). */
12609 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12613 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12621 if (tree_int_cst_lt (size, maxlen))
12626 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12627 mem{cpy,pcpy,move,set} is available. */
12630 case BUILT_IN_MEMCPY_CHK:
12631 fn = built_in_decls[BUILT_IN_MEMCPY];
12633 case BUILT_IN_MEMPCPY_CHK:
12634 fn = built_in_decls[BUILT_IN_MEMPCPY];
12636 case BUILT_IN_MEMMOVE_CHK:
12637 fn = built_in_decls[BUILT_IN_MEMMOVE];
12639 case BUILT_IN_MEMSET_CHK:
12640 fn = built_in_decls[BUILT_IN_MEMSET];
12649 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12652 /* Fold a call to the __st[rp]cpy_chk builtin.
12653 DEST, SRC, and SIZE are the arguments to the call.
12654 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12655 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12656 strings passed as second argument. */
12659 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12660 tree src, tree size,
12661 tree maxlen, bool ignore,
12662 enum built_in_function fcode)
12666 if (!validate_arg (dest, POINTER_TYPE)
12667 || !validate_arg (src, POINTER_TYPE)
12668 || !validate_arg (size, INTEGER_TYPE))
12671 /* If SRC and DEST are the same (and not volatile), return DEST. */
12672 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12673 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12675 if (! host_integerp (size, 1))
12678 if (! integer_all_onesp (size))
12680 len = c_strlen (src, 1);
12681 if (! len || ! host_integerp (len, 1))
12683 /* If LEN is not constant, try MAXLEN too.
12684 For MAXLEN only allow optimizing into non-_ocs function
12685 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12686 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12688 if (fcode == BUILT_IN_STPCPY_CHK)
12693 /* If return value of __stpcpy_chk is ignored,
12694 optimize into __strcpy_chk. */
12695 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12699 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12702 if (! len || TREE_SIDE_EFFECTS (len))
12705 /* If c_strlen returned something, but not a constant,
12706 transform __strcpy_chk into __memcpy_chk. */
12707 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12711 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12712 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12713 build_call_expr_loc (loc, fn, 4,
12714 dest, src, len, size));
12720 if (! tree_int_cst_lt (maxlen, size))
12724 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12725 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12726 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12730 return build_call_expr_loc (loc, fn, 2, dest, src);
12733 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12734 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12735 length passed as third argument. */
12738 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12739 tree len, tree size, tree maxlen)
12743 if (!validate_arg (dest, POINTER_TYPE)
12744 || !validate_arg (src, POINTER_TYPE)
12745 || !validate_arg (len, INTEGER_TYPE)
12746 || !validate_arg (size, INTEGER_TYPE))
12749 if (! host_integerp (size, 1))
12752 if (! integer_all_onesp (size))
12754 if (! host_integerp (len, 1))
12756 /* If LEN is not constant, try MAXLEN too.
12757 For MAXLEN only allow optimizing into non-_ocs function
12758 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12759 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12765 if (tree_int_cst_lt (size, maxlen))
12769 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12770 fn = built_in_decls[BUILT_IN_STRNCPY];
12774 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12777 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12778 are the arguments to the call. */
12781 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12782 tree src, tree size)
12787 if (!validate_arg (dest, POINTER_TYPE)
12788 || !validate_arg (src, POINTER_TYPE)
12789 || !validate_arg (size, INTEGER_TYPE))
12792 p = c_getstr (src);
12793 /* If the SRC parameter is "", return DEST. */
12794 if (p && *p == '\0')
12795 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12797 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12800 /* If __builtin_strcat_chk is used, assume strcat is available. */
12801 fn = built_in_decls[BUILT_IN_STRCAT];
12805 return build_call_expr_loc (loc, fn, 2, dest, src);
12808 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12812 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12813 tree dest, tree src, tree len, tree size)
12818 if (!validate_arg (dest, POINTER_TYPE)
12819 || !validate_arg (src, POINTER_TYPE)
12820 || !validate_arg (size, INTEGER_TYPE)
12821 || !validate_arg (size, INTEGER_TYPE))
12824 p = c_getstr (src);
12825 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12826 if (p && *p == '\0')
12827 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12828 else if (integer_zerop (len))
12829 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12831 if (! host_integerp (size, 1))
12834 if (! integer_all_onesp (size))
12836 tree src_len = c_strlen (src, 1);
12838 && host_integerp (src_len, 1)
12839 && host_integerp (len, 1)
12840 && ! tree_int_cst_lt (len, src_len))
12842 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12843 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12847 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12852 /* If __builtin_strncat_chk is used, assume strncat is available. */
12853 fn = built_in_decls[BUILT_IN_STRNCAT];
12857 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12860 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12861 a normal call should be emitted rather than expanding the function
12862 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12865 fold_builtin_sprintf_chk (location_t loc, tree exp,
12866 enum built_in_function fcode)
12868 tree dest, size, len, fn, fmt, flag;
12869 const char *fmt_str;
12870 int nargs = call_expr_nargs (exp);
12872 /* Verify the required arguments in the original call. */
12875 dest = CALL_EXPR_ARG (exp, 0);
12876 if (!validate_arg (dest, POINTER_TYPE))
12878 flag = CALL_EXPR_ARG (exp, 1);
12879 if (!validate_arg (flag, INTEGER_TYPE))
12881 size = CALL_EXPR_ARG (exp, 2);
12882 if (!validate_arg (size, INTEGER_TYPE))
12884 fmt = CALL_EXPR_ARG (exp, 3);
12885 if (!validate_arg (fmt, POINTER_TYPE))
12888 if (! host_integerp (size, 1))
12893 if (!init_target_chars ())
12896 /* Check whether the format is a literal string constant. */
12897 fmt_str = c_getstr (fmt);
12898 if (fmt_str != NULL)
12900 /* If the format doesn't contain % args or %%, we know the size. */
12901 if (strchr (fmt_str, target_percent) == 0)
12903 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12904 len = build_int_cstu (size_type_node, strlen (fmt_str));
12906 /* If the format is "%s" and first ... argument is a string literal,
12907 we know the size too. */
12908 else if (fcode == BUILT_IN_SPRINTF_CHK
12909 && strcmp (fmt_str, target_percent_s) == 0)
12915 arg = CALL_EXPR_ARG (exp, 4);
12916 if (validate_arg (arg, POINTER_TYPE))
12918 len = c_strlen (arg, 1);
12919 if (! len || ! host_integerp (len, 1))
12926 if (! integer_all_onesp (size))
12928 if (! len || ! tree_int_cst_lt (len, size))
12932 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12933 or if format doesn't contain % chars or is "%s". */
12934 if (! integer_zerop (flag))
12936 if (fmt_str == NULL)
12938 if (strchr (fmt_str, target_percent) != NULL
12939 && strcmp (fmt_str, target_percent_s))
12943 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12944 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12945 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12949 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12952 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12953 a normal call should be emitted rather than expanding the function
12954 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12955 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12956 passed as second argument. */
12959 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12960 enum built_in_function fcode)
12962 tree dest, size, len, fn, fmt, flag;
12963 const char *fmt_str;
12965 /* Verify the required arguments in the original call. */
12966 if (call_expr_nargs (exp) < 5)
12968 dest = CALL_EXPR_ARG (exp, 0);
12969 if (!validate_arg (dest, POINTER_TYPE))
12971 len = CALL_EXPR_ARG (exp, 1);
12972 if (!validate_arg (len, INTEGER_TYPE))
12974 flag = CALL_EXPR_ARG (exp, 2);
12975 if (!validate_arg (flag, INTEGER_TYPE))
12977 size = CALL_EXPR_ARG (exp, 3);
12978 if (!validate_arg (size, INTEGER_TYPE))
12980 fmt = CALL_EXPR_ARG (exp, 4);
12981 if (!validate_arg (fmt, POINTER_TYPE))
12984 if (! host_integerp (size, 1))
12987 if (! integer_all_onesp (size))
12989 if (! host_integerp (len, 1))
12991 /* If LEN is not constant, try MAXLEN too.
12992 For MAXLEN only allow optimizing into non-_ocs function
12993 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12994 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13000 if (tree_int_cst_lt (size, maxlen))
13004 if (!init_target_chars ())
13007 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13008 or if format doesn't contain % chars or is "%s". */
13009 if (! integer_zerop (flag))
13011 fmt_str = c_getstr (fmt);
13012 if (fmt_str == NULL)
13014 if (strchr (fmt_str, target_percent) != NULL
13015 && strcmp (fmt_str, target_percent_s))
13019 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13021 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13022 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13026 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
13029 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13030 FMT and ARG are the arguments to the call; we don't fold cases with
13031 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13033 Return NULL_TREE if no simplification was possible, otherwise return the
13034 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13035 code of the function to be simplified. */
13038 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13039 tree arg, bool ignore,
13040 enum built_in_function fcode)
13042 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13043 const char *fmt_str = NULL;
13045 /* If the return value is used, don't do the transformation. */
13049 /* Verify the required arguments in the original call. */
13050 if (!validate_arg (fmt, POINTER_TYPE))
13053 /* Check whether the format is a literal string constant. */
13054 fmt_str = c_getstr (fmt);
13055 if (fmt_str == NULL)
13058 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13060 /* If we're using an unlocked function, assume the other
13061 unlocked functions exist explicitly. */
13062 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
13063 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
13067 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
13068 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
13071 if (!init_target_chars ())
13074 if (strcmp (fmt_str, target_percent_s) == 0
13075 || strchr (fmt_str, target_percent) == NULL)
13079 if (strcmp (fmt_str, target_percent_s) == 0)
13081 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13084 if (!arg || !validate_arg (arg, POINTER_TYPE))
13087 str = c_getstr (arg);
13093 /* The format specifier doesn't contain any '%' characters. */
13094 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13100 /* If the string was "", printf does nothing. */
13101 if (str[0] == '\0')
13102 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13104 /* If the string has length of 1, call putchar. */
13105 if (str[1] == '\0')
13107 /* Given printf("c"), (where c is any one character,)
13108 convert "c"[0] to an int and pass that to the replacement
13110 newarg = build_int_cst (NULL_TREE, str[0]);
13112 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13116 /* If the string was "string\n", call puts("string"). */
13117 size_t len = strlen (str);
13118 if ((unsigned char)str[len - 1] == target_newline)
13120 /* Create a NUL-terminated string that's one char shorter
13121 than the original, stripping off the trailing '\n'. */
13122 char *newstr = XALLOCAVEC (char, len);
13123 memcpy (newstr, str, len - 1);
13124 newstr[len - 1] = 0;
13126 newarg = build_string_literal (len, newstr);
13128 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13131 /* We'd like to arrange to call fputs(string,stdout) here,
13132 but we need stdout and don't have a way to get it yet. */
13137 /* The other optimizations can be done only on the non-va_list variants. */
13138 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13141 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13142 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13144 if (!arg || !validate_arg (arg, POINTER_TYPE))
13147 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13150 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13151 else if (strcmp (fmt_str, target_percent_c) == 0)
13153 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13156 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13162 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13165 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13166 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13167 more than 3 arguments, and ARG may be null in the 2-argument case.
13169 Return NULL_TREE if no simplification was possible, otherwise return the
13170 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13171 code of the function to be simplified. */
13174 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13175 tree fmt, tree arg, bool ignore,
13176 enum built_in_function fcode)
13178 tree fn_fputc, fn_fputs, call = NULL_TREE;
13179 const char *fmt_str = NULL;
13181 /* If the return value is used, don't do the transformation. */
13185 /* Verify the required arguments in the original call. */
13186 if (!validate_arg (fp, POINTER_TYPE))
13188 if (!validate_arg (fmt, POINTER_TYPE))
13191 /* Check whether the format is a literal string constant. */
13192 fmt_str = c_getstr (fmt);
13193 if (fmt_str == NULL)
13196 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13198 /* If we're using an unlocked function, assume the other
13199 unlocked functions exist explicitly. */
13200 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
13201 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
13205 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
13206 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
13209 if (!init_target_chars ())
13212 /* If the format doesn't contain % args or %%, use strcpy. */
13213 if (strchr (fmt_str, target_percent) == NULL)
13215 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13219 /* If the format specifier was "", fprintf does nothing. */
13220 if (fmt_str[0] == '\0')
13222 /* If FP has side-effects, just wait until gimplification is
13224 if (TREE_SIDE_EFFECTS (fp))
13227 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13230 /* When "string" doesn't contain %, replace all cases of
13231 fprintf (fp, string) with fputs (string, fp). The fputs
13232 builtin will take care of special cases like length == 1. */
13234 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13237 /* The other optimizations can be done only on the non-va_list variants. */
13238 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13241 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13242 else if (strcmp (fmt_str, target_percent_s) == 0)
13244 if (!arg || !validate_arg (arg, POINTER_TYPE))
13247 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13250 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13251 else if (strcmp (fmt_str, target_percent_c) == 0)
13253 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13256 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13261 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13264 /* Initialize format string characters in the target charset. */
13267 init_target_chars (void)
13272 target_newline = lang_hooks.to_target_charset ('\n');
13273 target_percent = lang_hooks.to_target_charset ('%');
13274 target_c = lang_hooks.to_target_charset ('c');
13275 target_s = lang_hooks.to_target_charset ('s');
13276 if (target_newline == 0 || target_percent == 0 || target_c == 0
13280 target_percent_c[0] = target_percent;
13281 target_percent_c[1] = target_c;
13282 target_percent_c[2] = '\0';
13284 target_percent_s[0] = target_percent;
13285 target_percent_s[1] = target_s;
13286 target_percent_s[2] = '\0';
13288 target_percent_s_newline[0] = target_percent;
13289 target_percent_s_newline[1] = target_s;
13290 target_percent_s_newline[2] = target_newline;
13291 target_percent_s_newline[3] = '\0';
13298 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13299 and no overflow/underflow occurred. INEXACT is true if M was not
13300 exactly calculated. TYPE is the tree type for the result. This
13301 function assumes that you cleared the MPFR flags and then
13302 calculated M to see if anything subsequently set a flag prior to
13303 entering this function. Return NULL_TREE if any checks fail. */
13306 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13308 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13309 overflow/underflow occurred. If -frounding-math, proceed iff the
13310 result of calling FUNC was exact. */
13311 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13312 && (!flag_rounding_math || !inexact))
13314 REAL_VALUE_TYPE rr;
13316 real_from_mpfr (&rr, m, type, GMP_RNDN);
13317 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13318 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13319 but the mpft_t is not, then we underflowed in the
13321 if (real_isfinite (&rr)
13322 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13324 REAL_VALUE_TYPE rmode;
13326 real_convert (&rmode, TYPE_MODE (type), &rr);
13327 /* Proceed iff the specified mode can hold the value. */
13328 if (real_identical (&rmode, &rr))
13329 return build_real (type, rmode);
13336 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13337 number and no overflow/underflow occurred. INEXACT is true if M
13338 was not exactly calculated. TYPE is the tree type for the result.
13339 This function assumes that you cleared the MPFR flags and then
13340 calculated M to see if anything subsequently set a flag prior to
13341 entering this function. Return NULL_TREE if any checks fail. */
13344 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact)
13346 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13347 overflow/underflow occurred. If -frounding-math, proceed iff the
13348 result of calling FUNC was exact. */
13349 if (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13350 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13351 && (!flag_rounding_math || !inexact))
13353 REAL_VALUE_TYPE re, im;
13355 real_from_mpfr (&re, mpc_realref (m), type, GMP_RNDN);
13356 real_from_mpfr (&im, mpc_imagref (m), type, GMP_RNDN);
13357 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13358 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13359 but the mpft_t is not, then we underflowed in the
13361 if (real_isfinite (&re) && real_isfinite (&im)
13362 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13363 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0))
13365 REAL_VALUE_TYPE re_mode, im_mode;
13367 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13368 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13369 /* Proceed iff the specified mode can hold the value. */
13370 if (real_identical (&re_mode, &re) && real_identical (&im_mode, &im))
13371 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13372 build_real (TREE_TYPE (type), im_mode));
13377 #endif /* HAVE_mpc */
13379 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13380 FUNC on it and return the resulting value as a tree with type TYPE.
13381 If MIN and/or MAX are not NULL, then the supplied ARG must be
13382 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13383 acceptable values, otherwise they are not. The mpfr precision is
13384 set to the precision of TYPE. We assume that function FUNC returns
13385 zero if the result could be calculated exactly within the requested
13389 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13390 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13393 tree result = NULL_TREE;
13397 /* To proceed, MPFR must exactly represent the target floating point
13398 format, which only happens when the target base equals two. */
13399 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13400 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13402 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13404 if (real_isfinite (ra)
13405 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13406 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13408 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13409 const int prec = fmt->p;
13410 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13414 mpfr_init2 (m, prec);
13415 mpfr_from_real (m, ra, GMP_RNDN);
13416 mpfr_clear_flags ();
13417 inexact = func (m, m, rnd);
13418 result = do_mpfr_ckconv (m, type, inexact);
13426 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13427 FUNC on it and return the resulting value as a tree with type TYPE.
13428 The mpfr precision is set to the precision of TYPE. We assume that
13429 function FUNC returns zero if the result could be calculated
13430 exactly within the requested precision. */
13433 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13434 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13436 tree result = NULL_TREE;
13441 /* To proceed, MPFR must exactly represent the target floating point
13442 format, which only happens when the target base equals two. */
13443 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13444 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13445 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13447 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13448 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13450 if (real_isfinite (ra1) && real_isfinite (ra2))
13452 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13453 const int prec = fmt->p;
13454 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13458 mpfr_inits2 (prec, m1, m2, NULL);
13459 mpfr_from_real (m1, ra1, GMP_RNDN);
13460 mpfr_from_real (m2, ra2, GMP_RNDN);
13461 mpfr_clear_flags ();
13462 inexact = func (m1, m1, m2, rnd);
13463 result = do_mpfr_ckconv (m1, type, inexact);
13464 mpfr_clears (m1, m2, NULL);
13471 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13472 FUNC on it and return the resulting value as a tree with type TYPE.
13473 The mpfr precision is set to the precision of TYPE. We assume that
13474 function FUNC returns zero if the result could be calculated
13475 exactly within the requested precision. */
13478 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13479 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13481 tree result = NULL_TREE;
13487 /* To proceed, MPFR must exactly represent the target floating point
13488 format, which only happens when the target base equals two. */
13489 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13490 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13491 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13492 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13494 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13495 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13496 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13498 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13500 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13501 const int prec = fmt->p;
13502 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13506 mpfr_inits2 (prec, m1, m2, m3, NULL);
13507 mpfr_from_real (m1, ra1, GMP_RNDN);
13508 mpfr_from_real (m2, ra2, GMP_RNDN);
13509 mpfr_from_real (m3, ra3, GMP_RNDN);
13510 mpfr_clear_flags ();
13511 inexact = func (m1, m1, m2, m3, rnd);
13512 result = do_mpfr_ckconv (m1, type, inexact);
13513 mpfr_clears (m1, m2, m3, NULL);
13520 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13521 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13522 If ARG_SINP and ARG_COSP are NULL then the result is returned
13523 as a complex value.
13524 The type is taken from the type of ARG and is used for setting the
13525 precision of the calculation and results. */
13528 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13530 tree const type = TREE_TYPE (arg);
13531 tree result = NULL_TREE;
13535 /* To proceed, MPFR must exactly represent the target floating point
13536 format, which only happens when the target base equals two. */
13537 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13538 && TREE_CODE (arg) == REAL_CST
13539 && !TREE_OVERFLOW (arg))
13541 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13543 if (real_isfinite (ra))
13545 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13546 const int prec = fmt->p;
13547 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13548 tree result_s, result_c;
13552 mpfr_inits2 (prec, m, ms, mc, NULL);
13553 mpfr_from_real (m, ra, GMP_RNDN);
13554 mpfr_clear_flags ();
13555 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13556 result_s = do_mpfr_ckconv (ms, type, inexact);
13557 result_c = do_mpfr_ckconv (mc, type, inexact);
13558 mpfr_clears (m, ms, mc, NULL);
13559 if (result_s && result_c)
13561 /* If we are to return in a complex value do so. */
13562 if (!arg_sinp && !arg_cosp)
13563 return build_complex (build_complex_type (type),
13564 result_c, result_s);
13566 /* Dereference the sin/cos pointer arguments. */
13567 arg_sinp = build_fold_indirect_ref (arg_sinp);
13568 arg_cosp = build_fold_indirect_ref (arg_cosp);
13569 /* Proceed if valid pointer type were passed in. */
13570 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13571 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13573 /* Set the values. */
13574 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13576 TREE_SIDE_EFFECTS (result_s) = 1;
13577 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13579 TREE_SIDE_EFFECTS (result_c) = 1;
13580 /* Combine the assignments into a compound expr. */
13581 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13582 result_s, result_c));
13590 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13591 two-argument mpfr order N Bessel function FUNC on them and return
13592 the resulting value as a tree with type TYPE. The mpfr precision
13593 is set to the precision of TYPE. We assume that function FUNC
13594 returns zero if the result could be calculated exactly within the
13595 requested precision. */
13597 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13598 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13599 const REAL_VALUE_TYPE *min, bool inclusive)
13601 tree result = NULL_TREE;
13606 /* To proceed, MPFR must exactly represent the target floating point
13607 format, which only happens when the target base equals two. */
13608 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13609 && host_integerp (arg1, 0)
13610 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13612 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13613 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13616 && real_isfinite (ra)
13617 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13619 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13620 const int prec = fmt->p;
13621 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13625 mpfr_init2 (m, prec);
13626 mpfr_from_real (m, ra, GMP_RNDN);
13627 mpfr_clear_flags ();
13628 inexact = func (m, n, m, rnd);
13629 result = do_mpfr_ckconv (m, type, inexact);
13637 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13638 the pointer *(ARG_QUO) and return the result. The type is taken
13639 from the type of ARG0 and is used for setting the precision of the
13640 calculation and results. */
13643 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13645 tree const type = TREE_TYPE (arg0);
13646 tree result = NULL_TREE;
13651 /* To proceed, MPFR must exactly represent the target floating point
13652 format, which only happens when the target base equals two. */
13653 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13654 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13655 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13657 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13658 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13660 if (real_isfinite (ra0) && real_isfinite (ra1))
13662 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13663 const int prec = fmt->p;
13664 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13669 mpfr_inits2 (prec, m0, m1, NULL);
13670 mpfr_from_real (m0, ra0, GMP_RNDN);
13671 mpfr_from_real (m1, ra1, GMP_RNDN);
13672 mpfr_clear_flags ();
13673 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13674 /* Remquo is independent of the rounding mode, so pass
13675 inexact=0 to do_mpfr_ckconv(). */
13676 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13677 mpfr_clears (m0, m1, NULL);
13680 /* MPFR calculates quo in the host's long so it may
13681 return more bits in quo than the target int can hold
13682 if sizeof(host long) > sizeof(target int). This can
13683 happen even for native compilers in LP64 mode. In
13684 these cases, modulo the quo value with the largest
13685 number that the target int can hold while leaving one
13686 bit for the sign. */
13687 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13688 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13690 /* Dereference the quo pointer argument. */
13691 arg_quo = build_fold_indirect_ref (arg_quo);
13692 /* Proceed iff a valid pointer type was passed in. */
13693 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13695 /* Set the value. */
13696 tree result_quo = fold_build2 (MODIFY_EXPR,
13697 TREE_TYPE (arg_quo), arg_quo,
13698 build_int_cst (NULL, integer_quo));
13699 TREE_SIDE_EFFECTS (result_quo) = 1;
13700 /* Combine the quo assignment with the rem. */
13701 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13702 result_quo, result_rem));
13710 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13711 resulting value as a tree with type TYPE. The mpfr precision is
13712 set to the precision of TYPE. We assume that this mpfr function
13713 returns zero if the result could be calculated exactly within the
13714 requested precision. In addition, the integer pointer represented
13715 by ARG_SG will be dereferenced and set to the appropriate signgam
13719 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13721 tree result = NULL_TREE;
13725 /* To proceed, MPFR must exactly represent the target floating point
13726 format, which only happens when the target base equals two. Also
13727 verify ARG is a constant and that ARG_SG is an int pointer. */
13728 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13729 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13730 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13731 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13733 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13735 /* In addition to NaN and Inf, the argument cannot be zero or a
13736 negative integer. */
13737 if (real_isfinite (ra)
13738 && ra->cl != rvc_zero
13739 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13741 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13742 const int prec = fmt->p;
13743 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13748 mpfr_init2 (m, prec);
13749 mpfr_from_real (m, ra, GMP_RNDN);
13750 mpfr_clear_flags ();
13751 inexact = mpfr_lgamma (m, &sg, m, rnd);
13752 result_lg = do_mpfr_ckconv (m, type, inexact);
13758 /* Dereference the arg_sg pointer argument. */
13759 arg_sg = build_fold_indirect_ref (arg_sg);
13760 /* Assign the signgam value into *arg_sg. */
13761 result_sg = fold_build2 (MODIFY_EXPR,
13762 TREE_TYPE (arg_sg), arg_sg,
13763 build_int_cst (NULL, sg));
13764 TREE_SIDE_EFFECTS (result_sg) = 1;
13765 /* Combine the signgam assignment with the lgamma result. */
13766 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13767 result_sg, result_lg));
13776 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13777 function FUNC on it and return the resulting value as a tree with
13778 type TYPE. The mpfr precision is set to the precision of TYPE. We
13779 assume that function FUNC returns zero if the result could be
13780 calculated exactly within the requested precision. */
13783 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13785 tree result = NULL_TREE;
13789 /* To proceed, MPFR must exactly represent the target floating point
13790 format, which only happens when the target base equals two. */
13791 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13792 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13793 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13795 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13796 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13798 if (real_isfinite (re) && real_isfinite (im))
13800 const struct real_format *const fmt =
13801 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13802 const int prec = fmt->p;
13803 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13804 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13808 mpc_init2 (m, prec);
13809 mpfr_from_real (mpc_realref(m), re, rnd);
13810 mpfr_from_real (mpc_imagref(m), im, rnd);
13811 mpfr_clear_flags ();
13812 inexact = func (m, m, crnd);
13813 result = do_mpc_ckconv (m, type, inexact);
13821 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13822 mpc function FUNC on it and return the resulting value as a tree
13823 with type TYPE. The mpfr precision is set to the precision of
13824 TYPE. We assume that function FUNC returns zero if the result
13825 could be calculated exactly within the requested precision. */
13827 #ifdef HAVE_mpc_pow
13829 do_mpc_arg2 (tree arg0, tree arg1, tree type,
13830 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13832 tree result = NULL_TREE;
13837 /* To proceed, MPFR must exactly represent the target floating point
13838 format, which only happens when the target base equals two. */
13839 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13840 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13841 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13842 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13843 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13845 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13846 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13847 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13848 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13850 if (real_isfinite (re0) && real_isfinite (im0)
13851 && real_isfinite (re1) && real_isfinite (im1))
13853 const struct real_format *const fmt =
13854 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13855 const int prec = fmt->p;
13856 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13857 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13861 mpc_init2 (m0, prec);
13862 mpc_init2 (m1, prec);
13863 mpfr_from_real (mpc_realref(m0), re0, rnd);
13864 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13865 mpfr_from_real (mpc_realref(m1), re1, rnd);
13866 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13867 mpfr_clear_flags ();
13868 inexact = func (m0, m0, m1, crnd);
13869 result = do_mpc_ckconv (m0, type, inexact);
13878 #endif /* HAVE_mpc */
13881 The functions below provide an alternate interface for folding
13882 builtin function calls presented as GIMPLE_CALL statements rather
13883 than as CALL_EXPRs. The folded result is still expressed as a
13884 tree. There is too much code duplication in the handling of
13885 varargs functions, and a more intrusive re-factoring would permit
13886 better sharing of code between the tree and statement-based
13887 versions of these functions. */
13889 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13890 along with N new arguments specified as the "..." parameters. SKIP
13891 is the number of arguments in STMT to be omitted. This function is used
13892 to do varargs-to-varargs transformations. */
13895 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13897 int oldnargs = gimple_call_num_args (stmt);
13898 int nargs = oldnargs - skip + n;
13899 tree fntype = TREE_TYPE (fndecl);
13900 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13904 location_t loc = gimple_location (stmt);
13906 buffer = XALLOCAVEC (tree, nargs);
13908 for (i = 0; i < n; i++)
13909 buffer[i] = va_arg (ap, tree);
13911 for (j = skip; j < oldnargs; j++, i++)
13912 buffer[i] = gimple_call_arg (stmt, j);
13914 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13917 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13918 a normal call should be emitted rather than expanding the function
13919 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13922 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13924 tree dest, size, len, fn, fmt, flag;
13925 const char *fmt_str;
13926 int nargs = gimple_call_num_args (stmt);
13928 /* Verify the required arguments in the original call. */
13931 dest = gimple_call_arg (stmt, 0);
13932 if (!validate_arg (dest, POINTER_TYPE))
13934 flag = gimple_call_arg (stmt, 1);
13935 if (!validate_arg (flag, INTEGER_TYPE))
13937 size = gimple_call_arg (stmt, 2);
13938 if (!validate_arg (size, INTEGER_TYPE))
13940 fmt = gimple_call_arg (stmt, 3);
13941 if (!validate_arg (fmt, POINTER_TYPE))
13944 if (! host_integerp (size, 1))
13949 if (!init_target_chars ())
13952 /* Check whether the format is a literal string constant. */
13953 fmt_str = c_getstr (fmt);
13954 if (fmt_str != NULL)
13956 /* If the format doesn't contain % args or %%, we know the size. */
13957 if (strchr (fmt_str, target_percent) == 0)
13959 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13960 len = build_int_cstu (size_type_node, strlen (fmt_str));
13962 /* If the format is "%s" and first ... argument is a string literal,
13963 we know the size too. */
13964 else if (fcode == BUILT_IN_SPRINTF_CHK
13965 && strcmp (fmt_str, target_percent_s) == 0)
13971 arg = gimple_call_arg (stmt, 4);
13972 if (validate_arg (arg, POINTER_TYPE))
13974 len = c_strlen (arg, 1);
13975 if (! len || ! host_integerp (len, 1))
13982 if (! integer_all_onesp (size))
13984 if (! len || ! tree_int_cst_lt (len, size))
13988 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13989 or if format doesn't contain % chars or is "%s". */
13990 if (! integer_zerop (flag))
13992 if (fmt_str == NULL)
13994 if (strchr (fmt_str, target_percent) != NULL
13995 && strcmp (fmt_str, target_percent_s))
13999 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
14000 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
14001 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
14005 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
14008 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14009 a normal call should be emitted rather than expanding the function
14010 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14011 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14012 passed as second argument. */
14015 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14016 enum built_in_function fcode)
14018 tree dest, size, len, fn, fmt, flag;
14019 const char *fmt_str;
14021 /* Verify the required arguments in the original call. */
14022 if (gimple_call_num_args (stmt) < 5)
14024 dest = gimple_call_arg (stmt, 0);
14025 if (!validate_arg (dest, POINTER_TYPE))
14027 len = gimple_call_arg (stmt, 1);
14028 if (!validate_arg (len, INTEGER_TYPE))
14030 flag = gimple_call_arg (stmt, 2);
14031 if (!validate_arg (flag, INTEGER_TYPE))
14033 size = gimple_call_arg (stmt, 3);
14034 if (!validate_arg (size, INTEGER_TYPE))
14036 fmt = gimple_call_arg (stmt, 4);
14037 if (!validate_arg (fmt, POINTER_TYPE))
14040 if (! host_integerp (size, 1))
14043 if (! integer_all_onesp (size))
14045 if (! host_integerp (len, 1))
14047 /* If LEN is not constant, try MAXLEN too.
14048 For MAXLEN only allow optimizing into non-_ocs function
14049 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
14050 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
14056 if (tree_int_cst_lt (size, maxlen))
14060 if (!init_target_chars ())
14063 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
14064 or if format doesn't contain % chars or is "%s". */
14065 if (! integer_zerop (flag))
14067 fmt_str = c_getstr (fmt);
14068 if (fmt_str == NULL)
14070 if (strchr (fmt_str, target_percent) != NULL
14071 && strcmp (fmt_str, target_percent_s))
14075 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
14077 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
14078 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
14082 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
14085 /* Builtins with folding operations that operate on "..." arguments
14086 need special handling; we need to store the arguments in a convenient
14087 data structure before attempting any folding. Fortunately there are
14088 only a few builtins that fall into this category. FNDECL is the
14089 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14090 result of the function call is ignored. */
14093 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14094 bool ignore ATTRIBUTE_UNUSED)
14096 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14097 tree ret = NULL_TREE;
14101 case BUILT_IN_SPRINTF_CHK:
14102 case BUILT_IN_VSPRINTF_CHK:
14103 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14106 case BUILT_IN_SNPRINTF_CHK:
14107 case BUILT_IN_VSNPRINTF_CHK:
14108 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14115 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14116 TREE_NO_WARNING (ret) = 1;
14122 /* A wrapper function for builtin folding that prevents warnings for
14123 "statement without effect" and the like, caused by removing the
14124 call node earlier than the warning is generated. */
14127 fold_call_stmt (gimple stmt, bool ignore)
14129 tree ret = NULL_TREE;
14130 tree fndecl = gimple_call_fndecl (stmt);
14131 location_t loc = gimple_location (stmt);
14133 && TREE_CODE (fndecl) == FUNCTION_DECL
14134 && DECL_BUILT_IN (fndecl)
14135 && !gimple_call_va_arg_pack_p (stmt))
14137 int nargs = gimple_call_num_args (stmt);
14139 if (avoid_folding_inline_builtin (fndecl))
14141 /* FIXME: Don't use a list in this interface. */
14142 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14144 tree arglist = NULL_TREE;
14146 for (i = nargs - 1; i >= 0; i--)
14147 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
14148 return targetm.fold_builtin (fndecl, arglist, ignore);
14152 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14154 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
14156 for (i = 0; i < nargs; i++)
14157 args[i] = gimple_call_arg (stmt, i);
14158 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14161 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14164 /* Propagate location information from original call to
14165 expansion of builtin. Otherwise things like
14166 maybe_emit_chk_warning, that operate on the expansion
14167 of a builtin, will use the wrong location information. */
14168 if (gimple_has_location (stmt))
14170 tree realret = ret;
14171 if (TREE_CODE (ret) == NOP_EXPR)
14172 realret = TREE_OPERAND (ret, 0);
14173 if (CAN_HAVE_LOCATION_P (realret)
14174 && !EXPR_HAS_LOCATION (realret))
14175 SET_EXPR_LOCATION (realret, loc);