1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
62 /* Define the names of the builtin function types and codes. */
63 const char *const built_in_class_names[4]
64 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
66 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
67 const char * built_in_names[(int) END_BUILTINS] =
69 #include "builtins.def"
73 /* Setup an array of _DECL trees, make sure each element is
74 initialized to NULL_TREE. */
75 tree built_in_decls[(int) END_BUILTINS];
76 /* Declarations used when constructing the builtin implicitly in the compiler.
77 It may be NULL_TREE when this is invalid (for instance runtime is not
78 required to implement the function call in all cases). */
79 tree implicit_built_in_decls[(int) END_BUILTINS];
81 static const char *c_getstr (tree);
82 static rtx c_readstr (const char *, enum machine_mode);
83 static int target_char_cast (tree, char *);
84 static rtx get_memory_rtx (tree, tree);
85 static int apply_args_size (void);
86 static int apply_result_size (void);
87 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
88 static rtx result_vector (int, rtx);
90 static void expand_builtin_update_setjmp_buf (rtx);
91 static void expand_builtin_prefetch (tree);
92 static rtx expand_builtin_apply_args (void);
93 static rtx expand_builtin_apply_args_1 (void);
94 static rtx expand_builtin_apply (rtx, rtx, rtx);
95 static void expand_builtin_return (rtx);
96 static enum type_class type_to_class (tree);
97 static rtx expand_builtin_classify_type (tree);
98 static void expand_errno_check (tree, rtx);
99 static rtx expand_builtin_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_args_info (tree);
108 static rtx expand_builtin_next_arg (void);
109 static rtx expand_builtin_va_start (tree);
110 static rtx expand_builtin_va_end (tree);
111 static rtx expand_builtin_va_copy (tree);
112 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
126 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_bcopy (tree, int);
129 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
131 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
133 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
134 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
142 static rtx expand_builtin_alloca (tree, rtx);
143 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
144 static rtx expand_builtin_frame_address (tree, tree);
145 static rtx expand_builtin_fputs (tree, rtx, bool);
146 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
147 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
148 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
149 static tree stabilize_va_list (tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static tree fold_builtin_constant_p (tree);
152 static tree fold_builtin_expect (tree, tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (tree);
155 static tree fold_builtin_inf (tree, int);
156 static tree fold_builtin_nan (tree, tree, int);
157 static tree rewrite_call_expr (tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static bool integer_valued_real_p (tree);
160 static tree fold_trunc_transparent_mathfn (tree, tree);
161 static bool readonly_data_expr (tree);
162 static rtx expand_builtin_fabs (tree, rtx, rtx);
163 static rtx expand_builtin_signbit (tree, rtx);
164 static tree fold_builtin_sqrt (tree, tree);
165 static tree fold_builtin_cbrt (tree, tree);
166 static tree fold_builtin_pow (tree, tree, tree, tree);
167 static tree fold_builtin_powi (tree, tree, tree, tree);
168 static tree fold_builtin_cos (tree, tree, tree);
169 static tree fold_builtin_cosh (tree, tree, tree);
170 static tree fold_builtin_tan (tree, tree);
171 static tree fold_builtin_trunc (tree, tree);
172 static tree fold_builtin_floor (tree, tree);
173 static tree fold_builtin_ceil (tree, tree);
174 static tree fold_builtin_round (tree, tree);
175 static tree fold_builtin_int_roundingfn (tree, tree);
176 static tree fold_builtin_bitop (tree, tree);
177 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
178 static tree fold_builtin_strchr (tree, tree, tree);
179 static tree fold_builtin_memchr (tree, tree, tree, tree);
180 static tree fold_builtin_memcmp (tree, tree, tree);
181 static tree fold_builtin_strcmp (tree, tree);
182 static tree fold_builtin_strncmp (tree, tree, tree);
183 static tree fold_builtin_signbit (tree, tree);
184 static tree fold_builtin_copysign (tree, tree, tree, tree);
185 static tree fold_builtin_isascii (tree);
186 static tree fold_builtin_toascii (tree);
187 static tree fold_builtin_isdigit (tree);
188 static tree fold_builtin_fabs (tree, tree);
189 static tree fold_builtin_abs (tree, tree);
190 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
192 static tree fold_builtin_n (tree, tree *, int, bool);
193 static tree fold_builtin_0 (tree, bool);
194 static tree fold_builtin_1 (tree, tree, bool);
195 static tree fold_builtin_2 (tree, tree, tree, bool);
196 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
197 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
198 static tree fold_builtin_varargs (tree, tree, bool);
200 static tree fold_builtin_strpbrk (tree, tree, tree);
201 static tree fold_builtin_strstr (tree, tree, tree);
202 static tree fold_builtin_strrchr (tree, tree, tree);
203 static tree fold_builtin_strcat (tree, tree);
204 static tree fold_builtin_strncat (tree, tree, tree);
205 static tree fold_builtin_strspn (tree, tree);
206 static tree fold_builtin_strcspn (tree, tree);
207 static tree fold_builtin_sprintf (tree, tree, tree, int);
209 static rtx expand_builtin_object_size (tree);
210 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
211 enum built_in_function);
212 static void maybe_emit_chk_warning (tree, enum built_in_function);
213 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
214 static void maybe_emit_free_warning (tree);
215 static tree fold_builtin_object_size (tree, tree);
216 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
217 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
218 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
219 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
220 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
221 enum built_in_function);
222 static bool init_target_chars (void);
224 static unsigned HOST_WIDE_INT target_newline;
225 static unsigned HOST_WIDE_INT target_percent;
226 static unsigned HOST_WIDE_INT target_c;
227 static unsigned HOST_WIDE_INT target_s;
228 static char target_percent_c[3];
229 static char target_percent_s[3];
230 static char target_percent_s_newline[4];
231 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
232 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
233 static tree do_mpfr_arg2 (tree, tree, tree,
234 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
235 static tree do_mpfr_arg3 (tree, tree, tree, tree,
236 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
237 static tree do_mpfr_sincos (tree, tree, tree);
238 static tree do_mpfr_bessel_n (tree, tree, tree,
239 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
240 const REAL_VALUE_TYPE *, bool);
241 static tree do_mpfr_remquo (tree, tree, tree);
242 static tree do_mpfr_lgamma_r (tree, tree, tree);
245 is_builtin_name (const char *name)
247 if (strncmp (name, "__builtin_", 10) == 0)
249 if (strncmp (name, "__sync_", 7) == 0)
254 /* Return true if NODE should be considered for inline expansion regardless
255 of the optimization level. This means whenever a function is invoked with
256 its "internal" name, which normally contains the prefix "__builtin". */
259 called_as_built_in (tree node)
261 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
262 we want the name used to call the function, not the name it
264 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
265 return is_builtin_name (name);
268 /* Return the alignment in bits of EXP, an object.
269 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
270 guessed alignment e.g. from type alignment. */
273 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
278 if (handled_component_p (exp))
280 HOST_WIDE_INT bitsize, bitpos;
282 enum machine_mode mode;
283 int unsignedp, volatilep;
285 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
286 &mode, &unsignedp, &volatilep, true);
288 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
293 if (TREE_CODE (offset) == PLUS_EXPR)
295 next_offset = TREE_OPERAND (offset, 0);
296 offset = TREE_OPERAND (offset, 1);
300 if (host_integerp (offset, 1))
302 /* Any overflow in calculating offset_bits won't change
305 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
308 inner = MIN (inner, (offset_bits & -offset_bits));
310 else if (TREE_CODE (offset) == MULT_EXPR
311 && host_integerp (TREE_OPERAND (offset, 1), 1))
313 /* Any overflow in calculating offset_factor won't change
315 unsigned offset_factor
316 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
320 inner = MIN (inner, (offset_factor & -offset_factor));
324 inner = MIN (inner, BITS_PER_UNIT);
327 offset = next_offset;
331 align = MIN (inner, DECL_ALIGN (exp));
332 #ifdef CONSTANT_ALIGNMENT
333 else if (CONSTANT_CLASS_P (exp))
334 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
336 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
337 || TREE_CODE (exp) == INDIRECT_REF)
338 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
340 align = MIN (align, inner);
341 return MIN (align, max_align);
344 /* Return the alignment in bits of EXP, a pointer valued expression.
345 But don't return more than MAX_ALIGN no matter what.
346 The alignment returned is, by default, the alignment of the thing that
347 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
349 Otherwise, look at the expression to see if we can do better, i.e., if the
350 expression is actually pointing at an object whose alignment is tighter. */
353 get_pointer_alignment (tree exp, unsigned int max_align)
355 unsigned int align, inner;
357 /* We rely on TER to compute accurate alignment information. */
358 if (!(optimize && flag_tree_ter))
361 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
364 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
365 align = MIN (align, max_align);
369 switch (TREE_CODE (exp))
372 exp = TREE_OPERAND (exp, 0);
373 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
376 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
377 align = MIN (inner, max_align);
380 case POINTER_PLUS_EXPR:
381 /* If sum of pointer + int, restrict our maximum alignment to that
382 imposed by the integer. If not, we can't do any better than
384 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
387 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
388 & (max_align / BITS_PER_UNIT - 1))
392 exp = TREE_OPERAND (exp, 0);
396 /* See what we are pointing at and look at its alignment. */
397 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
405 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
406 way, because it could contain a zero byte in the middle.
407 TREE_STRING_LENGTH is the size of the character array, not the string.
409 ONLY_VALUE should be nonzero if the result is not going to be emitted
410 into the instruction stream and zero if it is going to be expanded.
411 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
412 is returned, otherwise NULL, since
413 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
414 evaluate the side-effects.
416 The value returned is of type `ssizetype'.
418 Unfortunately, string_constant can't access the values of const char
419 arrays with initializers, so neither can we do so here. */
422 c_strlen (tree src, int only_value)
425 HOST_WIDE_INT offset;
430 if (TREE_CODE (src) == COND_EXPR
431 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
435 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
436 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
437 if (tree_int_cst_equal (len1, len2))
441 if (TREE_CODE (src) == COMPOUND_EXPR
442 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
443 return c_strlen (TREE_OPERAND (src, 1), only_value);
445 src = string_constant (src, &offset_node);
449 max = TREE_STRING_LENGTH (src) - 1;
450 ptr = TREE_STRING_POINTER (src);
452 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
454 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
455 compute the offset to the following null if we don't know where to
456 start searching for it. */
459 for (i = 0; i < max; i++)
463 /* We don't know the starting offset, but we do know that the string
464 has no internal zero bytes. We can assume that the offset falls
465 within the bounds of the string; otherwise, the programmer deserves
466 what he gets. Subtract the offset from the length of the string,
467 and return that. This would perhaps not be valid if we were dealing
468 with named arrays in addition to literal string constants. */
470 return size_diffop (size_int (max), offset_node);
473 /* We have a known offset into the string. Start searching there for
474 a null character if we can represent it as a single HOST_WIDE_INT. */
475 if (offset_node == 0)
477 else if (! host_integerp (offset_node, 0))
480 offset = tree_low_cst (offset_node, 0);
482 /* If the offset is known to be out of bounds, warn, and call strlen at
484 if (offset < 0 || offset > max)
486 /* Suppress multiple warnings for propagated constant strings. */
487 if (! TREE_NO_WARNING (src))
489 warning (0, "offset outside bounds of constant string");
490 TREE_NO_WARNING (src) = 1;
495 /* Use strlen to search for the first zero byte. Since any strings
496 constructed with build_string will have nulls appended, we win even
497 if we get handed something like (char[4])"abcd".
499 Since OFFSET is our starting index into the string, no further
500 calculation is needed. */
501 return ssize_int (strlen (ptr + offset));
504 /* Return a char pointer for a C string if it is a string constant
505 or sum of string constant and integer constant. */
512 src = string_constant (src, &offset_node);
516 if (offset_node == 0)
517 return TREE_STRING_POINTER (src);
518 else if (!host_integerp (offset_node, 1)
519 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
522 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
525 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
526 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
529 c_readstr (const char *str, enum machine_mode mode)
535 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
540 for (i = 0; i < GET_MODE_SIZE (mode); i++)
543 if (WORDS_BIG_ENDIAN)
544 j = GET_MODE_SIZE (mode) - i - 1;
545 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
546 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
547 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
549 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
552 ch = (unsigned char) str[i];
553 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
555 return immed_double_const (c[0], c[1], mode);
558 /* Cast a target constant CST to target CHAR and if that value fits into
559 host char type, return zero and put that value into variable pointed to by
563 target_char_cast (tree cst, char *p)
565 unsigned HOST_WIDE_INT val, hostval;
567 if (!host_integerp (cst, 1)
568 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
571 val = tree_low_cst (cst, 1);
572 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
573 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
576 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
577 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
586 /* Similar to save_expr, but assumes that arbitrary code is not executed
587 in between the multiple evaluations. In particular, we assume that a
588 non-addressable local variable will not be modified. */
591 builtin_save_expr (tree exp)
593 if (TREE_ADDRESSABLE (exp) == 0
594 && (TREE_CODE (exp) == PARM_DECL
595 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
598 return save_expr (exp);
601 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
602 times to get the address of either a higher stack frame, or a return
603 address located within it (depending on FNDECL_CODE). */
606 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
610 #ifdef INITIAL_FRAME_ADDRESS_RTX
611 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
615 /* For a zero count with __builtin_return_address, we don't care what
616 frame address we return, because target-specific definitions will
617 override us. Therefore frame pointer elimination is OK, and using
618 the soft frame pointer is OK.
620 For a nonzero count, or a zero count with __builtin_frame_address,
621 we require a stable offset from the current frame pointer to the
622 previous one, so we must use the hard frame pointer, and
623 we must disable frame pointer elimination. */
624 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
625 tem = frame_pointer_rtx;
628 tem = hard_frame_pointer_rtx;
630 /* Tell reload not to eliminate the frame pointer. */
631 crtl->accesses_prior_frames = 1;
635 /* Some machines need special handling before we can access
636 arbitrary frames. For example, on the SPARC, we must first flush
637 all register windows to the stack. */
638 #ifdef SETUP_FRAME_ADDRESSES
640 SETUP_FRAME_ADDRESSES ();
643 /* On the SPARC, the return address is not in the frame, it is in a
644 register. There is no way to access it off of the current frame
645 pointer, but it can be accessed off the previous frame pointer by
646 reading the value from the register window save area. */
647 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
648 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
652 /* Scan back COUNT frames to the specified frame. */
653 for (i = 0; i < count; i++)
655 /* Assume the dynamic chain pointer is in the word that the
656 frame address points to, unless otherwise specified. */
657 #ifdef DYNAMIC_CHAIN_ADDRESS
658 tem = DYNAMIC_CHAIN_ADDRESS (tem);
660 tem = memory_address (Pmode, tem);
661 tem = gen_frame_mem (Pmode, tem);
662 tem = copy_to_reg (tem);
665 /* For __builtin_frame_address, return what we've got. But, on
666 the SPARC for example, we may have to add a bias. */
667 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
668 #ifdef FRAME_ADDR_RTX
669 return FRAME_ADDR_RTX (tem);
674 /* For __builtin_return_address, get the return address from that frame. */
675 #ifdef RETURN_ADDR_RTX
676 tem = RETURN_ADDR_RTX (count, tem);
678 tem = memory_address (Pmode,
679 plus_constant (tem, GET_MODE_SIZE (Pmode)));
680 tem = gen_frame_mem (Pmode, tem);
685 /* Alias set used for setjmp buffer. */
686 static alias_set_type setjmp_alias_set = -1;
688 /* Construct the leading half of a __builtin_setjmp call. Control will
689 return to RECEIVER_LABEL. This is also called directly by the SJLJ
690 exception handling code. */
693 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
695 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
699 if (setjmp_alias_set == -1)
700 setjmp_alias_set = new_alias_set ();
702 buf_addr = convert_memory_address (Pmode, buf_addr);
704 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
706 /* We store the frame pointer and the address of receiver_label in
707 the buffer and use the rest of it for the stack save area, which
708 is machine-dependent. */
710 mem = gen_rtx_MEM (Pmode, buf_addr);
711 set_mem_alias_set (mem, setjmp_alias_set);
712 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
714 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
715 set_mem_alias_set (mem, setjmp_alias_set);
717 emit_move_insn (validize_mem (mem),
718 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
720 stack_save = gen_rtx_MEM (sa_mode,
721 plus_constant (buf_addr,
722 2 * GET_MODE_SIZE (Pmode)));
723 set_mem_alias_set (stack_save, setjmp_alias_set);
724 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
726 /* If there is further processing to do, do it. */
727 #ifdef HAVE_builtin_setjmp_setup
728 if (HAVE_builtin_setjmp_setup)
729 emit_insn (gen_builtin_setjmp_setup (buf_addr));
732 /* Tell optimize_save_area_alloca that extra work is going to
733 need to go on during alloca. */
734 cfun->calls_setjmp = 1;
736 /* We have a nonlocal label. */
737 cfun->has_nonlocal_label = 1;
740 /* Construct the trailing part of a __builtin_setjmp call. This is
741 also called directly by the SJLJ exception handling code. */
744 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
746 /* Clobber the FP when we get here, so we have to make sure it's
747 marked as used by this function. */
748 emit_use (hard_frame_pointer_rtx);
750 /* Mark the static chain as clobbered here so life information
751 doesn't get messed up for it. */
752 emit_clobber (static_chain_rtx);
754 /* Now put in the code to restore the frame pointer, and argument
755 pointer, if needed. */
756 #ifdef HAVE_nonlocal_goto
757 if (! HAVE_nonlocal_goto)
760 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
761 /* This might change the hard frame pointer in ways that aren't
762 apparent to early optimization passes, so force a clobber. */
763 emit_clobber (hard_frame_pointer_rtx);
766 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
767 if (fixed_regs[ARG_POINTER_REGNUM])
769 #ifdef ELIMINABLE_REGS
771 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
773 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
774 if (elim_regs[i].from == ARG_POINTER_REGNUM
775 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
778 if (i == ARRAY_SIZE (elim_regs))
781 /* Now restore our arg pointer from the address at which it
782 was saved in our stack frame. */
783 emit_move_insn (crtl->args.internal_arg_pointer,
784 copy_to_reg (get_arg_pointer_save_area ()));
789 #ifdef HAVE_builtin_setjmp_receiver
790 if (HAVE_builtin_setjmp_receiver)
791 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
794 #ifdef HAVE_nonlocal_goto_receiver
795 if (HAVE_nonlocal_goto_receiver)
796 emit_insn (gen_nonlocal_goto_receiver ());
801 /* We must not allow the code we just generated to be reordered by
802 scheduling. Specifically, the update of the frame pointer must
803 happen immediately, not later. */
804 emit_insn (gen_blockage ());
807 /* __builtin_longjmp is passed a pointer to an array of five words (not
808 all will be used on all machines). It operates similarly to the C
809 library function of the same name, but is more efficient. Much of
810 the code below is copied from the handling of non-local gotos. */
813 expand_builtin_longjmp (rtx buf_addr, rtx value)
815 rtx fp, lab, stack, insn, last;
816 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
818 /* DRAP is needed for stack realign if longjmp is expanded to current
820 if (SUPPORTS_STACK_ALIGNMENT)
821 crtl->need_drap = true;
823 if (setjmp_alias_set == -1)
824 setjmp_alias_set = new_alias_set ();
826 buf_addr = convert_memory_address (Pmode, buf_addr);
828 buf_addr = force_reg (Pmode, buf_addr);
830 /* We used to store value in static_chain_rtx, but that fails if pointers
831 are smaller than integers. We instead require that the user must pass
832 a second argument of 1, because that is what builtin_setjmp will
833 return. This also makes EH slightly more efficient, since we are no
834 longer copying around a value that we don't care about. */
835 gcc_assert (value == const1_rtx);
837 last = get_last_insn ();
838 #ifdef HAVE_builtin_longjmp
839 if (HAVE_builtin_longjmp)
840 emit_insn (gen_builtin_longjmp (buf_addr));
844 fp = gen_rtx_MEM (Pmode, buf_addr);
845 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
846 GET_MODE_SIZE (Pmode)));
848 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
849 2 * GET_MODE_SIZE (Pmode)));
850 set_mem_alias_set (fp, setjmp_alias_set);
851 set_mem_alias_set (lab, setjmp_alias_set);
852 set_mem_alias_set (stack, setjmp_alias_set);
854 /* Pick up FP, label, and SP from the block and jump. This code is
855 from expand_goto in stmt.c; see there for detailed comments. */
856 #ifdef HAVE_nonlocal_goto
857 if (HAVE_nonlocal_goto)
858 /* We have to pass a value to the nonlocal_goto pattern that will
859 get copied into the static_chain pointer, but it does not matter
860 what that value is, because builtin_setjmp does not use it. */
861 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
865 lab = copy_to_reg (lab);
867 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
868 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
870 emit_move_insn (hard_frame_pointer_rtx, fp);
871 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
873 emit_use (hard_frame_pointer_rtx);
874 emit_use (stack_pointer_rtx);
875 emit_indirect_jump (lab);
879 /* Search backwards and mark the jump insn as a non-local goto.
880 Note that this precludes the use of __builtin_longjmp to a
881 __builtin_setjmp target in the same function. However, we've
882 already cautioned the user that these functions are for
883 internal exception handling use only. */
884 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
886 gcc_assert (insn != last);
890 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
893 else if (CALL_P (insn))
898 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
899 and the address of the save area. */
902 expand_builtin_nonlocal_goto (tree exp)
904 tree t_label, t_save_area;
905 rtx r_label, r_save_area, r_fp, r_sp, insn;
907 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
910 t_label = CALL_EXPR_ARG (exp, 0);
911 t_save_area = CALL_EXPR_ARG (exp, 1);
913 r_label = expand_normal (t_label);
914 r_label = convert_memory_address (Pmode, r_label);
915 r_save_area = expand_normal (t_save_area);
916 r_save_area = convert_memory_address (Pmode, r_save_area);
917 /* Copy the address of the save location to a register just in case it was based
918 on the frame pointer. */
919 r_save_area = copy_to_reg (r_save_area);
920 r_fp = gen_rtx_MEM (Pmode, r_save_area);
921 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
922 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
924 crtl->has_nonlocal_goto = 1;
926 #ifdef HAVE_nonlocal_goto
927 /* ??? We no longer need to pass the static chain value, afaik. */
928 if (HAVE_nonlocal_goto)
929 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
933 r_label = copy_to_reg (r_label);
935 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
936 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
938 /* Restore frame pointer for containing function.
939 This sets the actual hard register used for the frame pointer
940 to the location of the function's incoming static chain info.
941 The non-local goto handler will then adjust it to contain the
942 proper value and reload the argument pointer, if needed. */
943 emit_move_insn (hard_frame_pointer_rtx, r_fp);
944 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
946 /* USE of hard_frame_pointer_rtx added for consistency;
947 not clear if really needed. */
948 emit_use (hard_frame_pointer_rtx);
949 emit_use (stack_pointer_rtx);
951 /* If the architecture is using a GP register, we must
952 conservatively assume that the target function makes use of it.
953 The prologue of functions with nonlocal gotos must therefore
954 initialize the GP register to the appropriate value, and we
955 must then make sure that this value is live at the point
956 of the jump. (Note that this doesn't necessarily apply
957 to targets with a nonlocal_goto pattern; they are free
958 to implement it in their own way. Note also that this is
959 a no-op if the GP register is a global invariant.) */
960 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
961 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
962 emit_use (pic_offset_table_rtx);
964 emit_indirect_jump (r_label);
967 /* Search backwards to the jump insn and mark it as a
969 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
973 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
976 else if (CALL_P (insn))
983 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
984 (not all will be used on all machines) that was passed to __builtin_setjmp.
985 It updates the stack pointer in that block to correspond to the current
989 expand_builtin_update_setjmp_buf (rtx buf_addr)
991 enum machine_mode sa_mode = Pmode;
995 #ifdef HAVE_save_stack_nonlocal
996 if (HAVE_save_stack_nonlocal)
997 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
999 #ifdef STACK_SAVEAREA_MODE
1000 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1004 = gen_rtx_MEM (sa_mode,
1007 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1011 emit_insn (gen_setjmp ());
1014 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1017 /* Expand a call to __builtin_prefetch. For a target that does not support
1018 data prefetch, evaluate the memory address argument in case it has side
1022 expand_builtin_prefetch (tree exp)
1024 tree arg0, arg1, arg2;
1028 if (!validate_arglist (exp, POINTER_TYPE, 0))
1031 arg0 = CALL_EXPR_ARG (exp, 0);
1033 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1034 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1036 nargs = call_expr_nargs (exp);
1038 arg1 = CALL_EXPR_ARG (exp, 1);
1040 arg1 = integer_zero_node;
1042 arg2 = CALL_EXPR_ARG (exp, 2);
1044 arg2 = build_int_cst (NULL_TREE, 3);
1046 /* Argument 0 is an address. */
1047 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1049 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1050 if (TREE_CODE (arg1) != INTEGER_CST)
1052 error ("second argument to %<__builtin_prefetch%> must be a constant");
1053 arg1 = integer_zero_node;
1055 op1 = expand_normal (arg1);
1056 /* Argument 1 must be either zero or one. */
1057 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1059 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1064 /* Argument 2 (locality) must be a compile-time constant int. */
1065 if (TREE_CODE (arg2) != INTEGER_CST)
1067 error ("third argument to %<__builtin_prefetch%> must be a constant");
1068 arg2 = integer_zero_node;
1070 op2 = expand_normal (arg2);
1071 /* Argument 2 must be 0, 1, 2, or 3. */
1072 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1074 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1078 #ifdef HAVE_prefetch
1081 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1083 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1084 || (GET_MODE (op0) != Pmode))
1086 op0 = convert_memory_address (Pmode, op0);
1087 op0 = force_reg (Pmode, op0);
1089 emit_insn (gen_prefetch (op0, op1, op2));
1093 /* Don't do anything with direct references to volatile memory, but
1094 generate code to handle other side effects. */
1095 if (!MEM_P (op0) && side_effects_p (op0))
1099 /* Get a MEM rtx for expression EXP which is the address of an operand
1100 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1101 the maximum length of the block of memory that might be accessed or
1105 get_memory_rtx (tree exp, tree len)
1107 tree orig_exp = exp;
1111 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1112 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1113 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1114 exp = TREE_OPERAND (exp, 0);
1116 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1117 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1119 /* Get an expression we can use to find the attributes to assign to MEM.
1120 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1121 we can. First remove any nops. */
1122 while (CONVERT_EXPR_P (exp)
1123 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1124 exp = TREE_OPERAND (exp, 0);
1127 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1128 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1129 && host_integerp (TREE_OPERAND (exp, 1), 0)
1130 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1131 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1132 else if (TREE_CODE (exp) == ADDR_EXPR)
1133 exp = TREE_OPERAND (exp, 0);
1134 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1135 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1139 /* Honor attributes derived from exp, except for the alias set
1140 (as builtin stringops may alias with anything) and the size
1141 (as stringops may access multiple array elements). */
1144 set_mem_attributes (mem, exp, 0);
1147 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1149 /* Allow the string and memory builtins to overflow from one
1150 field into another, see http://gcc.gnu.org/PR23561.
1151 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1152 memory accessed by the string or memory builtin will fit
1153 within the field. */
1154 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1156 tree mem_expr = MEM_EXPR (mem);
1157 HOST_WIDE_INT offset = -1, length = -1;
1160 while (TREE_CODE (inner) == ARRAY_REF
1161 || CONVERT_EXPR_P (inner)
1162 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1163 || TREE_CODE (inner) == SAVE_EXPR)
1164 inner = TREE_OPERAND (inner, 0);
1166 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1168 if (MEM_OFFSET (mem)
1169 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1170 offset = INTVAL (MEM_OFFSET (mem));
1172 if (offset >= 0 && len && host_integerp (len, 0))
1173 length = tree_low_cst (len, 0);
1175 while (TREE_CODE (inner) == COMPONENT_REF)
1177 tree field = TREE_OPERAND (inner, 1);
1178 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1179 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1181 /* Bitfields are generally not byte-addressable. */
1182 gcc_assert (!DECL_BIT_FIELD (field)
1183 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1184 % BITS_PER_UNIT) == 0
1185 && host_integerp (DECL_SIZE (field), 0)
1186 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1187 % BITS_PER_UNIT) == 0));
1189 /* If we can prove that the memory starting at XEXP (mem, 0) and
1190 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1191 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1192 fields without DECL_SIZE_UNIT like flexible array members. */
1194 && DECL_SIZE_UNIT (field)
1195 && host_integerp (DECL_SIZE_UNIT (field), 0))
1198 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1201 && offset + length <= size)
1206 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1207 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1208 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1216 mem_expr = TREE_OPERAND (mem_expr, 0);
1217 inner = TREE_OPERAND (inner, 0);
1220 if (mem_expr == NULL)
1222 if (mem_expr != MEM_EXPR (mem))
1224 set_mem_expr (mem, mem_expr);
1225 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1228 set_mem_alias_set (mem, 0);
1229 set_mem_size (mem, NULL_RTX);
1235 /* Built-in functions to perform an untyped call and return. */
1237 /* For each register that may be used for calling a function, this
1238 gives a mode used to copy the register's value. VOIDmode indicates
1239 the register is not used for calling a function. If the machine
1240 has register windows, this gives only the outbound registers.
1241 INCOMING_REGNO gives the corresponding inbound register. */
1242 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1244 /* For each register that may be used for returning values, this gives
1245 a mode used to copy the register's value. VOIDmode indicates the
1246 register is not used for returning values. If the machine has
1247 register windows, this gives only the outbound registers.
1248 INCOMING_REGNO gives the corresponding inbound register. */
1249 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1251 /* For each register that may be used for calling a function, this
1252 gives the offset of that register into the block returned by
1253 __builtin_apply_args. 0 indicates that the register is not
1254 used for calling a function. */
1255 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1257 /* Return the size required for the block returned by __builtin_apply_args,
1258 and initialize apply_args_mode. */
1261 apply_args_size (void)
1263 static int size = -1;
1266 enum machine_mode mode;
1268 /* The values computed by this function never change. */
1271 /* The first value is the incoming arg-pointer. */
1272 size = GET_MODE_SIZE (Pmode);
1274 /* The second value is the structure value address unless this is
1275 passed as an "invisible" first argument. */
1276 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1277 size += GET_MODE_SIZE (Pmode);
1279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1280 if (FUNCTION_ARG_REGNO_P (regno))
1282 mode = reg_raw_mode[regno];
1284 gcc_assert (mode != VOIDmode);
1286 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1287 if (size % align != 0)
1288 size = CEIL (size, align) * align;
1289 apply_args_reg_offset[regno] = size;
1290 size += GET_MODE_SIZE (mode);
1291 apply_args_mode[regno] = mode;
1295 apply_args_mode[regno] = VOIDmode;
1296 apply_args_reg_offset[regno] = 0;
1302 /* Return the size required for the block returned by __builtin_apply,
1303 and initialize apply_result_mode. */
1306 apply_result_size (void)
1308 static int size = -1;
1310 enum machine_mode mode;
1312 /* The values computed by this function never change. */
1317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1318 if (FUNCTION_VALUE_REGNO_P (regno))
1320 mode = reg_raw_mode[regno];
1322 gcc_assert (mode != VOIDmode);
1324 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1325 if (size % align != 0)
1326 size = CEIL (size, align) * align;
1327 size += GET_MODE_SIZE (mode);
1328 apply_result_mode[regno] = mode;
1331 apply_result_mode[regno] = VOIDmode;
1333 /* Allow targets that use untyped_call and untyped_return to override
1334 the size so that machine-specific information can be stored here. */
1335 #ifdef APPLY_RESULT_SIZE
1336 size = APPLY_RESULT_SIZE;
1342 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1343 /* Create a vector describing the result block RESULT. If SAVEP is true,
1344 the result block is used to save the values; otherwise it is used to
1345 restore the values. */
1348 result_vector (int savep, rtx result)
1350 int regno, size, align, nelts;
1351 enum machine_mode mode;
1353 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1356 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1357 if ((mode = apply_result_mode[regno]) != VOIDmode)
1359 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1360 if (size % align != 0)
1361 size = CEIL (size, align) * align;
1362 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1363 mem = adjust_address (result, mode, size);
1364 savevec[nelts++] = (savep
1365 ? gen_rtx_SET (VOIDmode, mem, reg)
1366 : gen_rtx_SET (VOIDmode, reg, mem));
1367 size += GET_MODE_SIZE (mode);
1369 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1371 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1373 /* Save the state required to perform an untyped call with the same
1374 arguments as were passed to the current function. */
1377 expand_builtin_apply_args_1 (void)
1380 int size, align, regno;
1381 enum machine_mode mode;
1382 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1384 /* Create a block where the arg-pointer, structure value address,
1385 and argument registers can be saved. */
1386 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1388 /* Walk past the arg-pointer and structure value address. */
1389 size = GET_MODE_SIZE (Pmode);
1390 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1391 size += GET_MODE_SIZE (Pmode);
1393 /* Save each register used in calling a function to the block. */
1394 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1395 if ((mode = apply_args_mode[regno]) != VOIDmode)
1397 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1398 if (size % align != 0)
1399 size = CEIL (size, align) * align;
1401 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1403 emit_move_insn (adjust_address (registers, mode, size), tem);
1404 size += GET_MODE_SIZE (mode);
1407 /* Save the arg pointer to the block. */
1408 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1409 #ifdef STACK_GROWS_DOWNWARD
1410 /* We need the pointer as the caller actually passed them to us, not
1411 as we might have pretended they were passed. Make sure it's a valid
1412 operand, as emit_move_insn isn't expected to handle a PLUS. */
1414 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1417 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1419 size = GET_MODE_SIZE (Pmode);
1421 /* Save the structure value address unless this is passed as an
1422 "invisible" first argument. */
1423 if (struct_incoming_value)
1425 emit_move_insn (adjust_address (registers, Pmode, size),
1426 copy_to_reg (struct_incoming_value));
1427 size += GET_MODE_SIZE (Pmode);
1430 /* Return the address of the block. */
1431 return copy_addr_to_reg (XEXP (registers, 0));
1434 /* __builtin_apply_args returns block of memory allocated on
1435 the stack into which is stored the arg pointer, structure
1436 value address, static chain, and all the registers that might
1437 possibly be used in performing a function call. The code is
1438 moved to the start of the function so the incoming values are
1442 expand_builtin_apply_args (void)
1444 /* Don't do __builtin_apply_args more than once in a function.
1445 Save the result of the first call and reuse it. */
1446 if (apply_args_value != 0)
1447 return apply_args_value;
1449 /* When this function is called, it means that registers must be
1450 saved on entry to this function. So we migrate the
1451 call to the first insn of this function. */
1456 temp = expand_builtin_apply_args_1 ();
1460 apply_args_value = temp;
1462 /* Put the insns after the NOTE that starts the function.
1463 If this is inside a start_sequence, make the outer-level insn
1464 chain current, so the code is placed at the start of the
1465 function. If internal_arg_pointer is a non-virtual pseudo,
1466 it needs to be placed after the function that initializes
1468 push_topmost_sequence ();
1469 if (REG_P (crtl->args.internal_arg_pointer)
1470 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1471 emit_insn_before (seq, parm_birth_insn);
1473 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1474 pop_topmost_sequence ();
1479 /* Perform an untyped call and save the state required to perform an
1480 untyped return of whatever value was returned by the given function. */
1483 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1485 int size, align, regno;
1486 enum machine_mode mode;
1487 rtx incoming_args, result, reg, dest, src, call_insn;
1488 rtx old_stack_level = 0;
1489 rtx call_fusage = 0;
1490 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1492 arguments = convert_memory_address (Pmode, arguments);
1494 /* Create a block where the return registers can be saved. */
1495 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1497 /* Fetch the arg pointer from the ARGUMENTS block. */
1498 incoming_args = gen_reg_rtx (Pmode);
1499 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1500 #ifndef STACK_GROWS_DOWNWARD
1501 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1502 incoming_args, 0, OPTAB_LIB_WIDEN);
1505 /* Push a new argument block and copy the arguments. Do not allow
1506 the (potential) memcpy call below to interfere with our stack
1508 do_pending_stack_adjust ();
1511 /* Save the stack with nonlocal if available. */
1512 #ifdef HAVE_save_stack_nonlocal
1513 if (HAVE_save_stack_nonlocal)
1514 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1517 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1519 /* Allocate a block of memory onto the stack and copy the memory
1520 arguments to the outgoing arguments address. */
1521 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1523 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1524 may have already set current_function_calls_alloca to true.
1525 current_function_calls_alloca won't be set if argsize is zero,
1526 so we have to guarantee need_drap is true here. */
1527 if (SUPPORTS_STACK_ALIGNMENT)
1528 crtl->need_drap = true;
1530 dest = virtual_outgoing_args_rtx;
1531 #ifndef STACK_GROWS_DOWNWARD
1532 if (GET_CODE (argsize) == CONST_INT)
1533 dest = plus_constant (dest, -INTVAL (argsize));
1535 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1537 dest = gen_rtx_MEM (BLKmode, dest);
1538 set_mem_align (dest, PARM_BOUNDARY);
1539 src = gen_rtx_MEM (BLKmode, incoming_args);
1540 set_mem_align (src, PARM_BOUNDARY);
1541 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1543 /* Refer to the argument block. */
1545 arguments = gen_rtx_MEM (BLKmode, arguments);
1546 set_mem_align (arguments, PARM_BOUNDARY);
1548 /* Walk past the arg-pointer and structure value address. */
1549 size = GET_MODE_SIZE (Pmode);
1551 size += GET_MODE_SIZE (Pmode);
1553 /* Restore each of the registers previously saved. Make USE insns
1554 for each of these registers for use in making the call. */
1555 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1556 if ((mode = apply_args_mode[regno]) != VOIDmode)
1558 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1559 if (size % align != 0)
1560 size = CEIL (size, align) * align;
1561 reg = gen_rtx_REG (mode, regno);
1562 emit_move_insn (reg, adjust_address (arguments, mode, size));
1563 use_reg (&call_fusage, reg);
1564 size += GET_MODE_SIZE (mode);
1567 /* Restore the structure value address unless this is passed as an
1568 "invisible" first argument. */
1569 size = GET_MODE_SIZE (Pmode);
1572 rtx value = gen_reg_rtx (Pmode);
1573 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1574 emit_move_insn (struct_value, value);
1575 if (REG_P (struct_value))
1576 use_reg (&call_fusage, struct_value);
1577 size += GET_MODE_SIZE (Pmode);
1580 /* All arguments and registers used for the call are set up by now! */
1581 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1583 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1584 and we don't want to load it into a register as an optimization,
1585 because prepare_call_address already did it if it should be done. */
1586 if (GET_CODE (function) != SYMBOL_REF)
1587 function = memory_address (FUNCTION_MODE, function);
1589 /* Generate the actual call instruction and save the return value. */
1590 #ifdef HAVE_untyped_call
1591 if (HAVE_untyped_call)
1592 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1593 result, result_vector (1, result)));
1596 #ifdef HAVE_call_value
1597 if (HAVE_call_value)
1601 /* Locate the unique return register. It is not possible to
1602 express a call that sets more than one return register using
1603 call_value; use untyped_call for that. In fact, untyped_call
1604 only needs to save the return registers in the given block. */
1605 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1606 if ((mode = apply_result_mode[regno]) != VOIDmode)
1608 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1610 valreg = gen_rtx_REG (mode, regno);
1613 emit_call_insn (GEN_CALL_VALUE (valreg,
1614 gen_rtx_MEM (FUNCTION_MODE, function),
1615 const0_rtx, NULL_RTX, const0_rtx));
1617 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1623 /* Find the CALL insn we just emitted, and attach the register usage
1625 call_insn = last_call_insn ();
1626 add_function_usage_to (call_insn, call_fusage);
1628 /* Restore the stack. */
1629 #ifdef HAVE_save_stack_nonlocal
1630 if (HAVE_save_stack_nonlocal)
1631 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1634 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1638 /* Return the address of the result block. */
1639 result = copy_addr_to_reg (XEXP (result, 0));
1640 return convert_memory_address (ptr_mode, result);
1643 /* Perform an untyped return. */
1646 expand_builtin_return (rtx result)
1648 int size, align, regno;
1649 enum machine_mode mode;
1651 rtx call_fusage = 0;
1653 result = convert_memory_address (Pmode, result);
1655 apply_result_size ();
1656 result = gen_rtx_MEM (BLKmode, result);
1658 #ifdef HAVE_untyped_return
1659 if (HAVE_untyped_return)
1661 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1667 /* Restore the return value and note that each value is used. */
1669 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1670 if ((mode = apply_result_mode[regno]) != VOIDmode)
1672 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1673 if (size % align != 0)
1674 size = CEIL (size, align) * align;
1675 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1676 emit_move_insn (reg, adjust_address (result, mode, size));
1678 push_to_sequence (call_fusage);
1680 call_fusage = get_insns ();
1682 size += GET_MODE_SIZE (mode);
1685 /* Put the USE insns before the return. */
1686 emit_insn (call_fusage);
1688 /* Return whatever values was restored by jumping directly to the end
1690 expand_naked_return ();
1693 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1695 static enum type_class
1696 type_to_class (tree type)
1698 switch (TREE_CODE (type))
1700 case VOID_TYPE: return void_type_class;
1701 case INTEGER_TYPE: return integer_type_class;
1702 case ENUMERAL_TYPE: return enumeral_type_class;
1703 case BOOLEAN_TYPE: return boolean_type_class;
1704 case POINTER_TYPE: return pointer_type_class;
1705 case REFERENCE_TYPE: return reference_type_class;
1706 case OFFSET_TYPE: return offset_type_class;
1707 case REAL_TYPE: return real_type_class;
1708 case COMPLEX_TYPE: return complex_type_class;
1709 case FUNCTION_TYPE: return function_type_class;
1710 case METHOD_TYPE: return method_type_class;
1711 case RECORD_TYPE: return record_type_class;
1713 case QUAL_UNION_TYPE: return union_type_class;
1714 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1715 ? string_type_class : array_type_class);
1716 case LANG_TYPE: return lang_type_class;
1717 default: return no_type_class;
1721 /* Expand a call EXP to __builtin_classify_type. */
1724 expand_builtin_classify_type (tree exp)
1726 if (call_expr_nargs (exp))
1727 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1728 return GEN_INT (no_type_class);
1731 /* This helper macro, meant to be used in mathfn_built_in below,
1732 determines which among a set of three builtin math functions is
1733 appropriate for a given type mode. The `F' and `L' cases are
1734 automatically generated from the `double' case. */
1735 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1736 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1737 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1738 fcodel = BUILT_IN_MATHFN##L ; break;
1739 /* Similar to above, but appends _R after any F/L suffix. */
1740 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1741 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1742 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1743 fcodel = BUILT_IN_MATHFN##L_R ; break;
1745 /* Return mathematic function equivalent to FN but operating directly
1746 on TYPE, if available. If IMPLICIT is true find the function in
1747 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1748 can't do the conversion, return zero. */
1751 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1753 tree const *const fn_arr
1754 = implicit ? implicit_built_in_decls : built_in_decls;
1755 enum built_in_function fcode, fcodef, fcodel;
1759 CASE_MATHFN (BUILT_IN_ACOS)
1760 CASE_MATHFN (BUILT_IN_ACOSH)
1761 CASE_MATHFN (BUILT_IN_ASIN)
1762 CASE_MATHFN (BUILT_IN_ASINH)
1763 CASE_MATHFN (BUILT_IN_ATAN)
1764 CASE_MATHFN (BUILT_IN_ATAN2)
1765 CASE_MATHFN (BUILT_IN_ATANH)
1766 CASE_MATHFN (BUILT_IN_CBRT)
1767 CASE_MATHFN (BUILT_IN_CEIL)
1768 CASE_MATHFN (BUILT_IN_CEXPI)
1769 CASE_MATHFN (BUILT_IN_COPYSIGN)
1770 CASE_MATHFN (BUILT_IN_COS)
1771 CASE_MATHFN (BUILT_IN_COSH)
1772 CASE_MATHFN (BUILT_IN_DREM)
1773 CASE_MATHFN (BUILT_IN_ERF)
1774 CASE_MATHFN (BUILT_IN_ERFC)
1775 CASE_MATHFN (BUILT_IN_EXP)
1776 CASE_MATHFN (BUILT_IN_EXP10)
1777 CASE_MATHFN (BUILT_IN_EXP2)
1778 CASE_MATHFN (BUILT_IN_EXPM1)
1779 CASE_MATHFN (BUILT_IN_FABS)
1780 CASE_MATHFN (BUILT_IN_FDIM)
1781 CASE_MATHFN (BUILT_IN_FLOOR)
1782 CASE_MATHFN (BUILT_IN_FMA)
1783 CASE_MATHFN (BUILT_IN_FMAX)
1784 CASE_MATHFN (BUILT_IN_FMIN)
1785 CASE_MATHFN (BUILT_IN_FMOD)
1786 CASE_MATHFN (BUILT_IN_FREXP)
1787 CASE_MATHFN (BUILT_IN_GAMMA)
1788 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1789 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1790 CASE_MATHFN (BUILT_IN_HYPOT)
1791 CASE_MATHFN (BUILT_IN_ILOGB)
1792 CASE_MATHFN (BUILT_IN_INF)
1793 CASE_MATHFN (BUILT_IN_ISINF)
1794 CASE_MATHFN (BUILT_IN_J0)
1795 CASE_MATHFN (BUILT_IN_J1)
1796 CASE_MATHFN (BUILT_IN_JN)
1797 CASE_MATHFN (BUILT_IN_LCEIL)
1798 CASE_MATHFN (BUILT_IN_LDEXP)
1799 CASE_MATHFN (BUILT_IN_LFLOOR)
1800 CASE_MATHFN (BUILT_IN_LGAMMA)
1801 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1802 CASE_MATHFN (BUILT_IN_LLCEIL)
1803 CASE_MATHFN (BUILT_IN_LLFLOOR)
1804 CASE_MATHFN (BUILT_IN_LLRINT)
1805 CASE_MATHFN (BUILT_IN_LLROUND)
1806 CASE_MATHFN (BUILT_IN_LOG)
1807 CASE_MATHFN (BUILT_IN_LOG10)
1808 CASE_MATHFN (BUILT_IN_LOG1P)
1809 CASE_MATHFN (BUILT_IN_LOG2)
1810 CASE_MATHFN (BUILT_IN_LOGB)
1811 CASE_MATHFN (BUILT_IN_LRINT)
1812 CASE_MATHFN (BUILT_IN_LROUND)
1813 CASE_MATHFN (BUILT_IN_MODF)
1814 CASE_MATHFN (BUILT_IN_NAN)
1815 CASE_MATHFN (BUILT_IN_NANS)
1816 CASE_MATHFN (BUILT_IN_NEARBYINT)
1817 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1818 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1819 CASE_MATHFN (BUILT_IN_POW)
1820 CASE_MATHFN (BUILT_IN_POWI)
1821 CASE_MATHFN (BUILT_IN_POW10)
1822 CASE_MATHFN (BUILT_IN_REMAINDER)
1823 CASE_MATHFN (BUILT_IN_REMQUO)
1824 CASE_MATHFN (BUILT_IN_RINT)
1825 CASE_MATHFN (BUILT_IN_ROUND)
1826 CASE_MATHFN (BUILT_IN_SCALB)
1827 CASE_MATHFN (BUILT_IN_SCALBLN)
1828 CASE_MATHFN (BUILT_IN_SCALBN)
1829 CASE_MATHFN (BUILT_IN_SIGNBIT)
1830 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1831 CASE_MATHFN (BUILT_IN_SIN)
1832 CASE_MATHFN (BUILT_IN_SINCOS)
1833 CASE_MATHFN (BUILT_IN_SINH)
1834 CASE_MATHFN (BUILT_IN_SQRT)
1835 CASE_MATHFN (BUILT_IN_TAN)
1836 CASE_MATHFN (BUILT_IN_TANH)
1837 CASE_MATHFN (BUILT_IN_TGAMMA)
1838 CASE_MATHFN (BUILT_IN_TRUNC)
1839 CASE_MATHFN (BUILT_IN_Y0)
1840 CASE_MATHFN (BUILT_IN_Y1)
1841 CASE_MATHFN (BUILT_IN_YN)
1847 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1848 return fn_arr[fcode];
1849 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1850 return fn_arr[fcodef];
1851 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1852 return fn_arr[fcodel];
1857 /* Like mathfn_built_in_1(), but always use the implicit array. */
1860 mathfn_built_in (tree type, enum built_in_function fn)
1862 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1865 /* If errno must be maintained, expand the RTL to check if the result,
1866 TARGET, of a built-in function call, EXP, is NaN, and if so set
1870 expand_errno_check (tree exp, rtx target)
1872 rtx lab = gen_label_rtx ();
1874 /* Test the result; if it is NaN, set errno=EDOM because
1875 the argument was not in the domain. */
1876 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1880 /* If this built-in doesn't throw an exception, set errno directly. */
1881 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1883 #ifdef GEN_ERRNO_RTX
1884 rtx errno_rtx = GEN_ERRNO_RTX;
1887 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1889 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1895 /* Make sure the library call isn't expanded as a tail call. */
1896 CALL_EXPR_TAILCALL (exp) = 0;
1898 /* We can't set errno=EDOM directly; let the library call do it.
1899 Pop the arguments right away in case the call gets deleted. */
1901 expand_call (exp, target, 0);
1906 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1907 Return NULL_RTX if a normal call should be emitted rather than expanding
1908 the function in-line. EXP is the expression that is a call to the builtin
1909 function; if convenient, the result should be placed in TARGET.
1910 SUBTARGET may be used as the target for computing one of EXP's operands. */
1913 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1915 optab builtin_optab;
1916 rtx op0, insns, before_call;
1917 tree fndecl = get_callee_fndecl (exp);
1918 enum machine_mode mode;
1919 bool errno_set = false;
1922 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1925 arg = CALL_EXPR_ARG (exp, 0);
1927 switch (DECL_FUNCTION_CODE (fndecl))
1929 CASE_FLT_FN (BUILT_IN_SQRT):
1930 errno_set = ! tree_expr_nonnegative_p (arg);
1931 builtin_optab = sqrt_optab;
1933 CASE_FLT_FN (BUILT_IN_EXP):
1934 errno_set = true; builtin_optab = exp_optab; break;
1935 CASE_FLT_FN (BUILT_IN_EXP10):
1936 CASE_FLT_FN (BUILT_IN_POW10):
1937 errno_set = true; builtin_optab = exp10_optab; break;
1938 CASE_FLT_FN (BUILT_IN_EXP2):
1939 errno_set = true; builtin_optab = exp2_optab; break;
1940 CASE_FLT_FN (BUILT_IN_EXPM1):
1941 errno_set = true; builtin_optab = expm1_optab; break;
1942 CASE_FLT_FN (BUILT_IN_LOGB):
1943 errno_set = true; builtin_optab = logb_optab; break;
1944 CASE_FLT_FN (BUILT_IN_LOG):
1945 errno_set = true; builtin_optab = log_optab; break;
1946 CASE_FLT_FN (BUILT_IN_LOG10):
1947 errno_set = true; builtin_optab = log10_optab; break;
1948 CASE_FLT_FN (BUILT_IN_LOG2):
1949 errno_set = true; builtin_optab = log2_optab; break;
1950 CASE_FLT_FN (BUILT_IN_LOG1P):
1951 errno_set = true; builtin_optab = log1p_optab; break;
1952 CASE_FLT_FN (BUILT_IN_ASIN):
1953 builtin_optab = asin_optab; break;
1954 CASE_FLT_FN (BUILT_IN_ACOS):
1955 builtin_optab = acos_optab; break;
1956 CASE_FLT_FN (BUILT_IN_TAN):
1957 builtin_optab = tan_optab; break;
1958 CASE_FLT_FN (BUILT_IN_ATAN):
1959 builtin_optab = atan_optab; break;
1960 CASE_FLT_FN (BUILT_IN_FLOOR):
1961 builtin_optab = floor_optab; break;
1962 CASE_FLT_FN (BUILT_IN_CEIL):
1963 builtin_optab = ceil_optab; break;
1964 CASE_FLT_FN (BUILT_IN_TRUNC):
1965 builtin_optab = btrunc_optab; break;
1966 CASE_FLT_FN (BUILT_IN_ROUND):
1967 builtin_optab = round_optab; break;
1968 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1969 builtin_optab = nearbyint_optab;
1970 if (flag_trapping_math)
1972 /* Else fallthrough and expand as rint. */
1973 CASE_FLT_FN (BUILT_IN_RINT):
1974 builtin_optab = rint_optab; break;
1979 /* Make a suitable register to place result in. */
1980 mode = TYPE_MODE (TREE_TYPE (exp));
1982 if (! flag_errno_math || ! HONOR_NANS (mode))
1985 /* Before working hard, check whether the instruction is available. */
1986 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1988 target = gen_reg_rtx (mode);
1990 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1991 need to expand the argument again. This way, we will not perform
1992 side-effects more the once. */
1993 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1995 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1999 /* Compute into TARGET.
2000 Set TARGET to wherever the result comes back. */
2001 target = expand_unop (mode, builtin_optab, op0, target, 0);
2006 expand_errno_check (exp, target);
2008 /* Output the entire sequence. */
2009 insns = get_insns ();
2015 /* If we were unable to expand via the builtin, stop the sequence
2016 (without outputting the insns) and call to the library function
2017 with the stabilized argument list. */
2021 before_call = get_last_insn ();
2023 return expand_call (exp, target, target == const0_rtx);
2026 /* Expand a call to the builtin binary math functions (pow and atan2).
2027 Return NULL_RTX if a normal call should be emitted rather than expanding the
2028 function in-line. EXP is the expression that is a call to the builtin
2029 function; if convenient, the result should be placed in TARGET.
2030 SUBTARGET may be used as the target for computing one of EXP's
2034 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2036 optab builtin_optab;
2037 rtx op0, op1, insns;
2038 int op1_type = REAL_TYPE;
2039 tree fndecl = get_callee_fndecl (exp);
2041 enum machine_mode mode;
2042 bool errno_set = true;
2044 switch (DECL_FUNCTION_CODE (fndecl))
2046 CASE_FLT_FN (BUILT_IN_SCALBN):
2047 CASE_FLT_FN (BUILT_IN_SCALBLN):
2048 CASE_FLT_FN (BUILT_IN_LDEXP):
2049 op1_type = INTEGER_TYPE;
2054 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2057 arg0 = CALL_EXPR_ARG (exp, 0);
2058 arg1 = CALL_EXPR_ARG (exp, 1);
2060 switch (DECL_FUNCTION_CODE (fndecl))
2062 CASE_FLT_FN (BUILT_IN_POW):
2063 builtin_optab = pow_optab; break;
2064 CASE_FLT_FN (BUILT_IN_ATAN2):
2065 builtin_optab = atan2_optab; break;
2066 CASE_FLT_FN (BUILT_IN_SCALB):
2067 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2069 builtin_optab = scalb_optab; break;
2070 CASE_FLT_FN (BUILT_IN_SCALBN):
2071 CASE_FLT_FN (BUILT_IN_SCALBLN):
2072 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2074 /* Fall through... */
2075 CASE_FLT_FN (BUILT_IN_LDEXP):
2076 builtin_optab = ldexp_optab; break;
2077 CASE_FLT_FN (BUILT_IN_FMOD):
2078 builtin_optab = fmod_optab; break;
2079 CASE_FLT_FN (BUILT_IN_REMAINDER):
2080 CASE_FLT_FN (BUILT_IN_DREM):
2081 builtin_optab = remainder_optab; break;
2086 /* Make a suitable register to place result in. */
2087 mode = TYPE_MODE (TREE_TYPE (exp));
2089 /* Before working hard, check whether the instruction is available. */
2090 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2093 target = gen_reg_rtx (mode);
2095 if (! flag_errno_math || ! HONOR_NANS (mode))
2098 /* Always stabilize the argument list. */
2099 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2100 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2102 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2103 op1 = expand_normal (arg1);
2107 /* Compute into TARGET.
2108 Set TARGET to wherever the result comes back. */
2109 target = expand_binop (mode, builtin_optab, op0, op1,
2110 target, 0, OPTAB_DIRECT);
2112 /* If we were unable to expand via the builtin, stop the sequence
2113 (without outputting the insns) and call to the library function
2114 with the stabilized argument list. */
2118 return expand_call (exp, target, target == const0_rtx);
2122 expand_errno_check (exp, target);
2124 /* Output the entire sequence. */
2125 insns = get_insns ();
2132 /* Expand a call to the builtin sin and cos math functions.
2133 Return NULL_RTX if a normal call should be emitted rather than expanding the
2134 function in-line. EXP is the expression that is a call to the builtin
2135 function; if convenient, the result should be placed in TARGET.
2136 SUBTARGET may be used as the target for computing one of EXP's
2140 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2142 optab builtin_optab;
2144 tree fndecl = get_callee_fndecl (exp);
2145 enum machine_mode mode;
2148 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2151 arg = CALL_EXPR_ARG (exp, 0);
2153 switch (DECL_FUNCTION_CODE (fndecl))
2155 CASE_FLT_FN (BUILT_IN_SIN):
2156 CASE_FLT_FN (BUILT_IN_COS):
2157 builtin_optab = sincos_optab; break;
2162 /* Make a suitable register to place result in. */
2163 mode = TYPE_MODE (TREE_TYPE (exp));
2165 /* Check if sincos insn is available, otherwise fallback
2166 to sin or cos insn. */
2167 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2168 switch (DECL_FUNCTION_CODE (fndecl))
2170 CASE_FLT_FN (BUILT_IN_SIN):
2171 builtin_optab = sin_optab; break;
2172 CASE_FLT_FN (BUILT_IN_COS):
2173 builtin_optab = cos_optab; break;
2178 /* Before working hard, check whether the instruction is available. */
2179 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2181 target = gen_reg_rtx (mode);
2183 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2184 need to expand the argument again. This way, we will not perform
2185 side-effects more the once. */
2186 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2188 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2192 /* Compute into TARGET.
2193 Set TARGET to wherever the result comes back. */
2194 if (builtin_optab == sincos_optab)
2198 switch (DECL_FUNCTION_CODE (fndecl))
2200 CASE_FLT_FN (BUILT_IN_SIN):
2201 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2203 CASE_FLT_FN (BUILT_IN_COS):
2204 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2209 gcc_assert (result);
2213 target = expand_unop (mode, builtin_optab, op0, target, 0);
2218 /* Output the entire sequence. */
2219 insns = get_insns ();
2225 /* If we were unable to expand via the builtin, stop the sequence
2226 (without outputting the insns) and call to the library function
2227 with the stabilized argument list. */
2231 target = expand_call (exp, target, target == const0_rtx);
2236 /* Expand a call to one of the builtin math functions that operate on
2237 floating point argument and output an integer result (ilogb, isinf,
2239 Return 0 if a normal call should be emitted rather than expanding the
2240 function in-line. EXP is the expression that is a call to the builtin
2241 function; if convenient, the result should be placed in TARGET.
2242 SUBTARGET may be used as the target for computing one of EXP's operands. */
2245 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2247 optab builtin_optab = 0;
2248 enum insn_code icode = CODE_FOR_nothing;
2250 tree fndecl = get_callee_fndecl (exp);
2251 enum machine_mode mode;
2252 bool errno_set = false;
2255 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2258 arg = CALL_EXPR_ARG (exp, 0);
2260 switch (DECL_FUNCTION_CODE (fndecl))
2262 CASE_FLT_FN (BUILT_IN_ILOGB):
2263 errno_set = true; builtin_optab = ilogb_optab; break;
2264 CASE_FLT_FN (BUILT_IN_ISINF):
2265 builtin_optab = isinf_optab; break;
2266 case BUILT_IN_ISNORMAL:
2267 case BUILT_IN_ISFINITE:
2268 CASE_FLT_FN (BUILT_IN_FINITE):
2269 /* These builtins have no optabs (yet). */
2275 /* There's no easy way to detect the case we need to set EDOM. */
2276 if (flag_errno_math && errno_set)
2279 /* Optab mode depends on the mode of the input argument. */
2280 mode = TYPE_MODE (TREE_TYPE (arg));
2283 icode = optab_handler (builtin_optab, mode)->insn_code;
2285 /* Before working hard, check whether the instruction is available. */
2286 if (icode != CODE_FOR_nothing)
2288 /* Make a suitable register to place result in. */
2290 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2291 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2293 gcc_assert (insn_data[icode].operand[0].predicate
2294 (target, GET_MODE (target)));
2296 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2297 need to expand the argument again. This way, we will not perform
2298 side-effects more the once. */
2299 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2301 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2303 if (mode != GET_MODE (op0))
2304 op0 = convert_to_mode (mode, op0, 0);
2306 /* Compute into TARGET.
2307 Set TARGET to wherever the result comes back. */
2308 emit_unop_insn (icode, target, op0, UNKNOWN);
2312 /* If there is no optab, try generic code. */
2313 switch (DECL_FUNCTION_CODE (fndecl))
2317 CASE_FLT_FN (BUILT_IN_ISINF):
2319 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2320 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2321 tree const type = TREE_TYPE (arg);
2325 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2326 real_from_string (&r, buf);
2327 result = build_call_expr (isgr_fn, 2,
2328 fold_build1 (ABS_EXPR, type, arg),
2329 build_real (type, r));
2330 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2332 CASE_FLT_FN (BUILT_IN_FINITE):
2333 case BUILT_IN_ISFINITE:
2335 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2336 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2337 tree const type = TREE_TYPE (arg);
2341 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2342 real_from_string (&r, buf);
2343 result = build_call_expr (isle_fn, 2,
2344 fold_build1 (ABS_EXPR, type, arg),
2345 build_real (type, r));
2346 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2348 case BUILT_IN_ISNORMAL:
2350 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2351 islessequal(fabs(x),DBL_MAX). */
2352 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2353 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2354 tree const type = TREE_TYPE (arg);
2355 REAL_VALUE_TYPE rmax, rmin;
2358 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2359 real_from_string (&rmax, buf);
2360 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2361 real_from_string (&rmin, buf);
2362 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2363 result = build_call_expr (isle_fn, 2, arg,
2364 build_real (type, rmax));
2365 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2366 build_call_expr (isge_fn, 2, arg,
2367 build_real (type, rmin)));
2368 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2374 target = expand_call (exp, target, target == const0_rtx);
2379 /* Expand a call to the builtin sincos math function.
2380 Return NULL_RTX if a normal call should be emitted rather than expanding the
2381 function in-line. EXP is the expression that is a call to the builtin
2385 expand_builtin_sincos (tree exp)
2387 rtx op0, op1, op2, target1, target2;
2388 enum machine_mode mode;
2389 tree arg, sinp, cosp;
2392 if (!validate_arglist (exp, REAL_TYPE,
2393 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2396 arg = CALL_EXPR_ARG (exp, 0);
2397 sinp = CALL_EXPR_ARG (exp, 1);
2398 cosp = CALL_EXPR_ARG (exp, 2);
2400 /* Make a suitable register to place result in. */
2401 mode = TYPE_MODE (TREE_TYPE (arg));
2403 /* Check if sincos insn is available, otherwise emit the call. */
2404 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2407 target1 = gen_reg_rtx (mode);
2408 target2 = gen_reg_rtx (mode);
2410 op0 = expand_normal (arg);
2411 op1 = expand_normal (build_fold_indirect_ref (sinp));
2412 op2 = expand_normal (build_fold_indirect_ref (cosp));
2414 /* Compute into target1 and target2.
2415 Set TARGET to wherever the result comes back. */
2416 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2417 gcc_assert (result);
2419 /* Move target1 and target2 to the memory locations indicated
2421 emit_move_insn (op1, target1);
2422 emit_move_insn (op2, target2);
2427 /* Expand a call to the internal cexpi builtin to the sincos math function.
2428 EXP is the expression that is a call to the builtin function; if convenient,
2429 the result should be placed in TARGET. SUBTARGET may be used as the target
2430 for computing one of EXP's operands. */
2433 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2435 tree fndecl = get_callee_fndecl (exp);
2437 enum machine_mode mode;
2440 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2443 arg = CALL_EXPR_ARG (exp, 0);
2444 type = TREE_TYPE (arg);
2445 mode = TYPE_MODE (TREE_TYPE (arg));
2447 /* Try expanding via a sincos optab, fall back to emitting a libcall
2448 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2449 is only generated from sincos, cexp or if we have either of them. */
2450 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2452 op1 = gen_reg_rtx (mode);
2453 op2 = gen_reg_rtx (mode);
2455 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2457 /* Compute into op1 and op2. */
2458 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2460 else if (TARGET_HAS_SINCOS)
2462 tree call, fn = NULL_TREE;
2466 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2467 fn = built_in_decls[BUILT_IN_SINCOSF];
2468 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2469 fn = built_in_decls[BUILT_IN_SINCOS];
2470 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2471 fn = built_in_decls[BUILT_IN_SINCOSL];
2475 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2476 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2477 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2478 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2479 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2480 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2482 /* Make sure not to fold the sincos call again. */
2483 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2484 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2485 call, 3, arg, top1, top2));
2489 tree call, fn = NULL_TREE, narg;
2490 tree ctype = build_complex_type (type);
2492 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2493 fn = built_in_decls[BUILT_IN_CEXPF];
2494 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2495 fn = built_in_decls[BUILT_IN_CEXP];
2496 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2497 fn = built_in_decls[BUILT_IN_CEXPL];
2501 /* If we don't have a decl for cexp create one. This is the
2502 friendliest fallback if the user calls __builtin_cexpi
2503 without full target C99 function support. */
2504 if (fn == NULL_TREE)
2507 const char *name = NULL;
2509 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2511 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2513 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2516 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2517 fn = build_fn_decl (name, fntype);
2520 narg = fold_build2 (COMPLEX_EXPR, ctype,
2521 build_real (type, dconst0), arg);
2523 /* Make sure not to fold the cexp call again. */
2524 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2525 return expand_expr (build_call_nary (ctype, call, 1, narg),
2526 target, VOIDmode, EXPAND_NORMAL);
2529 /* Now build the proper return type. */
2530 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2531 make_tree (TREE_TYPE (arg), op2),
2532 make_tree (TREE_TYPE (arg), op1)),
2533 target, VOIDmode, EXPAND_NORMAL);
2536 /* Expand a call to one of the builtin rounding functions gcc defines
2537 as an extension (lfloor and lceil). As these are gcc extensions we
2538 do not need to worry about setting errno to EDOM.
2539 If expanding via optab fails, lower expression to (int)(floor(x)).
2540 EXP is the expression that is a call to the builtin function;
2541 if convenient, the result should be placed in TARGET. */
2544 expand_builtin_int_roundingfn (tree exp, rtx target)
2546 convert_optab builtin_optab;
2547 rtx op0, insns, tmp;
2548 tree fndecl = get_callee_fndecl (exp);
2549 enum built_in_function fallback_fn;
2550 tree fallback_fndecl;
2551 enum machine_mode mode;
2554 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2557 arg = CALL_EXPR_ARG (exp, 0);
2559 switch (DECL_FUNCTION_CODE (fndecl))
2561 CASE_FLT_FN (BUILT_IN_LCEIL):
2562 CASE_FLT_FN (BUILT_IN_LLCEIL):
2563 builtin_optab = lceil_optab;
2564 fallback_fn = BUILT_IN_CEIL;
2567 CASE_FLT_FN (BUILT_IN_LFLOOR):
2568 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2569 builtin_optab = lfloor_optab;
2570 fallback_fn = BUILT_IN_FLOOR;
2577 /* Make a suitable register to place result in. */
2578 mode = TYPE_MODE (TREE_TYPE (exp));
2580 target = gen_reg_rtx (mode);
2582 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2583 need to expand the argument again. This way, we will not perform
2584 side-effects more the once. */
2585 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2587 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2591 /* Compute into TARGET. */
2592 if (expand_sfix_optab (target, op0, builtin_optab))
2594 /* Output the entire sequence. */
2595 insns = get_insns ();
2601 /* If we were unable to expand via the builtin, stop the sequence
2602 (without outputting the insns). */
2605 /* Fall back to floating point rounding optab. */
2606 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2608 /* For non-C99 targets we may end up without a fallback fndecl here
2609 if the user called __builtin_lfloor directly. In this case emit
2610 a call to the floor/ceil variants nevertheless. This should result
2611 in the best user experience for not full C99 targets. */
2612 if (fallback_fndecl == NULL_TREE)
2615 const char *name = NULL;
2617 switch (DECL_FUNCTION_CODE (fndecl))
2619 case BUILT_IN_LCEIL:
2620 case BUILT_IN_LLCEIL:
2623 case BUILT_IN_LCEILF:
2624 case BUILT_IN_LLCEILF:
2627 case BUILT_IN_LCEILL:
2628 case BUILT_IN_LLCEILL:
2631 case BUILT_IN_LFLOOR:
2632 case BUILT_IN_LLFLOOR:
2635 case BUILT_IN_LFLOORF:
2636 case BUILT_IN_LLFLOORF:
2639 case BUILT_IN_LFLOORL:
2640 case BUILT_IN_LLFLOORL:
2647 fntype = build_function_type_list (TREE_TYPE (arg),
2648 TREE_TYPE (arg), NULL_TREE);
2649 fallback_fndecl = build_fn_decl (name, fntype);
2652 exp = build_call_expr (fallback_fndecl, 1, arg);
2654 tmp = expand_normal (exp);
2656 /* Truncate the result of floating point optab to integer
2657 via expand_fix (). */
2658 target = gen_reg_rtx (mode);
2659 expand_fix (target, tmp, 0);
2664 /* Expand a call to one of the builtin math functions doing integer
2666 Return 0 if a normal call should be emitted rather than expanding the
2667 function in-line. EXP is the expression that is a call to the builtin
2668 function; if convenient, the result should be placed in TARGET. */
2671 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2673 convert_optab builtin_optab;
2675 tree fndecl = get_callee_fndecl (exp);
2677 enum machine_mode mode;
2679 /* There's no easy way to detect the case we need to set EDOM. */
2680 if (flag_errno_math)
2683 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2686 arg = CALL_EXPR_ARG (exp, 0);
2688 switch (DECL_FUNCTION_CODE (fndecl))
2690 CASE_FLT_FN (BUILT_IN_LRINT):
2691 CASE_FLT_FN (BUILT_IN_LLRINT):
2692 builtin_optab = lrint_optab; break;
2693 CASE_FLT_FN (BUILT_IN_LROUND):
2694 CASE_FLT_FN (BUILT_IN_LLROUND):
2695 builtin_optab = lround_optab; break;
2700 /* Make a suitable register to place result in. */
2701 mode = TYPE_MODE (TREE_TYPE (exp));
2703 target = gen_reg_rtx (mode);
2705 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2706 need to expand the argument again. This way, we will not perform
2707 side-effects more the once. */
2708 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2710 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2714 if (expand_sfix_optab (target, op0, builtin_optab))
2716 /* Output the entire sequence. */
2717 insns = get_insns ();
2723 /* If we were unable to expand via the builtin, stop the sequence
2724 (without outputting the insns) and call to the library function
2725 with the stabilized argument list. */
2728 target = expand_call (exp, target, target == const0_rtx);
2733 /* To evaluate powi(x,n), the floating point value x raised to the
2734 constant integer exponent n, we use a hybrid algorithm that
2735 combines the "window method" with look-up tables. For an
2736 introduction to exponentiation algorithms and "addition chains",
2737 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2738 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2739 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2740 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2742 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2743 multiplications to inline before calling the system library's pow
2744 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2745 so this default never requires calling pow, powf or powl. */
2747 #ifndef POWI_MAX_MULTS
2748 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2751 /* The size of the "optimal power tree" lookup table. All
2752 exponents less than this value are simply looked up in the
2753 powi_table below. This threshold is also used to size the
2754 cache of pseudo registers that hold intermediate results. */
2755 #define POWI_TABLE_SIZE 256
2757 /* The size, in bits of the window, used in the "window method"
2758 exponentiation algorithm. This is equivalent to a radix of
2759 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2760 #define POWI_WINDOW_SIZE 3
2762 /* The following table is an efficient representation of an
2763 "optimal power tree". For each value, i, the corresponding
2764 value, j, in the table states than an optimal evaluation
2765 sequence for calculating pow(x,i) can be found by evaluating
2766 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2767 100 integers is given in Knuth's "Seminumerical algorithms". */
2769 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2771 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2772 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2773 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2774 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2775 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2776 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2777 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2778 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2779 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2780 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2781 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2782 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2783 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2784 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2785 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2786 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2787 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2788 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2789 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2790 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2791 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2792 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2793 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2794 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2795 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2796 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2797 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2798 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2799 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2800 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2801 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2802 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2806 /* Return the number of multiplications required to calculate
2807 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2808 subroutine of powi_cost. CACHE is an array indicating
2809 which exponents have already been calculated. */
2812 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2814 /* If we've already calculated this exponent, then this evaluation
2815 doesn't require any additional multiplications. */
2820 return powi_lookup_cost (n - powi_table[n], cache)
2821 + powi_lookup_cost (powi_table[n], cache) + 1;
2824 /* Return the number of multiplications required to calculate
2825 powi(x,n) for an arbitrary x, given the exponent N. This
2826 function needs to be kept in sync with expand_powi below. */
2829 powi_cost (HOST_WIDE_INT n)
2831 bool cache[POWI_TABLE_SIZE];
2832 unsigned HOST_WIDE_INT digit;
2833 unsigned HOST_WIDE_INT val;
2839 /* Ignore the reciprocal when calculating the cost. */
2840 val = (n < 0) ? -n : n;
2842 /* Initialize the exponent cache. */
2843 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2848 while (val >= POWI_TABLE_SIZE)
2852 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2853 result += powi_lookup_cost (digit, cache)
2854 + POWI_WINDOW_SIZE + 1;
2855 val >>= POWI_WINDOW_SIZE;
2864 return result + powi_lookup_cost (val, cache);
2867 /* Recursive subroutine of expand_powi. This function takes the array,
2868 CACHE, of already calculated exponents and an exponent N and returns
2869 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2872 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2874 unsigned HOST_WIDE_INT digit;
2878 if (n < POWI_TABLE_SIZE)
2883 target = gen_reg_rtx (mode);
2886 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2887 op1 = expand_powi_1 (mode, powi_table[n], cache);
2891 target = gen_reg_rtx (mode);
2892 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2893 op0 = expand_powi_1 (mode, n - digit, cache);
2894 op1 = expand_powi_1 (mode, digit, cache);
2898 target = gen_reg_rtx (mode);
2899 op0 = expand_powi_1 (mode, n >> 1, cache);
2903 result = expand_mult (mode, op0, op1, target, 0);
2904 if (result != target)
2905 emit_move_insn (target, result);
2909 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2910 floating point operand in mode MODE, and N is the exponent. This
2911 function needs to be kept in sync with powi_cost above. */
2914 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2916 unsigned HOST_WIDE_INT val;
2917 rtx cache[POWI_TABLE_SIZE];
2921 return CONST1_RTX (mode);
2923 val = (n < 0) ? -n : n;
2925 memset (cache, 0, sizeof (cache));
2928 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2930 /* If the original exponent was negative, reciprocate the result. */
2932 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2933 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2938 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2939 a normal call should be emitted rather than expanding the function
2940 in-line. EXP is the expression that is a call to the builtin
2941 function; if convenient, the result should be placed in TARGET. */
2944 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2948 tree type = TREE_TYPE (exp);
2949 REAL_VALUE_TYPE cint, c, c2;
2952 enum machine_mode mode = TYPE_MODE (type);
2954 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2957 arg0 = CALL_EXPR_ARG (exp, 0);
2958 arg1 = CALL_EXPR_ARG (exp, 1);
2960 if (TREE_CODE (arg1) != REAL_CST
2961 || TREE_OVERFLOW (arg1))
2962 return expand_builtin_mathfn_2 (exp, target, subtarget);
2964 /* Handle constant exponents. */
2966 /* For integer valued exponents we can expand to an optimal multiplication
2967 sequence using expand_powi. */
2968 c = TREE_REAL_CST (arg1);
2969 n = real_to_integer (&c);
2970 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2971 if (real_identical (&c, &cint)
2972 && ((n >= -1 && n <= 2)
2973 || (flag_unsafe_math_optimizations
2974 && optimize_insn_for_speed_p ()
2975 && powi_cost (n) <= POWI_MAX_MULTS)))
2977 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2980 op = force_reg (mode, op);
2981 op = expand_powi (op, mode, n);
2986 narg0 = builtin_save_expr (arg0);
2988 /* If the exponent is not integer valued, check if it is half of an integer.
2989 In this case we can expand to sqrt (x) * x**(n/2). */
2990 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2991 if (fn != NULL_TREE)
2993 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2994 n = real_to_integer (&c2);
2995 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2996 if (real_identical (&c2, &cint)
2997 && ((flag_unsafe_math_optimizations
2998 && optimize_insn_for_speed_p ()
2999 && powi_cost (n/2) <= POWI_MAX_MULTS)
3002 tree call_expr = build_call_expr (fn, 1, narg0);
3003 /* Use expand_expr in case the newly built call expression
3004 was folded to a non-call. */
3005 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3008 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3009 op2 = force_reg (mode, op2);
3010 op2 = expand_powi (op2, mode, abs (n / 2));
3011 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3012 0, OPTAB_LIB_WIDEN);
3013 /* If the original exponent was negative, reciprocate the
3016 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3017 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3023 /* Try if the exponent is a third of an integer. In this case
3024 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3025 different from pow (x, 1./3.) due to rounding and behavior
3026 with negative x we need to constrain this transformation to
3027 unsafe math and positive x or finite math. */
3028 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3030 && flag_unsafe_math_optimizations
3031 && (tree_expr_nonnegative_p (arg0)
3032 || !HONOR_NANS (mode)))
3034 REAL_VALUE_TYPE dconst3;
3035 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3036 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3037 real_round (&c2, mode, &c2);
3038 n = real_to_integer (&c2);
3039 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3040 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3041 real_convert (&c2, mode, &c2);
3042 if (real_identical (&c2, &c)
3043 && ((optimize_insn_for_speed_p ()
3044 && powi_cost (n/3) <= POWI_MAX_MULTS)
3047 tree call_expr = build_call_expr (fn, 1,narg0);
3048 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3049 if (abs (n) % 3 == 2)
3050 op = expand_simple_binop (mode, MULT, op, op, op,
3051 0, OPTAB_LIB_WIDEN);
3054 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3055 op2 = force_reg (mode, op2);
3056 op2 = expand_powi (op2, mode, abs (n / 3));
3057 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3058 0, OPTAB_LIB_WIDEN);
3059 /* If the original exponent was negative, reciprocate the
3062 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3063 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3069 /* Fall back to optab expansion. */
3070 return expand_builtin_mathfn_2 (exp, target, subtarget);
3073 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3074 a normal call should be emitted rather than expanding the function
3075 in-line. EXP is the expression that is a call to the builtin
3076 function; if convenient, the result should be placed in TARGET. */
3079 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3083 enum machine_mode mode;
3084 enum machine_mode mode2;
3086 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3089 arg0 = CALL_EXPR_ARG (exp, 0);
3090 arg1 = CALL_EXPR_ARG (exp, 1);
3091 mode = TYPE_MODE (TREE_TYPE (exp));
3093 /* Handle constant power. */
3095 if (TREE_CODE (arg1) == INTEGER_CST
3096 && !TREE_OVERFLOW (arg1))
3098 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3100 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3101 Otherwise, check the number of multiplications required. */
3102 if ((TREE_INT_CST_HIGH (arg1) == 0
3103 || TREE_INT_CST_HIGH (arg1) == -1)
3104 && ((n >= -1 && n <= 2)
3105 || (optimize_insn_for_speed_p ()
3106 && powi_cost (n) <= POWI_MAX_MULTS)))
3108 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3109 op0 = force_reg (mode, op0);
3110 return expand_powi (op0, mode, n);
3114 /* Emit a libcall to libgcc. */
3116 /* Mode of the 2nd argument must match that of an int. */
3117 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3119 if (target == NULL_RTX)
3120 target = gen_reg_rtx (mode);
3122 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3123 if (GET_MODE (op0) != mode)
3124 op0 = convert_to_mode (mode, op0, 0);
3125 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3126 if (GET_MODE (op1) != mode2)
3127 op1 = convert_to_mode (mode2, op1, 0);
3129 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3130 target, LCT_CONST, mode, 2,
3131 op0, mode, op1, mode2);
3136 /* Expand expression EXP which is a call to the strlen builtin. Return
3137 NULL_RTX if we failed the caller should emit a normal call, otherwise
3138 try to get the result in TARGET, if convenient. */
3141 expand_builtin_strlen (tree exp, rtx target,
3142 enum machine_mode target_mode)
3144 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3150 tree src = CALL_EXPR_ARG (exp, 0);
3151 rtx result, src_reg, char_rtx, before_strlen;
3152 enum machine_mode insn_mode = target_mode, char_mode;
3153 enum insn_code icode = CODE_FOR_nothing;
3156 /* If the length can be computed at compile-time, return it. */
3157 len = c_strlen (src, 0);
3159 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3161 /* If the length can be computed at compile-time and is constant
3162 integer, but there are side-effects in src, evaluate
3163 src for side-effects, then return len.
3164 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3165 can be optimized into: i++; x = 3; */
3166 len = c_strlen (src, 1);
3167 if (len && TREE_CODE (len) == INTEGER_CST)
3169 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3170 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3173 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3175 /* If SRC is not a pointer type, don't do this operation inline. */
3179 /* Bail out if we can't compute strlen in the right mode. */
3180 while (insn_mode != VOIDmode)
3182 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3183 if (icode != CODE_FOR_nothing)
3186 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3188 if (insn_mode == VOIDmode)
3191 /* Make a place to write the result of the instruction. */
3195 && GET_MODE (result) == insn_mode
3196 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3197 result = gen_reg_rtx (insn_mode);
3199 /* Make a place to hold the source address. We will not expand
3200 the actual source until we are sure that the expansion will
3201 not fail -- there are trees that cannot be expanded twice. */
3202 src_reg = gen_reg_rtx (Pmode);
3204 /* Mark the beginning of the strlen sequence so we can emit the
3205 source operand later. */
3206 before_strlen = get_last_insn ();
3208 char_rtx = const0_rtx;
3209 char_mode = insn_data[(int) icode].operand[2].mode;
3210 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3212 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3214 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3215 char_rtx, GEN_INT (align));
3220 /* Now that we are assured of success, expand the source. */
3222 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3224 emit_move_insn (src_reg, pat);
3229 emit_insn_after (pat, before_strlen);
3231 emit_insn_before (pat, get_insns ());
3233 /* Return the value in the proper mode for this function. */
3234 if (GET_MODE (result) == target_mode)
3236 else if (target != 0)
3237 convert_move (target, result, 0);
3239 target = convert_to_mode (target_mode, result, 0);
3245 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3246 caller should emit a normal call, otherwise try to get the result
3247 in TARGET, if convenient (and in mode MODE if that's convenient). */
3250 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3252 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3254 tree type = TREE_TYPE (exp);
3255 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3256 CALL_EXPR_ARG (exp, 1), type);
3258 return expand_expr (result, target, mode, EXPAND_NORMAL);
3263 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3264 caller should emit a normal call, otherwise try to get the result
3265 in TARGET, if convenient (and in mode MODE if that's convenient). */
3268 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3270 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3272 tree type = TREE_TYPE (exp);
3273 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3274 CALL_EXPR_ARG (exp, 1), type);
3276 return expand_expr (result, target, mode, EXPAND_NORMAL);
3278 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3283 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3284 caller should emit a normal call, otherwise try to get the result
3285 in TARGET, if convenient (and in mode MODE if that's convenient). */
3288 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3290 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3292 tree type = TREE_TYPE (exp);
3293 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3294 CALL_EXPR_ARG (exp, 1), type);
3296 return expand_expr (result, target, mode, EXPAND_NORMAL);
3301 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3302 caller should emit a normal call, otherwise try to get the result
3303 in TARGET, if convenient (and in mode MODE if that's convenient). */
3306 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3308 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3310 tree type = TREE_TYPE (exp);
3311 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3312 CALL_EXPR_ARG (exp, 1), type);
3314 return expand_expr (result, target, mode, EXPAND_NORMAL);
3319 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3320 bytes from constant string DATA + OFFSET and return it as target
3324 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3325 enum machine_mode mode)
3327 const char *str = (const char *) data;
3329 gcc_assert (offset >= 0
3330 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3331 <= strlen (str) + 1));
3333 return c_readstr (str + offset, mode);
3336 /* Expand a call EXP to the memcpy builtin.
3337 Return NULL_RTX if we failed, the caller should emit a normal call,
3338 otherwise try to get the result in TARGET, if convenient (and in
3339 mode MODE if that's convenient). */
3342 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3344 tree fndecl = get_callee_fndecl (exp);
3346 if (!validate_arglist (exp,
3347 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3351 tree dest = CALL_EXPR_ARG (exp, 0);
3352 tree src = CALL_EXPR_ARG (exp, 1);
3353 tree len = CALL_EXPR_ARG (exp, 2);
3354 const char *src_str;
3355 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3356 unsigned int dest_align
3357 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3358 rtx dest_mem, src_mem, dest_addr, len_rtx;
3359 tree result = fold_builtin_memory_op (dest, src, len,
3360 TREE_TYPE (TREE_TYPE (fndecl)),
3362 HOST_WIDE_INT expected_size = -1;
3363 unsigned int expected_align = 0;
3364 tree_ann_common_t ann;
3368 while (TREE_CODE (result) == COMPOUND_EXPR)
3370 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3372 result = TREE_OPERAND (result, 1);
3374 return expand_expr (result, target, mode, EXPAND_NORMAL);
3377 /* If DEST is not a pointer type, call the normal function. */
3378 if (dest_align == 0)
3381 /* If either SRC is not a pointer type, don't do this
3382 operation in-line. */
3386 ann = tree_common_ann (exp);
3388 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3390 if (expected_align < dest_align)
3391 expected_align = dest_align;
3392 dest_mem = get_memory_rtx (dest, len);
3393 set_mem_align (dest_mem, dest_align);
3394 len_rtx = expand_normal (len);
3395 src_str = c_getstr (src);
3397 /* If SRC is a string constant and block move would be done
3398 by pieces, we can avoid loading the string from memory
3399 and only stored the computed constants. */
3401 && GET_CODE (len_rtx) == CONST_INT
3402 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3403 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3404 CONST_CAST (char *, src_str),
3407 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3408 builtin_memcpy_read_str,
3409 CONST_CAST (char *, src_str),
3410 dest_align, false, 0);
3411 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3412 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3416 src_mem = get_memory_rtx (src, len);
3417 set_mem_align (src_mem, src_align);
3419 /* Copy word part most expediently. */
3420 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3421 CALL_EXPR_TAILCALL (exp)
3422 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3423 expected_align, expected_size);
3427 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3428 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3434 /* Expand a call EXP to the mempcpy builtin.
3435 Return NULL_RTX if we failed; the caller should emit a normal call,
3436 otherwise try to get the result in TARGET, if convenient (and in
3437 mode MODE if that's convenient). If ENDP is 0 return the
3438 destination pointer, if ENDP is 1 return the end pointer ala
3439 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3443 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3445 if (!validate_arglist (exp,
3446 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3450 tree dest = CALL_EXPR_ARG (exp, 0);
3451 tree src = CALL_EXPR_ARG (exp, 1);
3452 tree len = CALL_EXPR_ARG (exp, 2);
3453 return expand_builtin_mempcpy_args (dest, src, len,
3455 target, mode, /*endp=*/ 1);
3459 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3460 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3461 so that this can also be called without constructing an actual CALL_EXPR.
3462 TYPE is the return type of the call. The other arguments and return value
3463 are the same as for expand_builtin_mempcpy. */
3466 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3467 rtx target, enum machine_mode mode, int endp)
3469 /* If return value is ignored, transform mempcpy into memcpy. */
3470 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3472 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3473 tree result = build_call_expr (fn, 3, dest, src, len);
3475 while (TREE_CODE (result) == COMPOUND_EXPR)
3477 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3479 result = TREE_OPERAND (result, 1);
3481 return expand_expr (result, target, mode, EXPAND_NORMAL);
3485 const char *src_str;
3486 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3487 unsigned int dest_align
3488 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3489 rtx dest_mem, src_mem, len_rtx;
3490 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3494 while (TREE_CODE (result) == COMPOUND_EXPR)
3496 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3498 result = TREE_OPERAND (result, 1);
3500 return expand_expr (result, target, mode, EXPAND_NORMAL);
3503 /* If either SRC or DEST is not a pointer type, don't do this
3504 operation in-line. */
3505 if (dest_align == 0 || src_align == 0)
3508 /* If LEN is not constant, call the normal function. */
3509 if (! host_integerp (len, 1))
3512 len_rtx = expand_normal (len);
3513 src_str = c_getstr (src);
3515 /* If SRC is a string constant and block move would be done
3516 by pieces, we can avoid loading the string from memory
3517 and only stored the computed constants. */
3519 && GET_CODE (len_rtx) == CONST_INT
3520 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3521 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3522 CONST_CAST (char *, src_str),
3525 dest_mem = get_memory_rtx (dest, len);
3526 set_mem_align (dest_mem, dest_align);
3527 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3528 builtin_memcpy_read_str,
3529 CONST_CAST (char *, src_str),
3530 dest_align, false, endp);
3531 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3532 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3536 if (GET_CODE (len_rtx) == CONST_INT
3537 && can_move_by_pieces (INTVAL (len_rtx),
3538 MIN (dest_align, src_align)))
3540 dest_mem = get_memory_rtx (dest, len);
3541 set_mem_align (dest_mem, dest_align);
3542 src_mem = get_memory_rtx (src, len);
3543 set_mem_align (src_mem, src_align);
3544 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3545 MIN (dest_align, src_align), endp);
3546 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3547 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3555 /* Expand expression EXP, which is a call to the memmove builtin. Return
3556 NULL_RTX if we failed; the caller should emit a normal call. */
3559 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3561 if (!validate_arglist (exp,
3562 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3566 tree dest = CALL_EXPR_ARG (exp, 0);
3567 tree src = CALL_EXPR_ARG (exp, 1);
3568 tree len = CALL_EXPR_ARG (exp, 2);
3569 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3570 target, mode, ignore);
3574 /* Helper function to do the actual work for expand_builtin_memmove. The
3575 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3576 so that this can also be called without constructing an actual CALL_EXPR.
3577 TYPE is the return type of the call. The other arguments and return value
3578 are the same as for expand_builtin_memmove. */
3581 expand_builtin_memmove_args (tree dest, tree src, tree len,
3582 tree type, rtx target, enum machine_mode mode,
3585 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3589 STRIP_TYPE_NOPS (result);
3590 while (TREE_CODE (result) == COMPOUND_EXPR)
3592 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3594 result = TREE_OPERAND (result, 1);
3596 return expand_expr (result, target, mode, EXPAND_NORMAL);
3599 /* Otherwise, call the normal function. */
3603 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3604 NULL_RTX if we failed the caller should emit a normal call. */
3607 expand_builtin_bcopy (tree exp, int ignore)
3609 tree type = TREE_TYPE (exp);
3610 tree src, dest, size;
3612 if (!validate_arglist (exp,
3613 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3616 src = CALL_EXPR_ARG (exp, 0);
3617 dest = CALL_EXPR_ARG (exp, 1);
3618 size = CALL_EXPR_ARG (exp, 2);
3620 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3621 This is done this way so that if it isn't expanded inline, we fall
3622 back to calling bcopy instead of memmove. */
3623 return expand_builtin_memmove_args (dest, src,
3624 fold_convert (sizetype, size),
3625 type, const0_rtx, VOIDmode,
3630 # define HAVE_movstr 0
3631 # define CODE_FOR_movstr CODE_FOR_nothing
3634 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3635 we failed, the caller should emit a normal call, otherwise try to
3636 get the result in TARGET, if convenient. If ENDP is 0 return the
3637 destination pointer, if ENDP is 1 return the end pointer ala
3638 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3642 expand_movstr (tree dest, tree src, rtx target, int endp)
3648 const struct insn_data * data;
3653 dest_mem = get_memory_rtx (dest, NULL);
3654 src_mem = get_memory_rtx (src, NULL);
3657 target = force_reg (Pmode, XEXP (dest_mem, 0));
3658 dest_mem = replace_equiv_address (dest_mem, target);
3659 end = gen_reg_rtx (Pmode);
3663 if (target == 0 || target == const0_rtx)
3665 end = gen_reg_rtx (Pmode);
3673 data = insn_data + CODE_FOR_movstr;
3675 if (data->operand[0].mode != VOIDmode)
3676 end = gen_lowpart (data->operand[0].mode, end);
3678 insn = data->genfun (end, dest_mem, src_mem);
3684 /* movstr is supposed to set end to the address of the NUL
3685 terminator. If the caller requested a mempcpy-like return value,
3687 if (endp == 1 && target != const0_rtx)
3689 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3690 emit_move_insn (target, force_operand (tem, NULL_RTX));
3696 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3697 NULL_RTX if we failed the caller should emit a normal call, otherwise
3698 try to get the result in TARGET, if convenient (and in mode MODE if that's
3702 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3704 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3706 tree dest = CALL_EXPR_ARG (exp, 0);
3707 tree src = CALL_EXPR_ARG (exp, 1);
3708 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3713 /* Helper function to do the actual work for expand_builtin_strcpy. The
3714 arguments to the builtin_strcpy call DEST and SRC are broken out
3715 so that this can also be called without constructing an actual CALL_EXPR.
3716 The other arguments and return value are the same as for
3717 expand_builtin_strcpy. */
3720 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3721 rtx target, enum machine_mode mode)
3723 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3725 return expand_expr (result, target, mode, EXPAND_NORMAL);
3726 return expand_movstr (dest, src, target, /*endp=*/0);
3730 /* Expand a call EXP to the stpcpy builtin.
3731 Return NULL_RTX if we failed the caller should emit a normal call,
3732 otherwise try to get the result in TARGET, if convenient (and in
3733 mode MODE if that's convenient). */
3736 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3740 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3743 dst = CALL_EXPR_ARG (exp, 0);
3744 src = CALL_EXPR_ARG (exp, 1);
3746 /* If return value is ignored, transform stpcpy into strcpy. */
3747 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3749 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3750 tree result = build_call_expr (fn, 2, dst, src);
3752 STRIP_NOPS (result);
3753 while (TREE_CODE (result) == COMPOUND_EXPR)
3755 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3757 result = TREE_OPERAND (result, 1);
3759 return expand_expr (result, target, mode, EXPAND_NORMAL);
3766 /* Ensure we get an actual string whose length can be evaluated at
3767 compile-time, not an expression containing a string. This is
3768 because the latter will potentially produce pessimized code
3769 when used to produce the return value. */
3770 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3771 return expand_movstr (dst, src, target, /*endp=*/2);
3773 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3774 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3775 target, mode, /*endp=*/2);
3780 if (TREE_CODE (len) == INTEGER_CST)
3782 rtx len_rtx = expand_normal (len);
3784 if (GET_CODE (len_rtx) == CONST_INT)
3786 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3787 dst, src, target, mode);
3793 if (mode != VOIDmode)
3794 target = gen_reg_rtx (mode);
3796 target = gen_reg_rtx (GET_MODE (ret));
3798 if (GET_MODE (target) != GET_MODE (ret))
3799 ret = gen_lowpart (GET_MODE (target), ret);
3801 ret = plus_constant (ret, INTVAL (len_rtx));
3802 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3810 return expand_movstr (dst, src, target, /*endp=*/2);
3814 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3815 bytes from constant string DATA + OFFSET and return it as target
3819 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3820 enum machine_mode mode)
3822 const char *str = (const char *) data;
3824 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3827 return c_readstr (str + offset, mode);
3830 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3831 NULL_RTX if we failed the caller should emit a normal call. */
3834 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3836 tree fndecl = get_callee_fndecl (exp);
3838 if (validate_arglist (exp,
3839 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3841 tree dest = CALL_EXPR_ARG (exp, 0);
3842 tree src = CALL_EXPR_ARG (exp, 1);
3843 tree len = CALL_EXPR_ARG (exp, 2);
3844 tree slen = c_strlen (src, 1);
3845 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3849 while (TREE_CODE (result) == COMPOUND_EXPR)
3851 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3853 result = TREE_OPERAND (result, 1);
3855 return expand_expr (result, target, mode, EXPAND_NORMAL);
3858 /* We must be passed a constant len and src parameter. */
3859 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3862 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3864 /* We're required to pad with trailing zeros if the requested
3865 len is greater than strlen(s2)+1. In that case try to
3866 use store_by_pieces, if it fails, punt. */
3867 if (tree_int_cst_lt (slen, len))
3869 unsigned int dest_align
3870 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3871 const char *p = c_getstr (src);
3874 if (!p || dest_align == 0 || !host_integerp (len, 1)
3875 || !can_store_by_pieces (tree_low_cst (len, 1),
3876 builtin_strncpy_read_str,
3877 CONST_CAST (char *, p),
3881 dest_mem = get_memory_rtx (dest, len);
3882 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3883 builtin_strncpy_read_str,
3884 CONST_CAST (char *, p), dest_align, false, 0);
3885 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3886 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3893 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3894 bytes from constant string DATA + OFFSET and return it as target
3898 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3899 enum machine_mode mode)
3901 const char *c = (const char *) data;
3902 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3904 memset (p, *c, GET_MODE_SIZE (mode));
3906 return c_readstr (p, mode);
3909 /* Callback routine for store_by_pieces. Return the RTL of a register
3910 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3911 char value given in the RTL register data. For example, if mode is
3912 4 bytes wide, return the RTL for 0x01010101*data. */
3915 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3916 enum machine_mode mode)
3922 size = GET_MODE_SIZE (mode);
3926 p = XALLOCAVEC (char, size);
3927 memset (p, 1, size);
3928 coeff = c_readstr (p, mode);
3930 target = convert_to_mode (mode, (rtx) data, 1);
3931 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3932 return force_reg (mode, target);
3935 /* Expand expression EXP, which is a call to the memset builtin. Return
3936 NULL_RTX if we failed the caller should emit a normal call, otherwise
3937 try to get the result in TARGET, if convenient (and in mode MODE if that's
3941 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3943 if (!validate_arglist (exp,
3944 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3948 tree dest = CALL_EXPR_ARG (exp, 0);
3949 tree val = CALL_EXPR_ARG (exp, 1);
3950 tree len = CALL_EXPR_ARG (exp, 2);
3951 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3955 /* Helper function to do the actual work for expand_builtin_memset. The
3956 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3957 so that this can also be called without constructing an actual CALL_EXPR.
3958 The other arguments and return value are the same as for
3959 expand_builtin_memset. */
3962 expand_builtin_memset_args (tree dest, tree val, tree len,
3963 rtx target, enum machine_mode mode, tree orig_exp)
3966 enum built_in_function fcode;
3968 unsigned int dest_align;
3969 rtx dest_mem, dest_addr, len_rtx;
3970 HOST_WIDE_INT expected_size = -1;
3971 unsigned int expected_align = 0;
3972 tree_ann_common_t ann;
3974 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3976 /* If DEST is not a pointer type, don't do this operation in-line. */
3977 if (dest_align == 0)
3980 ann = tree_common_ann (orig_exp);
3982 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3984 if (expected_align < dest_align)
3985 expected_align = dest_align;
3987 /* If the LEN parameter is zero, return DEST. */
3988 if (integer_zerop (len))
3990 /* Evaluate and ignore VAL in case it has side-effects. */
3991 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3992 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3995 /* Stabilize the arguments in case we fail. */
3996 dest = builtin_save_expr (dest);
3997 val = builtin_save_expr (val);
3998 len = builtin_save_expr (len);
4000 len_rtx = expand_normal (len);
4001 dest_mem = get_memory_rtx (dest, len);
4003 if (TREE_CODE (val) != INTEGER_CST)
4007 val_rtx = expand_normal (val);
4008 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4011 /* Assume that we can memset by pieces if we can store
4012 * the coefficients by pieces (in the required modes).
4013 * We can't pass builtin_memset_gen_str as that emits RTL. */
4015 if (host_integerp (len, 1)
4016 && can_store_by_pieces (tree_low_cst (len, 1),
4017 builtin_memset_read_str, &c, dest_align,
4020 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4022 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4023 builtin_memset_gen_str, val_rtx, dest_align,
4026 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4027 dest_align, expected_align,
4031 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4032 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4036 if (target_char_cast (val, &c))
4041 if (host_integerp (len, 1)
4042 && can_store_by_pieces (tree_low_cst (len, 1),
4043 builtin_memset_read_str, &c, dest_align,
4045 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4046 builtin_memset_read_str, &c, dest_align, true, 0);
4047 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4048 dest_align, expected_align,
4052 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4053 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4057 set_mem_align (dest_mem, dest_align);
4058 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4059 CALL_EXPR_TAILCALL (orig_exp)
4060 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4061 expected_align, expected_size);
4065 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4066 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4072 fndecl = get_callee_fndecl (orig_exp);
4073 fcode = DECL_FUNCTION_CODE (fndecl);
4074 if (fcode == BUILT_IN_MEMSET)
4075 fn = build_call_expr (fndecl, 3, dest, val, len);
4076 else if (fcode == BUILT_IN_BZERO)
4077 fn = build_call_expr (fndecl, 2, dest, len);
4080 if (TREE_CODE (fn) == CALL_EXPR)
4081 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4082 return expand_call (fn, target, target == const0_rtx);
4085 /* Expand expression EXP, which is a call to the bzero builtin. Return
4086 NULL_RTX if we failed the caller should emit a normal call. */
4089 expand_builtin_bzero (tree exp)
4093 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4096 dest = CALL_EXPR_ARG (exp, 0);
4097 size = CALL_EXPR_ARG (exp, 1);
4099 /* New argument list transforming bzero(ptr x, int y) to
4100 memset(ptr x, int 0, size_t y). This is done this way
4101 so that if it isn't expanded inline, we fallback to
4102 calling bzero instead of memset. */
4104 return expand_builtin_memset_args (dest, integer_zero_node,
4105 fold_convert (sizetype, size),
4106 const0_rtx, VOIDmode, exp);
4109 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4110 caller should emit a normal call, otherwise try to get the result
4111 in TARGET, if convenient (and in mode MODE if that's convenient). */
4114 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4116 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4117 INTEGER_TYPE, VOID_TYPE))
4119 tree type = TREE_TYPE (exp);
4120 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4121 CALL_EXPR_ARG (exp, 1),
4122 CALL_EXPR_ARG (exp, 2), type);
4124 return expand_expr (result, target, mode, EXPAND_NORMAL);
4129 /* Expand expression EXP, which is a call to the memcmp built-in function.
4130 Return NULL_RTX if we failed and the
4131 caller should emit a normal call, otherwise try to get the result in
4132 TARGET, if convenient (and in mode MODE, if that's convenient). */
4135 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4137 if (!validate_arglist (exp,
4138 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4142 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4143 CALL_EXPR_ARG (exp, 1),
4144 CALL_EXPR_ARG (exp, 2));
4146 return expand_expr (result, target, mode, EXPAND_NORMAL);
4149 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4151 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4154 tree arg1 = CALL_EXPR_ARG (exp, 0);
4155 tree arg2 = CALL_EXPR_ARG (exp, 1);
4156 tree len = CALL_EXPR_ARG (exp, 2);
4159 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4161 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4162 enum machine_mode insn_mode;
4164 #ifdef HAVE_cmpmemsi
4166 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4169 #ifdef HAVE_cmpstrnsi
4171 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4176 /* If we don't have POINTER_TYPE, call the function. */
4177 if (arg1_align == 0 || arg2_align == 0)
4180 /* Make a place to write the result of the instruction. */
4183 && REG_P (result) && GET_MODE (result) == insn_mode
4184 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4185 result = gen_reg_rtx (insn_mode);
4187 arg1_rtx = get_memory_rtx (arg1, len);
4188 arg2_rtx = get_memory_rtx (arg2, len);
4189 arg3_rtx = expand_normal (len);
4191 /* Set MEM_SIZE as appropriate. */
4192 if (GET_CODE (arg3_rtx) == CONST_INT)
4194 set_mem_size (arg1_rtx, arg3_rtx);
4195 set_mem_size (arg2_rtx, arg3_rtx);
4198 #ifdef HAVE_cmpmemsi
4200 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4201 GEN_INT (MIN (arg1_align, arg2_align)));
4204 #ifdef HAVE_cmpstrnsi
4206 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4207 GEN_INT (MIN (arg1_align, arg2_align)));
4215 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4216 TYPE_MODE (integer_type_node), 3,
4217 XEXP (arg1_rtx, 0), Pmode,
4218 XEXP (arg2_rtx, 0), Pmode,
4219 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4220 TYPE_UNSIGNED (sizetype)),
4221 TYPE_MODE (sizetype));
4223 /* Return the value in the proper mode for this function. */
4224 mode = TYPE_MODE (TREE_TYPE (exp));
4225 if (GET_MODE (result) == mode)
4227 else if (target != 0)
4229 convert_move (target, result, 0);
4233 return convert_to_mode (mode, result, 0);
4240 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4241 if we failed the caller should emit a normal call, otherwise try to get
4242 the result in TARGET, if convenient. */
4245 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4247 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4251 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4252 CALL_EXPR_ARG (exp, 1));
4254 return expand_expr (result, target, mode, EXPAND_NORMAL);
4257 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4258 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4259 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4261 rtx arg1_rtx, arg2_rtx;
4262 rtx result, insn = NULL_RTX;
4264 tree arg1 = CALL_EXPR_ARG (exp, 0);
4265 tree arg2 = CALL_EXPR_ARG (exp, 1);
4268 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4270 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4272 /* If we don't have POINTER_TYPE, call the function. */
4273 if (arg1_align == 0 || arg2_align == 0)
4276 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4277 arg1 = builtin_save_expr (arg1);
4278 arg2 = builtin_save_expr (arg2);
4280 arg1_rtx = get_memory_rtx (arg1, NULL);
4281 arg2_rtx = get_memory_rtx (arg2, NULL);
4283 #ifdef HAVE_cmpstrsi
4284 /* Try to call cmpstrsi. */
4287 enum machine_mode insn_mode
4288 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4290 /* Make a place to write the result of the instruction. */
4293 && REG_P (result) && GET_MODE (result) == insn_mode
4294 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4295 result = gen_reg_rtx (insn_mode);
4297 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4298 GEN_INT (MIN (arg1_align, arg2_align)));
4301 #ifdef HAVE_cmpstrnsi
4302 /* Try to determine at least one length and call cmpstrnsi. */
4303 if (!insn && HAVE_cmpstrnsi)
4308 enum machine_mode insn_mode
4309 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4310 tree len1 = c_strlen (arg1, 1);
4311 tree len2 = c_strlen (arg2, 1);
4314 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4316 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4318 /* If we don't have a constant length for the first, use the length
4319 of the second, if we know it. We don't require a constant for
4320 this case; some cost analysis could be done if both are available
4321 but neither is constant. For now, assume they're equally cheap,
4322 unless one has side effects. If both strings have constant lengths,
4329 else if (TREE_SIDE_EFFECTS (len1))
4331 else if (TREE_SIDE_EFFECTS (len2))
4333 else if (TREE_CODE (len1) != INTEGER_CST)
4335 else if (TREE_CODE (len2) != INTEGER_CST)
4337 else if (tree_int_cst_lt (len1, len2))
4342 /* If both arguments have side effects, we cannot optimize. */
4343 if (!len || TREE_SIDE_EFFECTS (len))
4346 arg3_rtx = expand_normal (len);
4348 /* Make a place to write the result of the instruction. */
4351 && REG_P (result) && GET_MODE (result) == insn_mode
4352 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4353 result = gen_reg_rtx (insn_mode);
4355 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4356 GEN_INT (MIN (arg1_align, arg2_align)));
4364 /* Return the value in the proper mode for this function. */
4365 mode = TYPE_MODE (TREE_TYPE (exp));
4366 if (GET_MODE (result) == mode)
4369 return convert_to_mode (mode, result, 0);
4370 convert_move (target, result, 0);
4374 /* Expand the library call ourselves using a stabilized argument
4375 list to avoid re-evaluating the function's arguments twice. */
4376 #ifdef HAVE_cmpstrnsi
4379 fndecl = get_callee_fndecl (exp);
4380 fn = build_call_expr (fndecl, 2, arg1, arg2);
4381 if (TREE_CODE (fn) == CALL_EXPR)
4382 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4383 return expand_call (fn, target, target == const0_rtx);
4389 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4390 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4391 the result in TARGET, if convenient. */
4394 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4396 if (!validate_arglist (exp,
4397 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4401 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4402 CALL_EXPR_ARG (exp, 1),
4403 CALL_EXPR_ARG (exp, 2));
4405 return expand_expr (result, target, mode, EXPAND_NORMAL);
4408 /* If c_strlen can determine an expression for one of the string
4409 lengths, and it doesn't have side effects, then emit cmpstrnsi
4410 using length MIN(strlen(string)+1, arg3). */
4411 #ifdef HAVE_cmpstrnsi
4414 tree len, len1, len2;
4415 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4418 tree arg1 = CALL_EXPR_ARG (exp, 0);
4419 tree arg2 = CALL_EXPR_ARG (exp, 1);
4420 tree arg3 = CALL_EXPR_ARG (exp, 2);
4423 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4425 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4426 enum machine_mode insn_mode
4427 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4429 len1 = c_strlen (arg1, 1);
4430 len2 = c_strlen (arg2, 1);
4433 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4435 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4437 /* If we don't have a constant length for the first, use the length
4438 of the second, if we know it. We don't require a constant for
4439 this case; some cost analysis could be done if both are available
4440 but neither is constant. For now, assume they're equally cheap,
4441 unless one has side effects. If both strings have constant lengths,
4448 else if (TREE_SIDE_EFFECTS (len1))
4450 else if (TREE_SIDE_EFFECTS (len2))
4452 else if (TREE_CODE (len1) != INTEGER_CST)
4454 else if (TREE_CODE (len2) != INTEGER_CST)
4456 else if (tree_int_cst_lt (len1, len2))
4461 /* If both arguments have side effects, we cannot optimize. */
4462 if (!len || TREE_SIDE_EFFECTS (len))
4465 /* The actual new length parameter is MIN(len,arg3). */
4466 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4467 fold_convert (TREE_TYPE (len), arg3));
4469 /* If we don't have POINTER_TYPE, call the function. */
4470 if (arg1_align == 0 || arg2_align == 0)
4473 /* Make a place to write the result of the instruction. */
4476 && REG_P (result) && GET_MODE (result) == insn_mode
4477 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4478 result = gen_reg_rtx (insn_mode);
4480 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4481 arg1 = builtin_save_expr (arg1);
4482 arg2 = builtin_save_expr (arg2);
4483 len = builtin_save_expr (len);
4485 arg1_rtx = get_memory_rtx (arg1, len);
4486 arg2_rtx = get_memory_rtx (arg2, len);
4487 arg3_rtx = expand_normal (len);
4488 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4489 GEN_INT (MIN (arg1_align, arg2_align)));
4494 /* Return the value in the proper mode for this function. */
4495 mode = TYPE_MODE (TREE_TYPE (exp));
4496 if (GET_MODE (result) == mode)
4499 return convert_to_mode (mode, result, 0);
4500 convert_move (target, result, 0);
4504 /* Expand the library call ourselves using a stabilized argument
4505 list to avoid re-evaluating the function's arguments twice. */
4506 fndecl = get_callee_fndecl (exp);
4507 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4508 if (TREE_CODE (fn) == CALL_EXPR)
4509 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4510 return expand_call (fn, target, target == const0_rtx);
4516 /* Expand expression EXP, which is a call to the strcat builtin.
4517 Return NULL_RTX if we failed the caller should emit a normal call,
4518 otherwise try to get the result in TARGET, if convenient. */
4521 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4523 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4527 tree dst = CALL_EXPR_ARG (exp, 0);
4528 tree src = CALL_EXPR_ARG (exp, 1);
4529 const char *p = c_getstr (src);
4531 /* If the string length is zero, return the dst parameter. */
4532 if (p && *p == '\0')
4533 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4535 if (optimize_insn_for_speed_p ())
4537 /* See if we can store by pieces into (dst + strlen(dst)). */
4538 tree newsrc, newdst,
4539 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4542 /* Stabilize the argument list. */
4543 newsrc = builtin_save_expr (src);
4544 dst = builtin_save_expr (dst);
4548 /* Create strlen (dst). */
4549 newdst = build_call_expr (strlen_fn, 1, dst);
4550 /* Create (dst p+ strlen (dst)). */
4552 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4553 newdst = builtin_save_expr (newdst);
4555 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4557 end_sequence (); /* Stop sequence. */
4561 /* Output the entire sequence. */
4562 insns = get_insns ();
4566 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4573 /* Expand expression EXP, which is a call to the strncat builtin.
4574 Return NULL_RTX if we failed the caller should emit a normal call,
4575 otherwise try to get the result in TARGET, if convenient. */
4578 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4580 if (validate_arglist (exp,
4581 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4583 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4584 CALL_EXPR_ARG (exp, 1),
4585 CALL_EXPR_ARG (exp, 2));
4587 return expand_expr (result, target, mode, EXPAND_NORMAL);
4592 /* Expand expression EXP, which is a call to the strspn builtin.
4593 Return NULL_RTX if we failed the caller should emit a normal call,
4594 otherwise try to get the result in TARGET, if convenient. */
4597 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4599 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4601 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4602 CALL_EXPR_ARG (exp, 1));
4604 return expand_expr (result, target, mode, EXPAND_NORMAL);
4609 /* Expand expression EXP, which is a call to the strcspn builtin.
4610 Return NULL_RTX if we failed the caller should emit a normal call,
4611 otherwise try to get the result in TARGET, if convenient. */
4614 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4616 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4618 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4619 CALL_EXPR_ARG (exp, 1));
4621 return expand_expr (result, target, mode, EXPAND_NORMAL);
4626 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4627 if that's convenient. */
4630 expand_builtin_saveregs (void)
4634 /* Don't do __builtin_saveregs more than once in a function.
4635 Save the result of the first call and reuse it. */
4636 if (saveregs_value != 0)
4637 return saveregs_value;
4639 /* When this function is called, it means that registers must be
4640 saved on entry to this function. So we migrate the call to the
4641 first insn of this function. */
4645 /* Do whatever the machine needs done in this case. */
4646 val = targetm.calls.expand_builtin_saveregs ();
4651 saveregs_value = val;
4653 /* Put the insns after the NOTE that starts the function. If this
4654 is inside a start_sequence, make the outer-level insn chain current, so
4655 the code is placed at the start of the function. */
4656 push_topmost_sequence ();
4657 emit_insn_after (seq, entry_of_function ());
4658 pop_topmost_sequence ();
4663 /* __builtin_args_info (N) returns word N of the arg space info
4664 for the current function. The number and meanings of words
4665 is controlled by the definition of CUMULATIVE_ARGS. */
4668 expand_builtin_args_info (tree exp)
4670 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4671 int *word_ptr = (int *) &crtl->args.info;
4673 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4675 if (call_expr_nargs (exp) != 0)
4677 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4678 error ("argument of %<__builtin_args_info%> must be constant");
4681 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4683 if (wordnum < 0 || wordnum >= nwords)
4684 error ("argument of %<__builtin_args_info%> out of range");
4686 return GEN_INT (word_ptr[wordnum]);
4690 error ("missing argument in %<__builtin_args_info%>");
4695 /* Expand a call to __builtin_next_arg. */
4698 expand_builtin_next_arg (void)
4700 /* Checking arguments is already done in fold_builtin_next_arg
4701 that must be called before this function. */
4702 return expand_binop (ptr_mode, add_optab,
4703 crtl->args.internal_arg_pointer,
4704 crtl->args.arg_offset_rtx,
4705 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4708 /* Make it easier for the backends by protecting the valist argument
4709 from multiple evaluations. */
4712 stabilize_va_list (tree valist, int needs_lvalue)
4714 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4716 gcc_assert (vatype != NULL_TREE);
4718 if (TREE_CODE (vatype) == ARRAY_TYPE)
4720 if (TREE_SIDE_EFFECTS (valist))
4721 valist = save_expr (valist);
4723 /* For this case, the backends will be expecting a pointer to
4724 vatype, but it's possible we've actually been given an array
4725 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4727 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4729 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4730 valist = build_fold_addr_expr_with_type (valist, p1);
4739 if (! TREE_SIDE_EFFECTS (valist))
4742 pt = build_pointer_type (vatype);
4743 valist = fold_build1 (ADDR_EXPR, pt, valist);
4744 TREE_SIDE_EFFECTS (valist) = 1;
4747 if (TREE_SIDE_EFFECTS (valist))
4748 valist = save_expr (valist);
4749 valist = build_fold_indirect_ref (valist);
4755 /* The "standard" definition of va_list is void*. */
4758 std_build_builtin_va_list (void)
4760 return ptr_type_node;
4763 /* The "standard" abi va_list is va_list_type_node. */
4766 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4768 return va_list_type_node;
4771 /* The "standard" type of va_list is va_list_type_node. */
4774 std_canonical_va_list_type (tree type)
4778 if (INDIRECT_REF_P (type))
4779 type = TREE_TYPE (type);
4780 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4781 type = TREE_TYPE (type);
4782 wtype = va_list_type_node;
4784 /* Treat structure va_list types. */
4785 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4786 htype = TREE_TYPE (htype);
4787 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4789 /* If va_list is an array type, the argument may have decayed
4790 to a pointer type, e.g. by being passed to another function.
4791 In that case, unwrap both types so that we can compare the
4792 underlying records. */
4793 if (TREE_CODE (htype) == ARRAY_TYPE
4794 || POINTER_TYPE_P (htype))
4796 wtype = TREE_TYPE (wtype);
4797 htype = TREE_TYPE (htype);
4800 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4801 return va_list_type_node;
4806 /* The "standard" implementation of va_start: just assign `nextarg' to
4810 std_expand_builtin_va_start (tree valist, rtx nextarg)
4812 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4813 convert_move (va_r, nextarg, 0);
4816 /* Expand EXP, a call to __builtin_va_start. */
4819 expand_builtin_va_start (tree exp)
4824 if (call_expr_nargs (exp) < 2)
4826 error ("too few arguments to function %<va_start%>");
4830 if (fold_builtin_next_arg (exp, true))
4833 nextarg = expand_builtin_next_arg ();
4834 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4836 if (targetm.expand_builtin_va_start)
4837 targetm.expand_builtin_va_start (valist, nextarg);
4839 std_expand_builtin_va_start (valist, nextarg);
4844 /* The "standard" implementation of va_arg: read the value from the
4845 current (padded) address and increment by the (padded) size. */
4848 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4851 tree addr, t, type_size, rounded_size, valist_tmp;
4852 unsigned HOST_WIDE_INT align, boundary;
4855 #ifdef ARGS_GROW_DOWNWARD
4856 /* All of the alignment and movement below is for args-grow-up machines.
4857 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4858 implement their own specialized gimplify_va_arg_expr routines. */
4862 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4864 type = build_pointer_type (type);
4866 align = PARM_BOUNDARY / BITS_PER_UNIT;
4867 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4869 /* When we align parameter on stack for caller, if the parameter
4870 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4871 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4872 here with caller. */
4873 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4874 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4876 boundary /= BITS_PER_UNIT;
4878 /* Hoist the valist value into a temporary for the moment. */
4879 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4881 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4882 requires greater alignment, we must perform dynamic alignment. */
4883 if (boundary > align
4884 && !integer_zerop (TYPE_SIZE (type)))
4886 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4887 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4888 valist_tmp, size_int (boundary - 1)));
4889 gimplify_and_add (t, pre_p);
4891 t = fold_convert (sizetype, valist_tmp);
4892 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4893 fold_convert (TREE_TYPE (valist),
4894 fold_build2 (BIT_AND_EXPR, sizetype, t,
4895 size_int (-boundary))));
4896 gimplify_and_add (t, pre_p);
4901 /* If the actual alignment is less than the alignment of the type,
4902 adjust the type accordingly so that we don't assume strict alignment
4903 when dereferencing the pointer. */
4904 boundary *= BITS_PER_UNIT;
4905 if (boundary < TYPE_ALIGN (type))
4907 type = build_variant_type_copy (type);
4908 TYPE_ALIGN (type) = boundary;
4911 /* Compute the rounded size of the type. */
4912 type_size = size_in_bytes (type);
4913 rounded_size = round_up (type_size, align);
4915 /* Reduce rounded_size so it's sharable with the postqueue. */
4916 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4920 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4922 /* Small args are padded downward. */
4923 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4924 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4925 size_binop (MINUS_EXPR, rounded_size, type_size));
4926 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4929 /* Compute new value for AP. */
4930 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4931 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4932 gimplify_and_add (t, pre_p);
4934 addr = fold_convert (build_pointer_type (type), addr);
4937 addr = build_va_arg_indirect_ref (addr);
4939 return build_va_arg_indirect_ref (addr);
4942 /* Build an indirect-ref expression over the given TREE, which represents a
4943 piece of a va_arg() expansion. */
4945 build_va_arg_indirect_ref (tree addr)
4947 addr = build_fold_indirect_ref (addr);
4949 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4955 /* Return a dummy expression of type TYPE in order to keep going after an
4959 dummy_object (tree type)
4961 tree t = build_int_cst (build_pointer_type (type), 0);
4962 return build1 (INDIRECT_REF, type, t);
4965 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4966 builtin function, but a very special sort of operator. */
4968 enum gimplify_status
4969 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4971 tree promoted_type, have_va_type;
4972 tree valist = TREE_OPERAND (*expr_p, 0);
4973 tree type = TREE_TYPE (*expr_p);
4976 /* Verify that valist is of the proper type. */
4977 have_va_type = TREE_TYPE (valist);
4978 if (have_va_type == error_mark_node)
4980 have_va_type = targetm.canonical_va_list_type (have_va_type);
4982 if (have_va_type == NULL_TREE)
4984 error ("first argument to %<va_arg%> not of type %<va_list%>");
4988 /* Generate a diagnostic for requesting data of a type that cannot
4989 be passed through `...' due to type promotion at the call site. */
4990 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4993 static bool gave_help;
4996 /* Unfortunately, this is merely undefined, rather than a constraint
4997 violation, so we cannot make this an error. If this call is never
4998 executed, the program is still strictly conforming. */
4999 warned = warning (0, "%qT is promoted to %qT when passed through %<...%>",
5000 type, promoted_type);
5001 if (!gave_help && warned)
5004 inform (input_location, "(so you should pass %qT not %qT to %<va_arg%>)",
5005 promoted_type, type);
5008 /* We can, however, treat "undefined" any way we please.
5009 Call abort to encourage the user to fix the program. */
5011 inform (input_location, "if this code is reached, the program will abort");
5012 /* Before the abort, allow the evaluation of the va_list
5013 expression to exit or longjmp. */
5014 gimplify_and_add (valist, pre_p);
5015 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
5016 gimplify_and_add (t, pre_p);
5018 /* This is dead code, but go ahead and finish so that the
5019 mode of the result comes out right. */
5020 *expr_p = dummy_object (type);
5025 /* Make it easier for the backends by protecting the valist argument
5026 from multiple evaluations. */
5027 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
5029 /* For this case, the backends will be expecting a pointer to
5030 TREE_TYPE (abi), but it's possible we've
5031 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
5033 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5035 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
5036 valist = build_fold_addr_expr_with_type (valist, p1);
5039 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
5042 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
5044 if (!targetm.gimplify_va_arg_expr)
5045 /* FIXME: Once most targets are converted we should merely
5046 assert this is non-null. */
5049 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
5054 /* Expand EXP, a call to __builtin_va_end. */
5057 expand_builtin_va_end (tree exp)
5059 tree valist = CALL_EXPR_ARG (exp, 0);
5061 /* Evaluate for side effects, if needed. I hate macros that don't
5063 if (TREE_SIDE_EFFECTS (valist))
5064 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5069 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5070 builtin rather than just as an assignment in stdarg.h because of the
5071 nastiness of array-type va_list types. */
5074 expand_builtin_va_copy (tree exp)
5078 dst = CALL_EXPR_ARG (exp, 0);
5079 src = CALL_EXPR_ARG (exp, 1);
5081 dst = stabilize_va_list (dst, 1);
5082 src = stabilize_va_list (src, 0);
5084 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5086 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5088 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5089 TREE_SIDE_EFFECTS (t) = 1;
5090 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5094 rtx dstb, srcb, size;
5096 /* Evaluate to pointers. */
5097 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5098 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5099 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5100 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5102 dstb = convert_memory_address (Pmode, dstb);
5103 srcb = convert_memory_address (Pmode, srcb);
5105 /* "Dereference" to BLKmode memories. */
5106 dstb = gen_rtx_MEM (BLKmode, dstb);
5107 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5108 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5109 srcb = gen_rtx_MEM (BLKmode, srcb);
5110 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5111 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5114 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5120 /* Expand a call to one of the builtin functions __builtin_frame_address or
5121 __builtin_return_address. */
5124 expand_builtin_frame_address (tree fndecl, tree exp)
5126 /* The argument must be a nonnegative integer constant.
5127 It counts the number of frames to scan up the stack.
5128 The value is the return address saved in that frame. */
5129 if (call_expr_nargs (exp) == 0)
5130 /* Warning about missing arg was already issued. */
5132 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5134 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5135 error ("invalid argument to %<__builtin_frame_address%>");
5137 error ("invalid argument to %<__builtin_return_address%>");
5143 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5144 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5146 /* Some ports cannot access arbitrary stack frames. */
5149 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5150 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5152 warning (0, "unsupported argument to %<__builtin_return_address%>");
5156 /* For __builtin_frame_address, return what we've got. */
5157 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5161 && ! CONSTANT_P (tem))
5162 tem = copy_to_mode_reg (Pmode, tem);
5167 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5168 we failed and the caller should emit a normal call, otherwise try to get
5169 the result in TARGET, if convenient. */
5172 expand_builtin_alloca (tree exp, rtx target)
5177 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5178 should always expand to function calls. These can be intercepted
5183 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5186 /* Compute the argument. */
5187 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5189 /* Allocate the desired space. */
5190 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5191 result = convert_memory_address (ptr_mode, result);
5196 /* Expand a call to a bswap builtin with argument ARG0. MODE
5197 is the mode to expand with. */
5200 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5202 enum machine_mode mode;
5206 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5209 arg = CALL_EXPR_ARG (exp, 0);
5210 mode = TYPE_MODE (TREE_TYPE (arg));
5211 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5213 target = expand_unop (mode, bswap_optab, op0, target, 1);
5215 gcc_assert (target);
5217 return convert_to_mode (mode, target, 0);
5220 /* Expand a call to a unary builtin in EXP.
5221 Return NULL_RTX if a normal call should be emitted rather than expanding the
5222 function in-line. If convenient, the result should be placed in TARGET.
5223 SUBTARGET may be used as the target for computing one of EXP's operands. */
5226 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5227 rtx subtarget, optab op_optab)
5231 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5234 /* Compute the argument. */
5235 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5236 VOIDmode, EXPAND_NORMAL);
5237 /* Compute op, into TARGET if possible.
5238 Set TARGET to wherever the result comes back. */
5239 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5240 op_optab, op0, target, 1);
5241 gcc_assert (target);
5243 return convert_to_mode (target_mode, target, 0);
5246 /* If the string passed to fputs is a constant and is one character
5247 long, we attempt to transform this call into __builtin_fputc(). */
5250 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5252 /* Verify the arguments in the original call. */
5253 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5255 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5256 CALL_EXPR_ARG (exp, 1),
5257 (target == const0_rtx),
5258 unlocked, NULL_TREE);
5260 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5265 /* Expand a call to __builtin_expect. We just return our argument
5266 as the builtin_expect semantic should've been already executed by
5267 tree branch prediction pass. */
5270 expand_builtin_expect (tree exp, rtx target)
5274 if (call_expr_nargs (exp) < 2)
5276 arg = CALL_EXPR_ARG (exp, 0);
5277 c = CALL_EXPR_ARG (exp, 1);
5279 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5280 /* When guessing was done, the hints should be already stripped away. */
5281 gcc_assert (!flag_guess_branch_prob
5282 || optimize == 0 || errorcount || sorrycount);
5287 expand_builtin_trap (void)
5291 emit_insn (gen_trap ());
5294 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5298 /* Expand EXP, a call to fabs, fabsf or fabsl.
5299 Return NULL_RTX if a normal call should be emitted rather than expanding
5300 the function inline. If convenient, the result should be placed
5301 in TARGET. SUBTARGET may be used as the target for computing
5305 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5307 enum machine_mode mode;
5311 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5314 arg = CALL_EXPR_ARG (exp, 0);
5315 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5316 mode = TYPE_MODE (TREE_TYPE (arg));
5317 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5318 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5321 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5322 Return NULL is a normal call should be emitted rather than expanding the
5323 function inline. If convenient, the result should be placed in TARGET.
5324 SUBTARGET may be used as the target for computing the operand. */
5327 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5332 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5335 arg = CALL_EXPR_ARG (exp, 0);
5336 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5338 arg = CALL_EXPR_ARG (exp, 1);
5339 op1 = expand_normal (arg);
5341 return expand_copysign (op0, op1, target);
5344 /* Create a new constant string literal and return a char* pointer to it.
5345 The STRING_CST value is the LEN characters at STR. */
5347 build_string_literal (int len, const char *str)
5349 tree t, elem, index, type;
5351 t = build_string (len, str);
5352 elem = build_type_variant (char_type_node, 1, 0);
5353 index = build_index_type (size_int (len - 1));
5354 type = build_array_type (elem, index);
5355 TREE_TYPE (t) = type;
5356 TREE_CONSTANT (t) = 1;
5357 TREE_READONLY (t) = 1;
5358 TREE_STATIC (t) = 1;
5360 type = build_pointer_type (elem);
5361 t = build1 (ADDR_EXPR, type,
5362 build4 (ARRAY_REF, elem,
5363 t, integer_zero_node, NULL_TREE, NULL_TREE));
5367 /* Expand EXP, a call to printf or printf_unlocked.
5368 Return NULL_RTX if a normal call should be emitted rather than transforming
5369 the function inline. If convenient, the result should be placed in
5370 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5373 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5376 /* If we're using an unlocked function, assume the other unlocked
5377 functions exist explicitly. */
5378 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5379 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5380 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5381 : implicit_built_in_decls[BUILT_IN_PUTS];
5382 const char *fmt_str;
5385 int nargs = call_expr_nargs (exp);
5387 /* If the return value is used, don't do the transformation. */
5388 if (target != const0_rtx)
5391 /* Verify the required arguments in the original call. */
5394 fmt = CALL_EXPR_ARG (exp, 0);
5395 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5398 /* Check whether the format is a literal string constant. */
5399 fmt_str = c_getstr (fmt);
5400 if (fmt_str == NULL)
5403 if (!init_target_chars ())
5406 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5407 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5410 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5413 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5415 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5416 else if (strcmp (fmt_str, target_percent_c) == 0)
5419 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5422 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5426 /* We can't handle anything else with % args or %% ... yet. */
5427 if (strchr (fmt_str, target_percent))
5433 /* If the format specifier was "", printf does nothing. */
5434 if (fmt_str[0] == '\0')
5436 /* If the format specifier has length of 1, call putchar. */
5437 if (fmt_str[1] == '\0')
5439 /* Given printf("c"), (where c is any one character,)
5440 convert "c"[0] to an int and pass that to the replacement
5442 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5444 fn = build_call_expr (fn_putchar, 1, arg);
5448 /* If the format specifier was "string\n", call puts("string"). */
5449 size_t len = strlen (fmt_str);
5450 if ((unsigned char)fmt_str[len - 1] == target_newline)
5452 /* Create a NUL-terminated string that's one char shorter
5453 than the original, stripping off the trailing '\n'. */
5454 char *newstr = XALLOCAVEC (char, len);
5455 memcpy (newstr, fmt_str, len - 1);
5456 newstr[len - 1] = 0;
5457 arg = build_string_literal (len, newstr);
5459 fn = build_call_expr (fn_puts, 1, arg);
5462 /* We'd like to arrange to call fputs(string,stdout) here,
5463 but we need stdout and don't have a way to get it yet. */
5470 if (TREE_CODE (fn) == CALL_EXPR)
5471 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5472 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5475 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5476 Return NULL_RTX if a normal call should be emitted rather than transforming
5477 the function inline. If convenient, the result should be placed in
5478 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5481 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5484 /* If we're using an unlocked function, assume the other unlocked
5485 functions exist explicitly. */
5486 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5487 : implicit_built_in_decls[BUILT_IN_FPUTC];
5488 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5489 : implicit_built_in_decls[BUILT_IN_FPUTS];
5490 const char *fmt_str;
5493 int nargs = call_expr_nargs (exp);
5495 /* If the return value is used, don't do the transformation. */
5496 if (target != const0_rtx)
5499 /* Verify the required arguments in the original call. */
5502 fp = CALL_EXPR_ARG (exp, 0);
5503 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5505 fmt = CALL_EXPR_ARG (exp, 1);
5506 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5509 /* Check whether the format is a literal string constant. */
5510 fmt_str = c_getstr (fmt);
5511 if (fmt_str == NULL)
5514 if (!init_target_chars ())
5517 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5518 if (strcmp (fmt_str, target_percent_s) == 0)
5521 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5523 arg = CALL_EXPR_ARG (exp, 2);
5525 fn = build_call_expr (fn_fputs, 2, arg, fp);
5527 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5528 else if (strcmp (fmt_str, target_percent_c) == 0)
5531 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5533 arg = CALL_EXPR_ARG (exp, 2);
5535 fn = build_call_expr (fn_fputc, 2, arg, fp);
5539 /* We can't handle anything else with % args or %% ... yet. */
5540 if (strchr (fmt_str, target_percent))
5546 /* If the format specifier was "", fprintf does nothing. */
5547 if (fmt_str[0] == '\0')
5549 /* Evaluate and ignore FILE* argument for side-effects. */
5550 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5554 /* When "string" doesn't contain %, replace all cases of
5555 fprintf(stream,string) with fputs(string,stream). The fputs
5556 builtin will take care of special cases like length == 1. */
5558 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5563 if (TREE_CODE (fn) == CALL_EXPR)
5564 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5565 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5568 /* Expand a call EXP to sprintf. Return NULL_RTX if
5569 a normal call should be emitted rather than expanding the function
5570 inline. If convenient, the result should be placed in TARGET with
5574 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5577 const char *fmt_str;
5578 int nargs = call_expr_nargs (exp);
5580 /* Verify the required arguments in the original call. */
5583 dest = CALL_EXPR_ARG (exp, 0);
5584 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5586 fmt = CALL_EXPR_ARG (exp, 0);
5587 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5590 /* Check whether the format is a literal string constant. */
5591 fmt_str = c_getstr (fmt);
5592 if (fmt_str == NULL)
5595 if (!init_target_chars ())
5598 /* If the format doesn't contain % args or %%, use strcpy. */
5599 if (strchr (fmt_str, target_percent) == 0)
5601 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5604 if ((nargs > 2) || ! fn)
5606 expand_expr (build_call_expr (fn, 2, dest, fmt),
5607 const0_rtx, VOIDmode, EXPAND_NORMAL);
5608 if (target == const0_rtx)
5610 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5611 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5613 /* If the format is "%s", use strcpy if the result isn't used. */
5614 else if (strcmp (fmt_str, target_percent_s) == 0)
5617 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5623 arg = CALL_EXPR_ARG (exp, 2);
5624 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5627 if (target != const0_rtx)
5629 len = c_strlen (arg, 1);
5630 if (! len || TREE_CODE (len) != INTEGER_CST)
5636 expand_expr (build_call_expr (fn, 2, dest, arg),
5637 const0_rtx, VOIDmode, EXPAND_NORMAL);
5639 if (target == const0_rtx)
5641 return expand_expr (len, target, mode, EXPAND_NORMAL);
5647 /* Expand a call to either the entry or exit function profiler. */
5650 expand_builtin_profile_func (bool exitp)
5652 rtx this_rtx, which;
5654 this_rtx = DECL_RTL (current_function_decl);
5655 gcc_assert (MEM_P (this_rtx));
5656 this_rtx = XEXP (this_rtx, 0);
5659 which = profile_function_exit_libfunc;
5661 which = profile_function_entry_libfunc;
5663 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5664 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5671 /* Expand a call to __builtin___clear_cache. */
5674 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5676 #ifndef HAVE_clear_cache
5677 #ifdef CLEAR_INSN_CACHE
5678 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5679 does something. Just do the default expansion to a call to
5683 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5684 does nothing. There is no need to call it. Do nothing. */
5686 #endif /* CLEAR_INSN_CACHE */
5688 /* We have a "clear_cache" insn, and it will handle everything. */
5690 rtx begin_rtx, end_rtx;
5691 enum insn_code icode;
5693 /* We must not expand to a library call. If we did, any
5694 fallback library function in libgcc that might contain a call to
5695 __builtin___clear_cache() would recurse infinitely. */
5696 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5698 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5702 if (HAVE_clear_cache)
5704 icode = CODE_FOR_clear_cache;
5706 begin = CALL_EXPR_ARG (exp, 0);
5707 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5708 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5709 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5710 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5712 end = CALL_EXPR_ARG (exp, 1);
5713 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5714 end_rtx = convert_memory_address (Pmode, end_rtx);
5715 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5716 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5718 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5721 #endif /* HAVE_clear_cache */
5724 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5727 round_trampoline_addr (rtx tramp)
5729 rtx temp, addend, mask;
5731 /* If we don't need too much alignment, we'll have been guaranteed
5732 proper alignment by get_trampoline_type. */
5733 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5736 /* Round address up to desired boundary. */
5737 temp = gen_reg_rtx (Pmode);
5738 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5739 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5741 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5742 temp, 0, OPTAB_LIB_WIDEN);
5743 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5744 temp, 0, OPTAB_LIB_WIDEN);
5750 expand_builtin_init_trampoline (tree exp)
5752 tree t_tramp, t_func, t_chain;
5753 rtx r_tramp, r_func, r_chain;
5754 #ifdef TRAMPOLINE_TEMPLATE
5758 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5759 POINTER_TYPE, VOID_TYPE))
5762 t_tramp = CALL_EXPR_ARG (exp, 0);
5763 t_func = CALL_EXPR_ARG (exp, 1);
5764 t_chain = CALL_EXPR_ARG (exp, 2);
5766 r_tramp = expand_normal (t_tramp);
5767 r_func = expand_normal (t_func);
5768 r_chain = expand_normal (t_chain);
5770 /* Generate insns to initialize the trampoline. */
5771 r_tramp = round_trampoline_addr (r_tramp);
5772 #ifdef TRAMPOLINE_TEMPLATE
5773 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5774 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5775 emit_block_move (blktramp, assemble_trampoline_template (),
5776 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5778 trampolines_created = 1;
5779 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5785 expand_builtin_adjust_trampoline (tree exp)
5789 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5792 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5793 tramp = round_trampoline_addr (tramp);
5794 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5795 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5801 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5802 function. The function first checks whether the back end provides
5803 an insn to implement signbit for the respective mode. If not, it
5804 checks whether the floating point format of the value is such that
5805 the sign bit can be extracted. If that is not the case, the
5806 function returns NULL_RTX to indicate that a normal call should be
5807 emitted rather than expanding the function in-line. EXP is the
5808 expression that is a call to the builtin function; if convenient,
5809 the result should be placed in TARGET. */
5811 expand_builtin_signbit (tree exp, rtx target)
5813 const struct real_format *fmt;
5814 enum machine_mode fmode, imode, rmode;
5815 HOST_WIDE_INT hi, lo;
5818 enum insn_code icode;
5821 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5824 arg = CALL_EXPR_ARG (exp, 0);
5825 fmode = TYPE_MODE (TREE_TYPE (arg));
5826 rmode = TYPE_MODE (TREE_TYPE (exp));
5827 fmt = REAL_MODE_FORMAT (fmode);
5829 arg = builtin_save_expr (arg);
5831 /* Expand the argument yielding a RTX expression. */
5832 temp = expand_normal (arg);
5834 /* Check if the back end provides an insn that handles signbit for the
5836 icode = signbit_optab->handlers [(int) fmode].insn_code;
5837 if (icode != CODE_FOR_nothing)
5839 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5840 emit_unop_insn (icode, target, temp, UNKNOWN);
5844 /* For floating point formats without a sign bit, implement signbit
5846 bitpos = fmt->signbit_ro;
5849 /* But we can't do this if the format supports signed zero. */
5850 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5853 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5854 build_real (TREE_TYPE (arg), dconst0));
5855 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5858 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5860 imode = int_mode_for_mode (fmode);
5861 if (imode == BLKmode)
5863 temp = gen_lowpart (imode, temp);
5868 /* Handle targets with different FP word orders. */
5869 if (FLOAT_WORDS_BIG_ENDIAN)
5870 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5872 word = bitpos / BITS_PER_WORD;
5873 temp = operand_subword_force (temp, word, fmode);
5874 bitpos = bitpos % BITS_PER_WORD;
5877 /* Force the intermediate word_mode (or narrower) result into a
5878 register. This avoids attempting to create paradoxical SUBREGs
5879 of floating point modes below. */
5880 temp = force_reg (imode, temp);
5882 /* If the bitpos is within the "result mode" lowpart, the operation
5883 can be implement with a single bitwise AND. Otherwise, we need
5884 a right shift and an AND. */
5886 if (bitpos < GET_MODE_BITSIZE (rmode))
5888 if (bitpos < HOST_BITS_PER_WIDE_INT)
5891 lo = (HOST_WIDE_INT) 1 << bitpos;
5895 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5899 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5900 temp = gen_lowpart (rmode, temp);
5901 temp = expand_binop (rmode, and_optab, temp,
5902 immed_double_const (lo, hi, rmode),
5903 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5907 /* Perform a logical right shift to place the signbit in the least
5908 significant bit, then truncate the result to the desired mode
5909 and mask just this bit. */
5910 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5911 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5912 temp = gen_lowpart (rmode, temp);
5913 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5914 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5920 /* Expand fork or exec calls. TARGET is the desired target of the
5921 call. EXP is the call. FN is the
5922 identificator of the actual function. IGNORE is nonzero if the
5923 value is to be ignored. */
5926 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5931 /* If we are not profiling, just call the function. */
5932 if (!profile_arc_flag)
5935 /* Otherwise call the wrapper. This should be equivalent for the rest of
5936 compiler, so the code does not diverge, and the wrapper may run the
5937 code necessary for keeping the profiling sane. */
5939 switch (DECL_FUNCTION_CODE (fn))
5942 id = get_identifier ("__gcov_fork");
5945 case BUILT_IN_EXECL:
5946 id = get_identifier ("__gcov_execl");
5949 case BUILT_IN_EXECV:
5950 id = get_identifier ("__gcov_execv");
5953 case BUILT_IN_EXECLP:
5954 id = get_identifier ("__gcov_execlp");
5957 case BUILT_IN_EXECLE:
5958 id = get_identifier ("__gcov_execle");
5961 case BUILT_IN_EXECVP:
5962 id = get_identifier ("__gcov_execvp");
5965 case BUILT_IN_EXECVE:
5966 id = get_identifier ("__gcov_execve");
5973 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5974 DECL_EXTERNAL (decl) = 1;
5975 TREE_PUBLIC (decl) = 1;
5976 DECL_ARTIFICIAL (decl) = 1;
5977 TREE_NOTHROW (decl) = 1;
5978 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5979 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5980 call = rewrite_call_expr (exp, 0, decl, 0);
5981 return expand_call (call, target, ignore);
5986 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5987 the pointer in these functions is void*, the tree optimizers may remove
5988 casts. The mode computed in expand_builtin isn't reliable either, due
5989 to __sync_bool_compare_and_swap.
5991 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5992 group of builtins. This gives us log2 of the mode size. */
5994 static inline enum machine_mode
5995 get_builtin_sync_mode (int fcode_diff)
5997 /* The size is not negotiable, so ask not to get BLKmode in return
5998 if the target indicates that a smaller size would be better. */
5999 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
6002 /* Expand the memory expression LOC and return the appropriate memory operand
6003 for the builtin_sync operations. */
6006 get_builtin_sync_mem (tree loc, enum machine_mode mode)
6010 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
6012 /* Note that we explicitly do not want any alias information for this
6013 memory, so that we kill all other live memories. Otherwise we don't
6014 satisfy the full barrier semantics of the intrinsic. */
6015 mem = validize_mem (gen_rtx_MEM (mode, addr));
6017 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
6018 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6019 MEM_VOLATILE_P (mem) = 1;
6024 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6025 EXP is the CALL_EXPR. CODE is the rtx code
6026 that corresponds to the arithmetic or logical operation from the name;
6027 an exception here is that NOT actually means NAND. TARGET is an optional
6028 place for us to store the results; AFTER is true if this is the
6029 fetch_and_xxx form. IGNORE is true if we don't actually care about
6030 the result of the operation at all. */
6033 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
6034 enum rtx_code code, bool after,
6035 rtx target, bool ignore)
6038 enum machine_mode old_mode;
6040 if (code == NOT && warn_sync_nand)
6042 tree fndecl = get_callee_fndecl (exp);
6043 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6045 static bool warned_f_a_n, warned_n_a_f;
6049 case BUILT_IN_FETCH_AND_NAND_1:
6050 case BUILT_IN_FETCH_AND_NAND_2:
6051 case BUILT_IN_FETCH_AND_NAND_4:
6052 case BUILT_IN_FETCH_AND_NAND_8:
6053 case BUILT_IN_FETCH_AND_NAND_16:
6058 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
6059 inform (input_location,
6060 "%qD changed semantics in GCC 4.4", fndecl);
6061 warned_f_a_n = true;
6064 case BUILT_IN_NAND_AND_FETCH_1:
6065 case BUILT_IN_NAND_AND_FETCH_2:
6066 case BUILT_IN_NAND_AND_FETCH_4:
6067 case BUILT_IN_NAND_AND_FETCH_8:
6068 case BUILT_IN_NAND_AND_FETCH_16:
6073 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
6074 inform (input_location,
6075 "%qD changed semantics in GCC 4.4", fndecl);
6076 warned_n_a_f = true;
6084 /* Expand the operands. */
6085 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6087 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6088 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6089 of CONST_INTs, where we know the old_mode only from the call argument. */
6090 old_mode = GET_MODE (val);
6091 if (old_mode == VOIDmode)
6092 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6093 val = convert_modes (mode, old_mode, val, 1);
6096 return expand_sync_operation (mem, val, code);
6098 return expand_sync_fetch_operation (mem, val, code, after, target);
6101 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6102 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6103 true if this is the boolean form. TARGET is a place for us to store the
6104 results; this is NOT optional if IS_BOOL is true. */
6107 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
6108 bool is_bool, rtx target)
6110 rtx old_val, new_val, mem;
6111 enum machine_mode old_mode;
6113 /* Expand the operands. */
6114 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6117 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6118 mode, EXPAND_NORMAL);
6119 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6120 of CONST_INTs, where we know the old_mode only from the call argument. */
6121 old_mode = GET_MODE (old_val);
6122 if (old_mode == VOIDmode)
6123 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6124 old_val = convert_modes (mode, old_mode, old_val, 1);
6126 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6127 mode, EXPAND_NORMAL);
6128 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6129 of CONST_INTs, where we know the old_mode only from the call argument. */
6130 old_mode = GET_MODE (new_val);
6131 if (old_mode == VOIDmode)
6132 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6133 new_val = convert_modes (mode, old_mode, new_val, 1);
6136 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6138 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6141 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6142 general form is actually an atomic exchange, and some targets only
6143 support a reduced form with the second argument being a constant 1.
6144 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6148 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6152 enum machine_mode old_mode;
6154 /* Expand the operands. */
6155 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6156 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6157 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6158 of CONST_INTs, where we know the old_mode only from the call argument. */
6159 old_mode = GET_MODE (val);
6160 if (old_mode == VOIDmode)
6161 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6162 val = convert_modes (mode, old_mode, val, 1);
6164 return expand_sync_lock_test_and_set (mem, val, target);
6167 /* Expand the __sync_synchronize intrinsic. */
6170 expand_builtin_synchronize (void)
6174 #ifdef HAVE_memory_barrier
6175 if (HAVE_memory_barrier)
6177 emit_insn (gen_memory_barrier ());
6182 if (synchronize_libfunc != NULL_RTX)
6184 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6188 /* If no explicit memory barrier instruction is available, create an
6189 empty asm stmt with a memory clobber. */
6190 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6191 tree_cons (NULL, build_string (6, "memory"), NULL));
6192 ASM_VOLATILE_P (x) = 1;
6193 expand_asm_expr (x);
6196 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6199 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6201 enum insn_code icode;
6203 rtx val = const0_rtx;
6205 /* Expand the operands. */
6206 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6208 /* If there is an explicit operation in the md file, use it. */
6209 icode = sync_lock_release[mode];
6210 if (icode != CODE_FOR_nothing)
6212 if (!insn_data[icode].operand[1].predicate (val, mode))
6213 val = force_reg (mode, val);
6215 insn = GEN_FCN (icode) (mem, val);
6223 /* Otherwise we can implement this operation by emitting a barrier
6224 followed by a store of zero. */
6225 expand_builtin_synchronize ();
6226 emit_move_insn (mem, val);
6229 /* Expand an expression EXP that calls a built-in function,
6230 with result going to TARGET if that's convenient
6231 (and in mode MODE if that's convenient).
6232 SUBTARGET may be used as the target for computing one of EXP's operands.
6233 IGNORE is nonzero if the value is to be ignored. */
6236 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6239 tree fndecl = get_callee_fndecl (exp);
6240 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6241 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6243 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6244 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6246 /* When not optimizing, generate calls to library functions for a certain
6249 && !called_as_built_in (fndecl)
6250 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6251 && fcode != BUILT_IN_ALLOCA
6252 && fcode != BUILT_IN_FREE)
6253 return expand_call (exp, target, ignore);
6255 /* The built-in function expanders test for target == const0_rtx
6256 to determine whether the function's result will be ignored. */
6258 target = const0_rtx;
6260 /* If the result of a pure or const built-in function is ignored, and
6261 none of its arguments are volatile, we can avoid expanding the
6262 built-in call and just evaluate the arguments for side-effects. */
6263 if (target == const0_rtx
6264 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6266 bool volatilep = false;
6268 call_expr_arg_iterator iter;
6270 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6271 if (TREE_THIS_VOLATILE (arg))
6279 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6280 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6287 CASE_FLT_FN (BUILT_IN_FABS):
6288 target = expand_builtin_fabs (exp, target, subtarget);
6293 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6294 target = expand_builtin_copysign (exp, target, subtarget);
6299 /* Just do a normal library call if we were unable to fold
6301 CASE_FLT_FN (BUILT_IN_CABS):
6304 CASE_FLT_FN (BUILT_IN_EXP):
6305 CASE_FLT_FN (BUILT_IN_EXP10):
6306 CASE_FLT_FN (BUILT_IN_POW10):
6307 CASE_FLT_FN (BUILT_IN_EXP2):
6308 CASE_FLT_FN (BUILT_IN_EXPM1):
6309 CASE_FLT_FN (BUILT_IN_LOGB):
6310 CASE_FLT_FN (BUILT_IN_LOG):
6311 CASE_FLT_FN (BUILT_IN_LOG10):
6312 CASE_FLT_FN (BUILT_IN_LOG2):
6313 CASE_FLT_FN (BUILT_IN_LOG1P):
6314 CASE_FLT_FN (BUILT_IN_TAN):
6315 CASE_FLT_FN (BUILT_IN_ASIN):
6316 CASE_FLT_FN (BUILT_IN_ACOS):
6317 CASE_FLT_FN (BUILT_IN_ATAN):
6318 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6319 because of possible accuracy problems. */
6320 if (! flag_unsafe_math_optimizations)
6322 CASE_FLT_FN (BUILT_IN_SQRT):
6323 CASE_FLT_FN (BUILT_IN_FLOOR):
6324 CASE_FLT_FN (BUILT_IN_CEIL):
6325 CASE_FLT_FN (BUILT_IN_TRUNC):
6326 CASE_FLT_FN (BUILT_IN_ROUND):
6327 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6328 CASE_FLT_FN (BUILT_IN_RINT):
6329 target = expand_builtin_mathfn (exp, target, subtarget);
6334 CASE_FLT_FN (BUILT_IN_ILOGB):
6335 if (! flag_unsafe_math_optimizations)
6337 CASE_FLT_FN (BUILT_IN_ISINF):
6338 CASE_FLT_FN (BUILT_IN_FINITE):
6339 case BUILT_IN_ISFINITE:
6340 case BUILT_IN_ISNORMAL:
6341 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6346 CASE_FLT_FN (BUILT_IN_LCEIL):
6347 CASE_FLT_FN (BUILT_IN_LLCEIL):
6348 CASE_FLT_FN (BUILT_IN_LFLOOR):
6349 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6350 target = expand_builtin_int_roundingfn (exp, target);
6355 CASE_FLT_FN (BUILT_IN_LRINT):
6356 CASE_FLT_FN (BUILT_IN_LLRINT):
6357 CASE_FLT_FN (BUILT_IN_LROUND):
6358 CASE_FLT_FN (BUILT_IN_LLROUND):
6359 target = expand_builtin_int_roundingfn_2 (exp, target);
6364 CASE_FLT_FN (BUILT_IN_POW):
6365 target = expand_builtin_pow (exp, target, subtarget);
6370 CASE_FLT_FN (BUILT_IN_POWI):
6371 target = expand_builtin_powi (exp, target, subtarget);
6376 CASE_FLT_FN (BUILT_IN_ATAN2):
6377 CASE_FLT_FN (BUILT_IN_LDEXP):
6378 CASE_FLT_FN (BUILT_IN_SCALB):
6379 CASE_FLT_FN (BUILT_IN_SCALBN):
6380 CASE_FLT_FN (BUILT_IN_SCALBLN):
6381 if (! flag_unsafe_math_optimizations)
6384 CASE_FLT_FN (BUILT_IN_FMOD):
6385 CASE_FLT_FN (BUILT_IN_REMAINDER):
6386 CASE_FLT_FN (BUILT_IN_DREM):
6387 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6392 CASE_FLT_FN (BUILT_IN_CEXPI):
6393 target = expand_builtin_cexpi (exp, target, subtarget);
6394 gcc_assert (target);
6397 CASE_FLT_FN (BUILT_IN_SIN):
6398 CASE_FLT_FN (BUILT_IN_COS):
6399 if (! flag_unsafe_math_optimizations)
6401 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6406 CASE_FLT_FN (BUILT_IN_SINCOS):
6407 if (! flag_unsafe_math_optimizations)
6409 target = expand_builtin_sincos (exp);
6414 case BUILT_IN_APPLY_ARGS:
6415 return expand_builtin_apply_args ();
6417 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6418 FUNCTION with a copy of the parameters described by
6419 ARGUMENTS, and ARGSIZE. It returns a block of memory
6420 allocated on the stack into which is stored all the registers
6421 that might possibly be used for returning the result of a
6422 function. ARGUMENTS is the value returned by
6423 __builtin_apply_args. ARGSIZE is the number of bytes of
6424 arguments that must be copied. ??? How should this value be
6425 computed? We'll also need a safe worst case value for varargs
6427 case BUILT_IN_APPLY:
6428 if (!validate_arglist (exp, POINTER_TYPE,
6429 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6430 && !validate_arglist (exp, REFERENCE_TYPE,
6431 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6437 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6438 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6439 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6441 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6444 /* __builtin_return (RESULT) causes the function to return the
6445 value described by RESULT. RESULT is address of the block of
6446 memory returned by __builtin_apply. */
6447 case BUILT_IN_RETURN:
6448 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6449 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6452 case BUILT_IN_SAVEREGS:
6453 return expand_builtin_saveregs ();
6455 case BUILT_IN_ARGS_INFO:
6456 return expand_builtin_args_info (exp);
6458 case BUILT_IN_VA_ARG_PACK:
6459 /* All valid uses of __builtin_va_arg_pack () are removed during
6461 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6464 case BUILT_IN_VA_ARG_PACK_LEN:
6465 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6467 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6470 /* Return the address of the first anonymous stack arg. */
6471 case BUILT_IN_NEXT_ARG:
6472 if (fold_builtin_next_arg (exp, false))
6474 return expand_builtin_next_arg ();
6476 case BUILT_IN_CLEAR_CACHE:
6477 target = expand_builtin___clear_cache (exp);
6482 case BUILT_IN_CLASSIFY_TYPE:
6483 return expand_builtin_classify_type (exp);
6485 case BUILT_IN_CONSTANT_P:
6488 case BUILT_IN_FRAME_ADDRESS:
6489 case BUILT_IN_RETURN_ADDRESS:
6490 return expand_builtin_frame_address (fndecl, exp);
6492 /* Returns the address of the area where the structure is returned.
6494 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6495 if (call_expr_nargs (exp) != 0
6496 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6497 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6500 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6502 case BUILT_IN_ALLOCA:
6503 target = expand_builtin_alloca (exp, target);
6508 case BUILT_IN_STACK_SAVE:
6509 return expand_stack_save ();
6511 case BUILT_IN_STACK_RESTORE:
6512 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6515 case BUILT_IN_BSWAP32:
6516 case BUILT_IN_BSWAP64:
6517 target = expand_builtin_bswap (exp, target, subtarget);
6523 CASE_INT_FN (BUILT_IN_FFS):
6524 case BUILT_IN_FFSIMAX:
6525 target = expand_builtin_unop (target_mode, exp, target,
6526 subtarget, ffs_optab);
6531 CASE_INT_FN (BUILT_IN_CLZ):
6532 case BUILT_IN_CLZIMAX:
6533 target = expand_builtin_unop (target_mode, exp, target,
6534 subtarget, clz_optab);
6539 CASE_INT_FN (BUILT_IN_CTZ):
6540 case BUILT_IN_CTZIMAX:
6541 target = expand_builtin_unop (target_mode, exp, target,
6542 subtarget, ctz_optab);
6547 CASE_INT_FN (BUILT_IN_POPCOUNT):
6548 case BUILT_IN_POPCOUNTIMAX:
6549 target = expand_builtin_unop (target_mode, exp, target,
6550 subtarget, popcount_optab);
6555 CASE_INT_FN (BUILT_IN_PARITY):
6556 case BUILT_IN_PARITYIMAX:
6557 target = expand_builtin_unop (target_mode, exp, target,
6558 subtarget, parity_optab);
6563 case BUILT_IN_STRLEN:
6564 target = expand_builtin_strlen (exp, target, target_mode);
6569 case BUILT_IN_STRCPY:
6570 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6575 case BUILT_IN_STRNCPY:
6576 target = expand_builtin_strncpy (exp, target, mode);
6581 case BUILT_IN_STPCPY:
6582 target = expand_builtin_stpcpy (exp, target, mode);
6587 case BUILT_IN_STRCAT:
6588 target = expand_builtin_strcat (fndecl, exp, target, mode);
6593 case BUILT_IN_STRNCAT:
6594 target = expand_builtin_strncat (exp, target, mode);
6599 case BUILT_IN_STRSPN:
6600 target = expand_builtin_strspn (exp, target, mode);
6605 case BUILT_IN_STRCSPN:
6606 target = expand_builtin_strcspn (exp, target, mode);
6611 case BUILT_IN_STRSTR:
6612 target = expand_builtin_strstr (exp, target, mode);
6617 case BUILT_IN_STRPBRK:
6618 target = expand_builtin_strpbrk (exp, target, mode);
6623 case BUILT_IN_INDEX:
6624 case BUILT_IN_STRCHR:
6625 target = expand_builtin_strchr (exp, target, mode);
6630 case BUILT_IN_RINDEX:
6631 case BUILT_IN_STRRCHR:
6632 target = expand_builtin_strrchr (exp, target, mode);
6637 case BUILT_IN_MEMCPY:
6638 target = expand_builtin_memcpy (exp, target, mode);
6643 case BUILT_IN_MEMPCPY:
6644 target = expand_builtin_mempcpy (exp, target, mode);
6649 case BUILT_IN_MEMMOVE:
6650 target = expand_builtin_memmove (exp, target, mode, ignore);
6655 case BUILT_IN_BCOPY:
6656 target = expand_builtin_bcopy (exp, ignore);
6661 case BUILT_IN_MEMSET:
6662 target = expand_builtin_memset (exp, target, mode);
6667 case BUILT_IN_BZERO:
6668 target = expand_builtin_bzero (exp);
6673 case BUILT_IN_STRCMP:
6674 target = expand_builtin_strcmp (exp, target, mode);
6679 case BUILT_IN_STRNCMP:
6680 target = expand_builtin_strncmp (exp, target, mode);
6685 case BUILT_IN_MEMCHR:
6686 target = expand_builtin_memchr (exp, target, mode);
6692 case BUILT_IN_MEMCMP:
6693 target = expand_builtin_memcmp (exp, target, mode);
6698 case BUILT_IN_SETJMP:
6699 /* This should have been lowered to the builtins below. */
6702 case BUILT_IN_SETJMP_SETUP:
6703 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6704 and the receiver label. */
6705 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6707 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6708 VOIDmode, EXPAND_NORMAL);
6709 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6710 rtx label_r = label_rtx (label);
6712 /* This is copied from the handling of non-local gotos. */
6713 expand_builtin_setjmp_setup (buf_addr, label_r);
6714 nonlocal_goto_handler_labels
6715 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6716 nonlocal_goto_handler_labels);
6717 /* ??? Do not let expand_label treat us as such since we would
6718 not want to be both on the list of non-local labels and on
6719 the list of forced labels. */
6720 FORCED_LABEL (label) = 0;
6725 case BUILT_IN_SETJMP_DISPATCHER:
6726 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6727 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6729 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6730 rtx label_r = label_rtx (label);
6732 /* Remove the dispatcher label from the list of non-local labels
6733 since the receiver labels have been added to it above. */
6734 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6739 case BUILT_IN_SETJMP_RECEIVER:
6740 /* __builtin_setjmp_receiver is passed the receiver label. */
6741 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6743 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6744 rtx label_r = label_rtx (label);
6746 expand_builtin_setjmp_receiver (label_r);
6751 /* __builtin_longjmp is passed a pointer to an array of five words.
6752 It's similar to the C library longjmp function but works with
6753 __builtin_setjmp above. */
6754 case BUILT_IN_LONGJMP:
6755 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6757 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6758 VOIDmode, EXPAND_NORMAL);
6759 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6761 if (value != const1_rtx)
6763 error ("%<__builtin_longjmp%> second argument must be 1");
6767 expand_builtin_longjmp (buf_addr, value);
6772 case BUILT_IN_NONLOCAL_GOTO:
6773 target = expand_builtin_nonlocal_goto (exp);
6778 /* This updates the setjmp buffer that is its argument with the value
6779 of the current stack pointer. */
6780 case BUILT_IN_UPDATE_SETJMP_BUF:
6781 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6784 = expand_normal (CALL_EXPR_ARG (exp, 0));
6786 expand_builtin_update_setjmp_buf (buf_addr);
6792 expand_builtin_trap ();
6795 case BUILT_IN_PRINTF:
6796 target = expand_builtin_printf (exp, target, mode, false);
6801 case BUILT_IN_PRINTF_UNLOCKED:
6802 target = expand_builtin_printf (exp, target, mode, true);
6807 case BUILT_IN_FPUTS:
6808 target = expand_builtin_fputs (exp, target, false);
6812 case BUILT_IN_FPUTS_UNLOCKED:
6813 target = expand_builtin_fputs (exp, target, true);
6818 case BUILT_IN_FPRINTF:
6819 target = expand_builtin_fprintf (exp, target, mode, false);
6824 case BUILT_IN_FPRINTF_UNLOCKED:
6825 target = expand_builtin_fprintf (exp, target, mode, true);
6830 case BUILT_IN_SPRINTF:
6831 target = expand_builtin_sprintf (exp, target, mode);
6836 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6837 case BUILT_IN_SIGNBITD32:
6838 case BUILT_IN_SIGNBITD64:
6839 case BUILT_IN_SIGNBITD128:
6840 target = expand_builtin_signbit (exp, target);
6845 /* Various hooks for the DWARF 2 __throw routine. */
6846 case BUILT_IN_UNWIND_INIT:
6847 expand_builtin_unwind_init ();
6849 case BUILT_IN_DWARF_CFA:
6850 return virtual_cfa_rtx;
6851 #ifdef DWARF2_UNWIND_INFO
6852 case BUILT_IN_DWARF_SP_COLUMN:
6853 return expand_builtin_dwarf_sp_column ();
6854 case BUILT_IN_INIT_DWARF_REG_SIZES:
6855 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6858 case BUILT_IN_FROB_RETURN_ADDR:
6859 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6860 case BUILT_IN_EXTRACT_RETURN_ADDR:
6861 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6862 case BUILT_IN_EH_RETURN:
6863 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6864 CALL_EXPR_ARG (exp, 1));
6866 #ifdef EH_RETURN_DATA_REGNO
6867 case BUILT_IN_EH_RETURN_DATA_REGNO:
6868 return expand_builtin_eh_return_data_regno (exp);
6870 case BUILT_IN_EXTEND_POINTER:
6871 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6873 case BUILT_IN_VA_START:
6874 return expand_builtin_va_start (exp);
6875 case BUILT_IN_VA_END:
6876 return expand_builtin_va_end (exp);
6877 case BUILT_IN_VA_COPY:
6878 return expand_builtin_va_copy (exp);
6879 case BUILT_IN_EXPECT:
6880 return expand_builtin_expect (exp, target);
6881 case BUILT_IN_PREFETCH:
6882 expand_builtin_prefetch (exp);
6885 case BUILT_IN_PROFILE_FUNC_ENTER:
6886 return expand_builtin_profile_func (false);
6887 case BUILT_IN_PROFILE_FUNC_EXIT:
6888 return expand_builtin_profile_func (true);
6890 case BUILT_IN_INIT_TRAMPOLINE:
6891 return expand_builtin_init_trampoline (exp);
6892 case BUILT_IN_ADJUST_TRAMPOLINE:
6893 return expand_builtin_adjust_trampoline (exp);
6896 case BUILT_IN_EXECL:
6897 case BUILT_IN_EXECV:
6898 case BUILT_IN_EXECLP:
6899 case BUILT_IN_EXECLE:
6900 case BUILT_IN_EXECVP:
6901 case BUILT_IN_EXECVE:
6902 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6907 case BUILT_IN_FETCH_AND_ADD_1:
6908 case BUILT_IN_FETCH_AND_ADD_2:
6909 case BUILT_IN_FETCH_AND_ADD_4:
6910 case BUILT_IN_FETCH_AND_ADD_8:
6911 case BUILT_IN_FETCH_AND_ADD_16:
6912 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6913 target = expand_builtin_sync_operation (mode, exp, PLUS,
6914 false, target, ignore);
6919 case BUILT_IN_FETCH_AND_SUB_1:
6920 case BUILT_IN_FETCH_AND_SUB_2:
6921 case BUILT_IN_FETCH_AND_SUB_4:
6922 case BUILT_IN_FETCH_AND_SUB_8:
6923 case BUILT_IN_FETCH_AND_SUB_16:
6924 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6925 target = expand_builtin_sync_operation (mode, exp, MINUS,
6926 false, target, ignore);
6931 case BUILT_IN_FETCH_AND_OR_1:
6932 case BUILT_IN_FETCH_AND_OR_2:
6933 case BUILT_IN_FETCH_AND_OR_4:
6934 case BUILT_IN_FETCH_AND_OR_8:
6935 case BUILT_IN_FETCH_AND_OR_16:
6936 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6937 target = expand_builtin_sync_operation (mode, exp, IOR,
6938 false, target, ignore);
6943 case BUILT_IN_FETCH_AND_AND_1:
6944 case BUILT_IN_FETCH_AND_AND_2:
6945 case BUILT_IN_FETCH_AND_AND_4:
6946 case BUILT_IN_FETCH_AND_AND_8:
6947 case BUILT_IN_FETCH_AND_AND_16:
6948 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6949 target = expand_builtin_sync_operation (mode, exp, AND,
6950 false, target, ignore);
6955 case BUILT_IN_FETCH_AND_XOR_1:
6956 case BUILT_IN_FETCH_AND_XOR_2:
6957 case BUILT_IN_FETCH_AND_XOR_4:
6958 case BUILT_IN_FETCH_AND_XOR_8:
6959 case BUILT_IN_FETCH_AND_XOR_16:
6960 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6961 target = expand_builtin_sync_operation (mode, exp, XOR,
6962 false, target, ignore);
6967 case BUILT_IN_FETCH_AND_NAND_1:
6968 case BUILT_IN_FETCH_AND_NAND_2:
6969 case BUILT_IN_FETCH_AND_NAND_4:
6970 case BUILT_IN_FETCH_AND_NAND_8:
6971 case BUILT_IN_FETCH_AND_NAND_16:
6972 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6973 target = expand_builtin_sync_operation (mode, exp, NOT,
6974 false, target, ignore);
6979 case BUILT_IN_ADD_AND_FETCH_1:
6980 case BUILT_IN_ADD_AND_FETCH_2:
6981 case BUILT_IN_ADD_AND_FETCH_4:
6982 case BUILT_IN_ADD_AND_FETCH_8:
6983 case BUILT_IN_ADD_AND_FETCH_16:
6984 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6985 target = expand_builtin_sync_operation (mode, exp, PLUS,
6986 true, target, ignore);
6991 case BUILT_IN_SUB_AND_FETCH_1:
6992 case BUILT_IN_SUB_AND_FETCH_2:
6993 case BUILT_IN_SUB_AND_FETCH_4:
6994 case BUILT_IN_SUB_AND_FETCH_8:
6995 case BUILT_IN_SUB_AND_FETCH_16:
6996 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6997 target = expand_builtin_sync_operation (mode, exp, MINUS,
6998 true, target, ignore);
7003 case BUILT_IN_OR_AND_FETCH_1:
7004 case BUILT_IN_OR_AND_FETCH_2:
7005 case BUILT_IN_OR_AND_FETCH_4:
7006 case BUILT_IN_OR_AND_FETCH_8:
7007 case BUILT_IN_OR_AND_FETCH_16:
7008 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
7009 target = expand_builtin_sync_operation (mode, exp, IOR,
7010 true, target, ignore);
7015 case BUILT_IN_AND_AND_FETCH_1:
7016 case BUILT_IN_AND_AND_FETCH_2:
7017 case BUILT_IN_AND_AND_FETCH_4:
7018 case BUILT_IN_AND_AND_FETCH_8:
7019 case BUILT_IN_AND_AND_FETCH_16:
7020 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
7021 target = expand_builtin_sync_operation (mode, exp, AND,
7022 true, target, ignore);
7027 case BUILT_IN_XOR_AND_FETCH_1:
7028 case BUILT_IN_XOR_AND_FETCH_2:
7029 case BUILT_IN_XOR_AND_FETCH_4:
7030 case BUILT_IN_XOR_AND_FETCH_8:
7031 case BUILT_IN_XOR_AND_FETCH_16:
7032 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
7033 target = expand_builtin_sync_operation (mode, exp, XOR,
7034 true, target, ignore);
7039 case BUILT_IN_NAND_AND_FETCH_1:
7040 case BUILT_IN_NAND_AND_FETCH_2:
7041 case BUILT_IN_NAND_AND_FETCH_4:
7042 case BUILT_IN_NAND_AND_FETCH_8:
7043 case BUILT_IN_NAND_AND_FETCH_16:
7044 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
7045 target = expand_builtin_sync_operation (mode, exp, NOT,
7046 true, target, ignore);
7051 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
7052 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
7053 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
7054 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
7055 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
7056 if (mode == VOIDmode)
7057 mode = TYPE_MODE (boolean_type_node);
7058 if (!target || !register_operand (target, mode))
7059 target = gen_reg_rtx (mode);
7061 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
7062 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7067 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
7068 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
7069 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
7070 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
7071 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
7072 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
7073 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7078 case BUILT_IN_LOCK_TEST_AND_SET_1:
7079 case BUILT_IN_LOCK_TEST_AND_SET_2:
7080 case BUILT_IN_LOCK_TEST_AND_SET_4:
7081 case BUILT_IN_LOCK_TEST_AND_SET_8:
7082 case BUILT_IN_LOCK_TEST_AND_SET_16:
7083 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
7084 target = expand_builtin_lock_test_and_set (mode, exp, target);
7089 case BUILT_IN_LOCK_RELEASE_1:
7090 case BUILT_IN_LOCK_RELEASE_2:
7091 case BUILT_IN_LOCK_RELEASE_4:
7092 case BUILT_IN_LOCK_RELEASE_8:
7093 case BUILT_IN_LOCK_RELEASE_16:
7094 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
7095 expand_builtin_lock_release (mode, exp);
7098 case BUILT_IN_SYNCHRONIZE:
7099 expand_builtin_synchronize ();
7102 case BUILT_IN_OBJECT_SIZE:
7103 return expand_builtin_object_size (exp);
7105 case BUILT_IN_MEMCPY_CHK:
7106 case BUILT_IN_MEMPCPY_CHK:
7107 case BUILT_IN_MEMMOVE_CHK:
7108 case BUILT_IN_MEMSET_CHK:
7109 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7114 case BUILT_IN_STRCPY_CHK:
7115 case BUILT_IN_STPCPY_CHK:
7116 case BUILT_IN_STRNCPY_CHK:
7117 case BUILT_IN_STRCAT_CHK:
7118 case BUILT_IN_STRNCAT_CHK:
7119 case BUILT_IN_SNPRINTF_CHK:
7120 case BUILT_IN_VSNPRINTF_CHK:
7121 maybe_emit_chk_warning (exp, fcode);
7124 case BUILT_IN_SPRINTF_CHK:
7125 case BUILT_IN_VSPRINTF_CHK:
7126 maybe_emit_sprintf_chk_warning (exp, fcode);
7130 maybe_emit_free_warning (exp);
7133 default: /* just do library call, if unknown builtin */
7137 /* The switch statement above can drop through to cause the function
7138 to be called normally. */
7139 return expand_call (exp, target, ignore);
7142 /* Determine whether a tree node represents a call to a built-in
7143 function. If the tree T is a call to a built-in function with
7144 the right number of arguments of the appropriate types, return
7145 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7146 Otherwise the return value is END_BUILTINS. */
7148 enum built_in_function
7149 builtin_mathfn_code (const_tree t)
7151 const_tree fndecl, arg, parmlist;
7152 const_tree argtype, parmtype;
7153 const_call_expr_arg_iterator iter;
7155 if (TREE_CODE (t) != CALL_EXPR
7156 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7157 return END_BUILTINS;
7159 fndecl = get_callee_fndecl (t);
7160 if (fndecl == NULL_TREE
7161 || TREE_CODE (fndecl) != FUNCTION_DECL
7162 || ! DECL_BUILT_IN (fndecl)
7163 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7164 return END_BUILTINS;
7166 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7167 init_const_call_expr_arg_iterator (t, &iter);
7168 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7170 /* If a function doesn't take a variable number of arguments,
7171 the last element in the list will have type `void'. */
7172 parmtype = TREE_VALUE (parmlist);
7173 if (VOID_TYPE_P (parmtype))
7175 if (more_const_call_expr_args_p (&iter))
7176 return END_BUILTINS;
7177 return DECL_FUNCTION_CODE (fndecl);
7180 if (! more_const_call_expr_args_p (&iter))
7181 return END_BUILTINS;
7183 arg = next_const_call_expr_arg (&iter);
7184 argtype = TREE_TYPE (arg);
7186 if (SCALAR_FLOAT_TYPE_P (parmtype))
7188 if (! SCALAR_FLOAT_TYPE_P (argtype))
7189 return END_BUILTINS;
7191 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7193 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7194 return END_BUILTINS;
7196 else if (POINTER_TYPE_P (parmtype))
7198 if (! POINTER_TYPE_P (argtype))
7199 return END_BUILTINS;
7201 else if (INTEGRAL_TYPE_P (parmtype))
7203 if (! INTEGRAL_TYPE_P (argtype))
7204 return END_BUILTINS;
7207 return END_BUILTINS;
7210 /* Variable-length argument list. */
7211 return DECL_FUNCTION_CODE (fndecl);
7214 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7215 evaluate to a constant. */
7218 fold_builtin_constant_p (tree arg)
7220 /* We return 1 for a numeric type that's known to be a constant
7221 value at compile-time or for an aggregate type that's a
7222 literal constant. */
7225 /* If we know this is a constant, emit the constant of one. */
7226 if (CONSTANT_CLASS_P (arg)
7227 || (TREE_CODE (arg) == CONSTRUCTOR
7228 && TREE_CONSTANT (arg)))
7229 return integer_one_node;
7230 if (TREE_CODE (arg) == ADDR_EXPR)
7232 tree op = TREE_OPERAND (arg, 0);
7233 if (TREE_CODE (op) == STRING_CST
7234 || (TREE_CODE (op) == ARRAY_REF
7235 && integer_zerop (TREE_OPERAND (op, 1))
7236 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7237 return integer_one_node;
7240 /* If this expression has side effects, show we don't know it to be a
7241 constant. Likewise if it's a pointer or aggregate type since in
7242 those case we only want literals, since those are only optimized
7243 when generating RTL, not later.
7244 And finally, if we are compiling an initializer, not code, we
7245 need to return a definite result now; there's not going to be any
7246 more optimization done. */
7247 if (TREE_SIDE_EFFECTS (arg)
7248 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7249 || POINTER_TYPE_P (TREE_TYPE (arg))
7251 || folding_initializer)
7252 return integer_zero_node;
7257 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7258 return it as a truthvalue. */
7261 build_builtin_expect_predicate (tree pred, tree expected)
7263 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7265 fn = built_in_decls[BUILT_IN_EXPECT];
7266 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7267 ret_type = TREE_TYPE (TREE_TYPE (fn));
7268 pred_type = TREE_VALUE (arg_types);
7269 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7271 pred = fold_convert (pred_type, pred);
7272 expected = fold_convert (expected_type, expected);
7273 call_expr = build_call_expr (fn, 2, pred, expected);
7275 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7276 build_int_cst (ret_type, 0));
7279 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7280 NULL_TREE if no simplification is possible. */
7283 fold_builtin_expect (tree arg0, tree arg1)
7286 enum tree_code code;
7288 /* If this is a builtin_expect within a builtin_expect keep the
7289 inner one. See through a comparison against a constant. It
7290 might have been added to create a thruthvalue. */
7292 if (COMPARISON_CLASS_P (inner)
7293 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7294 inner = TREE_OPERAND (inner, 0);
7296 if (TREE_CODE (inner) == CALL_EXPR
7297 && (fndecl = get_callee_fndecl (inner))
7298 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7299 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7302 /* Distribute the expected value over short-circuiting operators.
7303 See through the cast from truthvalue_type_node to long. */
7305 while (TREE_CODE (inner) == NOP_EXPR
7306 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7307 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7308 inner = TREE_OPERAND (inner, 0);
7310 code = TREE_CODE (inner);
7311 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7313 tree op0 = TREE_OPERAND (inner, 0);
7314 tree op1 = TREE_OPERAND (inner, 1);
7316 op0 = build_builtin_expect_predicate (op0, arg1);
7317 op1 = build_builtin_expect_predicate (op1, arg1);
7318 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7320 return fold_convert (TREE_TYPE (arg0), inner);
7323 /* If the argument isn't invariant then there's nothing else we can do. */
7324 if (!TREE_CONSTANT (arg0))
7327 /* If we expect that a comparison against the argument will fold to
7328 a constant return the constant. In practice, this means a true
7329 constant or the address of a non-weak symbol. */
7332 if (TREE_CODE (inner) == ADDR_EXPR)
7336 inner = TREE_OPERAND (inner, 0);
7338 while (TREE_CODE (inner) == COMPONENT_REF
7339 || TREE_CODE (inner) == ARRAY_REF);
7340 if ((TREE_CODE (inner) == VAR_DECL
7341 || TREE_CODE (inner) == FUNCTION_DECL)
7342 && DECL_WEAK (inner))
7346 /* Otherwise, ARG0 already has the proper type for the return value. */
7350 /* Fold a call to __builtin_classify_type with argument ARG. */
7353 fold_builtin_classify_type (tree arg)
7356 return build_int_cst (NULL_TREE, no_type_class);
7358 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7361 /* Fold a call to __builtin_strlen with argument ARG. */
7364 fold_builtin_strlen (tree arg)
7366 if (!validate_arg (arg, POINTER_TYPE))
7370 tree len = c_strlen (arg, 0);
7374 /* Convert from the internal "sizetype" type to "size_t". */
7376 len = fold_convert (size_type_node, len);
7384 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7387 fold_builtin_inf (tree type, int warn)
7389 REAL_VALUE_TYPE real;
7391 /* __builtin_inff is intended to be usable to define INFINITY on all
7392 targets. If an infinity is not available, INFINITY expands "to a
7393 positive constant of type float that overflows at translation
7394 time", footnote "In this case, using INFINITY will violate the
7395 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7396 Thus we pedwarn to ensure this constraint violation is
7398 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7399 pedwarn (input_location, 0, "target format does not support infinity");
7402 return build_real (type, real);
7405 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7408 fold_builtin_nan (tree arg, tree type, int quiet)
7410 REAL_VALUE_TYPE real;
7413 if (!validate_arg (arg, POINTER_TYPE))
7415 str = c_getstr (arg);
7419 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7422 return build_real (type, real);
7425 /* Return true if the floating point expression T has an integer value.
7426 We also allow +Inf, -Inf and NaN to be considered integer values. */
7429 integer_valued_real_p (tree t)
7431 switch (TREE_CODE (t))
7438 return integer_valued_real_p (TREE_OPERAND (t, 0));
7443 return integer_valued_real_p (TREE_OPERAND (t, 1));
7450 return integer_valued_real_p (TREE_OPERAND (t, 0))
7451 && integer_valued_real_p (TREE_OPERAND (t, 1));
7454 return integer_valued_real_p (TREE_OPERAND (t, 1))
7455 && integer_valued_real_p (TREE_OPERAND (t, 2));
7458 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7462 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7463 if (TREE_CODE (type) == INTEGER_TYPE)
7465 if (TREE_CODE (type) == REAL_TYPE)
7466 return integer_valued_real_p (TREE_OPERAND (t, 0));
7471 switch (builtin_mathfn_code (t))
7473 CASE_FLT_FN (BUILT_IN_CEIL):
7474 CASE_FLT_FN (BUILT_IN_FLOOR):
7475 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7476 CASE_FLT_FN (BUILT_IN_RINT):
7477 CASE_FLT_FN (BUILT_IN_ROUND):
7478 CASE_FLT_FN (BUILT_IN_TRUNC):
7481 CASE_FLT_FN (BUILT_IN_FMIN):
7482 CASE_FLT_FN (BUILT_IN_FMAX):
7483 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7484 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7497 /* FNDECL is assumed to be a builtin where truncation can be propagated
7498 across (for instance floor((double)f) == (double)floorf (f).
7499 Do the transformation for a call with argument ARG. */
7502 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7504 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7506 if (!validate_arg (arg, REAL_TYPE))
7509 /* Integer rounding functions are idempotent. */
7510 if (fcode == builtin_mathfn_code (arg))
7513 /* If argument is already integer valued, and we don't need to worry
7514 about setting errno, there's no need to perform rounding. */
7515 if (! flag_errno_math && integer_valued_real_p (arg))
7520 tree arg0 = strip_float_extensions (arg);
7521 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7522 tree newtype = TREE_TYPE (arg0);
7525 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7526 && (decl = mathfn_built_in (newtype, fcode)))
7527 return fold_convert (ftype,
7528 build_call_expr (decl, 1,
7529 fold_convert (newtype, arg0)));
7534 /* FNDECL is assumed to be builtin which can narrow the FP type of
7535 the argument, for instance lround((double)f) -> lroundf (f).
7536 Do the transformation for a call with argument ARG. */
7539 fold_fixed_mathfn (tree fndecl, tree arg)
7541 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7543 if (!validate_arg (arg, REAL_TYPE))
7546 /* If argument is already integer valued, and we don't need to worry
7547 about setting errno, there's no need to perform rounding. */
7548 if (! flag_errno_math && integer_valued_real_p (arg))
7549 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7553 tree ftype = TREE_TYPE (arg);
7554 tree arg0 = strip_float_extensions (arg);
7555 tree newtype = TREE_TYPE (arg0);
7558 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7559 && (decl = mathfn_built_in (newtype, fcode)))
7560 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7563 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7564 sizeof (long long) == sizeof (long). */
7565 if (TYPE_PRECISION (long_long_integer_type_node)
7566 == TYPE_PRECISION (long_integer_type_node))
7568 tree newfn = NULL_TREE;
7571 CASE_FLT_FN (BUILT_IN_LLCEIL):
7572 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7575 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7576 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7579 CASE_FLT_FN (BUILT_IN_LLROUND):
7580 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7583 CASE_FLT_FN (BUILT_IN_LLRINT):
7584 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7593 tree newcall = build_call_expr(newfn, 1, arg);
7594 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7601 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7602 return type. Return NULL_TREE if no simplification can be made. */
7605 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7609 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7610 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7613 /* Calculate the result when the argument is a constant. */
7614 if (TREE_CODE (arg) == COMPLEX_CST
7615 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7619 if (TREE_CODE (arg) == COMPLEX_EXPR)
7621 tree real = TREE_OPERAND (arg, 0);
7622 tree imag = TREE_OPERAND (arg, 1);
7624 /* If either part is zero, cabs is fabs of the other. */
7625 if (real_zerop (real))
7626 return fold_build1 (ABS_EXPR, type, imag);
7627 if (real_zerop (imag))
7628 return fold_build1 (ABS_EXPR, type, real);
7630 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7631 if (flag_unsafe_math_optimizations
7632 && operand_equal_p (real, imag, OEP_PURE_SAME))
7634 const REAL_VALUE_TYPE sqrt2_trunc
7635 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7637 return fold_build2 (MULT_EXPR, type,
7638 fold_build1 (ABS_EXPR, type, real),
7639 build_real (type, sqrt2_trunc));
7643 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7644 if (TREE_CODE (arg) == NEGATE_EXPR
7645 || TREE_CODE (arg) == CONJ_EXPR)
7646 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7648 /* Don't do this when optimizing for size. */
7649 if (flag_unsafe_math_optimizations
7650 && optimize && optimize_function_for_speed_p (cfun))
7652 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7654 if (sqrtfn != NULL_TREE)
7656 tree rpart, ipart, result;
7658 arg = builtin_save_expr (arg);
7660 rpart = fold_build1 (REALPART_EXPR, type, arg);
7661 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7663 rpart = builtin_save_expr (rpart);
7664 ipart = builtin_save_expr (ipart);
7666 result = fold_build2 (PLUS_EXPR, type,
7667 fold_build2 (MULT_EXPR, type,
7669 fold_build2 (MULT_EXPR, type,
7672 return build_call_expr (sqrtfn, 1, result);
7679 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7680 Return NULL_TREE if no simplification can be made. */
7683 fold_builtin_sqrt (tree arg, tree type)
7686 enum built_in_function fcode;
7689 if (!validate_arg (arg, REAL_TYPE))
7692 /* Calculate the result when the argument is a constant. */
7693 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7696 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7697 fcode = builtin_mathfn_code (arg);
7698 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7700 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7701 arg = fold_build2 (MULT_EXPR, type,
7702 CALL_EXPR_ARG (arg, 0),
7703 build_real (type, dconsthalf));
7704 return build_call_expr (expfn, 1, arg);
7707 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7708 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7710 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7714 tree arg0 = CALL_EXPR_ARG (arg, 0);
7716 /* The inner root was either sqrt or cbrt. */
7717 /* This was a conditional expression but it triggered a bug
7719 REAL_VALUE_TYPE dconstroot;
7720 if (BUILTIN_SQRT_P (fcode))
7721 dconstroot = dconsthalf;
7723 dconstroot = dconst_third ();
7725 /* Adjust for the outer root. */
7726 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7727 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7728 tree_root = build_real (type, dconstroot);
7729 return build_call_expr (powfn, 2, arg0, tree_root);
7733 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7734 if (flag_unsafe_math_optimizations
7735 && (fcode == BUILT_IN_POW
7736 || fcode == BUILT_IN_POWF
7737 || fcode == BUILT_IN_POWL))
7739 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7740 tree arg0 = CALL_EXPR_ARG (arg, 0);
7741 tree arg1 = CALL_EXPR_ARG (arg, 1);
7743 if (!tree_expr_nonnegative_p (arg0))
7744 arg0 = build1 (ABS_EXPR, type, arg0);
7745 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7746 build_real (type, dconsthalf));
7747 return build_call_expr (powfn, 2, arg0, narg1);
7753 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7754 Return NULL_TREE if no simplification can be made. */
7757 fold_builtin_cbrt (tree arg, tree type)
7759 const enum built_in_function fcode = builtin_mathfn_code (arg);
7762 if (!validate_arg (arg, REAL_TYPE))
7765 /* Calculate the result when the argument is a constant. */
7766 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7769 if (flag_unsafe_math_optimizations)
7771 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7772 if (BUILTIN_EXPONENT_P (fcode))
7774 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7775 const REAL_VALUE_TYPE third_trunc =
7776 real_value_truncate (TYPE_MODE (type), dconst_third ());
7777 arg = fold_build2 (MULT_EXPR, type,
7778 CALL_EXPR_ARG (arg, 0),
7779 build_real (type, third_trunc));
7780 return build_call_expr (expfn, 1, arg);
7783 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7784 if (BUILTIN_SQRT_P (fcode))
7786 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7790 tree arg0 = CALL_EXPR_ARG (arg, 0);
7792 REAL_VALUE_TYPE dconstroot = dconst_third ();
7794 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7795 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7796 tree_root = build_real (type, dconstroot);
7797 return build_call_expr (powfn, 2, arg0, tree_root);
7801 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7802 if (BUILTIN_CBRT_P (fcode))
7804 tree arg0 = CALL_EXPR_ARG (arg, 0);
7805 if (tree_expr_nonnegative_p (arg0))
7807 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7812 REAL_VALUE_TYPE dconstroot;
7814 real_arithmetic (&dconstroot, MULT_EXPR,
7815 dconst_third_ptr (), dconst_third_ptr ());
7816 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7817 tree_root = build_real (type, dconstroot);
7818 return build_call_expr (powfn, 2, arg0, tree_root);
7823 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7824 if (fcode == BUILT_IN_POW
7825 || fcode == BUILT_IN_POWF
7826 || fcode == BUILT_IN_POWL)
7828 tree arg00 = CALL_EXPR_ARG (arg, 0);
7829 tree arg01 = CALL_EXPR_ARG (arg, 1);
7830 if (tree_expr_nonnegative_p (arg00))
7832 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7833 const REAL_VALUE_TYPE dconstroot
7834 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7835 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7836 build_real (type, dconstroot));
7837 return build_call_expr (powfn, 2, arg00, narg01);
7844 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7845 TYPE is the type of the return value. Return NULL_TREE if no
7846 simplification can be made. */
7849 fold_builtin_cos (tree arg, tree type, tree fndecl)
7853 if (!validate_arg (arg, REAL_TYPE))
7856 /* Calculate the result when the argument is a constant. */
7857 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7860 /* Optimize cos(-x) into cos (x). */
7861 if ((narg = fold_strip_sign_ops (arg)))
7862 return build_call_expr (fndecl, 1, narg);
7867 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7868 Return NULL_TREE if no simplification can be made. */
7871 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7873 if (validate_arg (arg, REAL_TYPE))
7877 /* Calculate the result when the argument is a constant. */
7878 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7881 /* Optimize cosh(-x) into cosh (x). */
7882 if ((narg = fold_strip_sign_ops (arg)))
7883 return build_call_expr (fndecl, 1, narg);
7889 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7890 Return NULL_TREE if no simplification can be made. */
7893 fold_builtin_tan (tree arg, tree type)
7895 enum built_in_function fcode;
7898 if (!validate_arg (arg, REAL_TYPE))
7901 /* Calculate the result when the argument is a constant. */
7902 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7905 /* Optimize tan(atan(x)) = x. */
7906 fcode = builtin_mathfn_code (arg);
7907 if (flag_unsafe_math_optimizations
7908 && (fcode == BUILT_IN_ATAN
7909 || fcode == BUILT_IN_ATANF
7910 || fcode == BUILT_IN_ATANL))
7911 return CALL_EXPR_ARG (arg, 0);
7916 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7917 NULL_TREE if no simplification can be made. */
7920 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7925 if (!validate_arg (arg0, REAL_TYPE)
7926 || !validate_arg (arg1, POINTER_TYPE)
7927 || !validate_arg (arg2, POINTER_TYPE))
7930 type = TREE_TYPE (arg0);
7932 /* Calculate the result when the argument is a constant. */
7933 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7936 /* Canonicalize sincos to cexpi. */
7937 if (!TARGET_C99_FUNCTIONS)
7939 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7943 call = build_call_expr (fn, 1, arg0);
7944 call = builtin_save_expr (call);
7946 return build2 (COMPOUND_EXPR, void_type_node,
7947 build2 (MODIFY_EXPR, void_type_node,
7948 build_fold_indirect_ref (arg1),
7949 build1 (IMAGPART_EXPR, type, call)),
7950 build2 (MODIFY_EXPR, void_type_node,
7951 build_fold_indirect_ref (arg2),
7952 build1 (REALPART_EXPR, type, call)));
7955 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7956 NULL_TREE if no simplification can be made. */
7959 fold_builtin_cexp (tree arg0, tree type)
7962 tree realp, imagp, ifn;
7964 if (!validate_arg (arg0, COMPLEX_TYPE))
7967 rtype = TREE_TYPE (TREE_TYPE (arg0));
7969 /* In case we can figure out the real part of arg0 and it is constant zero
7971 if (!TARGET_C99_FUNCTIONS)
7973 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7977 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7978 && real_zerop (realp))
7980 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7981 return build_call_expr (ifn, 1, narg);
7984 /* In case we can easily decompose real and imaginary parts split cexp
7985 to exp (r) * cexpi (i). */
7986 if (flag_unsafe_math_optimizations
7989 tree rfn, rcall, icall;
7991 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7995 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7999 icall = build_call_expr (ifn, 1, imagp);
8000 icall = builtin_save_expr (icall);
8001 rcall = build_call_expr (rfn, 1, realp);
8002 rcall = builtin_save_expr (rcall);
8003 return fold_build2 (COMPLEX_EXPR, type,
8004 fold_build2 (MULT_EXPR, rtype,
8006 fold_build1 (REALPART_EXPR, rtype, icall)),
8007 fold_build2 (MULT_EXPR, rtype,
8009 fold_build1 (IMAGPART_EXPR, rtype, icall)));
8015 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8016 Return NULL_TREE if no simplification can be made. */
8019 fold_builtin_trunc (tree fndecl, tree arg)
8021 if (!validate_arg (arg, REAL_TYPE))
8024 /* Optimize trunc of constant value. */
8025 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8027 REAL_VALUE_TYPE r, x;
8028 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8030 x = TREE_REAL_CST (arg);
8031 real_trunc (&r, TYPE_MODE (type), &x);
8032 return build_real (type, r);
8035 return fold_trunc_transparent_mathfn (fndecl, arg);
8038 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8039 Return NULL_TREE if no simplification can be made. */
8042 fold_builtin_floor (tree fndecl, tree arg)
8044 if (!validate_arg (arg, REAL_TYPE))
8047 /* Optimize floor of constant value. */
8048 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8052 x = TREE_REAL_CST (arg);
8053 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8055 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8058 real_floor (&r, TYPE_MODE (type), &x);
8059 return build_real (type, r);
8063 /* Fold floor (x) where x is nonnegative to trunc (x). */
8064 if (tree_expr_nonnegative_p (arg))
8066 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8068 return build_call_expr (truncfn, 1, arg);
8071 return fold_trunc_transparent_mathfn (fndecl, arg);
8074 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8075 Return NULL_TREE if no simplification can be made. */
8078 fold_builtin_ceil (tree fndecl, tree arg)
8080 if (!validate_arg (arg, REAL_TYPE))
8083 /* Optimize ceil of constant value. */
8084 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8088 x = TREE_REAL_CST (arg);
8089 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8091 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8094 real_ceil (&r, TYPE_MODE (type), &x);
8095 return build_real (type, r);
8099 return fold_trunc_transparent_mathfn (fndecl, arg);
8102 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8103 Return NULL_TREE if no simplification can be made. */
8106 fold_builtin_round (tree fndecl, tree arg)
8108 if (!validate_arg (arg, REAL_TYPE))
8111 /* Optimize round of constant value. */
8112 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8116 x = TREE_REAL_CST (arg);
8117 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8119 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8122 real_round (&r, TYPE_MODE (type), &x);
8123 return build_real (type, r);
8127 return fold_trunc_transparent_mathfn (fndecl, arg);
8130 /* Fold function call to builtin lround, lroundf or lroundl (or the
8131 corresponding long long versions) and other rounding functions. ARG
8132 is the argument to the call. Return NULL_TREE if no simplification
8136 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8138 if (!validate_arg (arg, REAL_TYPE))
8141 /* Optimize lround of constant value. */
8142 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8144 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8146 if (real_isfinite (&x))
8148 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8149 tree ftype = TREE_TYPE (arg);
8150 unsigned HOST_WIDE_INT lo2;
8151 HOST_WIDE_INT hi, lo;
8154 switch (DECL_FUNCTION_CODE (fndecl))
8156 CASE_FLT_FN (BUILT_IN_LFLOOR):
8157 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8158 real_floor (&r, TYPE_MODE (ftype), &x);
8161 CASE_FLT_FN (BUILT_IN_LCEIL):
8162 CASE_FLT_FN (BUILT_IN_LLCEIL):
8163 real_ceil (&r, TYPE_MODE (ftype), &x);
8166 CASE_FLT_FN (BUILT_IN_LROUND):
8167 CASE_FLT_FN (BUILT_IN_LLROUND):
8168 real_round (&r, TYPE_MODE (ftype), &x);
8175 REAL_VALUE_TO_INT (&lo, &hi, r);
8176 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8177 return build_int_cst_wide (itype, lo2, hi);
8181 switch (DECL_FUNCTION_CODE (fndecl))
8183 CASE_FLT_FN (BUILT_IN_LFLOOR):
8184 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8185 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8186 if (tree_expr_nonnegative_p (arg))
8187 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8193 return fold_fixed_mathfn (fndecl, arg);
8196 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8197 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8198 the argument to the call. Return NULL_TREE if no simplification can
8202 fold_builtin_bitop (tree fndecl, tree arg)
8204 if (!validate_arg (arg, INTEGER_TYPE))
8207 /* Optimize for constant argument. */
8208 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8210 HOST_WIDE_INT hi, width, result;
8211 unsigned HOST_WIDE_INT lo;
8214 type = TREE_TYPE (arg);
8215 width = TYPE_PRECISION (type);
8216 lo = TREE_INT_CST_LOW (arg);
8218 /* Clear all the bits that are beyond the type's precision. */
8219 if (width > HOST_BITS_PER_WIDE_INT)
8221 hi = TREE_INT_CST_HIGH (arg);
8222 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8223 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8228 if (width < HOST_BITS_PER_WIDE_INT)
8229 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8232 switch (DECL_FUNCTION_CODE (fndecl))
8234 CASE_INT_FN (BUILT_IN_FFS):
8236 result = exact_log2 (lo & -lo) + 1;
8238 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8243 CASE_INT_FN (BUILT_IN_CLZ):
8245 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8247 result = width - floor_log2 (lo) - 1;
8248 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8252 CASE_INT_FN (BUILT_IN_CTZ):
8254 result = exact_log2 (lo & -lo);
8256 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8257 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8261 CASE_INT_FN (BUILT_IN_POPCOUNT):
8264 result++, lo &= lo - 1;
8266 result++, hi &= hi - 1;
8269 CASE_INT_FN (BUILT_IN_PARITY):
8272 result++, lo &= lo - 1;
8274 result++, hi &= hi - 1;
8282 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8288 /* Fold function call to builtin_bswap and the long and long long
8289 variants. Return NULL_TREE if no simplification can be made. */
8291 fold_builtin_bswap (tree fndecl, tree arg)
8293 if (! validate_arg (arg, INTEGER_TYPE))
8296 /* Optimize constant value. */
8297 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8299 HOST_WIDE_INT hi, width, r_hi = 0;
8300 unsigned HOST_WIDE_INT lo, r_lo = 0;
8303 type = TREE_TYPE (arg);
8304 width = TYPE_PRECISION (type);
8305 lo = TREE_INT_CST_LOW (arg);
8306 hi = TREE_INT_CST_HIGH (arg);
8308 switch (DECL_FUNCTION_CODE (fndecl))
8310 case BUILT_IN_BSWAP32:
8311 case BUILT_IN_BSWAP64:
8315 for (s = 0; s < width; s += 8)
8317 int d = width - s - 8;
8318 unsigned HOST_WIDE_INT byte;
8320 if (s < HOST_BITS_PER_WIDE_INT)
8321 byte = (lo >> s) & 0xff;
8323 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8325 if (d < HOST_BITS_PER_WIDE_INT)
8328 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8338 if (width < HOST_BITS_PER_WIDE_INT)
8339 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8341 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8347 /* A subroutine of fold_builtin to fold the various logarithmic
8348 functions. Return NULL_TREE if no simplification can me made.
8349 FUNC is the corresponding MPFR logarithm function. */
8352 fold_builtin_logarithm (tree fndecl, tree arg,
8353 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8355 if (validate_arg (arg, REAL_TYPE))
8357 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8359 const enum built_in_function fcode = builtin_mathfn_code (arg);
8361 /* Calculate the result when the argument is a constant. */
8362 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8365 /* Special case, optimize logN(expN(x)) = x. */
8366 if (flag_unsafe_math_optimizations
8367 && ((func == mpfr_log
8368 && (fcode == BUILT_IN_EXP
8369 || fcode == BUILT_IN_EXPF
8370 || fcode == BUILT_IN_EXPL))
8371 || (func == mpfr_log2
8372 && (fcode == BUILT_IN_EXP2
8373 || fcode == BUILT_IN_EXP2F
8374 || fcode == BUILT_IN_EXP2L))
8375 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8376 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8378 /* Optimize logN(func()) for various exponential functions. We
8379 want to determine the value "x" and the power "exponent" in
8380 order to transform logN(x**exponent) into exponent*logN(x). */
8381 if (flag_unsafe_math_optimizations)
8383 tree exponent = 0, x = 0;
8387 CASE_FLT_FN (BUILT_IN_EXP):
8388 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8389 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8391 exponent = CALL_EXPR_ARG (arg, 0);
8393 CASE_FLT_FN (BUILT_IN_EXP2):
8394 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8395 x = build_real (type, dconst2);
8396 exponent = CALL_EXPR_ARG (arg, 0);
8398 CASE_FLT_FN (BUILT_IN_EXP10):
8399 CASE_FLT_FN (BUILT_IN_POW10):
8400 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8402 REAL_VALUE_TYPE dconst10;
8403 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8404 x = build_real (type, dconst10);
8406 exponent = CALL_EXPR_ARG (arg, 0);
8408 CASE_FLT_FN (BUILT_IN_SQRT):
8409 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8410 x = CALL_EXPR_ARG (arg, 0);
8411 exponent = build_real (type, dconsthalf);
8413 CASE_FLT_FN (BUILT_IN_CBRT):
8414 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8415 x = CALL_EXPR_ARG (arg, 0);
8416 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8419 CASE_FLT_FN (BUILT_IN_POW):
8420 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8421 x = CALL_EXPR_ARG (arg, 0);
8422 exponent = CALL_EXPR_ARG (arg, 1);
8428 /* Now perform the optimization. */
8431 tree logfn = build_call_expr (fndecl, 1, x);
8432 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8440 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8441 NULL_TREE if no simplification can be made. */
8444 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8446 tree res, narg0, narg1;
8448 if (!validate_arg (arg0, REAL_TYPE)
8449 || !validate_arg (arg1, REAL_TYPE))
8452 /* Calculate the result when the argument is a constant. */
8453 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8456 /* If either argument to hypot has a negate or abs, strip that off.
8457 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8458 narg0 = fold_strip_sign_ops (arg0);
8459 narg1 = fold_strip_sign_ops (arg1);
8462 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8463 narg1 ? narg1 : arg1);
8466 /* If either argument is zero, hypot is fabs of the other. */
8467 if (real_zerop (arg0))
8468 return fold_build1 (ABS_EXPR, type, arg1);
8469 else if (real_zerop (arg1))
8470 return fold_build1 (ABS_EXPR, type, arg0);
8472 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8473 if (flag_unsafe_math_optimizations
8474 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8476 const REAL_VALUE_TYPE sqrt2_trunc
8477 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8478 return fold_build2 (MULT_EXPR, type,
8479 fold_build1 (ABS_EXPR, type, arg0),
8480 build_real (type, sqrt2_trunc));
8487 /* Fold a builtin function call to pow, powf, or powl. Return
8488 NULL_TREE if no simplification can be made. */
8490 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8494 if (!validate_arg (arg0, REAL_TYPE)
8495 || !validate_arg (arg1, REAL_TYPE))
8498 /* Calculate the result when the argument is a constant. */
8499 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8502 /* Optimize pow(1.0,y) = 1.0. */
8503 if (real_onep (arg0))
8504 return omit_one_operand (type, build_real (type, dconst1), arg1);
8506 if (TREE_CODE (arg1) == REAL_CST
8507 && !TREE_OVERFLOW (arg1))
8509 REAL_VALUE_TYPE cint;
8513 c = TREE_REAL_CST (arg1);
8515 /* Optimize pow(x,0.0) = 1.0. */
8516 if (REAL_VALUES_EQUAL (c, dconst0))
8517 return omit_one_operand (type, build_real (type, dconst1),
8520 /* Optimize pow(x,1.0) = x. */
8521 if (REAL_VALUES_EQUAL (c, dconst1))
8524 /* Optimize pow(x,-1.0) = 1.0/x. */
8525 if (REAL_VALUES_EQUAL (c, dconstm1))
8526 return fold_build2 (RDIV_EXPR, type,
8527 build_real (type, dconst1), arg0);
8529 /* Optimize pow(x,0.5) = sqrt(x). */
8530 if (flag_unsafe_math_optimizations
8531 && REAL_VALUES_EQUAL (c, dconsthalf))
8533 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8535 if (sqrtfn != NULL_TREE)
8536 return build_call_expr (sqrtfn, 1, arg0);
8539 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8540 if (flag_unsafe_math_optimizations)
8542 const REAL_VALUE_TYPE dconstroot
8543 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8545 if (REAL_VALUES_EQUAL (c, dconstroot))
8547 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8548 if (cbrtfn != NULL_TREE)
8549 return build_call_expr (cbrtfn, 1, arg0);
8553 /* Check for an integer exponent. */
8554 n = real_to_integer (&c);
8555 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8556 if (real_identical (&c, &cint))
8558 /* Attempt to evaluate pow at compile-time, unless this should
8559 raise an exception. */
8560 if (TREE_CODE (arg0) == REAL_CST
8561 && !TREE_OVERFLOW (arg0)
8563 || (!flag_trapping_math && !flag_errno_math)
8564 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8569 x = TREE_REAL_CST (arg0);
8570 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8571 if (flag_unsafe_math_optimizations || !inexact)
8572 return build_real (type, x);
8575 /* Strip sign ops from even integer powers. */
8576 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8578 tree narg0 = fold_strip_sign_ops (arg0);
8580 return build_call_expr (fndecl, 2, narg0, arg1);
8585 if (flag_unsafe_math_optimizations)
8587 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8589 /* Optimize pow(expN(x),y) = expN(x*y). */
8590 if (BUILTIN_EXPONENT_P (fcode))
8592 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8593 tree arg = CALL_EXPR_ARG (arg0, 0);
8594 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8595 return build_call_expr (expfn, 1, arg);
8598 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8599 if (BUILTIN_SQRT_P (fcode))
8601 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8602 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8603 build_real (type, dconsthalf));
8604 return build_call_expr (fndecl, 2, narg0, narg1);
8607 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8608 if (BUILTIN_CBRT_P (fcode))
8610 tree arg = CALL_EXPR_ARG (arg0, 0);
8611 if (tree_expr_nonnegative_p (arg))
8613 const REAL_VALUE_TYPE dconstroot
8614 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8615 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8616 build_real (type, dconstroot));
8617 return build_call_expr (fndecl, 2, arg, narg1);
8621 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8622 if (fcode == BUILT_IN_POW
8623 || fcode == BUILT_IN_POWF
8624 || fcode == BUILT_IN_POWL)
8626 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8627 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8628 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8629 return build_call_expr (fndecl, 2, arg00, narg1);
8636 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8637 Return NULL_TREE if no simplification can be made. */
8639 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8640 tree arg0, tree arg1, tree type)
8642 if (!validate_arg (arg0, REAL_TYPE)
8643 || !validate_arg (arg1, INTEGER_TYPE))
8646 /* Optimize pow(1.0,y) = 1.0. */
8647 if (real_onep (arg0))
8648 return omit_one_operand (type, build_real (type, dconst1), arg1);
8650 if (host_integerp (arg1, 0))
8652 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8654 /* Evaluate powi at compile-time. */
8655 if (TREE_CODE (arg0) == REAL_CST
8656 && !TREE_OVERFLOW (arg0))
8659 x = TREE_REAL_CST (arg0);
8660 real_powi (&x, TYPE_MODE (type), &x, c);
8661 return build_real (type, x);
8664 /* Optimize pow(x,0) = 1.0. */
8666 return omit_one_operand (type, build_real (type, dconst1),
8669 /* Optimize pow(x,1) = x. */
8673 /* Optimize pow(x,-1) = 1.0/x. */
8675 return fold_build2 (RDIV_EXPR, type,
8676 build_real (type, dconst1), arg0);
8682 /* A subroutine of fold_builtin to fold the various exponent
8683 functions. Return NULL_TREE if no simplification can be made.
8684 FUNC is the corresponding MPFR exponent function. */
8687 fold_builtin_exponent (tree fndecl, tree arg,
8688 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8690 if (validate_arg (arg, REAL_TYPE))
8692 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8695 /* Calculate the result when the argument is a constant. */
8696 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8699 /* Optimize expN(logN(x)) = x. */
8700 if (flag_unsafe_math_optimizations)
8702 const enum built_in_function fcode = builtin_mathfn_code (arg);
8704 if ((func == mpfr_exp
8705 && (fcode == BUILT_IN_LOG
8706 || fcode == BUILT_IN_LOGF
8707 || fcode == BUILT_IN_LOGL))
8708 || (func == mpfr_exp2
8709 && (fcode == BUILT_IN_LOG2
8710 || fcode == BUILT_IN_LOG2F
8711 || fcode == BUILT_IN_LOG2L))
8712 || (func == mpfr_exp10
8713 && (fcode == BUILT_IN_LOG10
8714 || fcode == BUILT_IN_LOG10F
8715 || fcode == BUILT_IN_LOG10L)))
8716 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8723 /* Return true if VAR is a VAR_DECL or a component thereof. */
8726 var_decl_component_p (tree var)
8729 while (handled_component_p (inner))
8730 inner = TREE_OPERAND (inner, 0);
8731 return SSA_VAR_P (inner);
8734 /* Fold function call to builtin memset. Return
8735 NULL_TREE if no simplification can be made. */
8738 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8741 unsigned HOST_WIDE_INT length, cval;
8743 if (! validate_arg (dest, POINTER_TYPE)
8744 || ! validate_arg (c, INTEGER_TYPE)
8745 || ! validate_arg (len, INTEGER_TYPE))
8748 if (! host_integerp (len, 1))
8751 /* If the LEN parameter is zero, return DEST. */
8752 if (integer_zerop (len))
8753 return omit_one_operand (type, dest, c);
8755 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8760 if (TREE_CODE (var) != ADDR_EXPR)
8763 var = TREE_OPERAND (var, 0);
8764 if (TREE_THIS_VOLATILE (var))
8767 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8768 && !POINTER_TYPE_P (TREE_TYPE (var)))
8771 if (! var_decl_component_p (var))
8774 length = tree_low_cst (len, 1);
8775 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8776 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8780 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8783 if (integer_zerop (c))
8787 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8790 cval = tree_low_cst (c, 1);
8794 cval |= (cval << 31) << 1;
8797 ret = build_int_cst_type (TREE_TYPE (var), cval);
8798 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8802 return omit_one_operand (type, dest, ret);
8805 /* Fold function call to builtin memset. Return
8806 NULL_TREE if no simplification can be made. */
8809 fold_builtin_bzero (tree dest, tree size, bool ignore)
8811 if (! validate_arg (dest, POINTER_TYPE)
8812 || ! validate_arg (size, INTEGER_TYPE))
8818 /* New argument list transforming bzero(ptr x, int y) to
8819 memset(ptr x, int 0, size_t y). This is done this way
8820 so that if it isn't expanded inline, we fallback to
8821 calling bzero instead of memset. */
8823 return fold_builtin_memset (dest, integer_zero_node,
8824 fold_convert (sizetype, size),
8825 void_type_node, ignore);
8828 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8829 NULL_TREE if no simplification can be made.
8830 If ENDP is 0, return DEST (like memcpy).
8831 If ENDP is 1, return DEST+LEN (like mempcpy).
8832 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8833 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8837 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8839 tree destvar, srcvar, expr;
8841 if (! validate_arg (dest, POINTER_TYPE)
8842 || ! validate_arg (src, POINTER_TYPE)
8843 || ! validate_arg (len, INTEGER_TYPE))
8846 /* If the LEN parameter is zero, return DEST. */
8847 if (integer_zerop (len))
8848 return omit_one_operand (type, dest, src);
8850 /* If SRC and DEST are the same (and not volatile), return
8851 DEST{,+LEN,+LEN-1}. */
8852 if (operand_equal_p (src, dest, 0))
8856 tree srctype, desttype;
8857 int src_align, dest_align;
8861 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8862 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8864 /* Both DEST and SRC must be pointer types.
8865 ??? This is what old code did. Is the testing for pointer types
8868 If either SRC is readonly or length is 1, we can use memcpy. */
8869 if (!dest_align || !src_align)
8871 if (readonly_data_expr (src)
8872 || (host_integerp (len, 1)
8873 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8874 >= tree_low_cst (len, 1))))
8876 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8879 return build_call_expr (fn, 3, dest, src, len);
8882 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8883 srcvar = build_fold_indirect_ref (src);
8884 destvar = build_fold_indirect_ref (dest);
8886 && !TREE_THIS_VOLATILE (srcvar)
8888 && !TREE_THIS_VOLATILE (destvar))
8890 tree src_base, dest_base, fn;
8891 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8892 HOST_WIDE_INT size = -1;
8893 HOST_WIDE_INT maxsize = -1;
8896 if (handled_component_p (src_base))
8897 src_base = get_ref_base_and_extent (src_base, &src_offset,
8899 dest_base = destvar;
8900 if (handled_component_p (dest_base))
8901 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8903 if (host_integerp (len, 1))
8905 maxsize = tree_low_cst (len, 1);
8907 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8910 maxsize *= BITS_PER_UNIT;
8914 if (SSA_VAR_P (src_base)
8915 && SSA_VAR_P (dest_base))
8917 if (operand_equal_p (src_base, dest_base, 0)
8918 && ranges_overlap_p (src_offset, maxsize,
8919 dest_offset, maxsize))
8922 else if (TREE_CODE (src_base) == INDIRECT_REF
8923 && TREE_CODE (dest_base) == INDIRECT_REF)
8925 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8926 TREE_OPERAND (dest_base, 0), 0)
8927 || ranges_overlap_p (src_offset, maxsize,
8928 dest_offset, maxsize))
8934 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8937 return build_call_expr (fn, 3, dest, src, len);
8942 if (!host_integerp (len, 0))
8945 This logic lose for arguments like (type *)malloc (sizeof (type)),
8946 since we strip the casts of up to VOID return value from malloc.
8947 Perhaps we ought to inherit type from non-VOID argument here? */
8950 srctype = TREE_TYPE (TREE_TYPE (src));
8951 desttype = TREE_TYPE (TREE_TYPE (dest));
8952 if (!srctype || !desttype
8953 || !TYPE_SIZE_UNIT (srctype)
8954 || !TYPE_SIZE_UNIT (desttype)
8955 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8956 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8957 || TYPE_VOLATILE (srctype)
8958 || TYPE_VOLATILE (desttype))
8961 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8962 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8963 if (dest_align < (int) TYPE_ALIGN (desttype)
8964 || src_align < (int) TYPE_ALIGN (srctype))
8968 dest = builtin_save_expr (dest);
8971 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8973 srcvar = build_fold_indirect_ref (src);
8974 if (TREE_THIS_VOLATILE (srcvar))
8976 else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8978 /* With memcpy, it is possible to bypass aliasing rules, so without
8979 this check i.e. execute/20060930-2.c would be misoptimized,
8980 because it use conflicting alias set to hold argument for the
8981 memcpy call. This check is probably unnecessary with
8982 -fno-strict-aliasing. Similarly for destvar. See also
8984 else if (!var_decl_component_p (srcvar))
8988 destvar = NULL_TREE;
8989 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8991 destvar = build_fold_indirect_ref (dest);
8992 if (TREE_THIS_VOLATILE (destvar))
8994 else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8995 destvar = NULL_TREE;
8996 else if (!var_decl_component_p (destvar))
8997 destvar = NULL_TREE;
9000 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9003 if (srcvar == NULL_TREE)
9006 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
9009 srctype = build_qualified_type (desttype, 0);
9010 if (src_align < (int) TYPE_ALIGN (srctype))
9012 if (AGGREGATE_TYPE_P (srctype)
9013 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
9016 srctype = build_variant_type_copy (srctype);
9017 TYPE_ALIGN (srctype) = src_align;
9018 TYPE_USER_ALIGN (srctype) = 1;
9019 TYPE_PACKED (srctype) = 1;
9021 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
9022 src = fold_convert (srcptype, src);
9023 srcvar = build_fold_indirect_ref (src);
9025 else if (destvar == NULL_TREE)
9028 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
9031 desttype = build_qualified_type (srctype, 0);
9032 if (dest_align < (int) TYPE_ALIGN (desttype))
9034 if (AGGREGATE_TYPE_P (desttype)
9035 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
9038 desttype = build_variant_type_copy (desttype);
9039 TYPE_ALIGN (desttype) = dest_align;
9040 TYPE_USER_ALIGN (desttype) = 1;
9041 TYPE_PACKED (desttype) = 1;
9043 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
9044 dest = fold_convert (destptype, dest);
9045 destvar = build_fold_indirect_ref (dest);
9048 if (srctype == desttype
9049 || (gimple_in_ssa_p (cfun)
9050 && useless_type_conversion_p (desttype, srctype)))
9052 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
9053 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
9054 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
9055 || POINTER_TYPE_P (TREE_TYPE (destvar))))
9056 expr = fold_convert (TREE_TYPE (destvar), srcvar);
9058 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
9059 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
9065 if (endp == 0 || endp == 3)
9066 return omit_one_operand (type, dest, expr);
9072 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
9075 len = fold_convert (sizetype, len);
9076 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
9077 dest = fold_convert (type, dest);
9079 dest = omit_one_operand (type, dest, expr);
9083 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9084 If LEN is not NULL, it represents the length of the string to be
9085 copied. Return NULL_TREE if no simplification can be made. */
9088 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
9092 if (!validate_arg (dest, POINTER_TYPE)
9093 || !validate_arg (src, POINTER_TYPE))
9096 /* If SRC and DEST are the same (and not volatile), return DEST. */
9097 if (operand_equal_p (src, dest, 0))
9098 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
9100 if (optimize_function_for_size_p (cfun))
9103 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9109 len = c_strlen (src, 1);
9110 if (! len || TREE_SIDE_EFFECTS (len))
9114 len = size_binop (PLUS_EXPR, len, ssize_int (1));
9115 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9116 build_call_expr (fn, 3, dest, src, len));
9119 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9120 If SLEN is not NULL, it represents the length of the source string.
9121 Return NULL_TREE if no simplification can be made. */
9124 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
9128 if (!validate_arg (dest, POINTER_TYPE)
9129 || !validate_arg (src, POINTER_TYPE)
9130 || !validate_arg (len, INTEGER_TYPE))
9133 /* If the LEN parameter is zero, return DEST. */
9134 if (integer_zerop (len))
9135 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9137 /* We can't compare slen with len as constants below if len is not a
9139 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9143 slen = c_strlen (src, 1);
9145 /* Now, we must be passed a constant src ptr parameter. */
9146 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9149 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
9151 /* We do not support simplification of this case, though we do
9152 support it when expanding trees into RTL. */
9153 /* FIXME: generate a call to __builtin_memset. */
9154 if (tree_int_cst_lt (slen, len))
9157 /* OK transform into builtin memcpy. */
9158 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
9161 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
9162 build_call_expr (fn, 3, dest, src, len));
9165 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9166 arguments to the call, and TYPE is its return type.
9167 Return NULL_TREE if no simplification can be made. */
9170 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
9172 if (!validate_arg (arg1, POINTER_TYPE)
9173 || !validate_arg (arg2, INTEGER_TYPE)
9174 || !validate_arg (len, INTEGER_TYPE))
9180 if (TREE_CODE (arg2) != INTEGER_CST
9181 || !host_integerp (len, 1))
9184 p1 = c_getstr (arg1);
9185 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9191 if (target_char_cast (arg2, &c))
9194 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
9197 return build_int_cst (TREE_TYPE (arg1), 0);
9199 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
9201 return fold_convert (type, tem);
9207 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9208 Return NULL_TREE if no simplification can be made. */
9211 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
9213 const char *p1, *p2;
9215 if (!validate_arg (arg1, POINTER_TYPE)
9216 || !validate_arg (arg2, POINTER_TYPE)
9217 || !validate_arg (len, INTEGER_TYPE))
9220 /* If the LEN parameter is zero, return zero. */
9221 if (integer_zerop (len))
9222 return omit_two_operands (integer_type_node, integer_zero_node,
9225 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9226 if (operand_equal_p (arg1, arg2, 0))
9227 return omit_one_operand (integer_type_node, integer_zero_node, len);
9229 p1 = c_getstr (arg1);
9230 p2 = c_getstr (arg2);
9232 /* If all arguments are constant, and the value of len is not greater
9233 than the lengths of arg1 and arg2, evaluate at compile-time. */
9234 if (host_integerp (len, 1) && p1 && p2
9235 && compare_tree_int (len, strlen (p1) + 1) <= 0
9236 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9238 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9241 return integer_one_node;
9243 return integer_minus_one_node;
9245 return integer_zero_node;
9248 /* If len parameter is one, return an expression corresponding to
9249 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9250 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9252 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9253 tree cst_uchar_ptr_node
9254 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9256 tree ind1 = fold_convert (integer_type_node,
9257 build1 (INDIRECT_REF, cst_uchar_node,
9258 fold_convert (cst_uchar_ptr_node,
9260 tree ind2 = fold_convert (integer_type_node,
9261 build1 (INDIRECT_REF, cst_uchar_node,
9262 fold_convert (cst_uchar_ptr_node,
9264 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9270 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9271 Return NULL_TREE if no simplification can be made. */
9274 fold_builtin_strcmp (tree arg1, tree arg2)
9276 const char *p1, *p2;
9278 if (!validate_arg (arg1, POINTER_TYPE)
9279 || !validate_arg (arg2, POINTER_TYPE))
9282 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9283 if (operand_equal_p (arg1, arg2, 0))
9284 return integer_zero_node;
9286 p1 = c_getstr (arg1);
9287 p2 = c_getstr (arg2);
9291 const int i = strcmp (p1, p2);
9293 return integer_minus_one_node;
9295 return integer_one_node;
9297 return integer_zero_node;
9300 /* If the second arg is "", return *(const unsigned char*)arg1. */
9301 if (p2 && *p2 == '\0')
9303 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9304 tree cst_uchar_ptr_node
9305 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9307 return fold_convert (integer_type_node,
9308 build1 (INDIRECT_REF, cst_uchar_node,
9309 fold_convert (cst_uchar_ptr_node,
9313 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9314 if (p1 && *p1 == '\0')
9316 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9317 tree cst_uchar_ptr_node
9318 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9320 tree temp = fold_convert (integer_type_node,
9321 build1 (INDIRECT_REF, cst_uchar_node,
9322 fold_convert (cst_uchar_ptr_node,
9324 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9330 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9331 Return NULL_TREE if no simplification can be made. */
9334 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9336 const char *p1, *p2;
9338 if (!validate_arg (arg1, POINTER_TYPE)
9339 || !validate_arg (arg2, POINTER_TYPE)
9340 || !validate_arg (len, INTEGER_TYPE))
9343 /* If the LEN parameter is zero, return zero. */
9344 if (integer_zerop (len))
9345 return omit_two_operands (integer_type_node, integer_zero_node,
9348 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9349 if (operand_equal_p (arg1, arg2, 0))
9350 return omit_one_operand (integer_type_node, integer_zero_node, len);
9352 p1 = c_getstr (arg1);
9353 p2 = c_getstr (arg2);
9355 if (host_integerp (len, 1) && p1 && p2)
9357 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9359 return integer_one_node;
9361 return integer_minus_one_node;
9363 return integer_zero_node;
9366 /* If the second arg is "", and the length is greater than zero,
9367 return *(const unsigned char*)arg1. */
9368 if (p2 && *p2 == '\0'
9369 && TREE_CODE (len) == INTEGER_CST
9370 && tree_int_cst_sgn (len) == 1)
9372 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9373 tree cst_uchar_ptr_node
9374 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9376 return fold_convert (integer_type_node,
9377 build1 (INDIRECT_REF, cst_uchar_node,
9378 fold_convert (cst_uchar_ptr_node,
9382 /* If the first arg is "", and the length is greater than zero,
9383 return -*(const unsigned char*)arg2. */
9384 if (p1 && *p1 == '\0'
9385 && TREE_CODE (len) == INTEGER_CST
9386 && tree_int_cst_sgn (len) == 1)
9388 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9389 tree cst_uchar_ptr_node
9390 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9392 tree temp = fold_convert (integer_type_node,
9393 build1 (INDIRECT_REF, cst_uchar_node,
9394 fold_convert (cst_uchar_ptr_node,
9396 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9399 /* If len parameter is one, return an expression corresponding to
9400 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9401 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9403 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9404 tree cst_uchar_ptr_node
9405 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9407 tree ind1 = fold_convert (integer_type_node,
9408 build1 (INDIRECT_REF, cst_uchar_node,
9409 fold_convert (cst_uchar_ptr_node,
9411 tree ind2 = fold_convert (integer_type_node,
9412 build1 (INDIRECT_REF, cst_uchar_node,
9413 fold_convert (cst_uchar_ptr_node,
9415 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9421 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9422 ARG. Return NULL_TREE if no simplification can be made. */
9425 fold_builtin_signbit (tree arg, tree type)
9429 if (!validate_arg (arg, REAL_TYPE))
9432 /* If ARG is a compile-time constant, determine the result. */
9433 if (TREE_CODE (arg) == REAL_CST
9434 && !TREE_OVERFLOW (arg))
9438 c = TREE_REAL_CST (arg);
9439 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9440 return fold_convert (type, temp);
9443 /* If ARG is non-negative, the result is always zero. */
9444 if (tree_expr_nonnegative_p (arg))
9445 return omit_one_operand (type, integer_zero_node, arg);
9447 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9448 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9449 return fold_build2 (LT_EXPR, type, arg,
9450 build_real (TREE_TYPE (arg), dconst0));
9455 /* Fold function call to builtin copysign, copysignf or copysignl with
9456 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9460 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9464 if (!validate_arg (arg1, REAL_TYPE)
9465 || !validate_arg (arg2, REAL_TYPE))
9468 /* copysign(X,X) is X. */
9469 if (operand_equal_p (arg1, arg2, 0))
9470 return fold_convert (type, arg1);
9472 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9473 if (TREE_CODE (arg1) == REAL_CST
9474 && TREE_CODE (arg2) == REAL_CST
9475 && !TREE_OVERFLOW (arg1)
9476 && !TREE_OVERFLOW (arg2))
9478 REAL_VALUE_TYPE c1, c2;
9480 c1 = TREE_REAL_CST (arg1);
9481 c2 = TREE_REAL_CST (arg2);
9482 /* c1.sign := c2.sign. */
9483 real_copysign (&c1, &c2);
9484 return build_real (type, c1);
9487 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9488 Remember to evaluate Y for side-effects. */
9489 if (tree_expr_nonnegative_p (arg2))
9490 return omit_one_operand (type,
9491 fold_build1 (ABS_EXPR, type, arg1),
9494 /* Strip sign changing operations for the first argument. */
9495 tem = fold_strip_sign_ops (arg1);
9497 return build_call_expr (fndecl, 2, tem, arg2);
9502 /* Fold a call to builtin isascii with argument ARG. */
9505 fold_builtin_isascii (tree arg)
9507 if (!validate_arg (arg, INTEGER_TYPE))
9511 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9512 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9513 build_int_cst (NULL_TREE,
9514 ~ (unsigned HOST_WIDE_INT) 0x7f));
9515 return fold_build2 (EQ_EXPR, integer_type_node,
9516 arg, integer_zero_node);
9520 /* Fold a call to builtin toascii with argument ARG. */
9523 fold_builtin_toascii (tree arg)
9525 if (!validate_arg (arg, INTEGER_TYPE))
9528 /* Transform toascii(c) -> (c & 0x7f). */
9529 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9530 build_int_cst (NULL_TREE, 0x7f));
9533 /* Fold a call to builtin isdigit with argument ARG. */
9536 fold_builtin_isdigit (tree arg)
9538 if (!validate_arg (arg, INTEGER_TYPE))
9542 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9543 /* According to the C standard, isdigit is unaffected by locale.
9544 However, it definitely is affected by the target character set. */
9545 unsigned HOST_WIDE_INT target_digit0
9546 = lang_hooks.to_target_charset ('0');
9548 if (target_digit0 == 0)
9551 arg = fold_convert (unsigned_type_node, arg);
9552 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9553 build_int_cst (unsigned_type_node, target_digit0));
9554 return fold_build2 (LE_EXPR, integer_type_node, arg,
9555 build_int_cst (unsigned_type_node, 9));
9559 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9562 fold_builtin_fabs (tree arg, tree type)
9564 if (!validate_arg (arg, REAL_TYPE))
9567 arg = fold_convert (type, arg);
9568 if (TREE_CODE (arg) == REAL_CST)
9569 return fold_abs_const (arg, type);
9570 return fold_build1 (ABS_EXPR, type, arg);
9573 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9576 fold_builtin_abs (tree arg, tree type)
9578 if (!validate_arg (arg, INTEGER_TYPE))
9581 arg = fold_convert (type, arg);
9582 if (TREE_CODE (arg) == INTEGER_CST)
9583 return fold_abs_const (arg, type);
9584 return fold_build1 (ABS_EXPR, type, arg);
9587 /* Fold a call to builtin fmin or fmax. */
9590 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9592 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9594 /* Calculate the result when the argument is a constant. */
9595 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9600 /* If either argument is NaN, return the other one. Avoid the
9601 transformation if we get (and honor) a signalling NaN. Using
9602 omit_one_operand() ensures we create a non-lvalue. */
9603 if (TREE_CODE (arg0) == REAL_CST
9604 && real_isnan (&TREE_REAL_CST (arg0))
9605 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9606 || ! TREE_REAL_CST (arg0).signalling))
9607 return omit_one_operand (type, arg1, arg0);
9608 if (TREE_CODE (arg1) == REAL_CST
9609 && real_isnan (&TREE_REAL_CST (arg1))
9610 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9611 || ! TREE_REAL_CST (arg1).signalling))
9612 return omit_one_operand (type, arg0, arg1);
9614 /* Transform fmin/fmax(x,x) -> x. */
9615 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9616 return omit_one_operand (type, arg0, arg1);
9618 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9619 functions to return the numeric arg if the other one is NaN.
9620 These tree codes don't honor that, so only transform if
9621 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9622 handled, so we don't have to worry about it either. */
9623 if (flag_finite_math_only)
9624 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9625 fold_convert (type, arg0),
9626 fold_convert (type, arg1));
9631 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9634 fold_builtin_carg (tree arg, tree type)
9636 if (validate_arg (arg, COMPLEX_TYPE))
9638 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9642 tree new_arg = builtin_save_expr (arg);
9643 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9644 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9645 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9652 /* Fold a call to builtin logb/ilogb. */
9655 fold_builtin_logb (tree arg, tree rettype)
9657 if (! validate_arg (arg, REAL_TYPE))
9662 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9664 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9670 /* If arg is Inf or NaN and we're logb, return it. */
9671 if (TREE_CODE (rettype) == REAL_TYPE)
9672 return fold_convert (rettype, arg);
9673 /* Fall through... */
9675 /* Zero may set errno and/or raise an exception for logb, also
9676 for ilogb we don't know FP_ILOGB0. */
9679 /* For normal numbers, proceed iff radix == 2. In GCC,
9680 normalized significands are in the range [0.5, 1.0). We
9681 want the exponent as if they were [1.0, 2.0) so get the
9682 exponent and subtract 1. */
9683 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9684 return fold_convert (rettype, build_int_cst (NULL_TREE,
9685 REAL_EXP (value)-1));
9693 /* Fold a call to builtin significand, if radix == 2. */
9696 fold_builtin_significand (tree arg, tree rettype)
9698 if (! validate_arg (arg, REAL_TYPE))
9703 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9705 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9712 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9713 return fold_convert (rettype, arg);
9715 /* For normal numbers, proceed iff radix == 2. */
9716 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9718 REAL_VALUE_TYPE result = *value;
9719 /* In GCC, normalized significands are in the range [0.5,
9720 1.0). We want them to be [1.0, 2.0) so set the
9722 SET_REAL_EXP (&result, 1);
9723 return build_real (rettype, result);
9732 /* Fold a call to builtin frexp, we can assume the base is 2. */
9735 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9737 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9742 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9745 arg1 = build_fold_indirect_ref (arg1);
9747 /* Proceed if a valid pointer type was passed in. */
9748 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9750 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9756 /* For +-0, return (*exp = 0, +-0). */
9757 exp = integer_zero_node;
9762 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9763 return omit_one_operand (rettype, arg0, arg1);
9766 /* Since the frexp function always expects base 2, and in
9767 GCC normalized significands are already in the range
9768 [0.5, 1.0), we have exactly what frexp wants. */
9769 REAL_VALUE_TYPE frac_rvt = *value;
9770 SET_REAL_EXP (&frac_rvt, 0);
9771 frac = build_real (rettype, frac_rvt);
9772 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9779 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9780 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9781 TREE_SIDE_EFFECTS (arg1) = 1;
9782 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9788 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9789 then we can assume the base is two. If it's false, then we have to
9790 check the mode of the TYPE parameter in certain cases. */
9793 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9795 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9800 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9801 if (real_zerop (arg0) || integer_zerop (arg1)
9802 || (TREE_CODE (arg0) == REAL_CST
9803 && !real_isfinite (&TREE_REAL_CST (arg0))))
9804 return omit_one_operand (type, arg0, arg1);
9806 /* If both arguments are constant, then try to evaluate it. */
9807 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9808 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9809 && host_integerp (arg1, 0))
9811 /* Bound the maximum adjustment to twice the range of the
9812 mode's valid exponents. Use abs to ensure the range is
9813 positive as a sanity check. */
9814 const long max_exp_adj = 2 *
9815 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9816 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9818 /* Get the user-requested adjustment. */
9819 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9821 /* The requested adjustment must be inside this range. This
9822 is a preliminary cap to avoid things like overflow, we
9823 may still fail to compute the result for other reasons. */
9824 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9826 REAL_VALUE_TYPE initial_result;
9828 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9830 /* Ensure we didn't overflow. */
9831 if (! real_isinf (&initial_result))
9833 const REAL_VALUE_TYPE trunc_result
9834 = real_value_truncate (TYPE_MODE (type), initial_result);
9836 /* Only proceed if the target mode can hold the
9838 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9839 return build_real (type, trunc_result);
9848 /* Fold a call to builtin modf. */
9851 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9853 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9858 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9861 arg1 = build_fold_indirect_ref (arg1);
9863 /* Proceed if a valid pointer type was passed in. */
9864 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9866 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9867 REAL_VALUE_TYPE trunc, frac;
9873 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9874 trunc = frac = *value;
9877 /* For +-Inf, return (*arg1 = arg0, +-0). */
9879 frac.sign = value->sign;
9883 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9884 real_trunc (&trunc, VOIDmode, value);
9885 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9886 /* If the original number was negative and already
9887 integral, then the fractional part is -0.0. */
9888 if (value->sign && frac.cl == rvc_zero)
9889 frac.sign = value->sign;
9893 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9894 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9895 build_real (rettype, trunc));
9896 TREE_SIDE_EFFECTS (arg1) = 1;
9897 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9898 build_real (rettype, frac));
9904 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9905 ARG is the argument for the call. */
9908 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9910 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9913 if (!validate_arg (arg, REAL_TYPE))
9916 switch (builtin_index)
9918 case BUILT_IN_ISINF:
9919 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9920 return omit_one_operand (type, integer_zero_node, arg);
9922 if (TREE_CODE (arg) == REAL_CST)
9924 r = TREE_REAL_CST (arg);
9925 if (real_isinf (&r))
9926 return real_compare (GT_EXPR, &r, &dconst0)
9927 ? integer_one_node : integer_minus_one_node;
9929 return integer_zero_node;
9934 case BUILT_IN_ISINF_SIGN:
9936 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9937 /* In a boolean context, GCC will fold the inner COND_EXPR to
9938 1. So e.g. "if (isinf_sign(x))" would be folded to just
9939 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9940 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9941 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9942 tree tmp = NULL_TREE;
9944 arg = builtin_save_expr (arg);
9946 if (signbit_fn && isinf_fn)
9948 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9949 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9951 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9952 signbit_call, integer_zero_node);
9953 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9954 isinf_call, integer_zero_node);
9956 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9957 integer_minus_one_node, integer_one_node);
9958 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9965 case BUILT_IN_ISFINITE:
9966 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9967 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9968 return omit_one_operand (type, integer_one_node, arg);
9970 if (TREE_CODE (arg) == REAL_CST)
9972 r = TREE_REAL_CST (arg);
9973 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9978 case BUILT_IN_ISNAN:
9979 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9980 return omit_one_operand (type, integer_zero_node, arg);
9982 if (TREE_CODE (arg) == REAL_CST)
9984 r = TREE_REAL_CST (arg);
9985 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9988 arg = builtin_save_expr (arg);
9989 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9996 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9997 This builtin will generate code to return the appropriate floating
9998 point classification depending on the value of the floating point
9999 number passed in. The possible return values must be supplied as
10000 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10001 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10002 one floating point argument which is "type generic". */
10005 fold_builtin_fpclassify (tree exp)
10007 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10008 arg, type, res, tmp;
10009 enum machine_mode mode;
10013 /* Verify the required arguments in the original call. */
10014 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10015 INTEGER_TYPE, INTEGER_TYPE,
10016 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10019 fp_nan = CALL_EXPR_ARG (exp, 0);
10020 fp_infinite = CALL_EXPR_ARG (exp, 1);
10021 fp_normal = CALL_EXPR_ARG (exp, 2);
10022 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10023 fp_zero = CALL_EXPR_ARG (exp, 4);
10024 arg = CALL_EXPR_ARG (exp, 5);
10025 type = TREE_TYPE (arg);
10026 mode = TYPE_MODE (type);
10027 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
10029 /* fpclassify(x) ->
10030 isnan(x) ? FP_NAN :
10031 (fabs(x) == Inf ? FP_INFINITE :
10032 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10033 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10035 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10036 build_real (type, dconst0));
10037 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
10039 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10040 real_from_string (&r, buf);
10041 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
10042 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
10044 if (HONOR_INFINITIES (mode))
10047 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
10048 build_real (type, r));
10049 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
10052 if (HONOR_NANS (mode))
10054 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
10055 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
10061 /* Fold a call to an unordered comparison function such as
10062 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10063 being called and ARG0 and ARG1 are the arguments for the call.
10064 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10065 the opposite of the desired result. UNORDERED_CODE is used
10066 for modes that can hold NaNs and ORDERED_CODE is used for
10070 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
10071 enum tree_code unordered_code,
10072 enum tree_code ordered_code)
10074 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10075 enum tree_code code;
10077 enum tree_code code0, code1;
10078 tree cmp_type = NULL_TREE;
10080 type0 = TREE_TYPE (arg0);
10081 type1 = TREE_TYPE (arg1);
10083 code0 = TREE_CODE (type0);
10084 code1 = TREE_CODE (type1);
10086 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10087 /* Choose the wider of two real types. */
10088 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10090 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10092 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10095 arg0 = fold_convert (cmp_type, arg0);
10096 arg1 = fold_convert (cmp_type, arg1);
10098 if (unordered_code == UNORDERED_EXPR)
10100 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10101 return omit_two_operands (type, integer_zero_node, arg0, arg1);
10102 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
10105 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10107 return fold_build1 (TRUTH_NOT_EXPR, type,
10108 fold_build2 (code, type, arg0, arg1));
10111 /* Fold a call to built-in function FNDECL with 0 arguments.
10112 IGNORE is true if the result of the function call is ignored. This
10113 function returns NULL_TREE if no simplification was possible. */
10116 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10118 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10119 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10122 CASE_FLT_FN (BUILT_IN_INF):
10123 case BUILT_IN_INFD32:
10124 case BUILT_IN_INFD64:
10125 case BUILT_IN_INFD128:
10126 return fold_builtin_inf (type, true);
10128 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10129 return fold_builtin_inf (type, false);
10131 case BUILT_IN_CLASSIFY_TYPE:
10132 return fold_builtin_classify_type (NULL_TREE);
10140 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10141 IGNORE is true if the result of the function call is ignored. This
10142 function returns NULL_TREE if no simplification was possible. */
10145 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
10147 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10148 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10152 case BUILT_IN_CONSTANT_P:
10154 tree val = fold_builtin_constant_p (arg0);
10156 /* Gimplification will pull the CALL_EXPR for the builtin out of
10157 an if condition. When not optimizing, we'll not CSE it back.
10158 To avoid link error types of regressions, return false now. */
10159 if (!val && !optimize)
10160 val = integer_zero_node;
10165 case BUILT_IN_CLASSIFY_TYPE:
10166 return fold_builtin_classify_type (arg0);
10168 case BUILT_IN_STRLEN:
10169 return fold_builtin_strlen (arg0);
10171 CASE_FLT_FN (BUILT_IN_FABS):
10172 return fold_builtin_fabs (arg0, type);
10175 case BUILT_IN_LABS:
10176 case BUILT_IN_LLABS:
10177 case BUILT_IN_IMAXABS:
10178 return fold_builtin_abs (arg0, type);
10180 CASE_FLT_FN (BUILT_IN_CONJ):
10181 if (validate_arg (arg0, COMPLEX_TYPE))
10182 return fold_build1 (CONJ_EXPR, type, arg0);
10185 CASE_FLT_FN (BUILT_IN_CREAL):
10186 if (validate_arg (arg0, COMPLEX_TYPE))
10187 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
10190 CASE_FLT_FN (BUILT_IN_CIMAG):
10191 if (validate_arg (arg0, COMPLEX_TYPE))
10192 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
10195 CASE_FLT_FN (BUILT_IN_CCOS):
10196 CASE_FLT_FN (BUILT_IN_CCOSH):
10197 /* These functions are "even", i.e. f(x) == f(-x). */
10198 if (validate_arg (arg0, COMPLEX_TYPE))
10200 tree narg = fold_strip_sign_ops (arg0);
10202 return build_call_expr (fndecl, 1, narg);
10206 CASE_FLT_FN (BUILT_IN_CABS):
10207 return fold_builtin_cabs (arg0, type, fndecl);
10209 CASE_FLT_FN (BUILT_IN_CARG):
10210 return fold_builtin_carg (arg0, type);
10212 CASE_FLT_FN (BUILT_IN_SQRT):
10213 return fold_builtin_sqrt (arg0, type);
10215 CASE_FLT_FN (BUILT_IN_CBRT):
10216 return fold_builtin_cbrt (arg0, type);
10218 CASE_FLT_FN (BUILT_IN_ASIN):
10219 if (validate_arg (arg0, REAL_TYPE))
10220 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10221 &dconstm1, &dconst1, true);
10224 CASE_FLT_FN (BUILT_IN_ACOS):
10225 if (validate_arg (arg0, REAL_TYPE))
10226 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10227 &dconstm1, &dconst1, true);
10230 CASE_FLT_FN (BUILT_IN_ATAN):
10231 if (validate_arg (arg0, REAL_TYPE))
10232 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10235 CASE_FLT_FN (BUILT_IN_ASINH):
10236 if (validate_arg (arg0, REAL_TYPE))
10237 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10240 CASE_FLT_FN (BUILT_IN_ACOSH):
10241 if (validate_arg (arg0, REAL_TYPE))
10242 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10243 &dconst1, NULL, true);
10246 CASE_FLT_FN (BUILT_IN_ATANH):
10247 if (validate_arg (arg0, REAL_TYPE))
10248 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10249 &dconstm1, &dconst1, false);
10252 CASE_FLT_FN (BUILT_IN_SIN):
10253 if (validate_arg (arg0, REAL_TYPE))
10254 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10257 CASE_FLT_FN (BUILT_IN_COS):
10258 return fold_builtin_cos (arg0, type, fndecl);
10261 CASE_FLT_FN (BUILT_IN_TAN):
10262 return fold_builtin_tan (arg0, type);
10264 CASE_FLT_FN (BUILT_IN_CEXP):
10265 return fold_builtin_cexp (arg0, type);
10267 CASE_FLT_FN (BUILT_IN_CEXPI):
10268 if (validate_arg (arg0, REAL_TYPE))
10269 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10272 CASE_FLT_FN (BUILT_IN_SINH):
10273 if (validate_arg (arg0, REAL_TYPE))
10274 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10277 CASE_FLT_FN (BUILT_IN_COSH):
10278 return fold_builtin_cosh (arg0, type, fndecl);
10280 CASE_FLT_FN (BUILT_IN_TANH):
10281 if (validate_arg (arg0, REAL_TYPE))
10282 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10285 CASE_FLT_FN (BUILT_IN_ERF):
10286 if (validate_arg (arg0, REAL_TYPE))
10287 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10290 CASE_FLT_FN (BUILT_IN_ERFC):
10291 if (validate_arg (arg0, REAL_TYPE))
10292 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10295 CASE_FLT_FN (BUILT_IN_TGAMMA):
10296 if (validate_arg (arg0, REAL_TYPE))
10297 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10300 CASE_FLT_FN (BUILT_IN_EXP):
10301 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10303 CASE_FLT_FN (BUILT_IN_EXP2):
10304 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10306 CASE_FLT_FN (BUILT_IN_EXP10):
10307 CASE_FLT_FN (BUILT_IN_POW10):
10308 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10310 CASE_FLT_FN (BUILT_IN_EXPM1):
10311 if (validate_arg (arg0, REAL_TYPE))
10312 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10315 CASE_FLT_FN (BUILT_IN_LOG):
10316 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10318 CASE_FLT_FN (BUILT_IN_LOG2):
10319 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10321 CASE_FLT_FN (BUILT_IN_LOG10):
10322 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10324 CASE_FLT_FN (BUILT_IN_LOG1P):
10325 if (validate_arg (arg0, REAL_TYPE))
10326 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10327 &dconstm1, NULL, false);
10330 CASE_FLT_FN (BUILT_IN_J0):
10331 if (validate_arg (arg0, REAL_TYPE))
10332 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10336 CASE_FLT_FN (BUILT_IN_J1):
10337 if (validate_arg (arg0, REAL_TYPE))
10338 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10342 CASE_FLT_FN (BUILT_IN_Y0):
10343 if (validate_arg (arg0, REAL_TYPE))
10344 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10345 &dconst0, NULL, false);
10348 CASE_FLT_FN (BUILT_IN_Y1):
10349 if (validate_arg (arg0, REAL_TYPE))
10350 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10351 &dconst0, NULL, false);
10354 CASE_FLT_FN (BUILT_IN_NAN):
10355 case BUILT_IN_NAND32:
10356 case BUILT_IN_NAND64:
10357 case BUILT_IN_NAND128:
10358 return fold_builtin_nan (arg0, type, true);
10360 CASE_FLT_FN (BUILT_IN_NANS):
10361 return fold_builtin_nan (arg0, type, false);
10363 CASE_FLT_FN (BUILT_IN_FLOOR):
10364 return fold_builtin_floor (fndecl, arg0);
10366 CASE_FLT_FN (BUILT_IN_CEIL):
10367 return fold_builtin_ceil (fndecl, arg0);
10369 CASE_FLT_FN (BUILT_IN_TRUNC):
10370 return fold_builtin_trunc (fndecl, arg0);
10372 CASE_FLT_FN (BUILT_IN_ROUND):
10373 return fold_builtin_round (fndecl, arg0);
10375 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10376 CASE_FLT_FN (BUILT_IN_RINT):
10377 return fold_trunc_transparent_mathfn (fndecl, arg0);
10379 CASE_FLT_FN (BUILT_IN_LCEIL):
10380 CASE_FLT_FN (BUILT_IN_LLCEIL):
10381 CASE_FLT_FN (BUILT_IN_LFLOOR):
10382 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10383 CASE_FLT_FN (BUILT_IN_LROUND):
10384 CASE_FLT_FN (BUILT_IN_LLROUND):
10385 return fold_builtin_int_roundingfn (fndecl, arg0);
10387 CASE_FLT_FN (BUILT_IN_LRINT):
10388 CASE_FLT_FN (BUILT_IN_LLRINT):
10389 return fold_fixed_mathfn (fndecl, arg0);
10391 case BUILT_IN_BSWAP32:
10392 case BUILT_IN_BSWAP64:
10393 return fold_builtin_bswap (fndecl, arg0);
10395 CASE_INT_FN (BUILT_IN_FFS):
10396 CASE_INT_FN (BUILT_IN_CLZ):
10397 CASE_INT_FN (BUILT_IN_CTZ):
10398 CASE_INT_FN (BUILT_IN_POPCOUNT):
10399 CASE_INT_FN (BUILT_IN_PARITY):
10400 return fold_builtin_bitop (fndecl, arg0);
10402 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10403 return fold_builtin_signbit (arg0, type);
10405 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10406 return fold_builtin_significand (arg0, type);
10408 CASE_FLT_FN (BUILT_IN_ILOGB):
10409 CASE_FLT_FN (BUILT_IN_LOGB):
10410 return fold_builtin_logb (arg0, type);
10412 case BUILT_IN_ISASCII:
10413 return fold_builtin_isascii (arg0);
10415 case BUILT_IN_TOASCII:
10416 return fold_builtin_toascii (arg0);
10418 case BUILT_IN_ISDIGIT:
10419 return fold_builtin_isdigit (arg0);
10421 CASE_FLT_FN (BUILT_IN_FINITE):
10422 case BUILT_IN_FINITED32:
10423 case BUILT_IN_FINITED64:
10424 case BUILT_IN_FINITED128:
10425 case BUILT_IN_ISFINITE:
10426 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10428 CASE_FLT_FN (BUILT_IN_ISINF):
10429 case BUILT_IN_ISINFD32:
10430 case BUILT_IN_ISINFD64:
10431 case BUILT_IN_ISINFD128:
10432 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10434 case BUILT_IN_ISINF_SIGN:
10435 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10437 CASE_FLT_FN (BUILT_IN_ISNAN):
10438 case BUILT_IN_ISNAND32:
10439 case BUILT_IN_ISNAND64:
10440 case BUILT_IN_ISNAND128:
10441 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10443 case BUILT_IN_PRINTF:
10444 case BUILT_IN_PRINTF_UNLOCKED:
10445 case BUILT_IN_VPRINTF:
10446 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10456 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10457 IGNORE is true if the result of the function call is ignored. This
10458 function returns NULL_TREE if no simplification was possible. */
10461 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10463 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10464 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10468 CASE_FLT_FN (BUILT_IN_JN):
10469 if (validate_arg (arg0, INTEGER_TYPE)
10470 && validate_arg (arg1, REAL_TYPE))
10471 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10474 CASE_FLT_FN (BUILT_IN_YN):
10475 if (validate_arg (arg0, INTEGER_TYPE)
10476 && validate_arg (arg1, REAL_TYPE))
10477 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10481 CASE_FLT_FN (BUILT_IN_DREM):
10482 CASE_FLT_FN (BUILT_IN_REMAINDER):
10483 if (validate_arg (arg0, REAL_TYPE)
10484 && validate_arg(arg1, REAL_TYPE))
10485 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10488 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10489 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10490 if (validate_arg (arg0, REAL_TYPE)
10491 && validate_arg(arg1, POINTER_TYPE))
10492 return do_mpfr_lgamma_r (arg0, arg1, type);
10495 CASE_FLT_FN (BUILT_IN_ATAN2):
10496 if (validate_arg (arg0, REAL_TYPE)
10497 && validate_arg(arg1, REAL_TYPE))
10498 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10501 CASE_FLT_FN (BUILT_IN_FDIM):
10502 if (validate_arg (arg0, REAL_TYPE)
10503 && validate_arg(arg1, REAL_TYPE))
10504 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10507 CASE_FLT_FN (BUILT_IN_HYPOT):
10508 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10510 CASE_FLT_FN (BUILT_IN_LDEXP):
10511 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10512 CASE_FLT_FN (BUILT_IN_SCALBN):
10513 CASE_FLT_FN (BUILT_IN_SCALBLN):
10514 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10516 CASE_FLT_FN (BUILT_IN_FREXP):
10517 return fold_builtin_frexp (arg0, arg1, type);
10519 CASE_FLT_FN (BUILT_IN_MODF):
10520 return fold_builtin_modf (arg0, arg1, type);
10522 case BUILT_IN_BZERO:
10523 return fold_builtin_bzero (arg0, arg1, ignore);
10525 case BUILT_IN_FPUTS:
10526 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10528 case BUILT_IN_FPUTS_UNLOCKED:
10529 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10531 case BUILT_IN_STRSTR:
10532 return fold_builtin_strstr (arg0, arg1, type);
10534 case BUILT_IN_STRCAT:
10535 return fold_builtin_strcat (arg0, arg1);
10537 case BUILT_IN_STRSPN:
10538 return fold_builtin_strspn (arg0, arg1);
10540 case BUILT_IN_STRCSPN:
10541 return fold_builtin_strcspn (arg0, arg1);
10543 case BUILT_IN_STRCHR:
10544 case BUILT_IN_INDEX:
10545 return fold_builtin_strchr (arg0, arg1, type);
10547 case BUILT_IN_STRRCHR:
10548 case BUILT_IN_RINDEX:
10549 return fold_builtin_strrchr (arg0, arg1, type);
10551 case BUILT_IN_STRCPY:
10552 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10554 case BUILT_IN_STPCPY:
10557 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10561 return build_call_expr (fn, 2, arg0, arg1);
10565 case BUILT_IN_STRCMP:
10566 return fold_builtin_strcmp (arg0, arg1);
10568 case BUILT_IN_STRPBRK:
10569 return fold_builtin_strpbrk (arg0, arg1, type);
10571 case BUILT_IN_EXPECT:
10572 return fold_builtin_expect (arg0, arg1);
10574 CASE_FLT_FN (BUILT_IN_POW):
10575 return fold_builtin_pow (fndecl, arg0, arg1, type);
10577 CASE_FLT_FN (BUILT_IN_POWI):
10578 return fold_builtin_powi (fndecl, arg0, arg1, type);
10580 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10581 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10583 CASE_FLT_FN (BUILT_IN_FMIN):
10584 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10586 CASE_FLT_FN (BUILT_IN_FMAX):
10587 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10589 case BUILT_IN_ISGREATER:
10590 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10591 case BUILT_IN_ISGREATEREQUAL:
10592 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10593 case BUILT_IN_ISLESS:
10594 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10595 case BUILT_IN_ISLESSEQUAL:
10596 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10597 case BUILT_IN_ISLESSGREATER:
10598 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10599 case BUILT_IN_ISUNORDERED:
10600 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10603 /* We do the folding for va_start in the expander. */
10604 case BUILT_IN_VA_START:
10607 case BUILT_IN_SPRINTF:
10608 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10610 case BUILT_IN_OBJECT_SIZE:
10611 return fold_builtin_object_size (arg0, arg1);
10613 case BUILT_IN_PRINTF:
10614 case BUILT_IN_PRINTF_UNLOCKED:
10615 case BUILT_IN_VPRINTF:
10616 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10618 case BUILT_IN_PRINTF_CHK:
10619 case BUILT_IN_VPRINTF_CHK:
10620 if (!validate_arg (arg0, INTEGER_TYPE)
10621 || TREE_SIDE_EFFECTS (arg0))
10624 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10627 case BUILT_IN_FPRINTF:
10628 case BUILT_IN_FPRINTF_UNLOCKED:
10629 case BUILT_IN_VFPRINTF:
10630 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10639 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10640 and ARG2. IGNORE is true if the result of the function call is ignored.
10641 This function returns NULL_TREE if no simplification was possible. */
10644 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10646 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10647 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10651 CASE_FLT_FN (BUILT_IN_SINCOS):
10652 return fold_builtin_sincos (arg0, arg1, arg2);
10654 CASE_FLT_FN (BUILT_IN_FMA):
10655 if (validate_arg (arg0, REAL_TYPE)
10656 && validate_arg(arg1, REAL_TYPE)
10657 && validate_arg(arg2, REAL_TYPE))
10658 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10661 CASE_FLT_FN (BUILT_IN_REMQUO):
10662 if (validate_arg (arg0, REAL_TYPE)
10663 && validate_arg(arg1, REAL_TYPE)
10664 && validate_arg(arg2, POINTER_TYPE))
10665 return do_mpfr_remquo (arg0, arg1, arg2);
10668 case BUILT_IN_MEMSET:
10669 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10671 case BUILT_IN_BCOPY:
10672 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10674 case BUILT_IN_MEMCPY:
10675 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10677 case BUILT_IN_MEMPCPY:
10678 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10680 case BUILT_IN_MEMMOVE:
10681 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10683 case BUILT_IN_STRNCAT:
10684 return fold_builtin_strncat (arg0, arg1, arg2);
10686 case BUILT_IN_STRNCPY:
10687 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10689 case BUILT_IN_STRNCMP:
10690 return fold_builtin_strncmp (arg0, arg1, arg2);
10692 case BUILT_IN_MEMCHR:
10693 return fold_builtin_memchr (arg0, arg1, arg2, type);
10695 case BUILT_IN_BCMP:
10696 case BUILT_IN_MEMCMP:
10697 return fold_builtin_memcmp (arg0, arg1, arg2);;
10699 case BUILT_IN_SPRINTF:
10700 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10702 case BUILT_IN_STRCPY_CHK:
10703 case BUILT_IN_STPCPY_CHK:
10704 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10707 case BUILT_IN_STRCAT_CHK:
10708 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10710 case BUILT_IN_PRINTF_CHK:
10711 case BUILT_IN_VPRINTF_CHK:
10712 if (!validate_arg (arg0, INTEGER_TYPE)
10713 || TREE_SIDE_EFFECTS (arg0))
10716 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10719 case BUILT_IN_FPRINTF:
10720 case BUILT_IN_FPRINTF_UNLOCKED:
10721 case BUILT_IN_VFPRINTF:
10722 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10724 case BUILT_IN_FPRINTF_CHK:
10725 case BUILT_IN_VFPRINTF_CHK:
10726 if (!validate_arg (arg1, INTEGER_TYPE)
10727 || TREE_SIDE_EFFECTS (arg1))
10730 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10739 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10740 ARG2, and ARG3. IGNORE is true if the result of the function call is
10741 ignored. This function returns NULL_TREE if no simplification was
10745 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10748 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10752 case BUILT_IN_MEMCPY_CHK:
10753 case BUILT_IN_MEMPCPY_CHK:
10754 case BUILT_IN_MEMMOVE_CHK:
10755 case BUILT_IN_MEMSET_CHK:
10756 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10758 DECL_FUNCTION_CODE (fndecl));
10760 case BUILT_IN_STRNCPY_CHK:
10761 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10763 case BUILT_IN_STRNCAT_CHK:
10764 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10766 case BUILT_IN_FPRINTF_CHK:
10767 case BUILT_IN_VFPRINTF_CHK:
10768 if (!validate_arg (arg1, INTEGER_TYPE)
10769 || TREE_SIDE_EFFECTS (arg1))
10772 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10782 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10783 arguments, where NARGS <= 4. IGNORE is true if the result of the
10784 function call is ignored. This function returns NULL_TREE if no
10785 simplification was possible. Note that this only folds builtins with
10786 fixed argument patterns. Foldings that do varargs-to-varargs
10787 transformations, or that match calls with more than 4 arguments,
10788 need to be handled with fold_builtin_varargs instead. */
10790 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10793 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10795 tree ret = NULL_TREE;
10800 ret = fold_builtin_0 (fndecl, ignore);
10803 ret = fold_builtin_1 (fndecl, args[0], ignore);
10806 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10809 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10812 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10820 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10821 TREE_NO_WARNING (ret) = 1;
10827 /* Builtins with folding operations that operate on "..." arguments
10828 need special handling; we need to store the arguments in a convenient
10829 data structure before attempting any folding. Fortunately there are
10830 only a few builtins that fall into this category. FNDECL is the
10831 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10832 result of the function call is ignored. */
10835 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10837 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10838 tree ret = NULL_TREE;
10842 case BUILT_IN_SPRINTF_CHK:
10843 case BUILT_IN_VSPRINTF_CHK:
10844 ret = fold_builtin_sprintf_chk (exp, fcode);
10847 case BUILT_IN_SNPRINTF_CHK:
10848 case BUILT_IN_VSNPRINTF_CHK:
10849 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10852 case BUILT_IN_FPCLASSIFY:
10853 ret = fold_builtin_fpclassify (exp);
10861 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10862 TREE_NO_WARNING (ret) = 1;
10868 /* Return true if FNDECL shouldn't be folded right now.
10869 If a built-in function has an inline attribute always_inline
10870 wrapper, defer folding it after always_inline functions have
10871 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10872 might not be performed. */
10875 avoid_folding_inline_builtin (tree fndecl)
10877 return (DECL_DECLARED_INLINE_P (fndecl)
10878 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10880 && !cfun->always_inline_functions_inlined
10881 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10884 /* A wrapper function for builtin folding that prevents warnings for
10885 "statement without effect" and the like, caused by removing the
10886 call node earlier than the warning is generated. */
10889 fold_call_expr (tree exp, bool ignore)
10891 tree ret = NULL_TREE;
10892 tree fndecl = get_callee_fndecl (exp);
10894 && TREE_CODE (fndecl) == FUNCTION_DECL
10895 && DECL_BUILT_IN (fndecl)
10896 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10897 yet. Defer folding until we see all the arguments
10898 (after inlining). */
10899 && !CALL_EXPR_VA_ARG_PACK (exp))
10901 int nargs = call_expr_nargs (exp);
10903 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10904 instead last argument is __builtin_va_arg_pack (). Defer folding
10905 even in that case, until arguments are finalized. */
10906 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10908 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10910 && TREE_CODE (fndecl2) == FUNCTION_DECL
10911 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10912 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10916 if (avoid_folding_inline_builtin (fndecl))
10919 /* FIXME: Don't use a list in this interface. */
10920 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10921 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10924 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10926 tree *args = CALL_EXPR_ARGP (exp);
10927 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10930 ret = fold_builtin_varargs (fndecl, exp, ignore);
10933 /* Propagate location information from original call to
10934 expansion of builtin. Otherwise things like
10935 maybe_emit_chk_warning, that operate on the expansion
10936 of a builtin, will use the wrong location information. */
10937 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10939 tree realret = ret;
10940 if (TREE_CODE (ret) == NOP_EXPR)
10941 realret = TREE_OPERAND (ret, 0);
10942 if (CAN_HAVE_LOCATION_P (realret)
10943 && !EXPR_HAS_LOCATION (realret))
10944 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10953 /* Conveniently construct a function call expression. FNDECL names the
10954 function to be called and ARGLIST is a TREE_LIST of arguments. */
10957 build_function_call_expr (tree fndecl, tree arglist)
10959 tree fntype = TREE_TYPE (fndecl);
10960 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10961 int n = list_length (arglist);
10962 tree *argarray = (tree *) alloca (n * sizeof (tree));
10965 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10966 argarray[i] = TREE_VALUE (arglist);
10967 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10970 /* Conveniently construct a function call expression. FNDECL names the
10971 function to be called, N is the number of arguments, and the "..."
10972 parameters are the argument expressions. */
10975 build_call_expr (tree fndecl, int n, ...)
10978 tree fntype = TREE_TYPE (fndecl);
10979 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10980 tree *argarray = (tree *) alloca (n * sizeof (tree));
10984 for (i = 0; i < n; i++)
10985 argarray[i] = va_arg (ap, tree);
10987 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10990 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10991 N arguments are passed in the array ARGARRAY. */
10994 fold_builtin_call_array (tree type,
10999 tree ret = NULL_TREE;
11003 if (TREE_CODE (fn) == ADDR_EXPR)
11005 tree fndecl = TREE_OPERAND (fn, 0);
11006 if (TREE_CODE (fndecl) == FUNCTION_DECL
11007 && DECL_BUILT_IN (fndecl))
11009 /* If last argument is __builtin_va_arg_pack (), arguments to this
11010 function are not finalized yet. Defer folding until they are. */
11011 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11013 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11015 && TREE_CODE (fndecl2) == FUNCTION_DECL
11016 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11017 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11018 return build_call_array (type, fn, n, argarray);
11020 if (avoid_folding_inline_builtin (fndecl))
11021 return build_call_array (type, fn, n, argarray);
11022 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11024 tree arglist = NULL_TREE;
11025 for (i = n - 1; i >= 0; i--)
11026 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
11027 ret = targetm.fold_builtin (fndecl, arglist, false);
11030 return build_call_array (type, fn, n, argarray);
11032 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11034 /* First try the transformations that don't require consing up
11036 ret = fold_builtin_n (fndecl, argarray, n, false);
11041 /* If we got this far, we need to build an exp. */
11042 exp = build_call_array (type, fn, n, argarray);
11043 ret = fold_builtin_varargs (fndecl, exp, false);
11044 return ret ? ret : exp;
11048 return build_call_array (type, fn, n, argarray);
11051 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11052 along with N new arguments specified as the "..." parameters. SKIP
11053 is the number of arguments in EXP to be omitted. This function is used
11054 to do varargs-to-varargs transformations. */
11057 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
11059 int oldnargs = call_expr_nargs (exp);
11060 int nargs = oldnargs - skip + n;
11061 tree fntype = TREE_TYPE (fndecl);
11062 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11070 buffer = XALLOCAVEC (tree, nargs);
11072 for (i = 0; i < n; i++)
11073 buffer[i] = va_arg (ap, tree);
11075 for (j = skip; j < oldnargs; j++, i++)
11076 buffer[i] = CALL_EXPR_ARG (exp, j);
11079 buffer = CALL_EXPR_ARGP (exp) + skip;
11081 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
11084 /* Validate a single argument ARG against a tree code CODE representing
11088 validate_arg (const_tree arg, enum tree_code code)
11092 else if (code == POINTER_TYPE)
11093 return POINTER_TYPE_P (TREE_TYPE (arg));
11094 else if (code == INTEGER_TYPE)
11095 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11096 return code == TREE_CODE (TREE_TYPE (arg));
11099 /* This function validates the types of a function call argument list
11100 against a specified list of tree_codes. If the last specifier is a 0,
11101 that represents an ellipses, otherwise the last specifier must be a
11104 This is the GIMPLE version of validate_arglist. Eventually we want to
11105 completely convert builtins.c to work from GIMPLEs and the tree based
11106 validate_arglist will then be removed. */
11109 validate_gimple_arglist (const_gimple call, ...)
11111 enum tree_code code;
11117 va_start (ap, call);
11122 code = va_arg (ap, enum tree_code);
11126 /* This signifies an ellipses, any further arguments are all ok. */
11130 /* This signifies an endlink, if no arguments remain, return
11131 true, otherwise return false. */
11132 res = (i == gimple_call_num_args (call));
11135 /* If no parameters remain or the parameter's code does not
11136 match the specified code, return false. Otherwise continue
11137 checking any remaining arguments. */
11138 arg = gimple_call_arg (call, i++);
11139 if (!validate_arg (arg, code))
11146 /* We need gotos here since we can only have one VA_CLOSE in a
11154 /* This function validates the types of a function call argument list
11155 against a specified list of tree_codes. If the last specifier is a 0,
11156 that represents an ellipses, otherwise the last specifier must be a
11160 validate_arglist (const_tree callexpr, ...)
11162 enum tree_code code;
11165 const_call_expr_arg_iterator iter;
11168 va_start (ap, callexpr);
11169 init_const_call_expr_arg_iterator (callexpr, &iter);
11173 code = va_arg (ap, enum tree_code);
11177 /* This signifies an ellipses, any further arguments are all ok. */
11181 /* This signifies an endlink, if no arguments remain, return
11182 true, otherwise return false. */
11183 res = !more_const_call_expr_args_p (&iter);
11186 /* If no parameters remain or the parameter's code does not
11187 match the specified code, return false. Otherwise continue
11188 checking any remaining arguments. */
11189 arg = next_const_call_expr_arg (&iter);
11190 if (!validate_arg (arg, code))
11197 /* We need gotos here since we can only have one VA_CLOSE in a
11205 /* Default target-specific builtin expander that does nothing. */
11208 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11209 rtx target ATTRIBUTE_UNUSED,
11210 rtx subtarget ATTRIBUTE_UNUSED,
11211 enum machine_mode mode ATTRIBUTE_UNUSED,
11212 int ignore ATTRIBUTE_UNUSED)
11217 /* Returns true is EXP represents data that would potentially reside
11218 in a readonly section. */
11221 readonly_data_expr (tree exp)
11225 if (TREE_CODE (exp) != ADDR_EXPR)
11228 exp = get_base_address (TREE_OPERAND (exp, 0));
11232 /* Make sure we call decl_readonly_section only for trees it
11233 can handle (since it returns true for everything it doesn't
11235 if (TREE_CODE (exp) == STRING_CST
11236 || TREE_CODE (exp) == CONSTRUCTOR
11237 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11238 return decl_readonly_section (exp, 0);
11243 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11244 to the call, and TYPE is its return type.
11246 Return NULL_TREE if no simplification was possible, otherwise return the
11247 simplified form of the call as a tree.
11249 The simplified form may be a constant or other expression which
11250 computes the same value, but in a more efficient manner (including
11251 calls to other builtin functions).
11253 The call may contain arguments which need to be evaluated, but
11254 which are not useful to determine the result of the call. In
11255 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11256 COMPOUND_EXPR will be an argument which must be evaluated.
11257 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11258 COMPOUND_EXPR in the chain will contain the tree for the simplified
11259 form of the builtin function call. */
11262 fold_builtin_strstr (tree s1, tree s2, tree type)
11264 if (!validate_arg (s1, POINTER_TYPE)
11265 || !validate_arg (s2, POINTER_TYPE))
11270 const char *p1, *p2;
11272 p2 = c_getstr (s2);
11276 p1 = c_getstr (s1);
11279 const char *r = strstr (p1, p2);
11283 return build_int_cst (TREE_TYPE (s1), 0);
11285 /* Return an offset into the constant string argument. */
11286 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11287 s1, size_int (r - p1));
11288 return fold_convert (type, tem);
11291 /* The argument is const char *, and the result is char *, so we need
11292 a type conversion here to avoid a warning. */
11294 return fold_convert (type, s1);
11299 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11303 /* New argument list transforming strstr(s1, s2) to
11304 strchr(s1, s2[0]). */
11305 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11309 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11310 the call, and TYPE is its return type.
11312 Return NULL_TREE if no simplification was possible, otherwise return the
11313 simplified form of the call as a tree.
11315 The simplified form may be a constant or other expression which
11316 computes the same value, but in a more efficient manner (including
11317 calls to other builtin functions).
11319 The call may contain arguments which need to be evaluated, but
11320 which are not useful to determine the result of the call. In
11321 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11322 COMPOUND_EXPR will be an argument which must be evaluated.
11323 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11324 COMPOUND_EXPR in the chain will contain the tree for the simplified
11325 form of the builtin function call. */
11328 fold_builtin_strchr (tree s1, tree s2, tree type)
11330 if (!validate_arg (s1, POINTER_TYPE)
11331 || !validate_arg (s2, INTEGER_TYPE))
11337 if (TREE_CODE (s2) != INTEGER_CST)
11340 p1 = c_getstr (s1);
11347 if (target_char_cast (s2, &c))
11350 r = strchr (p1, c);
11353 return build_int_cst (TREE_TYPE (s1), 0);
11355 /* Return an offset into the constant string argument. */
11356 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11357 s1, size_int (r - p1));
11358 return fold_convert (type, tem);
11364 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11365 the call, and TYPE is its return type.
11367 Return NULL_TREE if no simplification was possible, otherwise return the
11368 simplified form of the call as a tree.
11370 The simplified form may be a constant or other expression which
11371 computes the same value, but in a more efficient manner (including
11372 calls to other builtin functions).
11374 The call may contain arguments which need to be evaluated, but
11375 which are not useful to determine the result of the call. In
11376 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11377 COMPOUND_EXPR will be an argument which must be evaluated.
11378 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11379 COMPOUND_EXPR in the chain will contain the tree for the simplified
11380 form of the builtin function call. */
11383 fold_builtin_strrchr (tree s1, tree s2, tree type)
11385 if (!validate_arg (s1, POINTER_TYPE)
11386 || !validate_arg (s2, INTEGER_TYPE))
11393 if (TREE_CODE (s2) != INTEGER_CST)
11396 p1 = c_getstr (s1);
11403 if (target_char_cast (s2, &c))
11406 r = strrchr (p1, c);
11409 return build_int_cst (TREE_TYPE (s1), 0);
11411 /* Return an offset into the constant string argument. */
11412 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11413 s1, size_int (r - p1));
11414 return fold_convert (type, tem);
11417 if (! integer_zerop (s2))
11420 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11424 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11425 return build_call_expr (fn, 2, s1, s2);
11429 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11430 to the call, and TYPE is its return type.
11432 Return NULL_TREE if no simplification was possible, otherwise return the
11433 simplified form of the call as a tree.
11435 The simplified form may be a constant or other expression which
11436 computes the same value, but in a more efficient manner (including
11437 calls to other builtin functions).
11439 The call may contain arguments which need to be evaluated, but
11440 which are not useful to determine the result of the call. In
11441 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11442 COMPOUND_EXPR will be an argument which must be evaluated.
11443 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11444 COMPOUND_EXPR in the chain will contain the tree for the simplified
11445 form of the builtin function call. */
11448 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11450 if (!validate_arg (s1, POINTER_TYPE)
11451 || !validate_arg (s2, POINTER_TYPE))
11456 const char *p1, *p2;
11458 p2 = c_getstr (s2);
11462 p1 = c_getstr (s1);
11465 const char *r = strpbrk (p1, p2);
11469 return build_int_cst (TREE_TYPE (s1), 0);
11471 /* Return an offset into the constant string argument. */
11472 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11473 s1, size_int (r - p1));
11474 return fold_convert (type, tem);
11478 /* strpbrk(x, "") == NULL.
11479 Evaluate and ignore s1 in case it had side-effects. */
11480 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11483 return NULL_TREE; /* Really call strpbrk. */
11485 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11489 /* New argument list transforming strpbrk(s1, s2) to
11490 strchr(s1, s2[0]). */
11491 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11495 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11498 Return NULL_TREE if no simplification was possible, otherwise return the
11499 simplified form of the call as a tree.
11501 The simplified form may be a constant or other expression which
11502 computes the same value, but in a more efficient manner (including
11503 calls to other builtin functions).
11505 The call may contain arguments which need to be evaluated, but
11506 which are not useful to determine the result of the call. In
11507 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11508 COMPOUND_EXPR will be an argument which must be evaluated.
11509 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11510 COMPOUND_EXPR in the chain will contain the tree for the simplified
11511 form of the builtin function call. */
11514 fold_builtin_strcat (tree dst, tree src)
11516 if (!validate_arg (dst, POINTER_TYPE)
11517 || !validate_arg (src, POINTER_TYPE))
11521 const char *p = c_getstr (src);
11523 /* If the string length is zero, return the dst parameter. */
11524 if (p && *p == '\0')
11531 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11532 arguments to the call.
11534 Return NULL_TREE if no simplification was possible, otherwise return the
11535 simplified form of the call as a tree.
11537 The simplified form may be a constant or other expression which
11538 computes the same value, but in a more efficient manner (including
11539 calls to other builtin functions).
11541 The call may contain arguments which need to be evaluated, but
11542 which are not useful to determine the result of the call. In
11543 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11544 COMPOUND_EXPR will be an argument which must be evaluated.
11545 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11546 COMPOUND_EXPR in the chain will contain the tree for the simplified
11547 form of the builtin function call. */
11550 fold_builtin_strncat (tree dst, tree src, tree len)
11552 if (!validate_arg (dst, POINTER_TYPE)
11553 || !validate_arg (src, POINTER_TYPE)
11554 || !validate_arg (len, INTEGER_TYPE))
11558 const char *p = c_getstr (src);
11560 /* If the requested length is zero, or the src parameter string
11561 length is zero, return the dst parameter. */
11562 if (integer_zerop (len) || (p && *p == '\0'))
11563 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11565 /* If the requested len is greater than or equal to the string
11566 length, call strcat. */
11567 if (TREE_CODE (len) == INTEGER_CST && p
11568 && compare_tree_int (len, strlen (p)) >= 0)
11570 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11572 /* If the replacement _DECL isn't initialized, don't do the
11577 return build_call_expr (fn, 2, dst, src);
11583 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11586 Return NULL_TREE if no simplification was possible, otherwise return the
11587 simplified form of the call as a tree.
11589 The simplified form may be a constant or other expression which
11590 computes the same value, but in a more efficient manner (including
11591 calls to other builtin functions).
11593 The call may contain arguments which need to be evaluated, but
11594 which are not useful to determine the result of the call. In
11595 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11596 COMPOUND_EXPR will be an argument which must be evaluated.
11597 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11598 COMPOUND_EXPR in the chain will contain the tree for the simplified
11599 form of the builtin function call. */
11602 fold_builtin_strspn (tree s1, tree s2)
11604 if (!validate_arg (s1, POINTER_TYPE)
11605 || !validate_arg (s2, POINTER_TYPE))
11609 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11611 /* If both arguments are constants, evaluate at compile-time. */
11614 const size_t r = strspn (p1, p2);
11615 return size_int (r);
11618 /* If either argument is "", return NULL_TREE. */
11619 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11620 /* Evaluate and ignore both arguments in case either one has
11622 return omit_two_operands (size_type_node, size_zero_node,
11628 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11631 Return NULL_TREE if no simplification was possible, otherwise return the
11632 simplified form of the call as a tree.
11634 The simplified form may be a constant or other expression which
11635 computes the same value, but in a more efficient manner (including
11636 calls to other builtin functions).
11638 The call may contain arguments which need to be evaluated, but
11639 which are not useful to determine the result of the call. In
11640 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11641 COMPOUND_EXPR will be an argument which must be evaluated.
11642 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11643 COMPOUND_EXPR in the chain will contain the tree for the simplified
11644 form of the builtin function call. */
11647 fold_builtin_strcspn (tree s1, tree s2)
11649 if (!validate_arg (s1, POINTER_TYPE)
11650 || !validate_arg (s2, POINTER_TYPE))
11654 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11656 /* If both arguments are constants, evaluate at compile-time. */
11659 const size_t r = strcspn (p1, p2);
11660 return size_int (r);
11663 /* If the first argument is "", return NULL_TREE. */
11664 if (p1 && *p1 == '\0')
11666 /* Evaluate and ignore argument s2 in case it has
11668 return omit_one_operand (size_type_node,
11669 size_zero_node, s2);
11672 /* If the second argument is "", return __builtin_strlen(s1). */
11673 if (p2 && *p2 == '\0')
11675 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11677 /* If the replacement _DECL isn't initialized, don't do the
11682 return build_call_expr (fn, 1, s1);
11688 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11689 to the call. IGNORE is true if the value returned
11690 by the builtin will be ignored. UNLOCKED is true is true if this
11691 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11692 the known length of the string. Return NULL_TREE if no simplification
11696 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11698 /* If we're using an unlocked function, assume the other unlocked
11699 functions exist explicitly. */
11700 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11701 : implicit_built_in_decls[BUILT_IN_FPUTC];
11702 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11703 : implicit_built_in_decls[BUILT_IN_FWRITE];
11705 /* If the return value is used, don't do the transformation. */
11709 /* Verify the arguments in the original call. */
11710 if (!validate_arg (arg0, POINTER_TYPE)
11711 || !validate_arg (arg1, POINTER_TYPE))
11715 len = c_strlen (arg0, 0);
11717 /* Get the length of the string passed to fputs. If the length
11718 can't be determined, punt. */
11720 || TREE_CODE (len) != INTEGER_CST)
11723 switch (compare_tree_int (len, 1))
11725 case -1: /* length is 0, delete the call entirely . */
11726 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11728 case 0: /* length is 1, call fputc. */
11730 const char *p = c_getstr (arg0);
11735 return build_call_expr (fn_fputc, 2,
11736 build_int_cst (NULL_TREE, p[0]), arg1);
11742 case 1: /* length is greater than 1, call fwrite. */
11744 /* If optimizing for size keep fputs. */
11745 if (optimize_function_for_size_p (cfun))
11747 /* New argument list transforming fputs(string, stream) to
11748 fwrite(string, 1, len, stream). */
11750 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11755 gcc_unreachable ();
11760 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11761 produced. False otherwise. This is done so that we don't output the error
11762 or warning twice or three times. */
11765 fold_builtin_next_arg (tree exp, bool va_start_p)
11767 tree fntype = TREE_TYPE (current_function_decl);
11768 int nargs = call_expr_nargs (exp);
11771 if (TYPE_ARG_TYPES (fntype) == 0
11772 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11773 == void_type_node))
11775 error ("%<va_start%> used in function with fixed args");
11781 if (va_start_p && (nargs != 2))
11783 error ("wrong number of arguments to function %<va_start%>");
11786 arg = CALL_EXPR_ARG (exp, 1);
11788 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11789 when we checked the arguments and if needed issued a warning. */
11794 /* Evidently an out of date version of <stdarg.h>; can't validate
11795 va_start's second argument, but can still work as intended. */
11796 warning (0, "%<__builtin_next_arg%> called without an argument");
11799 else if (nargs > 1)
11801 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11804 arg = CALL_EXPR_ARG (exp, 0);
11807 if (TREE_CODE (arg) == SSA_NAME)
11808 arg = SSA_NAME_VAR (arg);
11810 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11811 or __builtin_next_arg (0) the first time we see it, after checking
11812 the arguments and if needed issuing a warning. */
11813 if (!integer_zerop (arg))
11815 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11817 /* Strip off all nops for the sake of the comparison. This
11818 is not quite the same as STRIP_NOPS. It does more.
11819 We must also strip off INDIRECT_EXPR for C++ reference
11821 while (CONVERT_EXPR_P (arg)
11822 || TREE_CODE (arg) == INDIRECT_REF)
11823 arg = TREE_OPERAND (arg, 0);
11824 if (arg != last_parm)
11826 /* FIXME: Sometimes with the tree optimizers we can get the
11827 not the last argument even though the user used the last
11828 argument. We just warn and set the arg to be the last
11829 argument so that we will get wrong-code because of
11831 warning (0, "second parameter of %<va_start%> not last named argument");
11834 /* Undefined by C99 7.15.1.4p4 (va_start):
11835 "If the parameter parmN is declared with the register storage
11836 class, with a function or array type, or with a type that is
11837 not compatible with the type that results after application of
11838 the default argument promotions, the behavior is undefined."
11840 else if (DECL_REGISTER (arg))
11841 warning (0, "undefined behaviour when second parameter of "
11842 "%<va_start%> is declared with %<register%> storage");
11844 /* We want to verify the second parameter just once before the tree
11845 optimizers are run and then avoid keeping it in the tree,
11846 as otherwise we could warn even for correct code like:
11847 void foo (int i, ...)
11848 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11850 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11852 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11858 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11859 ORIG may be null if this is a 2-argument call. We don't attempt to
11860 simplify calls with more than 3 arguments.
11862 Return NULL_TREE if no simplification was possible, otherwise return the
11863 simplified form of the call as a tree. If IGNORED is true, it means that
11864 the caller does not use the returned value of the function. */
11867 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11870 const char *fmt_str = NULL;
11872 /* Verify the required arguments in the original call. We deal with two
11873 types of sprintf() calls: 'sprintf (str, fmt)' and
11874 'sprintf (dest, "%s", orig)'. */
11875 if (!validate_arg (dest, POINTER_TYPE)
11876 || !validate_arg (fmt, POINTER_TYPE))
11878 if (orig && !validate_arg (orig, POINTER_TYPE))
11881 /* Check whether the format is a literal string constant. */
11882 fmt_str = c_getstr (fmt);
11883 if (fmt_str == NULL)
11887 retval = NULL_TREE;
11889 if (!init_target_chars ())
11892 /* If the format doesn't contain % args or %%, use strcpy. */
11893 if (strchr (fmt_str, target_percent) == NULL)
11895 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11900 /* Don't optimize sprintf (buf, "abc", ptr++). */
11904 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11905 'format' is known to contain no % formats. */
11906 call = build_call_expr (fn, 2, dest, fmt);
11908 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11911 /* If the format is "%s", use strcpy if the result isn't used. */
11912 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11915 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11920 /* Don't crash on sprintf (str1, "%s"). */
11924 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11927 retval = c_strlen (orig, 1);
11928 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11931 call = build_call_expr (fn, 2, dest, orig);
11934 if (call && retval)
11936 retval = fold_convert
11937 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11939 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11945 /* Expand a call EXP to __builtin_object_size. */
11948 expand_builtin_object_size (tree exp)
11951 int object_size_type;
11952 tree fndecl = get_callee_fndecl (exp);
11954 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11956 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11958 expand_builtin_trap ();
11962 ost = CALL_EXPR_ARG (exp, 1);
11965 if (TREE_CODE (ost) != INTEGER_CST
11966 || tree_int_cst_sgn (ost) < 0
11967 || compare_tree_int (ost, 3) > 0)
11969 error ("%Klast argument of %D is not integer constant between 0 and 3",
11971 expand_builtin_trap ();
11975 object_size_type = tree_low_cst (ost, 0);
11977 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11980 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11981 FCODE is the BUILT_IN_* to use.
11982 Return NULL_RTX if we failed; the caller should emit a normal call,
11983 otherwise try to get the result in TARGET, if convenient (and in
11984 mode MODE if that's convenient). */
11987 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11988 enum built_in_function fcode)
11990 tree dest, src, len, size;
11992 if (!validate_arglist (exp,
11994 fcode == BUILT_IN_MEMSET_CHK
11995 ? INTEGER_TYPE : POINTER_TYPE,
11996 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11999 dest = CALL_EXPR_ARG (exp, 0);
12000 src = CALL_EXPR_ARG (exp, 1);
12001 len = CALL_EXPR_ARG (exp, 2);
12002 size = CALL_EXPR_ARG (exp, 3);
12004 if (! host_integerp (size, 1))
12007 if (host_integerp (len, 1) || integer_all_onesp (size))
12011 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12013 warning_at (tree_nonartificial_location (exp),
12014 0, "%Kcall to %D will always overflow destination buffer",
12015 exp, get_callee_fndecl (exp));
12020 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12021 mem{cpy,pcpy,move,set} is available. */
12024 case BUILT_IN_MEMCPY_CHK:
12025 fn = built_in_decls[BUILT_IN_MEMCPY];
12027 case BUILT_IN_MEMPCPY_CHK:
12028 fn = built_in_decls[BUILT_IN_MEMPCPY];
12030 case BUILT_IN_MEMMOVE_CHK:
12031 fn = built_in_decls[BUILT_IN_MEMMOVE];
12033 case BUILT_IN_MEMSET_CHK:
12034 fn = built_in_decls[BUILT_IN_MEMSET];
12043 fn = build_call_expr (fn, 3, dest, src, len);
12044 STRIP_TYPE_NOPS (fn);
12045 while (TREE_CODE (fn) == COMPOUND_EXPR)
12047 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12049 fn = TREE_OPERAND (fn, 1);
12051 if (TREE_CODE (fn) == CALL_EXPR)
12052 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12053 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12055 else if (fcode == BUILT_IN_MEMSET_CHK)
12059 unsigned int dest_align
12060 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12062 /* If DEST is not a pointer type, call the normal function. */
12063 if (dest_align == 0)
12066 /* If SRC and DEST are the same (and not volatile), do nothing. */
12067 if (operand_equal_p (src, dest, 0))
12071 if (fcode != BUILT_IN_MEMPCPY_CHK)
12073 /* Evaluate and ignore LEN in case it has side-effects. */
12074 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12075 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12078 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12079 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12082 /* __memmove_chk special case. */
12083 if (fcode == BUILT_IN_MEMMOVE_CHK)
12085 unsigned int src_align
12086 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12088 if (src_align == 0)
12091 /* If src is categorized for a readonly section we can use
12092 normal __memcpy_chk. */
12093 if (readonly_data_expr (src))
12095 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12098 fn = build_call_expr (fn, 4, dest, src, len, size);
12099 STRIP_TYPE_NOPS (fn);
12100 while (TREE_CODE (fn) == COMPOUND_EXPR)
12102 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
12104 fn = TREE_OPERAND (fn, 1);
12106 if (TREE_CODE (fn) == CALL_EXPR)
12107 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12108 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12115 /* Emit warning if a buffer overflow is detected at compile time. */
12118 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12122 location_t loc = tree_nonartificial_location (exp);
12126 case BUILT_IN_STRCPY_CHK:
12127 case BUILT_IN_STPCPY_CHK:
12128 /* For __strcat_chk the warning will be emitted only if overflowing
12129 by at least strlen (dest) + 1 bytes. */
12130 case BUILT_IN_STRCAT_CHK:
12131 len = CALL_EXPR_ARG (exp, 1);
12132 size = CALL_EXPR_ARG (exp, 2);
12135 case BUILT_IN_STRNCAT_CHK:
12136 case BUILT_IN_STRNCPY_CHK:
12137 len = CALL_EXPR_ARG (exp, 2);
12138 size = CALL_EXPR_ARG (exp, 3);
12140 case BUILT_IN_SNPRINTF_CHK:
12141 case BUILT_IN_VSNPRINTF_CHK:
12142 len = CALL_EXPR_ARG (exp, 1);
12143 size = CALL_EXPR_ARG (exp, 3);
12146 gcc_unreachable ();
12152 if (! host_integerp (size, 1) || integer_all_onesp (size))
12157 len = c_strlen (len, 1);
12158 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12161 else if (fcode == BUILT_IN_STRNCAT_CHK)
12163 tree src = CALL_EXPR_ARG (exp, 1);
12164 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12166 src = c_strlen (src, 1);
12167 if (! src || ! host_integerp (src, 1))
12169 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12170 exp, get_callee_fndecl (exp));
12173 else if (tree_int_cst_lt (src, size))
12176 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12179 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12180 exp, get_callee_fndecl (exp));
12183 /* Emit warning if a buffer overflow is detected at compile time
12184 in __sprintf_chk/__vsprintf_chk calls. */
12187 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12189 tree dest, size, len, fmt, flag;
12190 const char *fmt_str;
12191 int nargs = call_expr_nargs (exp);
12193 /* Verify the required arguments in the original call. */
12197 dest = CALL_EXPR_ARG (exp, 0);
12198 flag = CALL_EXPR_ARG (exp, 1);
12199 size = CALL_EXPR_ARG (exp, 2);
12200 fmt = CALL_EXPR_ARG (exp, 3);
12202 if (! host_integerp (size, 1) || integer_all_onesp (size))
12205 /* Check whether the format is a literal string constant. */
12206 fmt_str = c_getstr (fmt);
12207 if (fmt_str == NULL)
12210 if (!init_target_chars ())
12213 /* If the format doesn't contain % args or %%, we know its size. */
12214 if (strchr (fmt_str, target_percent) == 0)
12215 len = build_int_cstu (size_type_node, strlen (fmt_str));
12216 /* If the format is "%s" and first ... argument is a string literal,
12218 else if (fcode == BUILT_IN_SPRINTF_CHK
12219 && strcmp (fmt_str, target_percent_s) == 0)
12225 arg = CALL_EXPR_ARG (exp, 4);
12226 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12229 len = c_strlen (arg, 1);
12230 if (!len || ! host_integerp (len, 1))
12236 if (! tree_int_cst_lt (len, size))
12237 warning_at (tree_nonartificial_location (exp),
12238 0, "%Kcall to %D will always overflow destination buffer",
12239 exp, get_callee_fndecl (exp));
12242 /* Emit warning if a free is called with address of a variable. */
12245 maybe_emit_free_warning (tree exp)
12247 tree arg = CALL_EXPR_ARG (exp, 0);
12250 if (TREE_CODE (arg) != ADDR_EXPR)
12253 arg = get_base_address (TREE_OPERAND (arg, 0));
12254 if (arg == NULL || INDIRECT_REF_P (arg))
12257 if (SSA_VAR_P (arg))
12258 warning_at (tree_nonartificial_location (exp),
12259 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12261 warning_at (tree_nonartificial_location (exp),
12262 0, "%Kattempt to free a non-heap object", exp);
12265 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12269 fold_builtin_object_size (tree ptr, tree ost)
12271 tree ret = NULL_TREE;
12272 int object_size_type;
12274 if (!validate_arg (ptr, POINTER_TYPE)
12275 || !validate_arg (ost, INTEGER_TYPE))
12280 if (TREE_CODE (ost) != INTEGER_CST
12281 || tree_int_cst_sgn (ost) < 0
12282 || compare_tree_int (ost, 3) > 0)
12285 object_size_type = tree_low_cst (ost, 0);
12287 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12288 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12289 and (size_t) 0 for types 2 and 3. */
12290 if (TREE_SIDE_EFFECTS (ptr))
12291 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12293 if (TREE_CODE (ptr) == ADDR_EXPR)
12294 ret = build_int_cstu (size_type_node,
12295 compute_builtin_object_size (ptr, object_size_type));
12297 else if (TREE_CODE (ptr) == SSA_NAME)
12299 unsigned HOST_WIDE_INT bytes;
12301 /* If object size is not known yet, delay folding until
12302 later. Maybe subsequent passes will help determining
12304 bytes = compute_builtin_object_size (ptr, object_size_type);
12305 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12307 ret = build_int_cstu (size_type_node, bytes);
12312 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12313 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12314 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12321 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12322 DEST, SRC, LEN, and SIZE are the arguments to the call.
12323 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12324 code of the builtin. If MAXLEN is not NULL, it is maximum length
12325 passed as third argument. */
12328 fold_builtin_memory_chk (tree fndecl,
12329 tree dest, tree src, tree len, tree size,
12330 tree maxlen, bool ignore,
12331 enum built_in_function fcode)
12335 if (!validate_arg (dest, POINTER_TYPE)
12336 || !validate_arg (src,
12337 (fcode == BUILT_IN_MEMSET_CHK
12338 ? INTEGER_TYPE : POINTER_TYPE))
12339 || !validate_arg (len, INTEGER_TYPE)
12340 || !validate_arg (size, INTEGER_TYPE))
12343 /* If SRC and DEST are the same (and not volatile), return DEST
12344 (resp. DEST+LEN for __mempcpy_chk). */
12345 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12347 if (fcode != BUILT_IN_MEMPCPY_CHK)
12348 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12351 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12352 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12356 if (! host_integerp (size, 1))
12359 if (! integer_all_onesp (size))
12361 if (! host_integerp (len, 1))
12363 /* If LEN is not constant, try MAXLEN too.
12364 For MAXLEN only allow optimizing into non-_ocs function
12365 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12366 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12368 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12370 /* (void) __mempcpy_chk () can be optimized into
12371 (void) __memcpy_chk (). */
12372 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12376 return build_call_expr (fn, 4, dest, src, len, size);
12384 if (tree_int_cst_lt (size, maxlen))
12389 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12390 mem{cpy,pcpy,move,set} is available. */
12393 case BUILT_IN_MEMCPY_CHK:
12394 fn = built_in_decls[BUILT_IN_MEMCPY];
12396 case BUILT_IN_MEMPCPY_CHK:
12397 fn = built_in_decls[BUILT_IN_MEMPCPY];
12399 case BUILT_IN_MEMMOVE_CHK:
12400 fn = built_in_decls[BUILT_IN_MEMMOVE];
12402 case BUILT_IN_MEMSET_CHK:
12403 fn = built_in_decls[BUILT_IN_MEMSET];
12412 return build_call_expr (fn, 3, dest, src, len);
12415 /* Fold a call to the __st[rp]cpy_chk builtin.
12416 DEST, SRC, and SIZE are the arguments to the call.
12417 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12418 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12419 strings passed as second argument. */
12422 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12423 tree maxlen, bool ignore,
12424 enum built_in_function fcode)
12428 if (!validate_arg (dest, POINTER_TYPE)
12429 || !validate_arg (src, POINTER_TYPE)
12430 || !validate_arg (size, INTEGER_TYPE))
12433 /* If SRC and DEST are the same (and not volatile), return DEST. */
12434 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12435 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12437 if (! host_integerp (size, 1))
12440 if (! integer_all_onesp (size))
12442 len = c_strlen (src, 1);
12443 if (! len || ! host_integerp (len, 1))
12445 /* If LEN is not constant, try MAXLEN too.
12446 For MAXLEN only allow optimizing into non-_ocs function
12447 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12448 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12450 if (fcode == BUILT_IN_STPCPY_CHK)
12455 /* If return value of __stpcpy_chk is ignored,
12456 optimize into __strcpy_chk. */
12457 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12461 return build_call_expr (fn, 3, dest, src, size);
12464 if (! len || TREE_SIDE_EFFECTS (len))
12467 /* If c_strlen returned something, but not a constant,
12468 transform __strcpy_chk into __memcpy_chk. */
12469 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12473 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12474 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12475 build_call_expr (fn, 4,
12476 dest, src, len, size));
12482 if (! tree_int_cst_lt (maxlen, size))
12486 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12487 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12488 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12492 return build_call_expr (fn, 2, dest, src);
12495 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12496 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12497 length passed as third argument. */
12500 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12505 if (!validate_arg (dest, POINTER_TYPE)
12506 || !validate_arg (src, POINTER_TYPE)
12507 || !validate_arg (len, INTEGER_TYPE)
12508 || !validate_arg (size, INTEGER_TYPE))
12511 if (! host_integerp (size, 1))
12514 if (! integer_all_onesp (size))
12516 if (! host_integerp (len, 1))
12518 /* If LEN is not constant, try MAXLEN too.
12519 For MAXLEN only allow optimizing into non-_ocs function
12520 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12521 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12527 if (tree_int_cst_lt (size, maxlen))
12531 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12532 fn = built_in_decls[BUILT_IN_STRNCPY];
12536 return build_call_expr (fn, 3, dest, src, len);
12539 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12540 are the arguments to the call. */
12543 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12548 if (!validate_arg (dest, POINTER_TYPE)
12549 || !validate_arg (src, POINTER_TYPE)
12550 || !validate_arg (size, INTEGER_TYPE))
12553 p = c_getstr (src);
12554 /* If the SRC parameter is "", return DEST. */
12555 if (p && *p == '\0')
12556 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12558 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12561 /* If __builtin_strcat_chk is used, assume strcat is available. */
12562 fn = built_in_decls[BUILT_IN_STRCAT];
12566 return build_call_expr (fn, 2, dest, src);
12569 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12573 fold_builtin_strncat_chk (tree fndecl,
12574 tree dest, tree src, tree len, tree size)
12579 if (!validate_arg (dest, POINTER_TYPE)
12580 || !validate_arg (src, POINTER_TYPE)
12581 || !validate_arg (size, INTEGER_TYPE)
12582 || !validate_arg (size, INTEGER_TYPE))
12585 p = c_getstr (src);
12586 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12587 if (p && *p == '\0')
12588 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12589 else if (integer_zerop (len))
12590 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12592 if (! host_integerp (size, 1))
12595 if (! integer_all_onesp (size))
12597 tree src_len = c_strlen (src, 1);
12599 && host_integerp (src_len, 1)
12600 && host_integerp (len, 1)
12601 && ! tree_int_cst_lt (len, src_len))
12603 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12604 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12608 return build_call_expr (fn, 3, dest, src, size);
12613 /* If __builtin_strncat_chk is used, assume strncat is available. */
12614 fn = built_in_decls[BUILT_IN_STRNCAT];
12618 return build_call_expr (fn, 3, dest, src, len);
12621 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12622 a normal call should be emitted rather than expanding the function
12623 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12626 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12628 tree dest, size, len, fn, fmt, flag;
12629 const char *fmt_str;
12630 int nargs = call_expr_nargs (exp);
12632 /* Verify the required arguments in the original call. */
12635 dest = CALL_EXPR_ARG (exp, 0);
12636 if (!validate_arg (dest, POINTER_TYPE))
12638 flag = CALL_EXPR_ARG (exp, 1);
12639 if (!validate_arg (flag, INTEGER_TYPE))
12641 size = CALL_EXPR_ARG (exp, 2);
12642 if (!validate_arg (size, INTEGER_TYPE))
12644 fmt = CALL_EXPR_ARG (exp, 3);
12645 if (!validate_arg (fmt, POINTER_TYPE))
12648 if (! host_integerp (size, 1))
12653 if (!init_target_chars ())
12656 /* Check whether the format is a literal string constant. */
12657 fmt_str = c_getstr (fmt);
12658 if (fmt_str != NULL)
12660 /* If the format doesn't contain % args or %%, we know the size. */
12661 if (strchr (fmt_str, target_percent) == 0)
12663 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12664 len = build_int_cstu (size_type_node, strlen (fmt_str));
12666 /* If the format is "%s" and first ... argument is a string literal,
12667 we know the size too. */
12668 else if (fcode == BUILT_IN_SPRINTF_CHK
12669 && strcmp (fmt_str, target_percent_s) == 0)
12675 arg = CALL_EXPR_ARG (exp, 4);
12676 if (validate_arg (arg, POINTER_TYPE))
12678 len = c_strlen (arg, 1);
12679 if (! len || ! host_integerp (len, 1))
12686 if (! integer_all_onesp (size))
12688 if (! len || ! tree_int_cst_lt (len, size))
12692 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12693 or if format doesn't contain % chars or is "%s". */
12694 if (! integer_zerop (flag))
12696 if (fmt_str == NULL)
12698 if (strchr (fmt_str, target_percent) != NULL
12699 && strcmp (fmt_str, target_percent_s))
12703 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12704 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12705 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12709 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12712 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12713 a normal call should be emitted rather than expanding the function
12714 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12715 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12716 passed as second argument. */
12719 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12720 enum built_in_function fcode)
12722 tree dest, size, len, fn, fmt, flag;
12723 const char *fmt_str;
12725 /* Verify the required arguments in the original call. */
12726 if (call_expr_nargs (exp) < 5)
12728 dest = CALL_EXPR_ARG (exp, 0);
12729 if (!validate_arg (dest, POINTER_TYPE))
12731 len = CALL_EXPR_ARG (exp, 1);
12732 if (!validate_arg (len, INTEGER_TYPE))
12734 flag = CALL_EXPR_ARG (exp, 2);
12735 if (!validate_arg (flag, INTEGER_TYPE))
12737 size = CALL_EXPR_ARG (exp, 3);
12738 if (!validate_arg (size, INTEGER_TYPE))
12740 fmt = CALL_EXPR_ARG (exp, 4);
12741 if (!validate_arg (fmt, POINTER_TYPE))
12744 if (! host_integerp (size, 1))
12747 if (! integer_all_onesp (size))
12749 if (! host_integerp (len, 1))
12751 /* If LEN is not constant, try MAXLEN too.
12752 For MAXLEN only allow optimizing into non-_ocs function
12753 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12754 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12760 if (tree_int_cst_lt (size, maxlen))
12764 if (!init_target_chars ())
12767 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12768 or if format doesn't contain % chars or is "%s". */
12769 if (! integer_zerop (flag))
12771 fmt_str = c_getstr (fmt);
12772 if (fmt_str == NULL)
12774 if (strchr (fmt_str, target_percent) != NULL
12775 && strcmp (fmt_str, target_percent_s))
12779 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12781 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12782 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12786 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12789 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12790 FMT and ARG are the arguments to the call; we don't fold cases with
12791 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12793 Return NULL_TREE if no simplification was possible, otherwise return the
12794 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12795 code of the function to be simplified. */
12798 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12799 enum built_in_function fcode)
12801 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12802 const char *fmt_str = NULL;
12804 /* If the return value is used, don't do the transformation. */
12808 /* Verify the required arguments in the original call. */
12809 if (!validate_arg (fmt, POINTER_TYPE))
12812 /* Check whether the format is a literal string constant. */
12813 fmt_str = c_getstr (fmt);
12814 if (fmt_str == NULL)
12817 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12819 /* If we're using an unlocked function, assume the other
12820 unlocked functions exist explicitly. */
12821 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12822 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12826 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12827 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12830 if (!init_target_chars ())
12833 if (strcmp (fmt_str, target_percent_s) == 0
12834 || strchr (fmt_str, target_percent) == NULL)
12838 if (strcmp (fmt_str, target_percent_s) == 0)
12840 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12843 if (!arg || !validate_arg (arg, POINTER_TYPE))
12846 str = c_getstr (arg);
12852 /* The format specifier doesn't contain any '%' characters. */
12853 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12859 /* If the string was "", printf does nothing. */
12860 if (str[0] == '\0')
12861 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12863 /* If the string has length of 1, call putchar. */
12864 if (str[1] == '\0')
12866 /* Given printf("c"), (where c is any one character,)
12867 convert "c"[0] to an int and pass that to the replacement
12869 newarg = build_int_cst (NULL_TREE, str[0]);
12871 call = build_call_expr (fn_putchar, 1, newarg);
12875 /* If the string was "string\n", call puts("string"). */
12876 size_t len = strlen (str);
12877 if ((unsigned char)str[len - 1] == target_newline)
12879 /* Create a NUL-terminated string that's one char shorter
12880 than the original, stripping off the trailing '\n'. */
12881 char *newstr = XALLOCAVEC (char, len);
12882 memcpy (newstr, str, len - 1);
12883 newstr[len - 1] = 0;
12885 newarg = build_string_literal (len, newstr);
12887 call = build_call_expr (fn_puts, 1, newarg);
12890 /* We'd like to arrange to call fputs(string,stdout) here,
12891 but we need stdout and don't have a way to get it yet. */
12896 /* The other optimizations can be done only on the non-va_list variants. */
12897 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12900 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12901 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12903 if (!arg || !validate_arg (arg, POINTER_TYPE))
12906 call = build_call_expr (fn_puts, 1, arg);
12909 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12910 else if (strcmp (fmt_str, target_percent_c) == 0)
12912 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12915 call = build_call_expr (fn_putchar, 1, arg);
12921 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12924 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12925 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12926 more than 3 arguments, and ARG may be null in the 2-argument case.
12928 Return NULL_TREE if no simplification was possible, otherwise return the
12929 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12930 code of the function to be simplified. */
12933 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12934 enum built_in_function fcode)
12936 tree fn_fputc, fn_fputs, call = NULL_TREE;
12937 const char *fmt_str = NULL;
12939 /* If the return value is used, don't do the transformation. */
12943 /* Verify the required arguments in the original call. */
12944 if (!validate_arg (fp, POINTER_TYPE))
12946 if (!validate_arg (fmt, POINTER_TYPE))
12949 /* Check whether the format is a literal string constant. */
12950 fmt_str = c_getstr (fmt);
12951 if (fmt_str == NULL)
12954 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12956 /* If we're using an unlocked function, assume the other
12957 unlocked functions exist explicitly. */
12958 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12959 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12963 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12964 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12967 if (!init_target_chars ())
12970 /* If the format doesn't contain % args or %%, use strcpy. */
12971 if (strchr (fmt_str, target_percent) == NULL)
12973 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12977 /* If the format specifier was "", fprintf does nothing. */
12978 if (fmt_str[0] == '\0')
12980 /* If FP has side-effects, just wait until gimplification is
12982 if (TREE_SIDE_EFFECTS (fp))
12985 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12988 /* When "string" doesn't contain %, replace all cases of
12989 fprintf (fp, string) with fputs (string, fp). The fputs
12990 builtin will take care of special cases like length == 1. */
12992 call = build_call_expr (fn_fputs, 2, fmt, fp);
12995 /* The other optimizations can be done only on the non-va_list variants. */
12996 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12999 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13000 else if (strcmp (fmt_str, target_percent_s) == 0)
13002 if (!arg || !validate_arg (arg, POINTER_TYPE))
13005 call = build_call_expr (fn_fputs, 2, arg, fp);
13008 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13009 else if (strcmp (fmt_str, target_percent_c) == 0)
13011 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13014 call = build_call_expr (fn_fputc, 2, arg, fp);
13019 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
13022 /* Initialize format string characters in the target charset. */
13025 init_target_chars (void)
13030 target_newline = lang_hooks.to_target_charset ('\n');
13031 target_percent = lang_hooks.to_target_charset ('%');
13032 target_c = lang_hooks.to_target_charset ('c');
13033 target_s = lang_hooks.to_target_charset ('s');
13034 if (target_newline == 0 || target_percent == 0 || target_c == 0
13038 target_percent_c[0] = target_percent;
13039 target_percent_c[1] = target_c;
13040 target_percent_c[2] = '\0';
13042 target_percent_s[0] = target_percent;
13043 target_percent_s[1] = target_s;
13044 target_percent_s[2] = '\0';
13046 target_percent_s_newline[0] = target_percent;
13047 target_percent_s_newline[1] = target_s;
13048 target_percent_s_newline[2] = target_newline;
13049 target_percent_s_newline[3] = '\0';
13056 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13057 and no overflow/underflow occurred. INEXACT is true if M was not
13058 exactly calculated. TYPE is the tree type for the result. This
13059 function assumes that you cleared the MPFR flags and then
13060 calculated M to see if anything subsequently set a flag prior to
13061 entering this function. Return NULL_TREE if any checks fail. */
13064 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13066 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13067 overflow/underflow occurred. If -frounding-math, proceed iff the
13068 result of calling FUNC was exact. */
13069 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13070 && (!flag_rounding_math || !inexact))
13072 REAL_VALUE_TYPE rr;
13074 real_from_mpfr (&rr, m, type, GMP_RNDN);
13075 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13076 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13077 but the mpft_t is not, then we underflowed in the
13079 if (real_isfinite (&rr)
13080 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13082 REAL_VALUE_TYPE rmode;
13084 real_convert (&rmode, TYPE_MODE (type), &rr);
13085 /* Proceed iff the specified mode can hold the value. */
13086 if (real_identical (&rmode, &rr))
13087 return build_real (type, rmode);
13093 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13094 FUNC on it and return the resulting value as a tree with type TYPE.
13095 If MIN and/or MAX are not NULL, then the supplied ARG must be
13096 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13097 acceptable values, otherwise they are not. The mpfr precision is
13098 set to the precision of TYPE. We assume that function FUNC returns
13099 zero if the result could be calculated exactly within the requested
13103 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13104 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13107 tree result = NULL_TREE;
13111 /* To proceed, MPFR must exactly represent the target floating point
13112 format, which only happens when the target base equals two. */
13113 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13114 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13116 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13118 if (real_isfinite (ra)
13119 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13120 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13122 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13123 const int prec = fmt->p;
13124 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13128 mpfr_init2 (m, prec);
13129 mpfr_from_real (m, ra, GMP_RNDN);
13130 mpfr_clear_flags ();
13131 inexact = func (m, m, rnd);
13132 result = do_mpfr_ckconv (m, type, inexact);
13140 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13141 FUNC on it and return the resulting value as a tree with type TYPE.
13142 The mpfr precision is set to the precision of TYPE. We assume that
13143 function FUNC returns zero if the result could be calculated
13144 exactly within the requested precision. */
13147 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13148 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13150 tree result = NULL_TREE;
13155 /* To proceed, MPFR must exactly represent the target floating point
13156 format, which only happens when the target base equals two. */
13157 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13158 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13159 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13161 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13162 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13164 if (real_isfinite (ra1) && real_isfinite (ra2))
13166 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13167 const int prec = fmt->p;
13168 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13172 mpfr_inits2 (prec, m1, m2, NULL);
13173 mpfr_from_real (m1, ra1, GMP_RNDN);
13174 mpfr_from_real (m2, ra2, GMP_RNDN);
13175 mpfr_clear_flags ();
13176 inexact = func (m1, m1, m2, rnd);
13177 result = do_mpfr_ckconv (m1, type, inexact);
13178 mpfr_clears (m1, m2, NULL);
13185 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13186 FUNC on it and return the resulting value as a tree with type TYPE.
13187 The mpfr precision is set to the precision of TYPE. We assume that
13188 function FUNC returns zero if the result could be calculated
13189 exactly within the requested precision. */
13192 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13193 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13195 tree result = NULL_TREE;
13201 /* To proceed, MPFR must exactly represent the target floating point
13202 format, which only happens when the target base equals two. */
13203 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13204 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13205 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13206 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13208 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13209 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13210 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13212 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13214 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13215 const int prec = fmt->p;
13216 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13220 mpfr_inits2 (prec, m1, m2, m3, NULL);
13221 mpfr_from_real (m1, ra1, GMP_RNDN);
13222 mpfr_from_real (m2, ra2, GMP_RNDN);
13223 mpfr_from_real (m3, ra3, GMP_RNDN);
13224 mpfr_clear_flags ();
13225 inexact = func (m1, m1, m2, m3, rnd);
13226 result = do_mpfr_ckconv (m1, type, inexact);
13227 mpfr_clears (m1, m2, m3, NULL);
13234 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13235 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13236 If ARG_SINP and ARG_COSP are NULL then the result is returned
13237 as a complex value.
13238 The type is taken from the type of ARG and is used for setting the
13239 precision of the calculation and results. */
13242 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13244 tree const type = TREE_TYPE (arg);
13245 tree result = NULL_TREE;
13249 /* To proceed, MPFR must exactly represent the target floating point
13250 format, which only happens when the target base equals two. */
13251 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13252 && TREE_CODE (arg) == REAL_CST
13253 && !TREE_OVERFLOW (arg))
13255 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13257 if (real_isfinite (ra))
13259 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13260 const int prec = fmt->p;
13261 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13262 tree result_s, result_c;
13266 mpfr_inits2 (prec, m, ms, mc, NULL);
13267 mpfr_from_real (m, ra, GMP_RNDN);
13268 mpfr_clear_flags ();
13269 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13270 result_s = do_mpfr_ckconv (ms, type, inexact);
13271 result_c = do_mpfr_ckconv (mc, type, inexact);
13272 mpfr_clears (m, ms, mc, NULL);
13273 if (result_s && result_c)
13275 /* If we are to return in a complex value do so. */
13276 if (!arg_sinp && !arg_cosp)
13277 return build_complex (build_complex_type (type),
13278 result_c, result_s);
13280 /* Dereference the sin/cos pointer arguments. */
13281 arg_sinp = build_fold_indirect_ref (arg_sinp);
13282 arg_cosp = build_fold_indirect_ref (arg_cosp);
13283 /* Proceed if valid pointer type were passed in. */
13284 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13285 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13287 /* Set the values. */
13288 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13290 TREE_SIDE_EFFECTS (result_s) = 1;
13291 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13293 TREE_SIDE_EFFECTS (result_c) = 1;
13294 /* Combine the assignments into a compound expr. */
13295 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13296 result_s, result_c));
13304 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13305 two-argument mpfr order N Bessel function FUNC on them and return
13306 the resulting value as a tree with type TYPE. The mpfr precision
13307 is set to the precision of TYPE. We assume that function FUNC
13308 returns zero if the result could be calculated exactly within the
13309 requested precision. */
13311 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13312 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13313 const REAL_VALUE_TYPE *min, bool inclusive)
13315 tree result = NULL_TREE;
13320 /* To proceed, MPFR must exactly represent the target floating point
13321 format, which only happens when the target base equals two. */
13322 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13323 && host_integerp (arg1, 0)
13324 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13326 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13327 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13330 && real_isfinite (ra)
13331 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13333 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13334 const int prec = fmt->p;
13335 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13339 mpfr_init2 (m, prec);
13340 mpfr_from_real (m, ra, GMP_RNDN);
13341 mpfr_clear_flags ();
13342 inexact = func (m, n, m, rnd);
13343 result = do_mpfr_ckconv (m, type, inexact);
13351 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13352 the pointer *(ARG_QUO) and return the result. The type is taken
13353 from the type of ARG0 and is used for setting the precision of the
13354 calculation and results. */
13357 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13359 tree const type = TREE_TYPE (arg0);
13360 tree result = NULL_TREE;
13365 /* To proceed, MPFR must exactly represent the target floating point
13366 format, which only happens when the target base equals two. */
13367 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13368 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13369 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13371 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13372 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13374 if (real_isfinite (ra0) && real_isfinite (ra1))
13376 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13377 const int prec = fmt->p;
13378 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13383 mpfr_inits2 (prec, m0, m1, NULL);
13384 mpfr_from_real (m0, ra0, GMP_RNDN);
13385 mpfr_from_real (m1, ra1, GMP_RNDN);
13386 mpfr_clear_flags ();
13387 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13388 /* Remquo is independent of the rounding mode, so pass
13389 inexact=0 to do_mpfr_ckconv(). */
13390 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13391 mpfr_clears (m0, m1, NULL);
13394 /* MPFR calculates quo in the host's long so it may
13395 return more bits in quo than the target int can hold
13396 if sizeof(host long) > sizeof(target int). This can
13397 happen even for native compilers in LP64 mode. In
13398 these cases, modulo the quo value with the largest
13399 number that the target int can hold while leaving one
13400 bit for the sign. */
13401 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13402 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13404 /* Dereference the quo pointer argument. */
13405 arg_quo = build_fold_indirect_ref (arg_quo);
13406 /* Proceed iff a valid pointer type was passed in. */
13407 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13409 /* Set the value. */
13410 tree result_quo = fold_build2 (MODIFY_EXPR,
13411 TREE_TYPE (arg_quo), arg_quo,
13412 build_int_cst (NULL, integer_quo));
13413 TREE_SIDE_EFFECTS (result_quo) = 1;
13414 /* Combine the quo assignment with the rem. */
13415 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13416 result_quo, result_rem));
13424 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13425 resulting value as a tree with type TYPE. The mpfr precision is
13426 set to the precision of TYPE. We assume that this mpfr function
13427 returns zero if the result could be calculated exactly within the
13428 requested precision. In addition, the integer pointer represented
13429 by ARG_SG will be dereferenced and set to the appropriate signgam
13433 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13435 tree result = NULL_TREE;
13439 /* To proceed, MPFR must exactly represent the target floating point
13440 format, which only happens when the target base equals two. Also
13441 verify ARG is a constant and that ARG_SG is an int pointer. */
13442 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13443 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13444 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13445 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13447 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13449 /* In addition to NaN and Inf, the argument cannot be zero or a
13450 negative integer. */
13451 if (real_isfinite (ra)
13452 && ra->cl != rvc_zero
13453 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13455 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13456 const int prec = fmt->p;
13457 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13462 mpfr_init2 (m, prec);
13463 mpfr_from_real (m, ra, GMP_RNDN);
13464 mpfr_clear_flags ();
13465 inexact = mpfr_lgamma (m, &sg, m, rnd);
13466 result_lg = do_mpfr_ckconv (m, type, inexact);
13472 /* Dereference the arg_sg pointer argument. */
13473 arg_sg = build_fold_indirect_ref (arg_sg);
13474 /* Assign the signgam value into *arg_sg. */
13475 result_sg = fold_build2 (MODIFY_EXPR,
13476 TREE_TYPE (arg_sg), arg_sg,
13477 build_int_cst (NULL, sg));
13478 TREE_SIDE_EFFECTS (result_sg) = 1;
13479 /* Combine the signgam assignment with the lgamma result. */
13480 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13481 result_sg, result_lg));
13490 The functions below provide an alternate interface for folding
13491 builtin function calls presented as GIMPLE_CALL statements rather
13492 than as CALL_EXPRs. The folded result is still expressed as a
13493 tree. There is too much code duplication in the handling of
13494 varargs functions, and a more intrusive re-factoring would permit
13495 better sharing of code between the tree and statement-based
13496 versions of these functions. */
13498 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13499 along with N new arguments specified as the "..." parameters. SKIP
13500 is the number of arguments in STMT to be omitted. This function is used
13501 to do varargs-to-varargs transformations. */
13504 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13506 int oldnargs = gimple_call_num_args (stmt);
13507 int nargs = oldnargs - skip + n;
13508 tree fntype = TREE_TYPE (fndecl);
13509 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13514 buffer = XALLOCAVEC (tree, nargs);
13516 for (i = 0; i < n; i++)
13517 buffer[i] = va_arg (ap, tree);
13519 for (j = skip; j < oldnargs; j++, i++)
13520 buffer[i] = gimple_call_arg (stmt, j);
13522 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13525 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13526 a normal call should be emitted rather than expanding the function
13527 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13530 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13532 tree dest, size, len, fn, fmt, flag;
13533 const char *fmt_str;
13534 int nargs = gimple_call_num_args (stmt);
13536 /* Verify the required arguments in the original call. */
13539 dest = gimple_call_arg (stmt, 0);
13540 if (!validate_arg (dest, POINTER_TYPE))
13542 flag = gimple_call_arg (stmt, 1);
13543 if (!validate_arg (flag, INTEGER_TYPE))
13545 size = gimple_call_arg (stmt, 2);
13546 if (!validate_arg (size, INTEGER_TYPE))
13548 fmt = gimple_call_arg (stmt, 3);
13549 if (!validate_arg (fmt, POINTER_TYPE))
13552 if (! host_integerp (size, 1))
13557 if (!init_target_chars ())
13560 /* Check whether the format is a literal string constant. */
13561 fmt_str = c_getstr (fmt);
13562 if (fmt_str != NULL)
13564 /* If the format doesn't contain % args or %%, we know the size. */
13565 if (strchr (fmt_str, target_percent) == 0)
13567 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13568 len = build_int_cstu (size_type_node, strlen (fmt_str));
13570 /* If the format is "%s" and first ... argument is a string literal,
13571 we know the size too. */
13572 else if (fcode == BUILT_IN_SPRINTF_CHK
13573 && strcmp (fmt_str, target_percent_s) == 0)
13579 arg = gimple_call_arg (stmt, 4);
13580 if (validate_arg (arg, POINTER_TYPE))
13582 len = c_strlen (arg, 1);
13583 if (! len || ! host_integerp (len, 1))
13590 if (! integer_all_onesp (size))
13592 if (! len || ! tree_int_cst_lt (len, size))
13596 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13597 or if format doesn't contain % chars or is "%s". */
13598 if (! integer_zerop (flag))
13600 if (fmt_str == NULL)
13602 if (strchr (fmt_str, target_percent) != NULL
13603 && strcmp (fmt_str, target_percent_s))
13607 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13608 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13609 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13613 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13616 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13617 a normal call should be emitted rather than expanding the function
13618 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13619 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13620 passed as second argument. */
13623 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13624 enum built_in_function fcode)
13626 tree dest, size, len, fn, fmt, flag;
13627 const char *fmt_str;
13629 /* Verify the required arguments in the original call. */
13630 if (gimple_call_num_args (stmt) < 5)
13632 dest = gimple_call_arg (stmt, 0);
13633 if (!validate_arg (dest, POINTER_TYPE))
13635 len = gimple_call_arg (stmt, 1);
13636 if (!validate_arg (len, INTEGER_TYPE))
13638 flag = gimple_call_arg (stmt, 2);
13639 if (!validate_arg (flag, INTEGER_TYPE))
13641 size = gimple_call_arg (stmt, 3);
13642 if (!validate_arg (size, INTEGER_TYPE))
13644 fmt = gimple_call_arg (stmt, 4);
13645 if (!validate_arg (fmt, POINTER_TYPE))
13648 if (! host_integerp (size, 1))
13651 if (! integer_all_onesp (size))
13653 if (! host_integerp (len, 1))
13655 /* If LEN is not constant, try MAXLEN too.
13656 For MAXLEN only allow optimizing into non-_ocs function
13657 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13658 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13664 if (tree_int_cst_lt (size, maxlen))
13668 if (!init_target_chars ())
13671 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13672 or if format doesn't contain % chars or is "%s". */
13673 if (! integer_zerop (flag))
13675 fmt_str = c_getstr (fmt);
13676 if (fmt_str == NULL)
13678 if (strchr (fmt_str, target_percent) != NULL
13679 && strcmp (fmt_str, target_percent_s))
13683 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13685 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13686 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13690 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13693 /* Builtins with folding operations that operate on "..." arguments
13694 need special handling; we need to store the arguments in a convenient
13695 data structure before attempting any folding. Fortunately there are
13696 only a few builtins that fall into this category. FNDECL is the
13697 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13698 result of the function call is ignored. */
13701 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13703 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13704 tree ret = NULL_TREE;
13708 case BUILT_IN_SPRINTF_CHK:
13709 case BUILT_IN_VSPRINTF_CHK:
13710 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13713 case BUILT_IN_SNPRINTF_CHK:
13714 case BUILT_IN_VSNPRINTF_CHK:
13715 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13722 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13723 TREE_NO_WARNING (ret) = 1;
13729 /* A wrapper function for builtin folding that prevents warnings for
13730 "statement without effect" and the like, caused by removing the
13731 call node earlier than the warning is generated. */
13734 fold_call_stmt (gimple stmt, bool ignore)
13736 tree ret = NULL_TREE;
13737 tree fndecl = gimple_call_fndecl (stmt);
13739 && TREE_CODE (fndecl) == FUNCTION_DECL
13740 && DECL_BUILT_IN (fndecl)
13741 && !gimple_call_va_arg_pack_p (stmt))
13743 int nargs = gimple_call_num_args (stmt);
13745 if (avoid_folding_inline_builtin (fndecl))
13747 /* FIXME: Don't use a list in this interface. */
13748 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13750 tree arglist = NULL_TREE;
13752 for (i = nargs - 1; i >= 0; i--)
13753 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13754 return targetm.fold_builtin (fndecl, arglist, ignore);
13758 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13760 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13762 for (i = 0; i < nargs; i++)
13763 args[i] = gimple_call_arg (stmt, i);
13764 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13767 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13770 /* Propagate location information from original call to
13771 expansion of builtin. Otherwise things like
13772 maybe_emit_chk_warning, that operate on the expansion
13773 of a builtin, will use the wrong location information. */
13774 if (gimple_has_location (stmt))
13776 tree realret = ret;
13777 if (TREE_CODE (ret) == NOP_EXPR)
13778 realret = TREE_OPERAND (ret, 0);
13779 if (CAN_HAVE_LOCATION_P (realret)
13780 && !EXPR_HAS_LOCATION (realret))
13781 SET_EXPR_LOCATION (realret, gimple_location (stmt));