1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
250 if (strncmp (name, "__sync_", 7) == 0)
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
280 switch (TREE_CODE (exp))
284 case NON_LVALUE_EXPR:
285 exp = TREE_OPERAND (exp, 0);
286 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
289 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
290 align = MIN (inner, max_align);
293 case POINTER_PLUS_EXPR:
294 /* If sum of pointer + int, restrict our maximum alignment to that
295 imposed by the integer. If not, we can't do any better than
297 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
300 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
301 & (max_align / BITS_PER_UNIT - 1))
305 exp = TREE_OPERAND (exp, 0);
309 /* See what we are pointing at and look at its alignment. */
310 exp = TREE_OPERAND (exp, 0);
312 if (handled_component_p (exp))
314 HOST_WIDE_INT bitsize, bitpos;
316 enum machine_mode mode;
317 int unsignedp, volatilep;
319 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
320 &mode, &unsignedp, &volatilep, true);
322 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
323 if (offset && TREE_CODE (offset) == PLUS_EXPR
324 && host_integerp (TREE_OPERAND (offset, 1), 1))
326 /* Any overflow in calculating offset_bits won't change
329 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
333 inner = MIN (inner, (offset_bits & -offset_bits));
334 offset = TREE_OPERAND (offset, 0);
336 if (offset && TREE_CODE (offset) == MULT_EXPR
337 && host_integerp (TREE_OPERAND (offset, 1), 1))
339 /* Any overflow in calculating offset_factor won't change
341 unsigned offset_factor
342 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
346 inner = MIN (inner, (offset_factor & -offset_factor));
349 inner = MIN (inner, BITS_PER_UNIT);
352 align = MIN (inner, DECL_ALIGN (exp));
353 #ifdef CONSTANT_ALIGNMENT
354 else if (CONSTANT_CLASS_P (exp))
355 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
357 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
358 || TREE_CODE (exp) == INDIRECT_REF)
359 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
361 align = MIN (align, inner);
362 return MIN (align, max_align);
370 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
371 way, because it could contain a zero byte in the middle.
372 TREE_STRING_LENGTH is the size of the character array, not the string.
374 ONLY_VALUE should be nonzero if the result is not going to be emitted
375 into the instruction stream and zero if it is going to be expanded.
376 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
377 is returned, otherwise NULL, since
378 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
379 evaluate the side-effects.
381 The value returned is of type `ssizetype'.
383 Unfortunately, string_constant can't access the values of const char
384 arrays with initializers, so neither can we do so here. */
387 c_strlen (tree src, int only_value)
390 HOST_WIDE_INT offset;
395 if (TREE_CODE (src) == COND_EXPR
396 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
400 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
401 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
402 if (tree_int_cst_equal (len1, len2))
406 if (TREE_CODE (src) == COMPOUND_EXPR
407 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
408 return c_strlen (TREE_OPERAND (src, 1), only_value);
410 src = string_constant (src, &offset_node);
414 max = TREE_STRING_LENGTH (src) - 1;
415 ptr = TREE_STRING_POINTER (src);
417 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
419 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
420 compute the offset to the following null if we don't know where to
421 start searching for it. */
424 for (i = 0; i < max; i++)
428 /* We don't know the starting offset, but we do know that the string
429 has no internal zero bytes. We can assume that the offset falls
430 within the bounds of the string; otherwise, the programmer deserves
431 what he gets. Subtract the offset from the length of the string,
432 and return that. This would perhaps not be valid if we were dealing
433 with named arrays in addition to literal string constants. */
435 return size_diffop (size_int (max), offset_node);
438 /* We have a known offset into the string. Start searching there for
439 a null character if we can represent it as a single HOST_WIDE_INT. */
440 if (offset_node == 0)
442 else if (! host_integerp (offset_node, 0))
445 offset = tree_low_cst (offset_node, 0);
447 /* If the offset is known to be out of bounds, warn, and call strlen at
449 if (offset < 0 || offset > max)
451 warning (0, "offset outside bounds of constant string");
455 /* Use strlen to search for the first zero byte. Since any strings
456 constructed with build_string will have nulls appended, we win even
457 if we get handed something like (char[4])"abcd".
459 Since OFFSET is our starting index into the string, no further
460 calculation is needed. */
461 return ssize_int (strlen (ptr + offset));
464 /* Return a char pointer for a C string if it is a string constant
465 or sum of string constant and integer constant. */
472 src = string_constant (src, &offset_node);
476 if (offset_node == 0)
477 return TREE_STRING_POINTER (src);
478 else if (!host_integerp (offset_node, 1)
479 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
482 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
485 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
486 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
489 c_readstr (const char *str, enum machine_mode mode)
495 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
500 for (i = 0; i < GET_MODE_SIZE (mode); i++)
503 if (WORDS_BIG_ENDIAN)
504 j = GET_MODE_SIZE (mode) - i - 1;
505 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
506 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
507 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
509 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
512 ch = (unsigned char) str[i];
513 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
515 return immed_double_const (c[0], c[1], mode);
518 /* Cast a target constant CST to target CHAR and if that value fits into
519 host char type, return zero and put that value into variable pointed to by
523 target_char_cast (tree cst, char *p)
525 unsigned HOST_WIDE_INT val, hostval;
527 if (!host_integerp (cst, 1)
528 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
531 val = tree_low_cst (cst, 1);
532 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
533 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
536 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
537 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
546 /* Similar to save_expr, but assumes that arbitrary code is not executed
547 in between the multiple evaluations. In particular, we assume that a
548 non-addressable local variable will not be modified. */
551 builtin_save_expr (tree exp)
553 if (TREE_ADDRESSABLE (exp) == 0
554 && (TREE_CODE (exp) == PARM_DECL
555 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
558 return save_expr (exp);
561 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
562 times to get the address of either a higher stack frame, or a return
563 address located within it (depending on FNDECL_CODE). */
566 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
570 #ifdef INITIAL_FRAME_ADDRESS_RTX
571 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
575 /* For a zero count with __builtin_return_address, we don't care what
576 frame address we return, because target-specific definitions will
577 override us. Therefore frame pointer elimination is OK, and using
578 the soft frame pointer is OK.
580 For a nonzero count, or a zero count with __builtin_frame_address,
581 we require a stable offset from the current frame pointer to the
582 previous one, so we must use the hard frame pointer, and
583 we must disable frame pointer elimination. */
584 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
585 tem = frame_pointer_rtx;
588 tem = hard_frame_pointer_rtx;
590 /* Tell reload not to eliminate the frame pointer. */
591 current_function_accesses_prior_frames = 1;
595 /* Some machines need special handling before we can access
596 arbitrary frames. For example, on the SPARC, we must first flush
597 all register windows to the stack. */
598 #ifdef SETUP_FRAME_ADDRESSES
600 SETUP_FRAME_ADDRESSES ();
603 /* On the SPARC, the return address is not in the frame, it is in a
604 register. There is no way to access it off of the current frame
605 pointer, but it can be accessed off the previous frame pointer by
606 reading the value from the register window save area. */
607 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
608 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
612 /* Scan back COUNT frames to the specified frame. */
613 for (i = 0; i < count; i++)
615 /* Assume the dynamic chain pointer is in the word that the
616 frame address points to, unless otherwise specified. */
617 #ifdef DYNAMIC_CHAIN_ADDRESS
618 tem = DYNAMIC_CHAIN_ADDRESS (tem);
620 tem = memory_address (Pmode, tem);
621 tem = gen_frame_mem (Pmode, tem);
622 tem = copy_to_reg (tem);
625 /* For __builtin_frame_address, return what we've got. But, on
626 the SPARC for example, we may have to add a bias. */
627 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
628 #ifdef FRAME_ADDR_RTX
629 return FRAME_ADDR_RTX (tem);
634 /* For __builtin_return_address, get the return address from that frame. */
635 #ifdef RETURN_ADDR_RTX
636 tem = RETURN_ADDR_RTX (count, tem);
638 tem = memory_address (Pmode,
639 plus_constant (tem, GET_MODE_SIZE (Pmode)));
640 tem = gen_frame_mem (Pmode, tem);
645 /* Alias set used for setjmp buffer. */
646 static alias_set_type setjmp_alias_set = -1;
648 /* Construct the leading half of a __builtin_setjmp call. Control will
649 return to RECEIVER_LABEL. This is also called directly by the SJLJ
650 exception handling code. */
653 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
655 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
659 if (setjmp_alias_set == -1)
660 setjmp_alias_set = new_alias_set ();
662 buf_addr = convert_memory_address (Pmode, buf_addr);
664 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
666 /* We store the frame pointer and the address of receiver_label in
667 the buffer and use the rest of it for the stack save area, which
668 is machine-dependent. */
670 mem = gen_rtx_MEM (Pmode, buf_addr);
671 set_mem_alias_set (mem, setjmp_alias_set);
672 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
674 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
675 set_mem_alias_set (mem, setjmp_alias_set);
677 emit_move_insn (validize_mem (mem),
678 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
680 stack_save = gen_rtx_MEM (sa_mode,
681 plus_constant (buf_addr,
682 2 * GET_MODE_SIZE (Pmode)));
683 set_mem_alias_set (stack_save, setjmp_alias_set);
684 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
686 /* If there is further processing to do, do it. */
687 #ifdef HAVE_builtin_setjmp_setup
688 if (HAVE_builtin_setjmp_setup)
689 emit_insn (gen_builtin_setjmp_setup (buf_addr));
692 /* Tell optimize_save_area_alloca that extra work is going to
693 need to go on during alloca. */
694 current_function_calls_setjmp = 1;
696 /* We have a nonlocal label. */
697 current_function_has_nonlocal_label = 1;
700 /* Construct the trailing part of a __builtin_setjmp call. This is
701 also called directly by the SJLJ exception handling code. */
704 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
706 /* Clobber the FP when we get here, so we have to make sure it's
707 marked as used by this function. */
708 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
710 /* Mark the static chain as clobbered here so life information
711 doesn't get messed up for it. */
712 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
714 /* Now put in the code to restore the frame pointer, and argument
715 pointer, if needed. */
716 #ifdef HAVE_nonlocal_goto
717 if (! HAVE_nonlocal_goto)
720 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
721 /* This might change the hard frame pointer in ways that aren't
722 apparent to early optimization passes, so force a clobber. */
723 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
726 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
727 if (fixed_regs[ARG_POINTER_REGNUM])
729 #ifdef ELIMINABLE_REGS
731 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
733 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
734 if (elim_regs[i].from == ARG_POINTER_REGNUM
735 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
738 if (i == ARRAY_SIZE (elim_regs))
741 /* Now restore our arg pointer from the address at which it
742 was saved in our stack frame. */
743 emit_move_insn (virtual_incoming_args_rtx,
744 copy_to_reg (get_arg_pointer_save_area (cfun)));
749 #ifdef HAVE_builtin_setjmp_receiver
750 if (HAVE_builtin_setjmp_receiver)
751 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
754 #ifdef HAVE_nonlocal_goto_receiver
755 if (HAVE_nonlocal_goto_receiver)
756 emit_insn (gen_nonlocal_goto_receiver ());
761 /* We must not allow the code we just generated to be reordered by
762 scheduling. Specifically, the update of the frame pointer must
763 happen immediately, not later. */
764 emit_insn (gen_blockage ());
767 /* __builtin_longjmp is passed a pointer to an array of five words (not
768 all will be used on all machines). It operates similarly to the C
769 library function of the same name, but is more efficient. Much of
770 the code below is copied from the handling of non-local gotos. */
773 expand_builtin_longjmp (rtx buf_addr, rtx value)
775 rtx fp, lab, stack, insn, last;
776 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
778 if (setjmp_alias_set == -1)
779 setjmp_alias_set = new_alias_set ();
781 buf_addr = convert_memory_address (Pmode, buf_addr);
783 buf_addr = force_reg (Pmode, buf_addr);
785 /* We used to store value in static_chain_rtx, but that fails if pointers
786 are smaller than integers. We instead require that the user must pass
787 a second argument of 1, because that is what builtin_setjmp will
788 return. This also makes EH slightly more efficient, since we are no
789 longer copying around a value that we don't care about. */
790 gcc_assert (value == const1_rtx);
792 last = get_last_insn ();
793 #ifdef HAVE_builtin_longjmp
794 if (HAVE_builtin_longjmp)
795 emit_insn (gen_builtin_longjmp (buf_addr));
799 fp = gen_rtx_MEM (Pmode, buf_addr);
800 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
801 GET_MODE_SIZE (Pmode)));
803 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
804 2 * GET_MODE_SIZE (Pmode)));
805 set_mem_alias_set (fp, setjmp_alias_set);
806 set_mem_alias_set (lab, setjmp_alias_set);
807 set_mem_alias_set (stack, setjmp_alias_set);
809 /* Pick up FP, label, and SP from the block and jump. This code is
810 from expand_goto in stmt.c; see there for detailed comments. */
811 #ifdef HAVE_nonlocal_goto
812 if (HAVE_nonlocal_goto)
813 /* We have to pass a value to the nonlocal_goto pattern that will
814 get copied into the static_chain pointer, but it does not matter
815 what that value is, because builtin_setjmp does not use it. */
816 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
820 lab = copy_to_reg (lab);
822 emit_insn (gen_rtx_CLOBBER (VOIDmode,
823 gen_rtx_MEM (BLKmode,
824 gen_rtx_SCRATCH (VOIDmode))));
825 emit_insn (gen_rtx_CLOBBER (VOIDmode,
826 gen_rtx_MEM (BLKmode,
827 hard_frame_pointer_rtx)));
829 emit_move_insn (hard_frame_pointer_rtx, fp);
830 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
832 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
833 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
834 emit_indirect_jump (lab);
838 /* Search backwards and mark the jump insn as a non-local goto.
839 Note that this precludes the use of __builtin_longjmp to a
840 __builtin_setjmp target in the same function. However, we've
841 already cautioned the user that these functions are for
842 internal exception handling use only. */
843 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
845 gcc_assert (insn != last);
849 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
853 else if (CALL_P (insn))
858 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
859 and the address of the save area. */
862 expand_builtin_nonlocal_goto (tree exp)
864 tree t_label, t_save_area;
865 rtx r_label, r_save_area, r_fp, r_sp, insn;
867 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
870 t_label = CALL_EXPR_ARG (exp, 0);
871 t_save_area = CALL_EXPR_ARG (exp, 1);
873 r_label = expand_normal (t_label);
874 r_label = convert_memory_address (Pmode, r_label);
875 r_save_area = expand_normal (t_save_area);
876 r_save_area = convert_memory_address (Pmode, r_save_area);
877 r_fp = gen_rtx_MEM (Pmode, r_save_area);
878 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
879 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
881 current_function_has_nonlocal_goto = 1;
883 #ifdef HAVE_nonlocal_goto
884 /* ??? We no longer need to pass the static chain value, afaik. */
885 if (HAVE_nonlocal_goto)
886 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
890 r_label = copy_to_reg (r_label);
892 emit_insn (gen_rtx_CLOBBER (VOIDmode,
893 gen_rtx_MEM (BLKmode,
894 gen_rtx_SCRATCH (VOIDmode))));
896 emit_insn (gen_rtx_CLOBBER (VOIDmode,
897 gen_rtx_MEM (BLKmode,
898 hard_frame_pointer_rtx)));
900 /* Restore frame pointer for containing function.
901 This sets the actual hard register used for the frame pointer
902 to the location of the function's incoming static chain info.
903 The non-local goto handler will then adjust it to contain the
904 proper value and reload the argument pointer, if needed. */
905 emit_move_insn (hard_frame_pointer_rtx, r_fp);
906 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
908 /* USE of hard_frame_pointer_rtx added for consistency;
909 not clear if really needed. */
910 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
911 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
912 emit_indirect_jump (r_label);
915 /* Search backwards to the jump insn and mark it as a
917 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
921 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
922 const0_rtx, REG_NOTES (insn));
925 else if (CALL_P (insn))
932 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
933 (not all will be used on all machines) that was passed to __builtin_setjmp.
934 It updates the stack pointer in that block to correspond to the current
938 expand_builtin_update_setjmp_buf (rtx buf_addr)
940 enum machine_mode sa_mode = Pmode;
944 #ifdef HAVE_save_stack_nonlocal
945 if (HAVE_save_stack_nonlocal)
946 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
948 #ifdef STACK_SAVEAREA_MODE
949 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
953 = gen_rtx_MEM (sa_mode,
956 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
960 emit_insn (gen_setjmp ());
963 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
966 /* Expand a call to __builtin_prefetch. For a target that does not support
967 data prefetch, evaluate the memory address argument in case it has side
971 expand_builtin_prefetch (tree exp)
973 tree arg0, arg1, arg2;
977 if (!validate_arglist (exp, POINTER_TYPE, 0))
980 arg0 = CALL_EXPR_ARG (exp, 0);
982 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
983 zero (read) and argument 2 (locality) defaults to 3 (high degree of
985 nargs = call_expr_nargs (exp);
987 arg1 = CALL_EXPR_ARG (exp, 1);
989 arg1 = integer_zero_node;
991 arg2 = CALL_EXPR_ARG (exp, 2);
993 arg2 = build_int_cst (NULL_TREE, 3);
995 /* Argument 0 is an address. */
996 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
998 /* Argument 1 (read/write flag) must be a compile-time constant int. */
999 if (TREE_CODE (arg1) != INTEGER_CST)
1001 error ("second argument to %<__builtin_prefetch%> must be a constant");
1002 arg1 = integer_zero_node;
1004 op1 = expand_normal (arg1);
1005 /* Argument 1 must be either zero or one. */
1006 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1008 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1013 /* Argument 2 (locality) must be a compile-time constant int. */
1014 if (TREE_CODE (arg2) != INTEGER_CST)
1016 error ("third argument to %<__builtin_prefetch%> must be a constant");
1017 arg2 = integer_zero_node;
1019 op2 = expand_normal (arg2);
1020 /* Argument 2 must be 0, 1, 2, or 3. */
1021 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1023 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1027 #ifdef HAVE_prefetch
1030 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1032 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1033 || (GET_MODE (op0) != Pmode))
1035 op0 = convert_memory_address (Pmode, op0);
1036 op0 = force_reg (Pmode, op0);
1038 emit_insn (gen_prefetch (op0, op1, op2));
1042 /* Don't do anything with direct references to volatile memory, but
1043 generate code to handle other side effects. */
1044 if (!MEM_P (op0) && side_effects_p (op0))
1048 /* Get a MEM rtx for expression EXP which is the address of an operand
1049 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1050 the maximum length of the block of memory that might be accessed or
1054 get_memory_rtx (tree exp, tree len)
1056 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1057 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1059 /* Get an expression we can use to find the attributes to assign to MEM.
1060 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1061 we can. First remove any nops. */
1062 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1063 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1064 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1065 exp = TREE_OPERAND (exp, 0);
1067 if (TREE_CODE (exp) == ADDR_EXPR)
1068 exp = TREE_OPERAND (exp, 0);
1069 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1070 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1074 /* Honor attributes derived from exp, except for the alias set
1075 (as builtin stringops may alias with anything) and the size
1076 (as stringops may access multiple array elements). */
1079 set_mem_attributes (mem, exp, 0);
1081 /* Allow the string and memory builtins to overflow from one
1082 field into another, see http://gcc.gnu.org/PR23561.
1083 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1084 memory accessed by the string or memory builtin will fit
1085 within the field. */
1086 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1088 tree mem_expr = MEM_EXPR (mem);
1089 HOST_WIDE_INT offset = -1, length = -1;
1092 while (TREE_CODE (inner) == ARRAY_REF
1093 || TREE_CODE (inner) == NOP_EXPR
1094 || TREE_CODE (inner) == CONVERT_EXPR
1095 || TREE_CODE (inner) == NON_LVALUE_EXPR
1096 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1097 || TREE_CODE (inner) == SAVE_EXPR)
1098 inner = TREE_OPERAND (inner, 0);
1100 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1102 if (MEM_OFFSET (mem)
1103 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1104 offset = INTVAL (MEM_OFFSET (mem));
1106 if (offset >= 0 && len && host_integerp (len, 0))
1107 length = tree_low_cst (len, 0);
1109 while (TREE_CODE (inner) == COMPONENT_REF)
1111 tree field = TREE_OPERAND (inner, 1);
1112 gcc_assert (! DECL_BIT_FIELD (field));
1113 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1114 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1117 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1118 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1121 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1122 /* If we can prove the memory starting at XEXP (mem, 0)
1123 and ending at XEXP (mem, 0) + LENGTH will fit into
1124 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1127 && offset + length <= size)
1132 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1133 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1134 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1142 mem_expr = TREE_OPERAND (mem_expr, 0);
1143 inner = TREE_OPERAND (inner, 0);
1146 if (mem_expr == NULL)
1148 if (mem_expr != MEM_EXPR (mem))
1150 set_mem_expr (mem, mem_expr);
1151 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1154 set_mem_alias_set (mem, 0);
1155 set_mem_size (mem, NULL_RTX);
1161 /* Built-in functions to perform an untyped call and return. */
1163 /* For each register that may be used for calling a function, this
1164 gives a mode used to copy the register's value. VOIDmode indicates
1165 the register is not used for calling a function. If the machine
1166 has register windows, this gives only the outbound registers.
1167 INCOMING_REGNO gives the corresponding inbound register. */
1168 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1170 /* For each register that may be used for returning values, this gives
1171 a mode used to copy the register's value. VOIDmode indicates the
1172 register is not used for returning values. If the machine has
1173 register windows, this gives only the outbound registers.
1174 INCOMING_REGNO gives the corresponding inbound register. */
1175 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1177 /* For each register that may be used for calling a function, this
1178 gives the offset of that register into the block returned by
1179 __builtin_apply_args. 0 indicates that the register is not
1180 used for calling a function. */
1181 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1183 /* Return the size required for the block returned by __builtin_apply_args,
1184 and initialize apply_args_mode. */
1187 apply_args_size (void)
1189 static int size = -1;
1192 enum machine_mode mode;
1194 /* The values computed by this function never change. */
1197 /* The first value is the incoming arg-pointer. */
1198 size = GET_MODE_SIZE (Pmode);
1200 /* The second value is the structure value address unless this is
1201 passed as an "invisible" first argument. */
1202 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1203 size += GET_MODE_SIZE (Pmode);
1205 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1206 if (FUNCTION_ARG_REGNO_P (regno))
1208 mode = reg_raw_mode[regno];
1210 gcc_assert (mode != VOIDmode);
1212 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1213 if (size % align != 0)
1214 size = CEIL (size, align) * align;
1215 apply_args_reg_offset[regno] = size;
1216 size += GET_MODE_SIZE (mode);
1217 apply_args_mode[regno] = mode;
1221 apply_args_mode[regno] = VOIDmode;
1222 apply_args_reg_offset[regno] = 0;
1228 /* Return the size required for the block returned by __builtin_apply,
1229 and initialize apply_result_mode. */
1232 apply_result_size (void)
1234 static int size = -1;
1236 enum machine_mode mode;
1238 /* The values computed by this function never change. */
1243 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1244 if (FUNCTION_VALUE_REGNO_P (regno))
1246 mode = reg_raw_mode[regno];
1248 gcc_assert (mode != VOIDmode);
1250 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1251 if (size % align != 0)
1252 size = CEIL (size, align) * align;
1253 size += GET_MODE_SIZE (mode);
1254 apply_result_mode[regno] = mode;
1257 apply_result_mode[regno] = VOIDmode;
1259 /* Allow targets that use untyped_call and untyped_return to override
1260 the size so that machine-specific information can be stored here. */
1261 #ifdef APPLY_RESULT_SIZE
1262 size = APPLY_RESULT_SIZE;
1268 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1269 /* Create a vector describing the result block RESULT. If SAVEP is true,
1270 the result block is used to save the values; otherwise it is used to
1271 restore the values. */
1274 result_vector (int savep, rtx result)
1276 int regno, size, align, nelts;
1277 enum machine_mode mode;
1279 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1283 if ((mode = apply_result_mode[regno]) != VOIDmode)
1285 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1286 if (size % align != 0)
1287 size = CEIL (size, align) * align;
1288 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1289 mem = adjust_address (result, mode, size);
1290 savevec[nelts++] = (savep
1291 ? gen_rtx_SET (VOIDmode, mem, reg)
1292 : gen_rtx_SET (VOIDmode, reg, mem));
1293 size += GET_MODE_SIZE (mode);
1295 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1297 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1299 /* Save the state required to perform an untyped call with the same
1300 arguments as were passed to the current function. */
1303 expand_builtin_apply_args_1 (void)
1306 int size, align, regno;
1307 enum machine_mode mode;
1308 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1310 /* Create a block where the arg-pointer, structure value address,
1311 and argument registers can be saved. */
1312 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1314 /* Walk past the arg-pointer and structure value address. */
1315 size = GET_MODE_SIZE (Pmode);
1316 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1317 size += GET_MODE_SIZE (Pmode);
1319 /* Save each register used in calling a function to the block. */
1320 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1321 if ((mode = apply_args_mode[regno]) != VOIDmode)
1323 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1324 if (size % align != 0)
1325 size = CEIL (size, align) * align;
1327 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1329 emit_move_insn (adjust_address (registers, mode, size), tem);
1330 size += GET_MODE_SIZE (mode);
1333 /* Save the arg pointer to the block. */
1334 tem = copy_to_reg (virtual_incoming_args_rtx);
1335 #ifdef STACK_GROWS_DOWNWARD
1336 /* We need the pointer as the caller actually passed them to us, not
1337 as we might have pretended they were passed. Make sure it's a valid
1338 operand, as emit_move_insn isn't expected to handle a PLUS. */
1340 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1343 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1345 size = GET_MODE_SIZE (Pmode);
1347 /* Save the structure value address unless this is passed as an
1348 "invisible" first argument. */
1349 if (struct_incoming_value)
1351 emit_move_insn (adjust_address (registers, Pmode, size),
1352 copy_to_reg (struct_incoming_value));
1353 size += GET_MODE_SIZE (Pmode);
1356 /* Return the address of the block. */
1357 return copy_addr_to_reg (XEXP (registers, 0));
1360 /* __builtin_apply_args returns block of memory allocated on
1361 the stack into which is stored the arg pointer, structure
1362 value address, static chain, and all the registers that might
1363 possibly be used in performing a function call. The code is
1364 moved to the start of the function so the incoming values are
1368 expand_builtin_apply_args (void)
1370 /* Don't do __builtin_apply_args more than once in a function.
1371 Save the result of the first call and reuse it. */
1372 if (apply_args_value != 0)
1373 return apply_args_value;
1375 /* When this function is called, it means that registers must be
1376 saved on entry to this function. So we migrate the
1377 call to the first insn of this function. */
1382 temp = expand_builtin_apply_args_1 ();
1386 apply_args_value = temp;
1388 /* Put the insns after the NOTE that starts the function.
1389 If this is inside a start_sequence, make the outer-level insn
1390 chain current, so the code is placed at the start of the
1392 push_topmost_sequence ();
1393 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1394 pop_topmost_sequence ();
1399 /* Perform an untyped call and save the state required to perform an
1400 untyped return of whatever value was returned by the given function. */
1403 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1405 int size, align, regno;
1406 enum machine_mode mode;
1407 rtx incoming_args, result, reg, dest, src, call_insn;
1408 rtx old_stack_level = 0;
1409 rtx call_fusage = 0;
1410 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1412 arguments = convert_memory_address (Pmode, arguments);
1414 /* Create a block where the return registers can be saved. */
1415 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1417 /* Fetch the arg pointer from the ARGUMENTS block. */
1418 incoming_args = gen_reg_rtx (Pmode);
1419 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1420 #ifndef STACK_GROWS_DOWNWARD
1421 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1422 incoming_args, 0, OPTAB_LIB_WIDEN);
1425 /* Push a new argument block and copy the arguments. Do not allow
1426 the (potential) memcpy call below to interfere with our stack
1428 do_pending_stack_adjust ();
1431 /* Save the stack with nonlocal if available. */
1432 #ifdef HAVE_save_stack_nonlocal
1433 if (HAVE_save_stack_nonlocal)
1434 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1437 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1439 /* Allocate a block of memory onto the stack and copy the memory
1440 arguments to the outgoing arguments address. */
1441 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1442 dest = virtual_outgoing_args_rtx;
1443 #ifndef STACK_GROWS_DOWNWARD
1444 if (GET_CODE (argsize) == CONST_INT)
1445 dest = plus_constant (dest, -INTVAL (argsize));
1447 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1449 dest = gen_rtx_MEM (BLKmode, dest);
1450 set_mem_align (dest, PARM_BOUNDARY);
1451 src = gen_rtx_MEM (BLKmode, incoming_args);
1452 set_mem_align (src, PARM_BOUNDARY);
1453 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1455 /* Refer to the argument block. */
1457 arguments = gen_rtx_MEM (BLKmode, arguments);
1458 set_mem_align (arguments, PARM_BOUNDARY);
1460 /* Walk past the arg-pointer and structure value address. */
1461 size = GET_MODE_SIZE (Pmode);
1463 size += GET_MODE_SIZE (Pmode);
1465 /* Restore each of the registers previously saved. Make USE insns
1466 for each of these registers for use in making the call. */
1467 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1468 if ((mode = apply_args_mode[regno]) != VOIDmode)
1470 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1471 if (size % align != 0)
1472 size = CEIL (size, align) * align;
1473 reg = gen_rtx_REG (mode, regno);
1474 emit_move_insn (reg, adjust_address (arguments, mode, size));
1475 use_reg (&call_fusage, reg);
1476 size += GET_MODE_SIZE (mode);
1479 /* Restore the structure value address unless this is passed as an
1480 "invisible" first argument. */
1481 size = GET_MODE_SIZE (Pmode);
1484 rtx value = gen_reg_rtx (Pmode);
1485 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1486 emit_move_insn (struct_value, value);
1487 if (REG_P (struct_value))
1488 use_reg (&call_fusage, struct_value);
1489 size += GET_MODE_SIZE (Pmode);
1492 /* All arguments and registers used for the call are set up by now! */
1493 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1495 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1496 and we don't want to load it into a register as an optimization,
1497 because prepare_call_address already did it if it should be done. */
1498 if (GET_CODE (function) != SYMBOL_REF)
1499 function = memory_address (FUNCTION_MODE, function);
1501 /* Generate the actual call instruction and save the return value. */
1502 #ifdef HAVE_untyped_call
1503 if (HAVE_untyped_call)
1504 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1505 result, result_vector (1, result)));
1508 #ifdef HAVE_call_value
1509 if (HAVE_call_value)
1513 /* Locate the unique return register. It is not possible to
1514 express a call that sets more than one return register using
1515 call_value; use untyped_call for that. In fact, untyped_call
1516 only needs to save the return registers in the given block. */
1517 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1518 if ((mode = apply_result_mode[regno]) != VOIDmode)
1520 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1522 valreg = gen_rtx_REG (mode, regno);
1525 emit_call_insn (GEN_CALL_VALUE (valreg,
1526 gen_rtx_MEM (FUNCTION_MODE, function),
1527 const0_rtx, NULL_RTX, const0_rtx));
1529 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1535 /* Find the CALL insn we just emitted, and attach the register usage
1537 call_insn = last_call_insn ();
1538 add_function_usage_to (call_insn, call_fusage);
1540 /* Restore the stack. */
1541 #ifdef HAVE_save_stack_nonlocal
1542 if (HAVE_save_stack_nonlocal)
1543 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1546 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1550 /* Return the address of the result block. */
1551 result = copy_addr_to_reg (XEXP (result, 0));
1552 return convert_memory_address (ptr_mode, result);
1555 /* Perform an untyped return. */
1558 expand_builtin_return (rtx result)
1560 int size, align, regno;
1561 enum machine_mode mode;
1563 rtx call_fusage = 0;
1565 result = convert_memory_address (Pmode, result);
1567 apply_result_size ();
1568 result = gen_rtx_MEM (BLKmode, result);
1570 #ifdef HAVE_untyped_return
1571 if (HAVE_untyped_return)
1573 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1579 /* Restore the return value and note that each value is used. */
1581 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1582 if ((mode = apply_result_mode[regno]) != VOIDmode)
1584 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1585 if (size % align != 0)
1586 size = CEIL (size, align) * align;
1587 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1588 emit_move_insn (reg, adjust_address (result, mode, size));
1590 push_to_sequence (call_fusage);
1591 emit_insn (gen_rtx_USE (VOIDmode, reg));
1592 call_fusage = get_insns ();
1594 size += GET_MODE_SIZE (mode);
1597 /* Put the USE insns before the return. */
1598 emit_insn (call_fusage);
1600 /* Return whatever values was restored by jumping directly to the end
1602 expand_naked_return ();
1605 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1607 static enum type_class
1608 type_to_class (tree type)
1610 switch (TREE_CODE (type))
1612 case VOID_TYPE: return void_type_class;
1613 case INTEGER_TYPE: return integer_type_class;
1614 case ENUMERAL_TYPE: return enumeral_type_class;
1615 case BOOLEAN_TYPE: return boolean_type_class;
1616 case POINTER_TYPE: return pointer_type_class;
1617 case REFERENCE_TYPE: return reference_type_class;
1618 case OFFSET_TYPE: return offset_type_class;
1619 case REAL_TYPE: return real_type_class;
1620 case COMPLEX_TYPE: return complex_type_class;
1621 case FUNCTION_TYPE: return function_type_class;
1622 case METHOD_TYPE: return method_type_class;
1623 case RECORD_TYPE: return record_type_class;
1625 case QUAL_UNION_TYPE: return union_type_class;
1626 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1627 ? string_type_class : array_type_class);
1628 case LANG_TYPE: return lang_type_class;
1629 default: return no_type_class;
1633 /* Expand a call EXP to __builtin_classify_type. */
1636 expand_builtin_classify_type (tree exp)
1638 if (call_expr_nargs (exp))
1639 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1640 return GEN_INT (no_type_class);
1643 /* This helper macro, meant to be used in mathfn_built_in below,
1644 determines which among a set of three builtin math functions is
1645 appropriate for a given type mode. The `F' and `L' cases are
1646 automatically generated from the `double' case. */
1647 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1648 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1649 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1650 fcodel = BUILT_IN_MATHFN##L ; break;
1651 /* Similar to above, but appends _R after any F/L suffix. */
1652 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1653 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1654 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1655 fcodel = BUILT_IN_MATHFN##L_R ; break;
1657 /* Return mathematic function equivalent to FN but operating directly
1658 on TYPE, if available. If we can't do the conversion, return zero. */
1660 mathfn_built_in (tree type, enum built_in_function fn)
1662 enum built_in_function fcode, fcodef, fcodel;
1666 CASE_MATHFN (BUILT_IN_ACOS)
1667 CASE_MATHFN (BUILT_IN_ACOSH)
1668 CASE_MATHFN (BUILT_IN_ASIN)
1669 CASE_MATHFN (BUILT_IN_ASINH)
1670 CASE_MATHFN (BUILT_IN_ATAN)
1671 CASE_MATHFN (BUILT_IN_ATAN2)
1672 CASE_MATHFN (BUILT_IN_ATANH)
1673 CASE_MATHFN (BUILT_IN_CBRT)
1674 CASE_MATHFN (BUILT_IN_CEIL)
1675 CASE_MATHFN (BUILT_IN_CEXPI)
1676 CASE_MATHFN (BUILT_IN_COPYSIGN)
1677 CASE_MATHFN (BUILT_IN_COS)
1678 CASE_MATHFN (BUILT_IN_COSH)
1679 CASE_MATHFN (BUILT_IN_DREM)
1680 CASE_MATHFN (BUILT_IN_ERF)
1681 CASE_MATHFN (BUILT_IN_ERFC)
1682 CASE_MATHFN (BUILT_IN_EXP)
1683 CASE_MATHFN (BUILT_IN_EXP10)
1684 CASE_MATHFN (BUILT_IN_EXP2)
1685 CASE_MATHFN (BUILT_IN_EXPM1)
1686 CASE_MATHFN (BUILT_IN_FABS)
1687 CASE_MATHFN (BUILT_IN_FDIM)
1688 CASE_MATHFN (BUILT_IN_FLOOR)
1689 CASE_MATHFN (BUILT_IN_FMA)
1690 CASE_MATHFN (BUILT_IN_FMAX)
1691 CASE_MATHFN (BUILT_IN_FMIN)
1692 CASE_MATHFN (BUILT_IN_FMOD)
1693 CASE_MATHFN (BUILT_IN_FREXP)
1694 CASE_MATHFN (BUILT_IN_GAMMA)
1695 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1696 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1697 CASE_MATHFN (BUILT_IN_HYPOT)
1698 CASE_MATHFN (BUILT_IN_ILOGB)
1699 CASE_MATHFN (BUILT_IN_INF)
1700 CASE_MATHFN (BUILT_IN_ISINF)
1701 CASE_MATHFN (BUILT_IN_J0)
1702 CASE_MATHFN (BUILT_IN_J1)
1703 CASE_MATHFN (BUILT_IN_JN)
1704 CASE_MATHFN (BUILT_IN_LCEIL)
1705 CASE_MATHFN (BUILT_IN_LDEXP)
1706 CASE_MATHFN (BUILT_IN_LFLOOR)
1707 CASE_MATHFN (BUILT_IN_LGAMMA)
1708 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1709 CASE_MATHFN (BUILT_IN_LLCEIL)
1710 CASE_MATHFN (BUILT_IN_LLFLOOR)
1711 CASE_MATHFN (BUILT_IN_LLRINT)
1712 CASE_MATHFN (BUILT_IN_LLROUND)
1713 CASE_MATHFN (BUILT_IN_LOG)
1714 CASE_MATHFN (BUILT_IN_LOG10)
1715 CASE_MATHFN (BUILT_IN_LOG1P)
1716 CASE_MATHFN (BUILT_IN_LOG2)
1717 CASE_MATHFN (BUILT_IN_LOGB)
1718 CASE_MATHFN (BUILT_IN_LRINT)
1719 CASE_MATHFN (BUILT_IN_LROUND)
1720 CASE_MATHFN (BUILT_IN_MODF)
1721 CASE_MATHFN (BUILT_IN_NAN)
1722 CASE_MATHFN (BUILT_IN_NANS)
1723 CASE_MATHFN (BUILT_IN_NEARBYINT)
1724 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1725 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1726 CASE_MATHFN (BUILT_IN_POW)
1727 CASE_MATHFN (BUILT_IN_POWI)
1728 CASE_MATHFN (BUILT_IN_POW10)
1729 CASE_MATHFN (BUILT_IN_REMAINDER)
1730 CASE_MATHFN (BUILT_IN_REMQUO)
1731 CASE_MATHFN (BUILT_IN_RINT)
1732 CASE_MATHFN (BUILT_IN_ROUND)
1733 CASE_MATHFN (BUILT_IN_SCALB)
1734 CASE_MATHFN (BUILT_IN_SCALBLN)
1735 CASE_MATHFN (BUILT_IN_SCALBN)
1736 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1737 CASE_MATHFN (BUILT_IN_SIN)
1738 CASE_MATHFN (BUILT_IN_SINCOS)
1739 CASE_MATHFN (BUILT_IN_SINH)
1740 CASE_MATHFN (BUILT_IN_SQRT)
1741 CASE_MATHFN (BUILT_IN_TAN)
1742 CASE_MATHFN (BUILT_IN_TANH)
1743 CASE_MATHFN (BUILT_IN_TGAMMA)
1744 CASE_MATHFN (BUILT_IN_TRUNC)
1745 CASE_MATHFN (BUILT_IN_Y0)
1746 CASE_MATHFN (BUILT_IN_Y1)
1747 CASE_MATHFN (BUILT_IN_YN)
1753 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1754 return implicit_built_in_decls[fcode];
1755 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1756 return implicit_built_in_decls[fcodef];
1757 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1758 return implicit_built_in_decls[fcodel];
1763 /* If errno must be maintained, expand the RTL to check if the result,
1764 TARGET, of a built-in function call, EXP, is NaN, and if so set
1768 expand_errno_check (tree exp, rtx target)
1770 rtx lab = gen_label_rtx ();
1772 /* Test the result; if it is NaN, set errno=EDOM because
1773 the argument was not in the domain. */
1774 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1778 /* If this built-in doesn't throw an exception, set errno directly. */
1779 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1781 #ifdef GEN_ERRNO_RTX
1782 rtx errno_rtx = GEN_ERRNO_RTX;
1785 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1787 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1793 /* We can't set errno=EDOM directly; let the library call do it.
1794 Pop the arguments right away in case the call gets deleted. */
1796 expand_call (exp, target, 0);
1801 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1802 Return NULL_RTX if a normal call should be emitted rather than expanding
1803 the function in-line. EXP is the expression that is a call to the builtin
1804 function; if convenient, the result should be placed in TARGET.
1805 SUBTARGET may be used as the target for computing one of EXP's operands. */
1808 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1810 optab builtin_optab;
1811 rtx op0, insns, before_call;
1812 tree fndecl = get_callee_fndecl (exp);
1813 enum machine_mode mode;
1814 bool errno_set = false;
1817 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1820 arg = CALL_EXPR_ARG (exp, 0);
1822 switch (DECL_FUNCTION_CODE (fndecl))
1824 CASE_FLT_FN (BUILT_IN_SQRT):
1825 errno_set = ! tree_expr_nonnegative_p (arg);
1826 builtin_optab = sqrt_optab;
1828 CASE_FLT_FN (BUILT_IN_EXP):
1829 errno_set = true; builtin_optab = exp_optab; break;
1830 CASE_FLT_FN (BUILT_IN_EXP10):
1831 CASE_FLT_FN (BUILT_IN_POW10):
1832 errno_set = true; builtin_optab = exp10_optab; break;
1833 CASE_FLT_FN (BUILT_IN_EXP2):
1834 errno_set = true; builtin_optab = exp2_optab; break;
1835 CASE_FLT_FN (BUILT_IN_EXPM1):
1836 errno_set = true; builtin_optab = expm1_optab; break;
1837 CASE_FLT_FN (BUILT_IN_LOGB):
1838 errno_set = true; builtin_optab = logb_optab; break;
1839 CASE_FLT_FN (BUILT_IN_LOG):
1840 errno_set = true; builtin_optab = log_optab; break;
1841 CASE_FLT_FN (BUILT_IN_LOG10):
1842 errno_set = true; builtin_optab = log10_optab; break;
1843 CASE_FLT_FN (BUILT_IN_LOG2):
1844 errno_set = true; builtin_optab = log2_optab; break;
1845 CASE_FLT_FN (BUILT_IN_LOG1P):
1846 errno_set = true; builtin_optab = log1p_optab; break;
1847 CASE_FLT_FN (BUILT_IN_ASIN):
1848 builtin_optab = asin_optab; break;
1849 CASE_FLT_FN (BUILT_IN_ACOS):
1850 builtin_optab = acos_optab; break;
1851 CASE_FLT_FN (BUILT_IN_TAN):
1852 builtin_optab = tan_optab; break;
1853 CASE_FLT_FN (BUILT_IN_ATAN):
1854 builtin_optab = atan_optab; break;
1855 CASE_FLT_FN (BUILT_IN_FLOOR):
1856 builtin_optab = floor_optab; break;
1857 CASE_FLT_FN (BUILT_IN_CEIL):
1858 builtin_optab = ceil_optab; break;
1859 CASE_FLT_FN (BUILT_IN_TRUNC):
1860 builtin_optab = btrunc_optab; break;
1861 CASE_FLT_FN (BUILT_IN_ROUND):
1862 builtin_optab = round_optab; break;
1863 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1864 builtin_optab = nearbyint_optab;
1865 if (flag_trapping_math)
1867 /* Else fallthrough and expand as rint. */
1868 CASE_FLT_FN (BUILT_IN_RINT):
1869 builtin_optab = rint_optab; break;
1874 /* Make a suitable register to place result in. */
1875 mode = TYPE_MODE (TREE_TYPE (exp));
1877 if (! flag_errno_math || ! HONOR_NANS (mode))
1880 /* Before working hard, check whether the instruction is available. */
1881 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1883 target = gen_reg_rtx (mode);
1885 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1886 need to expand the argument again. This way, we will not perform
1887 side-effects more the once. */
1888 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1890 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1894 /* Compute into TARGET.
1895 Set TARGET to wherever the result comes back. */
1896 target = expand_unop (mode, builtin_optab, op0, target, 0);
1901 expand_errno_check (exp, target);
1903 /* Output the entire sequence. */
1904 insns = get_insns ();
1910 /* If we were unable to expand via the builtin, stop the sequence
1911 (without outputting the insns) and call to the library function
1912 with the stabilized argument list. */
1916 before_call = get_last_insn ();
1918 target = expand_call (exp, target, target == const0_rtx);
1920 /* If this is a sqrt operation and we don't care about errno, try to
1921 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1922 This allows the semantics of the libcall to be visible to the RTL
1924 if (builtin_optab == sqrt_optab && !errno_set)
1926 /* Search backwards through the insns emitted by expand_call looking
1927 for the instruction with the REG_RETVAL note. */
1928 rtx last = get_last_insn ();
1929 while (last != before_call)
1931 if (find_reg_note (last, REG_RETVAL, NULL))
1933 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1934 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1935 two elements, i.e. symbol_ref(sqrt) and the operand. */
1937 && GET_CODE (note) == EXPR_LIST
1938 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1939 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1940 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1942 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1943 /* Check operand is a register with expected mode. */
1946 && GET_MODE (operand) == mode)
1948 /* Replace the REG_EQUAL note with a SQRT rtx. */
1949 rtx equiv = gen_rtx_SQRT (mode, operand);
1950 set_unique_reg_note (last, REG_EQUAL, equiv);
1955 last = PREV_INSN (last);
1962 /* Expand a call to the builtin binary math functions (pow and atan2).
1963 Return NULL_RTX if a normal call should be emitted rather than expanding the
1964 function in-line. EXP is the expression that is a call to the builtin
1965 function; if convenient, the result should be placed in TARGET.
1966 SUBTARGET may be used as the target for computing one of EXP's
1970 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1972 optab builtin_optab;
1973 rtx op0, op1, insns;
1974 int op1_type = REAL_TYPE;
1975 tree fndecl = get_callee_fndecl (exp);
1977 enum machine_mode mode;
1978 bool errno_set = true;
1980 switch (DECL_FUNCTION_CODE (fndecl))
1982 CASE_FLT_FN (BUILT_IN_SCALBN):
1983 CASE_FLT_FN (BUILT_IN_SCALBLN):
1984 CASE_FLT_FN (BUILT_IN_LDEXP):
1985 op1_type = INTEGER_TYPE;
1990 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
1993 arg0 = CALL_EXPR_ARG (exp, 0);
1994 arg1 = CALL_EXPR_ARG (exp, 1);
1996 switch (DECL_FUNCTION_CODE (fndecl))
1998 CASE_FLT_FN (BUILT_IN_POW):
1999 builtin_optab = pow_optab; break;
2000 CASE_FLT_FN (BUILT_IN_ATAN2):
2001 builtin_optab = atan2_optab; break;
2002 CASE_FLT_FN (BUILT_IN_SCALB):
2003 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2005 builtin_optab = scalb_optab; break;
2006 CASE_FLT_FN (BUILT_IN_SCALBN):
2007 CASE_FLT_FN (BUILT_IN_SCALBLN):
2008 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2010 /* Fall through... */
2011 CASE_FLT_FN (BUILT_IN_LDEXP):
2012 builtin_optab = ldexp_optab; break;
2013 CASE_FLT_FN (BUILT_IN_FMOD):
2014 builtin_optab = fmod_optab; break;
2015 CASE_FLT_FN (BUILT_IN_REMAINDER):
2016 CASE_FLT_FN (BUILT_IN_DREM):
2017 builtin_optab = remainder_optab; break;
2022 /* Make a suitable register to place result in. */
2023 mode = TYPE_MODE (TREE_TYPE (exp));
2025 /* Before working hard, check whether the instruction is available. */
2026 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2029 target = gen_reg_rtx (mode);
2031 if (! flag_errno_math || ! HONOR_NANS (mode))
2034 /* Always stabilize the argument list. */
2035 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2036 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2038 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2039 op1 = expand_normal (arg1);
2043 /* Compute into TARGET.
2044 Set TARGET to wherever the result comes back. */
2045 target = expand_binop (mode, builtin_optab, op0, op1,
2046 target, 0, OPTAB_DIRECT);
2048 /* If we were unable to expand via the builtin, stop the sequence
2049 (without outputting the insns) and call to the library function
2050 with the stabilized argument list. */
2054 return expand_call (exp, target, target == const0_rtx);
2058 expand_errno_check (exp, target);
2060 /* Output the entire sequence. */
2061 insns = get_insns ();
2068 /* Expand a call to the builtin sin and cos math functions.
2069 Return NULL_RTX if a normal call should be emitted rather than expanding the
2070 function in-line. EXP is the expression that is a call to the builtin
2071 function; if convenient, the result should be placed in TARGET.
2072 SUBTARGET may be used as the target for computing one of EXP's
2076 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2078 optab builtin_optab;
2080 tree fndecl = get_callee_fndecl (exp);
2081 enum machine_mode mode;
2084 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2087 arg = CALL_EXPR_ARG (exp, 0);
2089 switch (DECL_FUNCTION_CODE (fndecl))
2091 CASE_FLT_FN (BUILT_IN_SIN):
2092 CASE_FLT_FN (BUILT_IN_COS):
2093 builtin_optab = sincos_optab; break;
2098 /* Make a suitable register to place result in. */
2099 mode = TYPE_MODE (TREE_TYPE (exp));
2101 /* Check if sincos insn is available, otherwise fallback
2102 to sin or cos insn. */
2103 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2104 switch (DECL_FUNCTION_CODE (fndecl))
2106 CASE_FLT_FN (BUILT_IN_SIN):
2107 builtin_optab = sin_optab; break;
2108 CASE_FLT_FN (BUILT_IN_COS):
2109 builtin_optab = cos_optab; break;
2114 /* Before working hard, check whether the instruction is available. */
2115 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2117 target = gen_reg_rtx (mode);
2119 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2120 need to expand the argument again. This way, we will not perform
2121 side-effects more the once. */
2122 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2124 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2128 /* Compute into TARGET.
2129 Set TARGET to wherever the result comes back. */
2130 if (builtin_optab == sincos_optab)
2134 switch (DECL_FUNCTION_CODE (fndecl))
2136 CASE_FLT_FN (BUILT_IN_SIN):
2137 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2139 CASE_FLT_FN (BUILT_IN_COS):
2140 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2145 gcc_assert (result);
2149 target = expand_unop (mode, builtin_optab, op0, target, 0);
2154 /* Output the entire sequence. */
2155 insns = get_insns ();
2161 /* If we were unable to expand via the builtin, stop the sequence
2162 (without outputting the insns) and call to the library function
2163 with the stabilized argument list. */
2167 target = expand_call (exp, target, target == const0_rtx);
2172 /* Expand a call to one of the builtin math functions that operate on
2173 floating point argument and output an integer result (ilogb, isinf,
2175 Return 0 if a normal call should be emitted rather than expanding the
2176 function in-line. EXP is the expression that is a call to the builtin
2177 function; if convenient, the result should be placed in TARGET.
2178 SUBTARGET may be used as the target for computing one of EXP's operands. */
2181 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2183 optab builtin_optab = 0;
2184 enum insn_code icode = CODE_FOR_nothing;
2186 tree fndecl = get_callee_fndecl (exp);
2187 enum machine_mode mode;
2188 bool errno_set = false;
2191 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2194 arg = CALL_EXPR_ARG (exp, 0);
2196 switch (DECL_FUNCTION_CODE (fndecl))
2198 CASE_FLT_FN (BUILT_IN_ILOGB):
2199 errno_set = true; builtin_optab = ilogb_optab; break;
2200 CASE_FLT_FN (BUILT_IN_ISINF):
2201 builtin_optab = isinf_optab; break;
2202 case BUILT_IN_ISNORMAL:
2203 case BUILT_IN_ISFINITE:
2204 CASE_FLT_FN (BUILT_IN_FINITE):
2205 /* These builtins have no optabs (yet). */
2211 /* There's no easy way to detect the case we need to set EDOM. */
2212 if (flag_errno_math && errno_set)
2215 /* Optab mode depends on the mode of the input argument. */
2216 mode = TYPE_MODE (TREE_TYPE (arg));
2219 icode = optab_handler (builtin_optab, mode)->insn_code;
2221 /* Before working hard, check whether the instruction is available. */
2222 if (icode != CODE_FOR_nothing)
2224 /* Make a suitable register to place result in. */
2226 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2227 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2229 gcc_assert (insn_data[icode].operand[0].predicate
2230 (target, GET_MODE (target)));
2232 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2233 need to expand the argument again. This way, we will not perform
2234 side-effects more the once. */
2235 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2237 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2239 if (mode != GET_MODE (op0))
2240 op0 = convert_to_mode (mode, op0, 0);
2242 /* Compute into TARGET.
2243 Set TARGET to wherever the result comes back. */
2244 emit_unop_insn (icode, target, op0, UNKNOWN);
2248 /* If there is no optab, try generic code. */
2249 switch (DECL_FUNCTION_CODE (fndecl))
2253 CASE_FLT_FN (BUILT_IN_ISINF):
2255 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2256 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2257 tree const type = TREE_TYPE (arg);
2261 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2262 real_from_string (&r, buf);
2263 result = build_call_expr (isgr_fn, 2,
2264 fold_build1 (ABS_EXPR, type, arg),
2265 build_real (type, r));
2266 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2268 CASE_FLT_FN (BUILT_IN_FINITE):
2269 case BUILT_IN_ISFINITE:
2271 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2272 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2273 tree const type = TREE_TYPE (arg);
2277 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2278 real_from_string (&r, buf);
2279 result = build_call_expr (isle_fn, 2,
2280 fold_build1 (ABS_EXPR, type, arg),
2281 build_real (type, r));
2282 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2284 case BUILT_IN_ISNORMAL:
2286 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2287 islessequal(fabs(x),DBL_MAX). */
2288 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2289 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2290 tree const type = TREE_TYPE (arg);
2291 REAL_VALUE_TYPE rmax, rmin;
2294 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2295 real_from_string (&rmax, buf);
2296 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2297 real_from_string (&rmin, buf);
2298 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2299 result = build_call_expr (isle_fn, 2, arg,
2300 build_real (type, rmax));
2301 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2302 build_call_expr (isge_fn, 2, arg,
2303 build_real (type, rmin)));
2304 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2310 target = expand_call (exp, target, target == const0_rtx);
2315 /* Expand a call to the builtin sincos math function.
2316 Return NULL_RTX if a normal call should be emitted rather than expanding the
2317 function in-line. EXP is the expression that is a call to the builtin
2321 expand_builtin_sincos (tree exp)
2323 rtx op0, op1, op2, target1, target2;
2324 enum machine_mode mode;
2325 tree arg, sinp, cosp;
2328 if (!validate_arglist (exp, REAL_TYPE,
2329 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2332 arg = CALL_EXPR_ARG (exp, 0);
2333 sinp = CALL_EXPR_ARG (exp, 1);
2334 cosp = CALL_EXPR_ARG (exp, 2);
2336 /* Make a suitable register to place result in. */
2337 mode = TYPE_MODE (TREE_TYPE (arg));
2339 /* Check if sincos insn is available, otherwise emit the call. */
2340 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2343 target1 = gen_reg_rtx (mode);
2344 target2 = gen_reg_rtx (mode);
2346 op0 = expand_normal (arg);
2347 op1 = expand_normal (build_fold_indirect_ref (sinp));
2348 op2 = expand_normal (build_fold_indirect_ref (cosp));
2350 /* Compute into target1 and target2.
2351 Set TARGET to wherever the result comes back. */
2352 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2353 gcc_assert (result);
2355 /* Move target1 and target2 to the memory locations indicated
2357 emit_move_insn (op1, target1);
2358 emit_move_insn (op2, target2);
2363 /* Expand a call to the internal cexpi builtin to the sincos math function.
2364 EXP is the expression that is a call to the builtin function; if convenient,
2365 the result should be placed in TARGET. SUBTARGET may be used as the target
2366 for computing one of EXP's operands. */
2369 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2371 tree fndecl = get_callee_fndecl (exp);
2373 enum machine_mode mode;
2376 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2379 arg = CALL_EXPR_ARG (exp, 0);
2380 type = TREE_TYPE (arg);
2381 mode = TYPE_MODE (TREE_TYPE (arg));
2383 /* Try expanding via a sincos optab, fall back to emitting a libcall
2384 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2385 is only generated from sincos, cexp or if we have either of them. */
2386 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2388 op1 = gen_reg_rtx (mode);
2389 op2 = gen_reg_rtx (mode);
2391 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2393 /* Compute into op1 and op2. */
2394 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2396 else if (TARGET_HAS_SINCOS)
2398 tree call, fn = NULL_TREE;
2402 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2403 fn = built_in_decls[BUILT_IN_SINCOSF];
2404 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2405 fn = built_in_decls[BUILT_IN_SINCOS];
2406 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2407 fn = built_in_decls[BUILT_IN_SINCOSL];
2411 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2412 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2413 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2414 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2415 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2416 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2418 /* Make sure not to fold the sincos call again. */
2419 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2420 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2421 call, 3, arg, top1, top2));
2425 tree call, fn = NULL_TREE, narg;
2426 tree ctype = build_complex_type (type);
2428 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2429 fn = built_in_decls[BUILT_IN_CEXPF];
2430 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2431 fn = built_in_decls[BUILT_IN_CEXP];
2432 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2433 fn = built_in_decls[BUILT_IN_CEXPL];
2437 /* If we don't have a decl for cexp create one. This is the
2438 friendliest fallback if the user calls __builtin_cexpi
2439 without full target C99 function support. */
2440 if (fn == NULL_TREE)
2443 const char *name = NULL;
2445 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2447 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2449 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2452 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2453 fn = build_fn_decl (name, fntype);
2456 narg = fold_build2 (COMPLEX_EXPR, ctype,
2457 build_real (type, dconst0), arg);
2459 /* Make sure not to fold the cexp call again. */
2460 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2461 return expand_expr (build_call_nary (ctype, call, 1, narg),
2462 target, VOIDmode, EXPAND_NORMAL);
2465 /* Now build the proper return type. */
2466 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2467 make_tree (TREE_TYPE (arg), op2),
2468 make_tree (TREE_TYPE (arg), op1)),
2469 target, VOIDmode, EXPAND_NORMAL);
2472 /* Expand a call to one of the builtin rounding functions gcc defines
2473 as an extension (lfloor and lceil). As these are gcc extensions we
2474 do not need to worry about setting errno to EDOM.
2475 If expanding via optab fails, lower expression to (int)(floor(x)).
2476 EXP is the expression that is a call to the builtin function;
2477 if convenient, the result should be placed in TARGET. SUBTARGET may
2478 be used as the target for computing one of EXP's operands. */
2481 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2483 convert_optab builtin_optab;
2484 rtx op0, insns, tmp;
2485 tree fndecl = get_callee_fndecl (exp);
2486 enum built_in_function fallback_fn;
2487 tree fallback_fndecl;
2488 enum machine_mode mode;
2491 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2494 arg = CALL_EXPR_ARG (exp, 0);
2496 switch (DECL_FUNCTION_CODE (fndecl))
2498 CASE_FLT_FN (BUILT_IN_LCEIL):
2499 CASE_FLT_FN (BUILT_IN_LLCEIL):
2500 builtin_optab = lceil_optab;
2501 fallback_fn = BUILT_IN_CEIL;
2504 CASE_FLT_FN (BUILT_IN_LFLOOR):
2505 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2506 builtin_optab = lfloor_optab;
2507 fallback_fn = BUILT_IN_FLOOR;
2514 /* Make a suitable register to place result in. */
2515 mode = TYPE_MODE (TREE_TYPE (exp));
2517 target = gen_reg_rtx (mode);
2519 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2520 need to expand the argument again. This way, we will not perform
2521 side-effects more the once. */
2522 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2524 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2528 /* Compute into TARGET. */
2529 if (expand_sfix_optab (target, op0, builtin_optab))
2531 /* Output the entire sequence. */
2532 insns = get_insns ();
2538 /* If we were unable to expand via the builtin, stop the sequence
2539 (without outputting the insns). */
2542 /* Fall back to floating point rounding optab. */
2543 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2545 /* For non-C99 targets we may end up without a fallback fndecl here
2546 if the user called __builtin_lfloor directly. In this case emit
2547 a call to the floor/ceil variants nevertheless. This should result
2548 in the best user experience for not full C99 targets. */
2549 if (fallback_fndecl == NULL_TREE)
2552 const char *name = NULL;
2554 switch (DECL_FUNCTION_CODE (fndecl))
2556 case BUILT_IN_LCEIL:
2557 case BUILT_IN_LLCEIL:
2560 case BUILT_IN_LCEILF:
2561 case BUILT_IN_LLCEILF:
2564 case BUILT_IN_LCEILL:
2565 case BUILT_IN_LLCEILL:
2568 case BUILT_IN_LFLOOR:
2569 case BUILT_IN_LLFLOOR:
2572 case BUILT_IN_LFLOORF:
2573 case BUILT_IN_LLFLOORF:
2576 case BUILT_IN_LFLOORL:
2577 case BUILT_IN_LLFLOORL:
2584 fntype = build_function_type_list (TREE_TYPE (arg),
2585 TREE_TYPE (arg), NULL_TREE);
2586 fallback_fndecl = build_fn_decl (name, fntype);
2589 exp = build_call_expr (fallback_fndecl, 1, arg);
2591 tmp = expand_normal (exp);
2593 /* Truncate the result of floating point optab to integer
2594 via expand_fix (). */
2595 target = gen_reg_rtx (mode);
2596 expand_fix (target, tmp, 0);
2601 /* Expand a call to one of the builtin math functions doing integer
2603 Return 0 if a normal call should be emitted rather than expanding the
2604 function in-line. EXP is the expression that is a call to the builtin
2605 function; if convenient, the result should be placed in TARGET.
2606 SUBTARGET may be used as the target for computing one of EXP's operands. */
2609 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2611 convert_optab builtin_optab;
2613 tree fndecl = get_callee_fndecl (exp);
2615 enum machine_mode mode;
2617 /* There's no easy way to detect the case we need to set EDOM. */
2618 if (flag_errno_math)
2621 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2624 arg = CALL_EXPR_ARG (exp, 0);
2626 switch (DECL_FUNCTION_CODE (fndecl))
2628 CASE_FLT_FN (BUILT_IN_LRINT):
2629 CASE_FLT_FN (BUILT_IN_LLRINT):
2630 builtin_optab = lrint_optab; break;
2631 CASE_FLT_FN (BUILT_IN_LROUND):
2632 CASE_FLT_FN (BUILT_IN_LLROUND):
2633 builtin_optab = lround_optab; break;
2638 /* Make a suitable register to place result in. */
2639 mode = TYPE_MODE (TREE_TYPE (exp));
2641 target = gen_reg_rtx (mode);
2643 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2644 need to expand the argument again. This way, we will not perform
2645 side-effects more the once. */
2646 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2648 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2652 if (expand_sfix_optab (target, op0, builtin_optab))
2654 /* Output the entire sequence. */
2655 insns = get_insns ();
2661 /* If we were unable to expand via the builtin, stop the sequence
2662 (without outputting the insns) and call to the library function
2663 with the stabilized argument list. */
2666 target = expand_call (exp, target, target == const0_rtx);
2671 /* To evaluate powi(x,n), the floating point value x raised to the
2672 constant integer exponent n, we use a hybrid algorithm that
2673 combines the "window method" with look-up tables. For an
2674 introduction to exponentiation algorithms and "addition chains",
2675 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2676 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2677 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2678 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2680 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2681 multiplications to inline before calling the system library's pow
2682 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2683 so this default never requires calling pow, powf or powl. */
2685 #ifndef POWI_MAX_MULTS
2686 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2689 /* The size of the "optimal power tree" lookup table. All
2690 exponents less than this value are simply looked up in the
2691 powi_table below. This threshold is also used to size the
2692 cache of pseudo registers that hold intermediate results. */
2693 #define POWI_TABLE_SIZE 256
2695 /* The size, in bits of the window, used in the "window method"
2696 exponentiation algorithm. This is equivalent to a radix of
2697 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2698 #define POWI_WINDOW_SIZE 3
2700 /* The following table is an efficient representation of an
2701 "optimal power tree". For each value, i, the corresponding
2702 value, j, in the table states than an optimal evaluation
2703 sequence for calculating pow(x,i) can be found by evaluating
2704 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2705 100 integers is given in Knuth's "Seminumerical algorithms". */
2707 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2709 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2710 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2711 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2712 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2713 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2714 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2715 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2716 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2717 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2718 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2719 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2720 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2721 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2722 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2723 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2724 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2725 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2726 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2727 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2728 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2729 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2730 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2731 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2732 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2733 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2734 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2735 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2736 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2737 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2738 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2739 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2740 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2744 /* Return the number of multiplications required to calculate
2745 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2746 subroutine of powi_cost. CACHE is an array indicating
2747 which exponents have already been calculated. */
2750 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2752 /* If we've already calculated this exponent, then this evaluation
2753 doesn't require any additional multiplications. */
2758 return powi_lookup_cost (n - powi_table[n], cache)
2759 + powi_lookup_cost (powi_table[n], cache) + 1;
2762 /* Return the number of multiplications required to calculate
2763 powi(x,n) for an arbitrary x, given the exponent N. This
2764 function needs to be kept in sync with expand_powi below. */
2767 powi_cost (HOST_WIDE_INT n)
2769 bool cache[POWI_TABLE_SIZE];
2770 unsigned HOST_WIDE_INT digit;
2771 unsigned HOST_WIDE_INT val;
2777 /* Ignore the reciprocal when calculating the cost. */
2778 val = (n < 0) ? -n : n;
2780 /* Initialize the exponent cache. */
2781 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2786 while (val >= POWI_TABLE_SIZE)
2790 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2791 result += powi_lookup_cost (digit, cache)
2792 + POWI_WINDOW_SIZE + 1;
2793 val >>= POWI_WINDOW_SIZE;
2802 return result + powi_lookup_cost (val, cache);
2805 /* Recursive subroutine of expand_powi. This function takes the array,
2806 CACHE, of already calculated exponents and an exponent N and returns
2807 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2810 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2812 unsigned HOST_WIDE_INT digit;
2816 if (n < POWI_TABLE_SIZE)
2821 target = gen_reg_rtx (mode);
2824 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2825 op1 = expand_powi_1 (mode, powi_table[n], cache);
2829 target = gen_reg_rtx (mode);
2830 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2831 op0 = expand_powi_1 (mode, n - digit, cache);
2832 op1 = expand_powi_1 (mode, digit, cache);
2836 target = gen_reg_rtx (mode);
2837 op0 = expand_powi_1 (mode, n >> 1, cache);
2841 result = expand_mult (mode, op0, op1, target, 0);
2842 if (result != target)
2843 emit_move_insn (target, result);
2847 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2848 floating point operand in mode MODE, and N is the exponent. This
2849 function needs to be kept in sync with powi_cost above. */
2852 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2854 unsigned HOST_WIDE_INT val;
2855 rtx cache[POWI_TABLE_SIZE];
2859 return CONST1_RTX (mode);
2861 val = (n < 0) ? -n : n;
2863 memset (cache, 0, sizeof (cache));
2866 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2868 /* If the original exponent was negative, reciprocate the result. */
2870 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2871 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2876 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2877 a normal call should be emitted rather than expanding the function
2878 in-line. EXP is the expression that is a call to the builtin
2879 function; if convenient, the result should be placed in TARGET. */
2882 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2886 tree type = TREE_TYPE (exp);
2887 REAL_VALUE_TYPE cint, c, c2;
2890 enum machine_mode mode = TYPE_MODE (type);
2892 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2895 arg0 = CALL_EXPR_ARG (exp, 0);
2896 arg1 = CALL_EXPR_ARG (exp, 1);
2898 if (TREE_CODE (arg1) != REAL_CST
2899 || TREE_OVERFLOW (arg1))
2900 return expand_builtin_mathfn_2 (exp, target, subtarget);
2902 /* Handle constant exponents. */
2904 /* For integer valued exponents we can expand to an optimal multiplication
2905 sequence using expand_powi. */
2906 c = TREE_REAL_CST (arg1);
2907 n = real_to_integer (&c);
2908 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2909 if (real_identical (&c, &cint)
2910 && ((n >= -1 && n <= 2)
2911 || (flag_unsafe_math_optimizations
2913 && powi_cost (n) <= POWI_MAX_MULTS)))
2915 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2918 op = force_reg (mode, op);
2919 op = expand_powi (op, mode, n);
2924 narg0 = builtin_save_expr (arg0);
2926 /* If the exponent is not integer valued, check if it is half of an integer.
2927 In this case we can expand to sqrt (x) * x**(n/2). */
2928 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2929 if (fn != NULL_TREE)
2931 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2932 n = real_to_integer (&c2);
2933 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2934 if (real_identical (&c2, &cint)
2935 && ((flag_unsafe_math_optimizations
2937 && powi_cost (n/2) <= POWI_MAX_MULTS)
2940 tree call_expr = build_call_expr (fn, 1, narg0);
2941 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2944 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2945 op2 = force_reg (mode, op2);
2946 op2 = expand_powi (op2, mode, abs (n / 2));
2947 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2948 0, OPTAB_LIB_WIDEN);
2949 /* If the original exponent was negative, reciprocate the
2952 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2953 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2959 /* Try if the exponent is a third of an integer. In this case
2960 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2961 different from pow (x, 1./3.) due to rounding and behavior
2962 with negative x we need to constrain this transformation to
2963 unsafe math and positive x or finite math. */
2964 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2966 && flag_unsafe_math_optimizations
2967 && (tree_expr_nonnegative_p (arg0)
2968 || !HONOR_NANS (mode)))
2970 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2971 real_round (&c2, mode, &c2);
2972 n = real_to_integer (&c2);
2973 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2974 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2975 real_convert (&c2, mode, &c2);
2976 if (real_identical (&c2, &c)
2978 && powi_cost (n/3) <= POWI_MAX_MULTS)
2981 tree call_expr = build_call_expr (fn, 1,narg0);
2982 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2983 if (abs (n) % 3 == 2)
2984 op = expand_simple_binop (mode, MULT, op, op, op,
2985 0, OPTAB_LIB_WIDEN);
2988 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2989 op2 = force_reg (mode, op2);
2990 op2 = expand_powi (op2, mode, abs (n / 3));
2991 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2992 0, OPTAB_LIB_WIDEN);
2993 /* If the original exponent was negative, reciprocate the
2996 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2997 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3003 /* Fall back to optab expansion. */
3004 return expand_builtin_mathfn_2 (exp, target, subtarget);
3007 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3008 a normal call should be emitted rather than expanding the function
3009 in-line. EXP is the expression that is a call to the builtin
3010 function; if convenient, the result should be placed in TARGET. */
3013 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3017 enum machine_mode mode;
3018 enum machine_mode mode2;
3020 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3023 arg0 = CALL_EXPR_ARG (exp, 0);
3024 arg1 = CALL_EXPR_ARG (exp, 1);
3025 mode = TYPE_MODE (TREE_TYPE (exp));
3027 /* Handle constant power. */
3029 if (TREE_CODE (arg1) == INTEGER_CST
3030 && !TREE_OVERFLOW (arg1))
3032 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3034 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3035 Otherwise, check the number of multiplications required. */
3036 if ((TREE_INT_CST_HIGH (arg1) == 0
3037 || TREE_INT_CST_HIGH (arg1) == -1)
3038 && ((n >= -1 && n <= 2)
3040 && powi_cost (n) <= POWI_MAX_MULTS)))
3042 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3043 op0 = force_reg (mode, op0);
3044 return expand_powi (op0, mode, n);
3048 /* Emit a libcall to libgcc. */
3050 /* Mode of the 2nd argument must match that of an int. */
3051 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3053 if (target == NULL_RTX)
3054 target = gen_reg_rtx (mode);
3056 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3057 if (GET_MODE (op0) != mode)
3058 op0 = convert_to_mode (mode, op0, 0);
3059 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3060 if (GET_MODE (op1) != mode2)
3061 op1 = convert_to_mode (mode2, op1, 0);
3063 target = emit_library_call_value (optab_handler (powi_optab, mode)->libfunc,
3064 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3065 op0, mode, op1, mode2);
3070 /* Expand expression EXP which is a call to the strlen builtin. Return
3071 NULL_RTX if we failed the caller should emit a normal call, otherwise
3072 try to get the result in TARGET, if convenient. */
3075 expand_builtin_strlen (tree exp, rtx target,
3076 enum machine_mode target_mode)
3078 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3084 tree src = CALL_EXPR_ARG (exp, 0);
3085 rtx result, src_reg, char_rtx, before_strlen;
3086 enum machine_mode insn_mode = target_mode, char_mode;
3087 enum insn_code icode = CODE_FOR_nothing;
3090 /* If the length can be computed at compile-time, return it. */
3091 len = c_strlen (src, 0);
3093 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3095 /* If the length can be computed at compile-time and is constant
3096 integer, but there are side-effects in src, evaluate
3097 src for side-effects, then return len.
3098 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3099 can be optimized into: i++; x = 3; */
3100 len = c_strlen (src, 1);
3101 if (len && TREE_CODE (len) == INTEGER_CST)
3103 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3104 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3107 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3109 /* If SRC is not a pointer type, don't do this operation inline. */
3113 /* Bail out if we can't compute strlen in the right mode. */
3114 while (insn_mode != VOIDmode)
3116 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3117 if (icode != CODE_FOR_nothing)
3120 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3122 if (insn_mode == VOIDmode)
3125 /* Make a place to write the result of the instruction. */
3129 && GET_MODE (result) == insn_mode
3130 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3131 result = gen_reg_rtx (insn_mode);
3133 /* Make a place to hold the source address. We will not expand
3134 the actual source until we are sure that the expansion will
3135 not fail -- there are trees that cannot be expanded twice. */
3136 src_reg = gen_reg_rtx (Pmode);
3138 /* Mark the beginning of the strlen sequence so we can emit the
3139 source operand later. */
3140 before_strlen = get_last_insn ();
3142 char_rtx = const0_rtx;
3143 char_mode = insn_data[(int) icode].operand[2].mode;
3144 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3146 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3148 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3149 char_rtx, GEN_INT (align));
3154 /* Now that we are assured of success, expand the source. */
3156 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3158 emit_move_insn (src_reg, pat);
3163 emit_insn_after (pat, before_strlen);
3165 emit_insn_before (pat, get_insns ());
3167 /* Return the value in the proper mode for this function. */
3168 if (GET_MODE (result) == target_mode)
3170 else if (target != 0)
3171 convert_move (target, result, 0);
3173 target = convert_to_mode (target_mode, result, 0);
3179 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3180 caller should emit a normal call, otherwise try to get the result
3181 in TARGET, if convenient (and in mode MODE if that's convenient). */
3184 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3186 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3188 tree type = TREE_TYPE (exp);
3189 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3190 CALL_EXPR_ARG (exp, 1), type);
3192 return expand_expr (result, target, mode, EXPAND_NORMAL);
3197 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3198 caller should emit a normal call, otherwise try to get the result
3199 in TARGET, if convenient (and in mode MODE if that's convenient). */
3202 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3204 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3206 tree type = TREE_TYPE (exp);
3207 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3208 CALL_EXPR_ARG (exp, 1), type);
3210 return expand_expr (result, target, mode, EXPAND_NORMAL);
3212 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3217 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3218 caller should emit a normal call, otherwise try to get the result
3219 in TARGET, if convenient (and in mode MODE if that's convenient). */
3222 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3224 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3226 tree type = TREE_TYPE (exp);
3227 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3228 CALL_EXPR_ARG (exp, 1), type);
3230 return expand_expr (result, target, mode, EXPAND_NORMAL);
3235 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3236 caller should emit a normal call, otherwise try to get the result
3237 in TARGET, if convenient (and in mode MODE if that's convenient). */
3240 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3242 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3244 tree type = TREE_TYPE (exp);
3245 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3246 CALL_EXPR_ARG (exp, 1), type);
3248 return expand_expr (result, target, mode, EXPAND_NORMAL);
3253 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3254 bytes from constant string DATA + OFFSET and return it as target
3258 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3259 enum machine_mode mode)
3261 const char *str = (const char *) data;
3263 gcc_assert (offset >= 0
3264 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3265 <= strlen (str) + 1));
3267 return c_readstr (str + offset, mode);
3270 /* Expand a call EXP to the memcpy builtin.
3271 Return NULL_RTX if we failed, the caller should emit a normal call,
3272 otherwise try to get the result in TARGET, if convenient (and in
3273 mode MODE if that's convenient). */
3276 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3278 tree fndecl = get_callee_fndecl (exp);
3280 if (!validate_arglist (exp,
3281 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3285 tree dest = CALL_EXPR_ARG (exp, 0);
3286 tree src = CALL_EXPR_ARG (exp, 1);
3287 tree len = CALL_EXPR_ARG (exp, 2);
3288 const char *src_str;
3289 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3290 unsigned int dest_align
3291 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3292 rtx dest_mem, src_mem, dest_addr, len_rtx;
3293 tree result = fold_builtin_memory_op (dest, src, len,
3294 TREE_TYPE (TREE_TYPE (fndecl)),
3296 HOST_WIDE_INT expected_size = -1;
3297 unsigned int expected_align = 0;
3301 while (TREE_CODE (result) == COMPOUND_EXPR)
3303 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3305 result = TREE_OPERAND (result, 1);
3307 return expand_expr (result, target, mode, EXPAND_NORMAL);
3310 /* If DEST is not a pointer type, call the normal function. */
3311 if (dest_align == 0)
3314 /* If either SRC is not a pointer type, don't do this
3315 operation in-line. */
3319 stringop_block_profile (exp, &expected_align, &expected_size);
3320 if (expected_align < dest_align)
3321 expected_align = dest_align;
3322 dest_mem = get_memory_rtx (dest, len);
3323 set_mem_align (dest_mem, dest_align);
3324 len_rtx = expand_normal (len);
3325 src_str = c_getstr (src);
3327 /* If SRC is a string constant and block move would be done
3328 by pieces, we can avoid loading the string from memory
3329 and only stored the computed constants. */
3331 && GET_CODE (len_rtx) == CONST_INT
3332 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3333 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3334 (void *) src_str, dest_align))
3336 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3337 builtin_memcpy_read_str,
3338 (void *) src_str, dest_align, 0);
3339 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3340 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3344 src_mem = get_memory_rtx (src, len);
3345 set_mem_align (src_mem, src_align);
3347 /* Copy word part most expediently. */
3348 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3349 CALL_EXPR_TAILCALL (exp)
3350 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3351 expected_align, expected_size);
3355 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3356 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3362 /* Expand a call EXP to the mempcpy builtin.
3363 Return NULL_RTX if we failed; the caller should emit a normal call,
3364 otherwise try to get the result in TARGET, if convenient (and in
3365 mode MODE if that's convenient). If ENDP is 0 return the
3366 destination pointer, if ENDP is 1 return the end pointer ala
3367 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3371 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3373 if (!validate_arglist (exp,
3374 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3378 tree dest = CALL_EXPR_ARG (exp, 0);
3379 tree src = CALL_EXPR_ARG (exp, 1);
3380 tree len = CALL_EXPR_ARG (exp, 2);
3381 return expand_builtin_mempcpy_args (dest, src, len,
3383 target, mode, /*endp=*/ 1);
3387 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3388 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3389 so that this can also be called without constructing an actual CALL_EXPR.
3390 TYPE is the return type of the call. The other arguments and return value
3391 are the same as for expand_builtin_mempcpy. */
3394 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3395 rtx target, enum machine_mode mode, int endp)
3397 /* If return value is ignored, transform mempcpy into memcpy. */
3398 if (target == const0_rtx)
3400 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3405 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3406 target, mode, EXPAND_NORMAL);
3410 const char *src_str;
3411 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3412 unsigned int dest_align
3413 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3414 rtx dest_mem, src_mem, len_rtx;
3415 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3419 while (TREE_CODE (result) == COMPOUND_EXPR)
3421 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3423 result = TREE_OPERAND (result, 1);
3425 return expand_expr (result, target, mode, EXPAND_NORMAL);
3428 /* If either SRC or DEST is not a pointer type, don't do this
3429 operation in-line. */
3430 if (dest_align == 0 || src_align == 0)
3433 /* If LEN is not constant, call the normal function. */
3434 if (! host_integerp (len, 1))
3437 len_rtx = expand_normal (len);
3438 src_str = c_getstr (src);
3440 /* If SRC is a string constant and block move would be done
3441 by pieces, we can avoid loading the string from memory
3442 and only stored the computed constants. */
3444 && GET_CODE (len_rtx) == CONST_INT
3445 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3446 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3447 (void *) src_str, dest_align))
3449 dest_mem = get_memory_rtx (dest, len);
3450 set_mem_align (dest_mem, dest_align);
3451 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3452 builtin_memcpy_read_str,
3453 (void *) src_str, dest_align, endp);
3454 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3455 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3459 if (GET_CODE (len_rtx) == CONST_INT
3460 && can_move_by_pieces (INTVAL (len_rtx),
3461 MIN (dest_align, src_align)))
3463 dest_mem = get_memory_rtx (dest, len);
3464 set_mem_align (dest_mem, dest_align);
3465 src_mem = get_memory_rtx (src, len);
3466 set_mem_align (src_mem, src_align);
3467 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3468 MIN (dest_align, src_align), endp);
3469 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3470 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3478 /* Expand expression EXP, which is a call to the memmove builtin. Return
3479 NULL_RTX if we failed; the caller should emit a normal call. */
3482 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3484 if (!validate_arglist (exp,
3485 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3489 tree dest = CALL_EXPR_ARG (exp, 0);
3490 tree src = CALL_EXPR_ARG (exp, 1);
3491 tree len = CALL_EXPR_ARG (exp, 2);
3492 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3493 target, mode, ignore);
3497 /* Helper function to do the actual work for expand_builtin_memmove. The
3498 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3499 so that this can also be called without constructing an actual CALL_EXPR.
3500 TYPE is the return type of the call. The other arguments and return value
3501 are the same as for expand_builtin_memmove. */
3504 expand_builtin_memmove_args (tree dest, tree src, tree len,
3505 tree type, rtx target, enum machine_mode mode,
3508 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3512 STRIP_TYPE_NOPS (result);
3513 while (TREE_CODE (result) == COMPOUND_EXPR)
3515 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3517 result = TREE_OPERAND (result, 1);
3519 return expand_expr (result, target, mode, EXPAND_NORMAL);
3522 /* Otherwise, call the normal function. */
3526 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3527 NULL_RTX if we failed the caller should emit a normal call. */
3530 expand_builtin_bcopy (tree exp, int ignore)
3532 tree type = TREE_TYPE (exp);
3533 tree src, dest, size;
3535 if (!validate_arglist (exp,
3536 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3539 src = CALL_EXPR_ARG (exp, 0);
3540 dest = CALL_EXPR_ARG (exp, 1);
3541 size = CALL_EXPR_ARG (exp, 2);
3543 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3544 This is done this way so that if it isn't expanded inline, we fall
3545 back to calling bcopy instead of memmove. */
3546 return expand_builtin_memmove_args (dest, src,
3547 fold_convert (sizetype, size),
3548 type, const0_rtx, VOIDmode,
3553 # define HAVE_movstr 0
3554 # define CODE_FOR_movstr CODE_FOR_nothing
3557 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3558 we failed, the caller should emit a normal call, otherwise try to
3559 get the result in TARGET, if convenient. If ENDP is 0 return the
3560 destination pointer, if ENDP is 1 return the end pointer ala
3561 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3565 expand_movstr (tree dest, tree src, rtx target, int endp)
3571 const struct insn_data * data;
3576 dest_mem = get_memory_rtx (dest, NULL);
3577 src_mem = get_memory_rtx (src, NULL);
3580 target = force_reg (Pmode, XEXP (dest_mem, 0));
3581 dest_mem = replace_equiv_address (dest_mem, target);
3582 end = gen_reg_rtx (Pmode);
3586 if (target == 0 || target == const0_rtx)
3588 end = gen_reg_rtx (Pmode);
3596 data = insn_data + CODE_FOR_movstr;
3598 if (data->operand[0].mode != VOIDmode)
3599 end = gen_lowpart (data->operand[0].mode, end);
3601 insn = data->genfun (end, dest_mem, src_mem);
3607 /* movstr is supposed to set end to the address of the NUL
3608 terminator. If the caller requested a mempcpy-like return value,
3610 if (endp == 1 && target != const0_rtx)
3612 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3613 emit_move_insn (target, force_operand (tem, NULL_RTX));
3619 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3620 NULL_RTX if we failed the caller should emit a normal call, otherwise
3621 try to get the result in TARGET, if convenient (and in mode MODE if that's
3625 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3627 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3629 tree dest = CALL_EXPR_ARG (exp, 0);
3630 tree src = CALL_EXPR_ARG (exp, 1);
3631 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3636 /* Helper function to do the actual work for expand_builtin_strcpy. The
3637 arguments to the builtin_strcpy call DEST and SRC are broken out
3638 so that this can also be called without constructing an actual CALL_EXPR.
3639 The other arguments and return value are the same as for
3640 expand_builtin_strcpy. */
3643 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3644 rtx target, enum machine_mode mode)
3646 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3648 return expand_expr (result, target, mode, EXPAND_NORMAL);
3649 return expand_movstr (dest, src, target, /*endp=*/0);
3653 /* Expand a call EXP to the stpcpy builtin.
3654 Return NULL_RTX if we failed the caller should emit a normal call,
3655 otherwise try to get the result in TARGET, if convenient (and in
3656 mode MODE if that's convenient). */
3659 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3663 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3666 dst = CALL_EXPR_ARG (exp, 0);
3667 src = CALL_EXPR_ARG (exp, 1);
3669 /* If return value is ignored, transform stpcpy into strcpy. */
3670 if (target == const0_rtx)
3672 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3676 return expand_expr (build_call_expr (fn, 2, dst, src),
3677 target, mode, EXPAND_NORMAL);
3684 /* Ensure we get an actual string whose length can be evaluated at
3685 compile-time, not an expression containing a string. This is
3686 because the latter will potentially produce pessimized code
3687 when used to produce the return value. */
3688 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3689 return expand_movstr (dst, src, target, /*endp=*/2);
3691 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3692 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3693 target, mode, /*endp=*/2);
3698 if (TREE_CODE (len) == INTEGER_CST)
3700 rtx len_rtx = expand_normal (len);
3702 if (GET_CODE (len_rtx) == CONST_INT)
3704 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3705 dst, src, target, mode);
3711 if (mode != VOIDmode)
3712 target = gen_reg_rtx (mode);
3714 target = gen_reg_rtx (GET_MODE (ret));
3716 if (GET_MODE (target) != GET_MODE (ret))
3717 ret = gen_lowpart (GET_MODE (target), ret);
3719 ret = plus_constant (ret, INTVAL (len_rtx));
3720 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3728 return expand_movstr (dst, src, target, /*endp=*/2);
3732 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3733 bytes from constant string DATA + OFFSET and return it as target
3737 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3738 enum machine_mode mode)
3740 const char *str = (const char *) data;
3742 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3745 return c_readstr (str + offset, mode);
3748 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3749 NULL_RTX if we failed the caller should emit a normal call. */
3752 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3754 tree fndecl = get_callee_fndecl (exp);
3756 if (validate_arglist (exp,
3757 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3759 tree dest = CALL_EXPR_ARG (exp, 0);
3760 tree src = CALL_EXPR_ARG (exp, 1);
3761 tree len = CALL_EXPR_ARG (exp, 2);
3762 tree slen = c_strlen (src, 1);
3763 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3767 while (TREE_CODE (result) == COMPOUND_EXPR)
3769 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3771 result = TREE_OPERAND (result, 1);
3773 return expand_expr (result, target, mode, EXPAND_NORMAL);
3776 /* We must be passed a constant len and src parameter. */
3777 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3780 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3782 /* We're required to pad with trailing zeros if the requested
3783 len is greater than strlen(s2)+1. In that case try to
3784 use store_by_pieces, if it fails, punt. */
3785 if (tree_int_cst_lt (slen, len))
3787 unsigned int dest_align
3788 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3789 const char *p = c_getstr (src);
3792 if (!p || dest_align == 0 || !host_integerp (len, 1)
3793 || !can_store_by_pieces (tree_low_cst (len, 1),
3794 builtin_strncpy_read_str,
3795 (void *) p, dest_align))
3798 dest_mem = get_memory_rtx (dest, len);
3799 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3800 builtin_strncpy_read_str,
3801 (void *) p, dest_align, 0);
3802 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3803 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3810 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3811 bytes from constant string DATA + OFFSET and return it as target
3815 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3816 enum machine_mode mode)
3818 const char *c = (const char *) data;
3819 char *p = alloca (GET_MODE_SIZE (mode));
3821 memset (p, *c, GET_MODE_SIZE (mode));
3823 return c_readstr (p, mode);
3826 /* Callback routine for store_by_pieces. Return the RTL of a register
3827 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3828 char value given in the RTL register data. For example, if mode is
3829 4 bytes wide, return the RTL for 0x01010101*data. */
3832 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3833 enum machine_mode mode)
3839 size = GET_MODE_SIZE (mode);
3844 memset (p, 1, size);
3845 coeff = c_readstr (p, mode);
3847 target = convert_to_mode (mode, (rtx) data, 1);
3848 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3849 return force_reg (mode, target);
3852 /* Expand expression EXP, which is a call to the memset builtin. Return
3853 NULL_RTX if we failed the caller should emit a normal call, otherwise
3854 try to get the result in TARGET, if convenient (and in mode MODE if that's
3858 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3860 if (!validate_arglist (exp,
3861 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3865 tree dest = CALL_EXPR_ARG (exp, 0);
3866 tree val = CALL_EXPR_ARG (exp, 1);
3867 tree len = CALL_EXPR_ARG (exp, 2);
3868 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3872 /* Helper function to do the actual work for expand_builtin_memset. The
3873 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3874 so that this can also be called without constructing an actual CALL_EXPR.
3875 The other arguments and return value are the same as for
3876 expand_builtin_memset. */
3879 expand_builtin_memset_args (tree dest, tree val, tree len,
3880 rtx target, enum machine_mode mode, tree orig_exp)
3883 enum built_in_function fcode;
3885 unsigned int dest_align;
3886 rtx dest_mem, dest_addr, len_rtx;
3887 HOST_WIDE_INT expected_size = -1;
3888 unsigned int expected_align = 0;
3890 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3892 /* If DEST is not a pointer type, don't do this operation in-line. */
3893 if (dest_align == 0)
3896 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3897 if (expected_align < dest_align)
3898 expected_align = dest_align;
3900 /* If the LEN parameter is zero, return DEST. */
3901 if (integer_zerop (len))
3903 /* Evaluate and ignore VAL in case it has side-effects. */
3904 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3905 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3908 /* Stabilize the arguments in case we fail. */
3909 dest = builtin_save_expr (dest);
3910 val = builtin_save_expr (val);
3911 len = builtin_save_expr (len);
3913 len_rtx = expand_normal (len);
3914 dest_mem = get_memory_rtx (dest, len);
3916 if (TREE_CODE (val) != INTEGER_CST)
3920 val_rtx = expand_normal (val);
3921 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3924 /* Assume that we can memset by pieces if we can store
3925 * the coefficients by pieces (in the required modes).
3926 * We can't pass builtin_memset_gen_str as that emits RTL. */
3928 if (host_integerp (len, 1)
3929 && !(optimize_size && tree_low_cst (len, 1) > 1)
3930 && can_store_by_pieces (tree_low_cst (len, 1),
3931 builtin_memset_read_str, &c, dest_align))
3933 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3935 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3936 builtin_memset_gen_str, val_rtx, dest_align, 0);
3938 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3939 dest_align, expected_align,
3943 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3944 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3948 if (target_char_cast (val, &c))
3953 if (host_integerp (len, 1)
3954 && !(optimize_size && tree_low_cst (len, 1) > 1)
3955 && can_store_by_pieces (tree_low_cst (len, 1),
3956 builtin_memset_read_str, &c, dest_align))
3957 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3958 builtin_memset_read_str, &c, dest_align, 0);
3959 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3960 dest_align, expected_align,
3964 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3965 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3969 set_mem_align (dest_mem, dest_align);
3970 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3971 CALL_EXPR_TAILCALL (orig_exp)
3972 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3973 expected_align, expected_size);
3977 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3978 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3984 fndecl = get_callee_fndecl (orig_exp);
3985 fcode = DECL_FUNCTION_CODE (fndecl);
3986 if (fcode == BUILT_IN_MEMSET)
3987 fn = build_call_expr (fndecl, 3, dest, val, len);
3988 else if (fcode == BUILT_IN_BZERO)
3989 fn = build_call_expr (fndecl, 2, dest, len);
3992 if (TREE_CODE (fn) == CALL_EXPR)
3993 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3994 return expand_call (fn, target, target == const0_rtx);
3997 /* Expand expression EXP, which is a call to the bzero builtin. Return
3998 NULL_RTX if we failed the caller should emit a normal call. */
4001 expand_builtin_bzero (tree exp)
4005 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4008 dest = CALL_EXPR_ARG (exp, 0);
4009 size = CALL_EXPR_ARG (exp, 1);
4011 /* New argument list transforming bzero(ptr x, int y) to
4012 memset(ptr x, int 0, size_t y). This is done this way
4013 so that if it isn't expanded inline, we fallback to
4014 calling bzero instead of memset. */
4016 return expand_builtin_memset_args (dest, integer_zero_node,
4017 fold_convert (sizetype, size),
4018 const0_rtx, VOIDmode, exp);
4021 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4022 caller should emit a normal call, otherwise try to get the result
4023 in TARGET, if convenient (and in mode MODE if that's convenient). */
4026 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4028 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4029 INTEGER_TYPE, VOID_TYPE))
4031 tree type = TREE_TYPE (exp);
4032 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4033 CALL_EXPR_ARG (exp, 1),
4034 CALL_EXPR_ARG (exp, 2), type);
4036 return expand_expr (result, target, mode, EXPAND_NORMAL);
4041 /* Expand expression EXP, which is a call to the memcmp built-in function.
4042 Return NULL_RTX if we failed and the
4043 caller should emit a normal call, otherwise try to get the result in
4044 TARGET, if convenient (and in mode MODE, if that's convenient). */
4047 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4049 if (!validate_arglist (exp,
4050 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4054 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4055 CALL_EXPR_ARG (exp, 1),
4056 CALL_EXPR_ARG (exp, 2));
4058 return expand_expr (result, target, mode, EXPAND_NORMAL);
4061 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4063 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4066 tree arg1 = CALL_EXPR_ARG (exp, 0);
4067 tree arg2 = CALL_EXPR_ARG (exp, 1);
4068 tree len = CALL_EXPR_ARG (exp, 2);
4071 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4073 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4074 enum machine_mode insn_mode;
4076 #ifdef HAVE_cmpmemsi
4078 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4081 #ifdef HAVE_cmpstrnsi
4083 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4088 /* If we don't have POINTER_TYPE, call the function. */
4089 if (arg1_align == 0 || arg2_align == 0)
4092 /* Make a place to write the result of the instruction. */
4095 && REG_P (result) && GET_MODE (result) == insn_mode
4096 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4097 result = gen_reg_rtx (insn_mode);
4099 arg1_rtx = get_memory_rtx (arg1, len);
4100 arg2_rtx = get_memory_rtx (arg2, len);
4101 arg3_rtx = expand_normal (len);
4103 /* Set MEM_SIZE as appropriate. */
4104 if (GET_CODE (arg3_rtx) == CONST_INT)
4106 set_mem_size (arg1_rtx, arg3_rtx);
4107 set_mem_size (arg2_rtx, arg3_rtx);
4110 #ifdef HAVE_cmpmemsi
4112 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4113 GEN_INT (MIN (arg1_align, arg2_align)));
4116 #ifdef HAVE_cmpstrnsi
4118 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4119 GEN_INT (MIN (arg1_align, arg2_align)));
4127 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4128 TYPE_MODE (integer_type_node), 3,
4129 XEXP (arg1_rtx, 0), Pmode,
4130 XEXP (arg2_rtx, 0), Pmode,
4131 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4132 TYPE_UNSIGNED (sizetype)),
4133 TYPE_MODE (sizetype));
4135 /* Return the value in the proper mode for this function. */
4136 mode = TYPE_MODE (TREE_TYPE (exp));
4137 if (GET_MODE (result) == mode)
4139 else if (target != 0)
4141 convert_move (target, result, 0);
4145 return convert_to_mode (mode, result, 0);
4152 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4153 if we failed the caller should emit a normal call, otherwise try to get
4154 the result in TARGET, if convenient. */
4157 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4159 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4163 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4164 CALL_EXPR_ARG (exp, 1));
4166 return expand_expr (result, target, mode, EXPAND_NORMAL);
4169 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4170 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4171 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4173 rtx arg1_rtx, arg2_rtx;
4174 rtx result, insn = NULL_RTX;
4176 tree arg1 = CALL_EXPR_ARG (exp, 0);
4177 tree arg2 = CALL_EXPR_ARG (exp, 1);
4180 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4182 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4184 /* If we don't have POINTER_TYPE, call the function. */
4185 if (arg1_align == 0 || arg2_align == 0)
4188 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4189 arg1 = builtin_save_expr (arg1);
4190 arg2 = builtin_save_expr (arg2);
4192 arg1_rtx = get_memory_rtx (arg1, NULL);
4193 arg2_rtx = get_memory_rtx (arg2, NULL);
4195 #ifdef HAVE_cmpstrsi
4196 /* Try to call cmpstrsi. */
4199 enum machine_mode insn_mode
4200 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4202 /* Make a place to write the result of the instruction. */
4205 && REG_P (result) && GET_MODE (result) == insn_mode
4206 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4207 result = gen_reg_rtx (insn_mode);
4209 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4210 GEN_INT (MIN (arg1_align, arg2_align)));
4213 #ifdef HAVE_cmpstrnsi
4214 /* Try to determine at least one length and call cmpstrnsi. */
4215 if (!insn && HAVE_cmpstrnsi)
4220 enum machine_mode insn_mode
4221 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4222 tree len1 = c_strlen (arg1, 1);
4223 tree len2 = c_strlen (arg2, 1);
4226 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4228 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4230 /* If we don't have a constant length for the first, use the length
4231 of the second, if we know it. We don't require a constant for
4232 this case; some cost analysis could be done if both are available
4233 but neither is constant. For now, assume they're equally cheap,
4234 unless one has side effects. If both strings have constant lengths,
4241 else if (TREE_SIDE_EFFECTS (len1))
4243 else if (TREE_SIDE_EFFECTS (len2))
4245 else if (TREE_CODE (len1) != INTEGER_CST)
4247 else if (TREE_CODE (len2) != INTEGER_CST)
4249 else if (tree_int_cst_lt (len1, len2))
4254 /* If both arguments have side effects, we cannot optimize. */
4255 if (!len || TREE_SIDE_EFFECTS (len))
4258 arg3_rtx = expand_normal (len);
4260 /* Make a place to write the result of the instruction. */
4263 && REG_P (result) && GET_MODE (result) == insn_mode
4264 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4265 result = gen_reg_rtx (insn_mode);
4267 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4268 GEN_INT (MIN (arg1_align, arg2_align)));
4276 /* Return the value in the proper mode for this function. */
4277 mode = TYPE_MODE (TREE_TYPE (exp));
4278 if (GET_MODE (result) == mode)
4281 return convert_to_mode (mode, result, 0);
4282 convert_move (target, result, 0);
4286 /* Expand the library call ourselves using a stabilized argument
4287 list to avoid re-evaluating the function's arguments twice. */
4288 #ifdef HAVE_cmpstrnsi
4291 fndecl = get_callee_fndecl (exp);
4292 fn = build_call_expr (fndecl, 2, arg1, arg2);
4293 if (TREE_CODE (fn) == CALL_EXPR)
4294 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4295 return expand_call (fn, target, target == const0_rtx);
4301 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4302 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4303 the result in TARGET, if convenient. */
4306 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4308 if (!validate_arglist (exp,
4309 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4313 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4314 CALL_EXPR_ARG (exp, 1),
4315 CALL_EXPR_ARG (exp, 2));
4317 return expand_expr (result, target, mode, EXPAND_NORMAL);
4320 /* If c_strlen can determine an expression for one of the string
4321 lengths, and it doesn't have side effects, then emit cmpstrnsi
4322 using length MIN(strlen(string)+1, arg3). */
4323 #ifdef HAVE_cmpstrnsi
4326 tree len, len1, len2;
4327 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4330 tree arg1 = CALL_EXPR_ARG (exp, 0);
4331 tree arg2 = CALL_EXPR_ARG (exp, 1);
4332 tree arg3 = CALL_EXPR_ARG (exp, 2);
4335 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4337 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4338 enum machine_mode insn_mode
4339 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4341 len1 = c_strlen (arg1, 1);
4342 len2 = c_strlen (arg2, 1);
4345 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4347 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4349 /* If we don't have a constant length for the first, use the length
4350 of the second, if we know it. We don't require a constant for
4351 this case; some cost analysis could be done if both are available
4352 but neither is constant. For now, assume they're equally cheap,
4353 unless one has side effects. If both strings have constant lengths,
4360 else if (TREE_SIDE_EFFECTS (len1))
4362 else if (TREE_SIDE_EFFECTS (len2))
4364 else if (TREE_CODE (len1) != INTEGER_CST)
4366 else if (TREE_CODE (len2) != INTEGER_CST)
4368 else if (tree_int_cst_lt (len1, len2))
4373 /* If both arguments have side effects, we cannot optimize. */
4374 if (!len || TREE_SIDE_EFFECTS (len))
4377 /* The actual new length parameter is MIN(len,arg3). */
4378 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4379 fold_convert (TREE_TYPE (len), arg3));
4381 /* If we don't have POINTER_TYPE, call the function. */
4382 if (arg1_align == 0 || arg2_align == 0)
4385 /* Make a place to write the result of the instruction. */
4388 && REG_P (result) && GET_MODE (result) == insn_mode
4389 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4390 result = gen_reg_rtx (insn_mode);
4392 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4393 arg1 = builtin_save_expr (arg1);
4394 arg2 = builtin_save_expr (arg2);
4395 len = builtin_save_expr (len);
4397 arg1_rtx = get_memory_rtx (arg1, len);
4398 arg2_rtx = get_memory_rtx (arg2, len);
4399 arg3_rtx = expand_normal (len);
4400 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4401 GEN_INT (MIN (arg1_align, arg2_align)));
4406 /* Return the value in the proper mode for this function. */
4407 mode = TYPE_MODE (TREE_TYPE (exp));
4408 if (GET_MODE (result) == mode)
4411 return convert_to_mode (mode, result, 0);
4412 convert_move (target, result, 0);
4416 /* Expand the library call ourselves using a stabilized argument
4417 list to avoid re-evaluating the function's arguments twice. */
4418 fndecl = get_callee_fndecl (exp);
4419 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4420 if (TREE_CODE (fn) == CALL_EXPR)
4421 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4422 return expand_call (fn, target, target == const0_rtx);
4428 /* Expand expression EXP, which is a call to the strcat builtin.
4429 Return NULL_RTX if we failed the caller should emit a normal call,
4430 otherwise try to get the result in TARGET, if convenient. */
4433 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4435 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4439 tree dst = CALL_EXPR_ARG (exp, 0);
4440 tree src = CALL_EXPR_ARG (exp, 1);
4441 const char *p = c_getstr (src);
4443 /* If the string length is zero, return the dst parameter. */
4444 if (p && *p == '\0')
4445 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4449 /* See if we can store by pieces into (dst + strlen(dst)). */
4450 tree newsrc, newdst,
4451 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4454 /* Stabilize the argument list. */
4455 newsrc = builtin_save_expr (src);
4456 dst = builtin_save_expr (dst);
4460 /* Create strlen (dst). */
4461 newdst = build_call_expr (strlen_fn, 1, dst);
4462 /* Create (dst p+ strlen (dst)). */
4464 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4465 newdst = builtin_save_expr (newdst);
4467 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4469 end_sequence (); /* Stop sequence. */
4473 /* Output the entire sequence. */
4474 insns = get_insns ();
4478 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4485 /* Expand expression EXP, which is a call to the strncat builtin.
4486 Return NULL_RTX if we failed the caller should emit a normal call,
4487 otherwise try to get the result in TARGET, if convenient. */
4490 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4492 if (validate_arglist (exp,
4493 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4495 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4496 CALL_EXPR_ARG (exp, 1),
4497 CALL_EXPR_ARG (exp, 2));
4499 return expand_expr (result, target, mode, EXPAND_NORMAL);
4504 /* Expand expression EXP, which is a call to the strspn builtin.
4505 Return NULL_RTX if we failed the caller should emit a normal call,
4506 otherwise try to get the result in TARGET, if convenient. */
4509 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4511 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4513 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4514 CALL_EXPR_ARG (exp, 1));
4516 return expand_expr (result, target, mode, EXPAND_NORMAL);
4521 /* Expand expression EXP, which is a call to the strcspn builtin.
4522 Return NULL_RTX if we failed the caller should emit a normal call,
4523 otherwise try to get the result in TARGET, if convenient. */
4526 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4528 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4530 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4531 CALL_EXPR_ARG (exp, 1));
4533 return expand_expr (result, target, mode, EXPAND_NORMAL);
4538 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4539 if that's convenient. */
4542 expand_builtin_saveregs (void)
4546 /* Don't do __builtin_saveregs more than once in a function.
4547 Save the result of the first call and reuse it. */
4548 if (saveregs_value != 0)
4549 return saveregs_value;
4551 /* When this function is called, it means that registers must be
4552 saved on entry to this function. So we migrate the call to the
4553 first insn of this function. */
4557 /* Do whatever the machine needs done in this case. */
4558 val = targetm.calls.expand_builtin_saveregs ();
4563 saveregs_value = val;
4565 /* Put the insns after the NOTE that starts the function. If this
4566 is inside a start_sequence, make the outer-level insn chain current, so
4567 the code is placed at the start of the function. */
4568 push_topmost_sequence ();
4569 emit_insn_after (seq, entry_of_function ());
4570 pop_topmost_sequence ();
4575 /* __builtin_args_info (N) returns word N of the arg space info
4576 for the current function. The number and meanings of words
4577 is controlled by the definition of CUMULATIVE_ARGS. */
4580 expand_builtin_args_info (tree exp)
4582 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4583 int *word_ptr = (int *) ¤t_function_args_info;
4585 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4587 if (call_expr_nargs (exp) != 0)
4589 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4590 error ("argument of %<__builtin_args_info%> must be constant");
4593 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4595 if (wordnum < 0 || wordnum >= nwords)
4596 error ("argument of %<__builtin_args_info%> out of range");
4598 return GEN_INT (word_ptr[wordnum]);
4602 error ("missing argument in %<__builtin_args_info%>");
4607 /* Expand a call to __builtin_next_arg. */
4610 expand_builtin_next_arg (void)
4612 /* Checking arguments is already done in fold_builtin_next_arg
4613 that must be called before this function. */
4614 return expand_binop (ptr_mode, add_optab,
4615 current_function_internal_arg_pointer,
4616 current_function_arg_offset_rtx,
4617 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4620 /* Make it easier for the backends by protecting the valist argument
4621 from multiple evaluations. */
4624 stabilize_va_list (tree valist, int needs_lvalue)
4626 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4628 if (TREE_SIDE_EFFECTS (valist))
4629 valist = save_expr (valist);
4631 /* For this case, the backends will be expecting a pointer to
4632 TREE_TYPE (va_list_type_node), but it's possible we've
4633 actually been given an array (an actual va_list_type_node).
4635 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4637 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4638 valist = build_fold_addr_expr_with_type (valist, p1);
4647 if (! TREE_SIDE_EFFECTS (valist))
4650 pt = build_pointer_type (va_list_type_node);
4651 valist = fold_build1 (ADDR_EXPR, pt, valist);
4652 TREE_SIDE_EFFECTS (valist) = 1;
4655 if (TREE_SIDE_EFFECTS (valist))
4656 valist = save_expr (valist);
4657 valist = build_fold_indirect_ref (valist);
4663 /* The "standard" definition of va_list is void*. */
4666 std_build_builtin_va_list (void)
4668 return ptr_type_node;
4671 /* The "standard" implementation of va_start: just assign `nextarg' to
4675 std_expand_builtin_va_start (tree valist, rtx nextarg)
4677 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4678 convert_move (va_r, nextarg, 0);
4681 /* Expand EXP, a call to __builtin_va_start. */
4684 expand_builtin_va_start (tree exp)
4689 if (call_expr_nargs (exp) < 2)
4691 error ("too few arguments to function %<va_start%>");
4695 if (fold_builtin_next_arg (exp, true))
4698 nextarg = expand_builtin_next_arg ();
4699 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4701 #ifdef EXPAND_BUILTIN_VA_START
4702 EXPAND_BUILTIN_VA_START (valist, nextarg);
4704 std_expand_builtin_va_start (valist, nextarg);
4710 /* The "standard" implementation of va_arg: read the value from the
4711 current (padded) address and increment by the (padded) size. */
4714 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4716 tree addr, t, type_size, rounded_size, valist_tmp;
4717 unsigned HOST_WIDE_INT align, boundary;
4720 #ifdef ARGS_GROW_DOWNWARD
4721 /* All of the alignment and movement below is for args-grow-up machines.
4722 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4723 implement their own specialized gimplify_va_arg_expr routines. */
4727 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4729 type = build_pointer_type (type);
4731 align = PARM_BOUNDARY / BITS_PER_UNIT;
4732 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4734 /* Hoist the valist value into a temporary for the moment. */
4735 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4737 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4738 requires greater alignment, we must perform dynamic alignment. */
4739 if (boundary > align
4740 && !integer_zerop (TYPE_SIZE (type)))
4742 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4743 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4744 valist_tmp, size_int (boundary - 1)));
4745 gimplify_and_add (t, pre_p);
4747 t = fold_convert (sizetype, valist_tmp);
4748 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4749 fold_convert (TREE_TYPE (valist),
4750 fold_build2 (BIT_AND_EXPR, sizetype, t,
4751 size_int (-boundary))));
4752 gimplify_and_add (t, pre_p);
4757 /* If the actual alignment is less than the alignment of the type,
4758 adjust the type accordingly so that we don't assume strict alignment
4759 when deferencing the pointer. */
4760 boundary *= BITS_PER_UNIT;
4761 if (boundary < TYPE_ALIGN (type))
4763 type = build_variant_type_copy (type);
4764 TYPE_ALIGN (type) = boundary;
4767 /* Compute the rounded size of the type. */
4768 type_size = size_in_bytes (type);
4769 rounded_size = round_up (type_size, align);
4771 /* Reduce rounded_size so it's sharable with the postqueue. */
4772 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4776 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4778 /* Small args are padded downward. */
4779 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4780 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4781 size_binop (MINUS_EXPR, rounded_size, type_size));
4782 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4785 /* Compute new value for AP. */
4786 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4787 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4788 gimplify_and_add (t, pre_p);
4790 addr = fold_convert (build_pointer_type (type), addr);
4793 addr = build_va_arg_indirect_ref (addr);
4795 return build_va_arg_indirect_ref (addr);
4798 /* Build an indirect-ref expression over the given TREE, which represents a
4799 piece of a va_arg() expansion. */
4801 build_va_arg_indirect_ref (tree addr)
4803 addr = build_fold_indirect_ref (addr);
4805 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4811 /* Return a dummy expression of type TYPE in order to keep going after an
4815 dummy_object (tree type)
4817 tree t = build_int_cst (build_pointer_type (type), 0);
4818 return build1 (INDIRECT_REF, type, t);
4821 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4822 builtin function, but a very special sort of operator. */
4824 enum gimplify_status
4825 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4827 tree promoted_type, want_va_type, have_va_type;
4828 tree valist = TREE_OPERAND (*expr_p, 0);
4829 tree type = TREE_TYPE (*expr_p);
4832 /* Verify that valist is of the proper type. */
4833 want_va_type = va_list_type_node;
4834 have_va_type = TREE_TYPE (valist);
4836 if (have_va_type == error_mark_node)
4839 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4841 /* If va_list is an array type, the argument may have decayed
4842 to a pointer type, e.g. by being passed to another function.
4843 In that case, unwrap both types so that we can compare the
4844 underlying records. */
4845 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4846 || POINTER_TYPE_P (have_va_type))
4848 want_va_type = TREE_TYPE (want_va_type);
4849 have_va_type = TREE_TYPE (have_va_type);
4853 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4855 error ("first argument to %<va_arg%> not of type %<va_list%>");
4859 /* Generate a diagnostic for requesting data of a type that cannot
4860 be passed through `...' due to type promotion at the call site. */
4861 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4864 static bool gave_help;
4866 /* Unfortunately, this is merely undefined, rather than a constraint
4867 violation, so we cannot make this an error. If this call is never
4868 executed, the program is still strictly conforming. */
4869 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4870 type, promoted_type);
4874 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4875 promoted_type, type);
4878 /* We can, however, treat "undefined" any way we please.
4879 Call abort to encourage the user to fix the program. */
4880 inform ("if this code is reached, the program will abort");
4881 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4882 append_to_statement_list (t, pre_p);
4884 /* This is dead code, but go ahead and finish so that the
4885 mode of the result comes out right. */
4886 *expr_p = dummy_object (type);
4891 /* Make it easier for the backends by protecting the valist argument
4892 from multiple evaluations. */
4893 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4895 /* For this case, the backends will be expecting a pointer to
4896 TREE_TYPE (va_list_type_node), but it's possible we've
4897 actually been given an array (an actual va_list_type_node).
4899 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4901 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4902 valist = build_fold_addr_expr_with_type (valist, p1);
4904 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4907 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4909 if (!targetm.gimplify_va_arg_expr)
4910 /* FIXME:Once most targets are converted we should merely
4911 assert this is non-null. */
4914 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4919 /* Expand EXP, a call to __builtin_va_end. */
4922 expand_builtin_va_end (tree exp)
4924 tree valist = CALL_EXPR_ARG (exp, 0);
4926 /* Evaluate for side effects, if needed. I hate macros that don't
4928 if (TREE_SIDE_EFFECTS (valist))
4929 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4934 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4935 builtin rather than just as an assignment in stdarg.h because of the
4936 nastiness of array-type va_list types. */
4939 expand_builtin_va_copy (tree exp)
4943 dst = CALL_EXPR_ARG (exp, 0);
4944 src = CALL_EXPR_ARG (exp, 1);
4946 dst = stabilize_va_list (dst, 1);
4947 src = stabilize_va_list (src, 0);
4949 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4951 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4952 TREE_SIDE_EFFECTS (t) = 1;
4953 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4957 rtx dstb, srcb, size;
4959 /* Evaluate to pointers. */
4960 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4961 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4962 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4963 VOIDmode, EXPAND_NORMAL);
4965 dstb = convert_memory_address (Pmode, dstb);
4966 srcb = convert_memory_address (Pmode, srcb);
4968 /* "Dereference" to BLKmode memories. */
4969 dstb = gen_rtx_MEM (BLKmode, dstb);
4970 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4971 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4972 srcb = gen_rtx_MEM (BLKmode, srcb);
4973 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4974 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4977 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4983 /* Expand a call to one of the builtin functions __builtin_frame_address or
4984 __builtin_return_address. */
4987 expand_builtin_frame_address (tree fndecl, tree exp)
4989 /* The argument must be a nonnegative integer constant.
4990 It counts the number of frames to scan up the stack.
4991 The value is the return address saved in that frame. */
4992 if (call_expr_nargs (exp) == 0)
4993 /* Warning about missing arg was already issued. */
4995 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4997 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4998 error ("invalid argument to %<__builtin_frame_address%>");
5000 error ("invalid argument to %<__builtin_return_address%>");
5006 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5007 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5009 /* Some ports cannot access arbitrary stack frames. */
5012 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5013 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5015 warning (0, "unsupported argument to %<__builtin_return_address%>");
5019 /* For __builtin_frame_address, return what we've got. */
5020 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5024 && ! CONSTANT_P (tem))
5025 tem = copy_to_mode_reg (Pmode, tem);
5030 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5031 we failed and the caller should emit a normal call, otherwise try to get
5032 the result in TARGET, if convenient. */
5035 expand_builtin_alloca (tree exp, rtx target)
5040 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5041 should always expand to function calls. These can be intercepted
5046 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5049 /* Compute the argument. */
5050 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5052 /* Allocate the desired space. */
5053 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5054 result = convert_memory_address (ptr_mode, result);
5059 /* Expand a call to a bswap builtin with argument ARG0. MODE
5060 is the mode to expand with. */
5063 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5065 enum machine_mode mode;
5069 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5072 arg = CALL_EXPR_ARG (exp, 0);
5073 mode = TYPE_MODE (TREE_TYPE (arg));
5074 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5076 target = expand_unop (mode, bswap_optab, op0, target, 1);
5078 gcc_assert (target);
5080 return convert_to_mode (mode, target, 0);
5083 /* Expand a call to a unary builtin in EXP.
5084 Return NULL_RTX if a normal call should be emitted rather than expanding the
5085 function in-line. If convenient, the result should be placed in TARGET.
5086 SUBTARGET may be used as the target for computing one of EXP's operands. */
5089 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5090 rtx subtarget, optab op_optab)
5094 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5097 /* Compute the argument. */
5098 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5099 VOIDmode, EXPAND_NORMAL);
5100 /* Compute op, into TARGET if possible.
5101 Set TARGET to wherever the result comes back. */
5102 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5103 op_optab, op0, target, 1);
5104 gcc_assert (target);
5106 return convert_to_mode (target_mode, target, 0);
5109 /* If the string passed to fputs is a constant and is one character
5110 long, we attempt to transform this call into __builtin_fputc(). */
5113 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5115 /* Verify the arguments in the original call. */
5116 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5118 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5119 CALL_EXPR_ARG (exp, 1),
5120 (target == const0_rtx),
5121 unlocked, NULL_TREE);
5123 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5128 /* Expand a call to __builtin_expect. We just return our argument
5129 as the builtin_expect semantic should've been already executed by
5130 tree branch prediction pass. */
5133 expand_builtin_expect (tree exp, rtx target)
5137 if (call_expr_nargs (exp) < 2)
5139 arg = CALL_EXPR_ARG (exp, 0);
5140 c = CALL_EXPR_ARG (exp, 1);
5142 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5143 /* When guessing was done, the hints should be already stripped away. */
5144 gcc_assert (!flag_guess_branch_prob
5145 || optimize == 0 || errorcount || sorrycount);
5150 expand_builtin_trap (void)
5154 emit_insn (gen_trap ());
5157 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5161 /* Expand EXP, a call to fabs, fabsf or fabsl.
5162 Return NULL_RTX if a normal call should be emitted rather than expanding
5163 the function inline. If convenient, the result should be placed
5164 in TARGET. SUBTARGET may be used as the target for computing
5168 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5170 enum machine_mode mode;
5174 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5177 arg = CALL_EXPR_ARG (exp, 0);
5178 mode = TYPE_MODE (TREE_TYPE (arg));
5179 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5180 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5183 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5184 Return NULL is a normal call should be emitted rather than expanding the
5185 function inline. If convenient, the result should be placed in TARGET.
5186 SUBTARGET may be used as the target for computing the operand. */
5189 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5194 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5197 arg = CALL_EXPR_ARG (exp, 0);
5198 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5200 arg = CALL_EXPR_ARG (exp, 1);
5201 op1 = expand_normal (arg);
5203 return expand_copysign (op0, op1, target);
5206 /* Create a new constant string literal and return a char* pointer to it.
5207 The STRING_CST value is the LEN characters at STR. */
5209 build_string_literal (int len, const char *str)
5211 tree t, elem, index, type;
5213 t = build_string (len, str);
5214 elem = build_type_variant (char_type_node, 1, 0);
5215 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5216 type = build_array_type (elem, index);
5217 TREE_TYPE (t) = type;
5218 TREE_CONSTANT (t) = 1;
5219 TREE_INVARIANT (t) = 1;
5220 TREE_READONLY (t) = 1;
5221 TREE_STATIC (t) = 1;
5223 type = build_pointer_type (type);
5224 t = build1 (ADDR_EXPR, type, t);
5226 type = build_pointer_type (elem);
5227 t = build1 (NOP_EXPR, type, t);
5231 /* Expand EXP, a call to printf or printf_unlocked.
5232 Return NULL_RTX if a normal call should be emitted rather than transforming
5233 the function inline. If convenient, the result should be placed in
5234 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5237 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5240 /* If we're using an unlocked function, assume the other unlocked
5241 functions exist explicitly. */
5242 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5243 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5244 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5245 : implicit_built_in_decls[BUILT_IN_PUTS];
5246 const char *fmt_str;
5249 int nargs = call_expr_nargs (exp);
5251 /* If the return value is used, don't do the transformation. */
5252 if (target != const0_rtx)
5255 /* Verify the required arguments in the original call. */
5258 fmt = CALL_EXPR_ARG (exp, 0);
5259 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5262 /* Check whether the format is a literal string constant. */
5263 fmt_str = c_getstr (fmt);
5264 if (fmt_str == NULL)
5267 if (!init_target_chars ())
5270 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5271 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5274 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5277 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5279 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5280 else if (strcmp (fmt_str, target_percent_c) == 0)
5283 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5286 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5290 /* We can't handle anything else with % args or %% ... yet. */
5291 if (strchr (fmt_str, target_percent))
5297 /* If the format specifier was "", printf does nothing. */
5298 if (fmt_str[0] == '\0')
5300 /* If the format specifier has length of 1, call putchar. */
5301 if (fmt_str[1] == '\0')
5303 /* Given printf("c"), (where c is any one character,)
5304 convert "c"[0] to an int and pass that to the replacement
5306 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5308 fn = build_call_expr (fn_putchar, 1, arg);
5312 /* If the format specifier was "string\n", call puts("string"). */
5313 size_t len = strlen (fmt_str);
5314 if ((unsigned char)fmt_str[len - 1] == target_newline)
5316 /* Create a NUL-terminated string that's one char shorter
5317 than the original, stripping off the trailing '\n'. */
5318 char *newstr = alloca (len);
5319 memcpy (newstr, fmt_str, len - 1);
5320 newstr[len - 1] = 0;
5321 arg = build_string_literal (len, newstr);
5323 fn = build_call_expr (fn_puts, 1, arg);
5326 /* We'd like to arrange to call fputs(string,stdout) here,
5327 but we need stdout and don't have a way to get it yet. */
5334 if (TREE_CODE (fn) == CALL_EXPR)
5335 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5336 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5339 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5340 Return NULL_RTX if a normal call should be emitted rather than transforming
5341 the function inline. If convenient, the result should be placed in
5342 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5345 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5348 /* If we're using an unlocked function, assume the other unlocked
5349 functions exist explicitly. */
5350 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5351 : implicit_built_in_decls[BUILT_IN_FPUTC];
5352 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5353 : implicit_built_in_decls[BUILT_IN_FPUTS];
5354 const char *fmt_str;
5357 int nargs = call_expr_nargs (exp);
5359 /* If the return value is used, don't do the transformation. */
5360 if (target != const0_rtx)
5363 /* Verify the required arguments in the original call. */
5366 fp = CALL_EXPR_ARG (exp, 0);
5367 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5369 fmt = CALL_EXPR_ARG (exp, 1);
5370 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5373 /* Check whether the format is a literal string constant. */
5374 fmt_str = c_getstr (fmt);
5375 if (fmt_str == NULL)
5378 if (!init_target_chars ())
5381 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5382 if (strcmp (fmt_str, target_percent_s) == 0)
5385 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5387 arg = CALL_EXPR_ARG (exp, 2);
5389 fn = build_call_expr (fn_fputs, 2, arg, fp);
5391 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5392 else if (strcmp (fmt_str, target_percent_c) == 0)
5395 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5397 arg = CALL_EXPR_ARG (exp, 2);
5399 fn = build_call_expr (fn_fputc, 2, arg, fp);
5403 /* We can't handle anything else with % args or %% ... yet. */
5404 if (strchr (fmt_str, target_percent))
5410 /* If the format specifier was "", fprintf does nothing. */
5411 if (fmt_str[0] == '\0')
5413 /* Evaluate and ignore FILE* argument for side-effects. */
5414 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5418 /* When "string" doesn't contain %, replace all cases of
5419 fprintf(stream,string) with fputs(string,stream). The fputs
5420 builtin will take care of special cases like length == 1. */
5422 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5427 if (TREE_CODE (fn) == CALL_EXPR)
5428 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5429 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5432 /* Expand a call EXP to sprintf. Return NULL_RTX if
5433 a normal call should be emitted rather than expanding the function
5434 inline. If convenient, the result should be placed in TARGET with
5438 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5441 const char *fmt_str;
5442 int nargs = call_expr_nargs (exp);
5444 /* Verify the required arguments in the original call. */
5447 dest = CALL_EXPR_ARG (exp, 0);
5448 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5450 fmt = CALL_EXPR_ARG (exp, 0);
5451 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5454 /* Check whether the format is a literal string constant. */
5455 fmt_str = c_getstr (fmt);
5456 if (fmt_str == NULL)
5459 if (!init_target_chars ())
5462 /* If the format doesn't contain % args or %%, use strcpy. */
5463 if (strchr (fmt_str, target_percent) == 0)
5465 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5468 if ((nargs > 2) || ! fn)
5470 expand_expr (build_call_expr (fn, 2, dest, fmt),
5471 const0_rtx, VOIDmode, EXPAND_NORMAL);
5472 if (target == const0_rtx)
5474 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5475 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5477 /* If the format is "%s", use strcpy if the result isn't used. */
5478 else if (strcmp (fmt_str, target_percent_s) == 0)
5481 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5487 arg = CALL_EXPR_ARG (exp, 2);
5488 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5491 if (target != const0_rtx)
5493 len = c_strlen (arg, 1);
5494 if (! len || TREE_CODE (len) != INTEGER_CST)
5500 expand_expr (build_call_expr (fn, 2, dest, arg),
5501 const0_rtx, VOIDmode, EXPAND_NORMAL);
5503 if (target == const0_rtx)
5505 return expand_expr (len, target, mode, EXPAND_NORMAL);
5511 /* Expand a call to either the entry or exit function profiler. */
5514 expand_builtin_profile_func (bool exitp)
5518 this = DECL_RTL (current_function_decl);
5519 gcc_assert (MEM_P (this));
5520 this = XEXP (this, 0);
5523 which = profile_function_exit_libfunc;
5525 which = profile_function_entry_libfunc;
5527 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5528 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5535 /* Expand a call to __builtin___clear_cache. */
5538 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5540 #ifndef HAVE_clear_cache
5541 #ifdef CLEAR_INSN_CACHE
5542 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5543 does something. Just do the default expansion to a call to
5547 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5548 does nothing. There is no need to call it. Do nothing. */
5550 #endif /* CLEAR_INSN_CACHE */
5552 /* We have a "clear_cache" insn, and it will handle everything. */
5554 rtx begin_rtx, end_rtx;
5555 enum insn_code icode;
5557 /* We must not expand to a library call. If we did, any
5558 fallback library function in libgcc that might contain a call to
5559 __builtin___clear_cache() would recurse infinitely. */
5560 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5562 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5566 if (HAVE_clear_cache)
5568 icode = CODE_FOR_clear_cache;
5570 begin = CALL_EXPR_ARG (exp, 0);
5571 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5572 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5573 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5574 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5576 end = CALL_EXPR_ARG (exp, 1);
5577 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5578 end_rtx = convert_memory_address (Pmode, end_rtx);
5579 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5580 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5582 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5585 #endif /* HAVE_clear_cache */
5588 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5591 round_trampoline_addr (rtx tramp)
5593 rtx temp, addend, mask;
5595 /* If we don't need too much alignment, we'll have been guaranteed
5596 proper alignment by get_trampoline_type. */
5597 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5600 /* Round address up to desired boundary. */
5601 temp = gen_reg_rtx (Pmode);
5602 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5603 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5605 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5606 temp, 0, OPTAB_LIB_WIDEN);
5607 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5608 temp, 0, OPTAB_LIB_WIDEN);
5614 expand_builtin_init_trampoline (tree exp)
5616 tree t_tramp, t_func, t_chain;
5617 rtx r_tramp, r_func, r_chain;
5618 #ifdef TRAMPOLINE_TEMPLATE
5622 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5623 POINTER_TYPE, VOID_TYPE))
5626 t_tramp = CALL_EXPR_ARG (exp, 0);
5627 t_func = CALL_EXPR_ARG (exp, 1);
5628 t_chain = CALL_EXPR_ARG (exp, 2);
5630 r_tramp = expand_normal (t_tramp);
5631 r_func = expand_normal (t_func);
5632 r_chain = expand_normal (t_chain);
5634 /* Generate insns to initialize the trampoline. */
5635 r_tramp = round_trampoline_addr (r_tramp);
5636 #ifdef TRAMPOLINE_TEMPLATE
5637 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5638 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5639 emit_block_move (blktramp, assemble_trampoline_template (),
5640 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5642 trampolines_created = 1;
5643 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5649 expand_builtin_adjust_trampoline (tree exp)
5653 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5656 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5657 tramp = round_trampoline_addr (tramp);
5658 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5659 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5665 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5666 function. The function first checks whether the back end provides
5667 an insn to implement signbit for the respective mode. If not, it
5668 checks whether the floating point format of the value is such that
5669 the sign bit can be extracted. If that is not the case, the
5670 function returns NULL_RTX to indicate that a normal call should be
5671 emitted rather than expanding the function in-line. EXP is the
5672 expression that is a call to the builtin function; if convenient,
5673 the result should be placed in TARGET. */
5675 expand_builtin_signbit (tree exp, rtx target)
5677 const struct real_format *fmt;
5678 enum machine_mode fmode, imode, rmode;
5679 HOST_WIDE_INT hi, lo;
5682 enum insn_code icode;
5685 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5688 arg = CALL_EXPR_ARG (exp, 0);
5689 fmode = TYPE_MODE (TREE_TYPE (arg));
5690 rmode = TYPE_MODE (TREE_TYPE (exp));
5691 fmt = REAL_MODE_FORMAT (fmode);
5693 arg = builtin_save_expr (arg);
5695 /* Expand the argument yielding a RTX expression. */
5696 temp = expand_normal (arg);
5698 /* Check if the back end provides an insn that handles signbit for the
5700 icode = signbit_optab->handlers [(int) fmode].insn_code;
5701 if (icode != CODE_FOR_nothing)
5703 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5704 emit_unop_insn (icode, target, temp, UNKNOWN);
5708 /* For floating point formats without a sign bit, implement signbit
5710 bitpos = fmt->signbit_ro;
5713 /* But we can't do this if the format supports signed zero. */
5714 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5717 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5718 build_real (TREE_TYPE (arg), dconst0));
5719 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5722 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5724 imode = int_mode_for_mode (fmode);
5725 if (imode == BLKmode)
5727 temp = gen_lowpart (imode, temp);
5732 /* Handle targets with different FP word orders. */
5733 if (FLOAT_WORDS_BIG_ENDIAN)
5734 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5736 word = bitpos / BITS_PER_WORD;
5737 temp = operand_subword_force (temp, word, fmode);
5738 bitpos = bitpos % BITS_PER_WORD;
5741 /* Force the intermediate word_mode (or narrower) result into a
5742 register. This avoids attempting to create paradoxical SUBREGs
5743 of floating point modes below. */
5744 temp = force_reg (imode, temp);
5746 /* If the bitpos is within the "result mode" lowpart, the operation
5747 can be implement with a single bitwise AND. Otherwise, we need
5748 a right shift and an AND. */
5750 if (bitpos < GET_MODE_BITSIZE (rmode))
5752 if (bitpos < HOST_BITS_PER_WIDE_INT)
5755 lo = (HOST_WIDE_INT) 1 << bitpos;
5759 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5764 temp = gen_lowpart (rmode, temp);
5765 temp = expand_binop (rmode, and_optab, temp,
5766 immed_double_const (lo, hi, rmode),
5767 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5771 /* Perform a logical right shift to place the signbit in the least
5772 significant bit, then truncate the result to the desired mode
5773 and mask just this bit. */
5774 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5775 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5776 temp = gen_lowpart (rmode, temp);
5777 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5778 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5784 /* Expand fork or exec calls. TARGET is the desired target of the
5785 call. EXP is the call. FN is the
5786 identificator of the actual function. IGNORE is nonzero if the
5787 value is to be ignored. */
5790 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5795 /* If we are not profiling, just call the function. */
5796 if (!profile_arc_flag)
5799 /* Otherwise call the wrapper. This should be equivalent for the rest of
5800 compiler, so the code does not diverge, and the wrapper may run the
5801 code necessary for keeping the profiling sane. */
5803 switch (DECL_FUNCTION_CODE (fn))
5806 id = get_identifier ("__gcov_fork");
5809 case BUILT_IN_EXECL:
5810 id = get_identifier ("__gcov_execl");
5813 case BUILT_IN_EXECV:
5814 id = get_identifier ("__gcov_execv");
5817 case BUILT_IN_EXECLP:
5818 id = get_identifier ("__gcov_execlp");
5821 case BUILT_IN_EXECLE:
5822 id = get_identifier ("__gcov_execle");
5825 case BUILT_IN_EXECVP:
5826 id = get_identifier ("__gcov_execvp");
5829 case BUILT_IN_EXECVE:
5830 id = get_identifier ("__gcov_execve");
5837 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5838 DECL_EXTERNAL (decl) = 1;
5839 TREE_PUBLIC (decl) = 1;
5840 DECL_ARTIFICIAL (decl) = 1;
5841 TREE_NOTHROW (decl) = 1;
5842 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5843 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5844 call = rewrite_call_expr (exp, 0, decl, 0);
5845 return expand_call (call, target, ignore);
5850 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5851 the pointer in these functions is void*, the tree optimizers may remove
5852 casts. The mode computed in expand_builtin isn't reliable either, due
5853 to __sync_bool_compare_and_swap.
5855 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5856 group of builtins. This gives us log2 of the mode size. */
5858 static inline enum machine_mode
5859 get_builtin_sync_mode (int fcode_diff)
5861 /* The size is not negotiable, so ask not to get BLKmode in return
5862 if the target indicates that a smaller size would be better. */
5863 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5866 /* Expand the memory expression LOC and return the appropriate memory operand
5867 for the builtin_sync operations. */
5870 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5874 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5876 /* Note that we explicitly do not want any alias information for this
5877 memory, so that we kill all other live memories. Otherwise we don't
5878 satisfy the full barrier semantics of the intrinsic. */
5879 mem = validize_mem (gen_rtx_MEM (mode, addr));
5881 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5882 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5883 MEM_VOLATILE_P (mem) = 1;
5888 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5889 EXP is the CALL_EXPR. CODE is the rtx code
5890 that corresponds to the arithmetic or logical operation from the name;
5891 an exception here is that NOT actually means NAND. TARGET is an optional
5892 place for us to store the results; AFTER is true if this is the
5893 fetch_and_xxx form. IGNORE is true if we don't actually care about
5894 the result of the operation at all. */
5897 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5898 enum rtx_code code, bool after,
5899 rtx target, bool ignore)
5902 enum machine_mode old_mode;
5904 /* Expand the operands. */
5905 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5907 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5908 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5909 of CONST_INTs, where we know the old_mode only from the call argument. */
5910 old_mode = GET_MODE (val);
5911 if (old_mode == VOIDmode)
5912 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5913 val = convert_modes (mode, old_mode, val, 1);
5916 return expand_sync_operation (mem, val, code);
5918 return expand_sync_fetch_operation (mem, val, code, after, target);
5921 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5922 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5923 true if this is the boolean form. TARGET is a place for us to store the
5924 results; this is NOT optional if IS_BOOL is true. */
5927 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5928 bool is_bool, rtx target)
5930 rtx old_val, new_val, mem;
5931 enum machine_mode old_mode;
5933 /* Expand the operands. */
5934 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5937 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5938 mode, EXPAND_NORMAL);
5939 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5940 of CONST_INTs, where we know the old_mode only from the call argument. */
5941 old_mode = GET_MODE (old_val);
5942 if (old_mode == VOIDmode)
5943 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5944 old_val = convert_modes (mode, old_mode, old_val, 1);
5946 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5947 mode, EXPAND_NORMAL);
5948 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5949 of CONST_INTs, where we know the old_mode only from the call argument. */
5950 old_mode = GET_MODE (new_val);
5951 if (old_mode == VOIDmode)
5952 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5953 new_val = convert_modes (mode, old_mode, new_val, 1);
5956 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5958 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5961 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5962 general form is actually an atomic exchange, and some targets only
5963 support a reduced form with the second argument being a constant 1.
5964 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5968 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5972 enum machine_mode old_mode;
5974 /* Expand the operands. */
5975 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5976 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5977 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5978 of CONST_INTs, where we know the old_mode only from the call argument. */
5979 old_mode = GET_MODE (val);
5980 if (old_mode == VOIDmode)
5981 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5982 val = convert_modes (mode, old_mode, val, 1);
5984 return expand_sync_lock_test_and_set (mem, val, target);
5987 /* Expand the __sync_synchronize intrinsic. */
5990 expand_builtin_synchronize (void)
5994 #ifdef HAVE_memory_barrier
5995 if (HAVE_memory_barrier)
5997 emit_insn (gen_memory_barrier ());
6002 /* If no explicit memory barrier instruction is available, create an
6003 empty asm stmt with a memory clobber. */
6004 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6005 tree_cons (NULL, build_string (6, "memory"), NULL));
6006 ASM_VOLATILE_P (x) = 1;
6007 expand_asm_expr (x);
6010 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6013 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6015 enum insn_code icode;
6017 rtx val = const0_rtx;
6019 /* Expand the operands. */
6020 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6022 /* If there is an explicit operation in the md file, use it. */
6023 icode = sync_lock_release[mode];
6024 if (icode != CODE_FOR_nothing)
6026 if (!insn_data[icode].operand[1].predicate (val, mode))
6027 val = force_reg (mode, val);
6029 insn = GEN_FCN (icode) (mem, val);
6037 /* Otherwise we can implement this operation by emitting a barrier
6038 followed by a store of zero. */
6039 expand_builtin_synchronize ();
6040 emit_move_insn (mem, val);
6043 /* Expand an expression EXP that calls a built-in function,
6044 with result going to TARGET if that's convenient
6045 (and in mode MODE if that's convenient).
6046 SUBTARGET may be used as the target for computing one of EXP's operands.
6047 IGNORE is nonzero if the value is to be ignored. */
6050 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6053 tree fndecl = get_callee_fndecl (exp);
6054 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6055 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6057 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6058 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6060 /* When not optimizing, generate calls to library functions for a certain
6063 && !called_as_built_in (fndecl)
6064 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6065 && fcode != BUILT_IN_ALLOCA)
6066 return expand_call (exp, target, ignore);
6068 /* The built-in function expanders test for target == const0_rtx
6069 to determine whether the function's result will be ignored. */
6071 target = const0_rtx;
6073 /* If the result of a pure or const built-in function is ignored, and
6074 none of its arguments are volatile, we can avoid expanding the
6075 built-in call and just evaluate the arguments for side-effects. */
6076 if (target == const0_rtx
6077 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
6079 bool volatilep = false;
6081 call_expr_arg_iterator iter;
6083 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6084 if (TREE_THIS_VOLATILE (arg))
6092 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6093 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6100 CASE_FLT_FN (BUILT_IN_FABS):
6101 target = expand_builtin_fabs (exp, target, subtarget);
6106 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6107 target = expand_builtin_copysign (exp, target, subtarget);
6112 /* Just do a normal library call if we were unable to fold
6114 CASE_FLT_FN (BUILT_IN_CABS):
6117 CASE_FLT_FN (BUILT_IN_EXP):
6118 CASE_FLT_FN (BUILT_IN_EXP10):
6119 CASE_FLT_FN (BUILT_IN_POW10):
6120 CASE_FLT_FN (BUILT_IN_EXP2):
6121 CASE_FLT_FN (BUILT_IN_EXPM1):
6122 CASE_FLT_FN (BUILT_IN_LOGB):
6123 CASE_FLT_FN (BUILT_IN_LOG):
6124 CASE_FLT_FN (BUILT_IN_LOG10):
6125 CASE_FLT_FN (BUILT_IN_LOG2):
6126 CASE_FLT_FN (BUILT_IN_LOG1P):
6127 CASE_FLT_FN (BUILT_IN_TAN):
6128 CASE_FLT_FN (BUILT_IN_ASIN):
6129 CASE_FLT_FN (BUILT_IN_ACOS):
6130 CASE_FLT_FN (BUILT_IN_ATAN):
6131 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6132 because of possible accuracy problems. */
6133 if (! flag_unsafe_math_optimizations)
6135 CASE_FLT_FN (BUILT_IN_SQRT):
6136 CASE_FLT_FN (BUILT_IN_FLOOR):
6137 CASE_FLT_FN (BUILT_IN_CEIL):
6138 CASE_FLT_FN (BUILT_IN_TRUNC):
6139 CASE_FLT_FN (BUILT_IN_ROUND):
6140 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6141 CASE_FLT_FN (BUILT_IN_RINT):
6142 target = expand_builtin_mathfn (exp, target, subtarget);
6147 CASE_FLT_FN (BUILT_IN_ILOGB):
6148 if (! flag_unsafe_math_optimizations)
6150 CASE_FLT_FN (BUILT_IN_ISINF):
6151 CASE_FLT_FN (BUILT_IN_FINITE):
6152 case BUILT_IN_ISFINITE:
6153 case BUILT_IN_ISNORMAL:
6154 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6159 CASE_FLT_FN (BUILT_IN_LCEIL):
6160 CASE_FLT_FN (BUILT_IN_LLCEIL):
6161 CASE_FLT_FN (BUILT_IN_LFLOOR):
6162 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6163 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6168 CASE_FLT_FN (BUILT_IN_LRINT):
6169 CASE_FLT_FN (BUILT_IN_LLRINT):
6170 CASE_FLT_FN (BUILT_IN_LROUND):
6171 CASE_FLT_FN (BUILT_IN_LLROUND):
6172 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6177 CASE_FLT_FN (BUILT_IN_POW):
6178 target = expand_builtin_pow (exp, target, subtarget);
6183 CASE_FLT_FN (BUILT_IN_POWI):
6184 target = expand_builtin_powi (exp, target, subtarget);
6189 CASE_FLT_FN (BUILT_IN_ATAN2):
6190 CASE_FLT_FN (BUILT_IN_LDEXP):
6191 CASE_FLT_FN (BUILT_IN_SCALB):
6192 CASE_FLT_FN (BUILT_IN_SCALBN):
6193 CASE_FLT_FN (BUILT_IN_SCALBLN):
6194 if (! flag_unsafe_math_optimizations)
6197 CASE_FLT_FN (BUILT_IN_FMOD):
6198 CASE_FLT_FN (BUILT_IN_REMAINDER):
6199 CASE_FLT_FN (BUILT_IN_DREM):
6200 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6205 CASE_FLT_FN (BUILT_IN_CEXPI):
6206 target = expand_builtin_cexpi (exp, target, subtarget);
6207 gcc_assert (target);
6210 CASE_FLT_FN (BUILT_IN_SIN):
6211 CASE_FLT_FN (BUILT_IN_COS):
6212 if (! flag_unsafe_math_optimizations)
6214 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6219 CASE_FLT_FN (BUILT_IN_SINCOS):
6220 if (! flag_unsafe_math_optimizations)
6222 target = expand_builtin_sincos (exp);
6227 case BUILT_IN_APPLY_ARGS:
6228 return expand_builtin_apply_args ();
6230 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6231 FUNCTION with a copy of the parameters described by
6232 ARGUMENTS, and ARGSIZE. It returns a block of memory
6233 allocated on the stack into which is stored all the registers
6234 that might possibly be used for returning the result of a
6235 function. ARGUMENTS is the value returned by
6236 __builtin_apply_args. ARGSIZE is the number of bytes of
6237 arguments that must be copied. ??? How should this value be
6238 computed? We'll also need a safe worst case value for varargs
6240 case BUILT_IN_APPLY:
6241 if (!validate_arglist (exp, POINTER_TYPE,
6242 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6243 && !validate_arglist (exp, REFERENCE_TYPE,
6244 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6250 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6251 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6252 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6254 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6257 /* __builtin_return (RESULT) causes the function to return the
6258 value described by RESULT. RESULT is address of the block of
6259 memory returned by __builtin_apply. */
6260 case BUILT_IN_RETURN:
6261 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6262 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6265 case BUILT_IN_SAVEREGS:
6266 return expand_builtin_saveregs ();
6268 case BUILT_IN_ARGS_INFO:
6269 return expand_builtin_args_info (exp);
6271 /* Return the address of the first anonymous stack arg. */
6272 case BUILT_IN_NEXT_ARG:
6273 if (fold_builtin_next_arg (exp, false))
6275 return expand_builtin_next_arg ();
6277 case BUILT_IN_CLEAR_CACHE:
6278 target = expand_builtin___clear_cache (exp);
6283 case BUILT_IN_CLASSIFY_TYPE:
6284 return expand_builtin_classify_type (exp);
6286 case BUILT_IN_CONSTANT_P:
6289 case BUILT_IN_FRAME_ADDRESS:
6290 case BUILT_IN_RETURN_ADDRESS:
6291 return expand_builtin_frame_address (fndecl, exp);
6293 /* Returns the address of the area where the structure is returned.
6295 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6296 if (call_expr_nargs (exp) != 0
6297 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6298 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6301 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6303 case BUILT_IN_ALLOCA:
6304 target = expand_builtin_alloca (exp, target);
6309 case BUILT_IN_STACK_SAVE:
6310 return expand_stack_save ();
6312 case BUILT_IN_STACK_RESTORE:
6313 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6316 case BUILT_IN_BSWAP32:
6317 case BUILT_IN_BSWAP64:
6318 target = expand_builtin_bswap (exp, target, subtarget);
6324 CASE_INT_FN (BUILT_IN_FFS):
6325 case BUILT_IN_FFSIMAX:
6326 target = expand_builtin_unop (target_mode, exp, target,
6327 subtarget, ffs_optab);
6332 CASE_INT_FN (BUILT_IN_CLZ):
6333 case BUILT_IN_CLZIMAX:
6334 target = expand_builtin_unop (target_mode, exp, target,
6335 subtarget, clz_optab);
6340 CASE_INT_FN (BUILT_IN_CTZ):
6341 case BUILT_IN_CTZIMAX:
6342 target = expand_builtin_unop (target_mode, exp, target,
6343 subtarget, ctz_optab);
6348 CASE_INT_FN (BUILT_IN_POPCOUNT):
6349 case BUILT_IN_POPCOUNTIMAX:
6350 target = expand_builtin_unop (target_mode, exp, target,
6351 subtarget, popcount_optab);
6356 CASE_INT_FN (BUILT_IN_PARITY):
6357 case BUILT_IN_PARITYIMAX:
6358 target = expand_builtin_unop (target_mode, exp, target,
6359 subtarget, parity_optab);
6364 case BUILT_IN_STRLEN:
6365 target = expand_builtin_strlen (exp, target, target_mode);
6370 case BUILT_IN_STRCPY:
6371 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6376 case BUILT_IN_STRNCPY:
6377 target = expand_builtin_strncpy (exp, target, mode);
6382 case BUILT_IN_STPCPY:
6383 target = expand_builtin_stpcpy (exp, target, mode);
6388 case BUILT_IN_STRCAT:
6389 target = expand_builtin_strcat (fndecl, exp, target, mode);
6394 case BUILT_IN_STRNCAT:
6395 target = expand_builtin_strncat (exp, target, mode);
6400 case BUILT_IN_STRSPN:
6401 target = expand_builtin_strspn (exp, target, mode);
6406 case BUILT_IN_STRCSPN:
6407 target = expand_builtin_strcspn (exp, target, mode);
6412 case BUILT_IN_STRSTR:
6413 target = expand_builtin_strstr (exp, target, mode);
6418 case BUILT_IN_STRPBRK:
6419 target = expand_builtin_strpbrk (exp, target, mode);
6424 case BUILT_IN_INDEX:
6425 case BUILT_IN_STRCHR:
6426 target = expand_builtin_strchr (exp, target, mode);
6431 case BUILT_IN_RINDEX:
6432 case BUILT_IN_STRRCHR:
6433 target = expand_builtin_strrchr (exp, target, mode);
6438 case BUILT_IN_MEMCPY:
6439 target = expand_builtin_memcpy (exp, target, mode);
6444 case BUILT_IN_MEMPCPY:
6445 target = expand_builtin_mempcpy (exp, target, mode);
6450 case BUILT_IN_MEMMOVE:
6451 target = expand_builtin_memmove (exp, target, mode, ignore);
6456 case BUILT_IN_BCOPY:
6457 target = expand_builtin_bcopy (exp, ignore);
6462 case BUILT_IN_MEMSET:
6463 target = expand_builtin_memset (exp, target, mode);
6468 case BUILT_IN_BZERO:
6469 target = expand_builtin_bzero (exp);
6474 case BUILT_IN_STRCMP:
6475 target = expand_builtin_strcmp (exp, target, mode);
6480 case BUILT_IN_STRNCMP:
6481 target = expand_builtin_strncmp (exp, target, mode);
6486 case BUILT_IN_MEMCHR:
6487 target = expand_builtin_memchr (exp, target, mode);
6493 case BUILT_IN_MEMCMP:
6494 target = expand_builtin_memcmp (exp, target, mode);
6499 case BUILT_IN_SETJMP:
6500 /* This should have been lowered to the builtins below. */
6503 case BUILT_IN_SETJMP_SETUP:
6504 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6505 and the receiver label. */
6506 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6508 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6509 VOIDmode, EXPAND_NORMAL);
6510 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6511 rtx label_r = label_rtx (label);
6513 /* This is copied from the handling of non-local gotos. */
6514 expand_builtin_setjmp_setup (buf_addr, label_r);
6515 nonlocal_goto_handler_labels
6516 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6517 nonlocal_goto_handler_labels);
6518 /* ??? Do not let expand_label treat us as such since we would
6519 not want to be both on the list of non-local labels and on
6520 the list of forced labels. */
6521 FORCED_LABEL (label) = 0;
6526 case BUILT_IN_SETJMP_DISPATCHER:
6527 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6528 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6530 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6531 rtx label_r = label_rtx (label);
6533 /* Remove the dispatcher label from the list of non-local labels
6534 since the receiver labels have been added to it above. */
6535 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6540 case BUILT_IN_SETJMP_RECEIVER:
6541 /* __builtin_setjmp_receiver is passed the receiver label. */
6542 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6544 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6545 rtx label_r = label_rtx (label);
6547 expand_builtin_setjmp_receiver (label_r);
6552 /* __builtin_longjmp is passed a pointer to an array of five words.
6553 It's similar to the C library longjmp function but works with
6554 __builtin_setjmp above. */
6555 case BUILT_IN_LONGJMP:
6556 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6558 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6559 VOIDmode, EXPAND_NORMAL);
6560 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6562 if (value != const1_rtx)
6564 error ("%<__builtin_longjmp%> second argument must be 1");
6568 expand_builtin_longjmp (buf_addr, value);
6573 case BUILT_IN_NONLOCAL_GOTO:
6574 target = expand_builtin_nonlocal_goto (exp);
6579 /* This updates the setjmp buffer that is its argument with the value
6580 of the current stack pointer. */
6581 case BUILT_IN_UPDATE_SETJMP_BUF:
6582 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6585 = expand_normal (CALL_EXPR_ARG (exp, 0));
6587 expand_builtin_update_setjmp_buf (buf_addr);
6593 expand_builtin_trap ();
6596 case BUILT_IN_PRINTF:
6597 target = expand_builtin_printf (exp, target, mode, false);
6602 case BUILT_IN_PRINTF_UNLOCKED:
6603 target = expand_builtin_printf (exp, target, mode, true);
6608 case BUILT_IN_FPUTS:
6609 target = expand_builtin_fputs (exp, target, false);
6613 case BUILT_IN_FPUTS_UNLOCKED:
6614 target = expand_builtin_fputs (exp, target, true);
6619 case BUILT_IN_FPRINTF:
6620 target = expand_builtin_fprintf (exp, target, mode, false);
6625 case BUILT_IN_FPRINTF_UNLOCKED:
6626 target = expand_builtin_fprintf (exp, target, mode, true);
6631 case BUILT_IN_SPRINTF:
6632 target = expand_builtin_sprintf (exp, target, mode);
6637 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6638 case BUILT_IN_SIGNBITD32:
6639 case BUILT_IN_SIGNBITD64:
6640 case BUILT_IN_SIGNBITD128:
6641 target = expand_builtin_signbit (exp, target);
6646 /* Various hooks for the DWARF 2 __throw routine. */
6647 case BUILT_IN_UNWIND_INIT:
6648 expand_builtin_unwind_init ();
6650 case BUILT_IN_DWARF_CFA:
6651 return virtual_cfa_rtx;
6652 #ifdef DWARF2_UNWIND_INFO
6653 case BUILT_IN_DWARF_SP_COLUMN:
6654 return expand_builtin_dwarf_sp_column ();
6655 case BUILT_IN_INIT_DWARF_REG_SIZES:
6656 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6659 case BUILT_IN_FROB_RETURN_ADDR:
6660 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6661 case BUILT_IN_EXTRACT_RETURN_ADDR:
6662 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6663 case BUILT_IN_EH_RETURN:
6664 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6665 CALL_EXPR_ARG (exp, 1));
6667 #ifdef EH_RETURN_DATA_REGNO
6668 case BUILT_IN_EH_RETURN_DATA_REGNO:
6669 return expand_builtin_eh_return_data_regno (exp);
6671 case BUILT_IN_EXTEND_POINTER:
6672 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6674 case BUILT_IN_VA_START:
6675 case BUILT_IN_STDARG_START:
6676 return expand_builtin_va_start (exp);
6677 case BUILT_IN_VA_END:
6678 return expand_builtin_va_end (exp);
6679 case BUILT_IN_VA_COPY:
6680 return expand_builtin_va_copy (exp);
6681 case BUILT_IN_EXPECT:
6682 return expand_builtin_expect (exp, target);
6683 case BUILT_IN_PREFETCH:
6684 expand_builtin_prefetch (exp);
6687 case BUILT_IN_PROFILE_FUNC_ENTER:
6688 return expand_builtin_profile_func (false);
6689 case BUILT_IN_PROFILE_FUNC_EXIT:
6690 return expand_builtin_profile_func (true);
6692 case BUILT_IN_INIT_TRAMPOLINE:
6693 return expand_builtin_init_trampoline (exp);
6694 case BUILT_IN_ADJUST_TRAMPOLINE:
6695 return expand_builtin_adjust_trampoline (exp);
6698 case BUILT_IN_EXECL:
6699 case BUILT_IN_EXECV:
6700 case BUILT_IN_EXECLP:
6701 case BUILT_IN_EXECLE:
6702 case BUILT_IN_EXECVP:
6703 case BUILT_IN_EXECVE:
6704 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6709 case BUILT_IN_FETCH_AND_ADD_1:
6710 case BUILT_IN_FETCH_AND_ADD_2:
6711 case BUILT_IN_FETCH_AND_ADD_4:
6712 case BUILT_IN_FETCH_AND_ADD_8:
6713 case BUILT_IN_FETCH_AND_ADD_16:
6714 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6715 target = expand_builtin_sync_operation (mode, exp, PLUS,
6716 false, target, ignore);
6721 case BUILT_IN_FETCH_AND_SUB_1:
6722 case BUILT_IN_FETCH_AND_SUB_2:
6723 case BUILT_IN_FETCH_AND_SUB_4:
6724 case BUILT_IN_FETCH_AND_SUB_8:
6725 case BUILT_IN_FETCH_AND_SUB_16:
6726 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6727 target = expand_builtin_sync_operation (mode, exp, MINUS,
6728 false, target, ignore);
6733 case BUILT_IN_FETCH_AND_OR_1:
6734 case BUILT_IN_FETCH_AND_OR_2:
6735 case BUILT_IN_FETCH_AND_OR_4:
6736 case BUILT_IN_FETCH_AND_OR_8:
6737 case BUILT_IN_FETCH_AND_OR_16:
6738 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6739 target = expand_builtin_sync_operation (mode, exp, IOR,
6740 false, target, ignore);
6745 case BUILT_IN_FETCH_AND_AND_1:
6746 case BUILT_IN_FETCH_AND_AND_2:
6747 case BUILT_IN_FETCH_AND_AND_4:
6748 case BUILT_IN_FETCH_AND_AND_8:
6749 case BUILT_IN_FETCH_AND_AND_16:
6750 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6751 target = expand_builtin_sync_operation (mode, exp, AND,
6752 false, target, ignore);
6757 case BUILT_IN_FETCH_AND_XOR_1:
6758 case BUILT_IN_FETCH_AND_XOR_2:
6759 case BUILT_IN_FETCH_AND_XOR_4:
6760 case BUILT_IN_FETCH_AND_XOR_8:
6761 case BUILT_IN_FETCH_AND_XOR_16:
6762 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6763 target = expand_builtin_sync_operation (mode, exp, XOR,
6764 false, target, ignore);
6769 case BUILT_IN_FETCH_AND_NAND_1:
6770 case BUILT_IN_FETCH_AND_NAND_2:
6771 case BUILT_IN_FETCH_AND_NAND_4:
6772 case BUILT_IN_FETCH_AND_NAND_8:
6773 case BUILT_IN_FETCH_AND_NAND_16:
6774 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6775 target = expand_builtin_sync_operation (mode, exp, NOT,
6776 false, target, ignore);
6781 case BUILT_IN_ADD_AND_FETCH_1:
6782 case BUILT_IN_ADD_AND_FETCH_2:
6783 case BUILT_IN_ADD_AND_FETCH_4:
6784 case BUILT_IN_ADD_AND_FETCH_8:
6785 case BUILT_IN_ADD_AND_FETCH_16:
6786 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6787 target = expand_builtin_sync_operation (mode, exp, PLUS,
6788 true, target, ignore);
6793 case BUILT_IN_SUB_AND_FETCH_1:
6794 case BUILT_IN_SUB_AND_FETCH_2:
6795 case BUILT_IN_SUB_AND_FETCH_4:
6796 case BUILT_IN_SUB_AND_FETCH_8:
6797 case BUILT_IN_SUB_AND_FETCH_16:
6798 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6799 target = expand_builtin_sync_operation (mode, exp, MINUS,
6800 true, target, ignore);
6805 case BUILT_IN_OR_AND_FETCH_1:
6806 case BUILT_IN_OR_AND_FETCH_2:
6807 case BUILT_IN_OR_AND_FETCH_4:
6808 case BUILT_IN_OR_AND_FETCH_8:
6809 case BUILT_IN_OR_AND_FETCH_16:
6810 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6811 target = expand_builtin_sync_operation (mode, exp, IOR,
6812 true, target, ignore);
6817 case BUILT_IN_AND_AND_FETCH_1:
6818 case BUILT_IN_AND_AND_FETCH_2:
6819 case BUILT_IN_AND_AND_FETCH_4:
6820 case BUILT_IN_AND_AND_FETCH_8:
6821 case BUILT_IN_AND_AND_FETCH_16:
6822 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6823 target = expand_builtin_sync_operation (mode, exp, AND,
6824 true, target, ignore);
6829 case BUILT_IN_XOR_AND_FETCH_1:
6830 case BUILT_IN_XOR_AND_FETCH_2:
6831 case BUILT_IN_XOR_AND_FETCH_4:
6832 case BUILT_IN_XOR_AND_FETCH_8:
6833 case BUILT_IN_XOR_AND_FETCH_16:
6834 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6835 target = expand_builtin_sync_operation (mode, exp, XOR,
6836 true, target, ignore);
6841 case BUILT_IN_NAND_AND_FETCH_1:
6842 case BUILT_IN_NAND_AND_FETCH_2:
6843 case BUILT_IN_NAND_AND_FETCH_4:
6844 case BUILT_IN_NAND_AND_FETCH_8:
6845 case BUILT_IN_NAND_AND_FETCH_16:
6846 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6847 target = expand_builtin_sync_operation (mode, exp, NOT,
6848 true, target, ignore);
6853 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6854 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6855 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6856 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6857 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6858 if (mode == VOIDmode)
6859 mode = TYPE_MODE (boolean_type_node);
6860 if (!target || !register_operand (target, mode))
6861 target = gen_reg_rtx (mode);
6863 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6864 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6869 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6870 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6871 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6872 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6873 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6874 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6875 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6880 case BUILT_IN_LOCK_TEST_AND_SET_1:
6881 case BUILT_IN_LOCK_TEST_AND_SET_2:
6882 case BUILT_IN_LOCK_TEST_AND_SET_4:
6883 case BUILT_IN_LOCK_TEST_AND_SET_8:
6884 case BUILT_IN_LOCK_TEST_AND_SET_16:
6885 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6886 target = expand_builtin_lock_test_and_set (mode, exp, target);
6891 case BUILT_IN_LOCK_RELEASE_1:
6892 case BUILT_IN_LOCK_RELEASE_2:
6893 case BUILT_IN_LOCK_RELEASE_4:
6894 case BUILT_IN_LOCK_RELEASE_8:
6895 case BUILT_IN_LOCK_RELEASE_16:
6896 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6897 expand_builtin_lock_release (mode, exp);
6900 case BUILT_IN_SYNCHRONIZE:
6901 expand_builtin_synchronize ();
6904 case BUILT_IN_OBJECT_SIZE:
6905 return expand_builtin_object_size (exp);
6907 case BUILT_IN_MEMCPY_CHK:
6908 case BUILT_IN_MEMPCPY_CHK:
6909 case BUILT_IN_MEMMOVE_CHK:
6910 case BUILT_IN_MEMSET_CHK:
6911 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6916 case BUILT_IN_STRCPY_CHK:
6917 case BUILT_IN_STPCPY_CHK:
6918 case BUILT_IN_STRNCPY_CHK:
6919 case BUILT_IN_STRCAT_CHK:
6920 case BUILT_IN_STRNCAT_CHK:
6921 case BUILT_IN_SNPRINTF_CHK:
6922 case BUILT_IN_VSNPRINTF_CHK:
6923 maybe_emit_chk_warning (exp, fcode);
6926 case BUILT_IN_SPRINTF_CHK:
6927 case BUILT_IN_VSPRINTF_CHK:
6928 maybe_emit_sprintf_chk_warning (exp, fcode);
6931 default: /* just do library call, if unknown builtin */
6935 /* The switch statement above can drop through to cause the function
6936 to be called normally. */
6937 return expand_call (exp, target, ignore);
6940 /* Determine whether a tree node represents a call to a built-in
6941 function. If the tree T is a call to a built-in function with
6942 the right number of arguments of the appropriate types, return
6943 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6944 Otherwise the return value is END_BUILTINS. */
6946 enum built_in_function
6947 builtin_mathfn_code (const_tree t)
6949 const_tree fndecl, arg, parmlist;
6950 const_tree argtype, parmtype;
6951 const_call_expr_arg_iterator iter;
6953 if (TREE_CODE (t) != CALL_EXPR
6954 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6955 return END_BUILTINS;
6957 fndecl = get_callee_fndecl (t);
6958 if (fndecl == NULL_TREE
6959 || TREE_CODE (fndecl) != FUNCTION_DECL
6960 || ! DECL_BUILT_IN (fndecl)
6961 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6962 return END_BUILTINS;
6964 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6965 init_const_call_expr_arg_iterator (t, &iter);
6966 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6968 /* If a function doesn't take a variable number of arguments,
6969 the last element in the list will have type `void'. */
6970 parmtype = TREE_VALUE (parmlist);
6971 if (VOID_TYPE_P (parmtype))
6973 if (more_const_call_expr_args_p (&iter))
6974 return END_BUILTINS;
6975 return DECL_FUNCTION_CODE (fndecl);
6978 if (! more_const_call_expr_args_p (&iter))
6979 return END_BUILTINS;
6981 arg = next_const_call_expr_arg (&iter);
6982 argtype = TREE_TYPE (arg);
6984 if (SCALAR_FLOAT_TYPE_P (parmtype))
6986 if (! SCALAR_FLOAT_TYPE_P (argtype))
6987 return END_BUILTINS;
6989 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6991 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6992 return END_BUILTINS;
6994 else if (POINTER_TYPE_P (parmtype))
6996 if (! POINTER_TYPE_P (argtype))
6997 return END_BUILTINS;
6999 else if (INTEGRAL_TYPE_P (parmtype))
7001 if (! INTEGRAL_TYPE_P (argtype))
7002 return END_BUILTINS;
7005 return END_BUILTINS;
7008 /* Variable-length argument list. */
7009 return DECL_FUNCTION_CODE (fndecl);
7012 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7013 evaluate to a constant. */
7016 fold_builtin_constant_p (tree arg)
7018 /* We return 1 for a numeric type that's known to be a constant
7019 value at compile-time or for an aggregate type that's a
7020 literal constant. */
7023 /* If we know this is a constant, emit the constant of one. */
7024 if (CONSTANT_CLASS_P (arg)
7025 || (TREE_CODE (arg) == CONSTRUCTOR
7026 && TREE_CONSTANT (arg)))
7027 return integer_one_node;
7028 if (TREE_CODE (arg) == ADDR_EXPR)
7030 tree op = TREE_OPERAND (arg, 0);
7031 if (TREE_CODE (op) == STRING_CST
7032 || (TREE_CODE (op) == ARRAY_REF
7033 && integer_zerop (TREE_OPERAND (op, 1))
7034 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7035 return integer_one_node;
7038 /* If this expression has side effects, show we don't know it to be a
7039 constant. Likewise if it's a pointer or aggregate type since in
7040 those case we only want literals, since those are only optimized
7041 when generating RTL, not later.
7042 And finally, if we are compiling an initializer, not code, we
7043 need to return a definite result now; there's not going to be any
7044 more optimization done. */
7045 if (TREE_SIDE_EFFECTS (arg)
7046 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7047 || POINTER_TYPE_P (TREE_TYPE (arg))
7049 || folding_initializer)
7050 return integer_zero_node;
7055 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
7056 comparison against the argument will fold to a constant. In practice,
7057 this means a true constant or the address of a non-weak symbol. */
7060 fold_builtin_expect (tree arg)
7064 /* If the argument isn't invariant, then there's nothing we can do. */
7065 if (!TREE_INVARIANT (arg))
7068 /* If we're looking at an address of a weak decl, then do not fold. */
7071 if (TREE_CODE (inner) == ADDR_EXPR)
7075 inner = TREE_OPERAND (inner, 0);
7077 while (TREE_CODE (inner) == COMPONENT_REF
7078 || TREE_CODE (inner) == ARRAY_REF);
7079 if (DECL_P (inner) && DECL_WEAK (inner))
7083 /* Otherwise, ARG already has the proper type for the return value. */
7087 /* Fold a call to __builtin_classify_type with argument ARG. */
7090 fold_builtin_classify_type (tree arg)
7093 return build_int_cst (NULL_TREE, no_type_class);
7095 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7098 /* Fold a call to __builtin_strlen with argument ARG. */
7101 fold_builtin_strlen (tree arg)
7103 if (!validate_arg (arg, POINTER_TYPE))
7107 tree len = c_strlen (arg, 0);
7111 /* Convert from the internal "sizetype" type to "size_t". */
7113 len = fold_convert (size_type_node, len);
7121 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7124 fold_builtin_inf (tree type, int warn)
7126 REAL_VALUE_TYPE real;
7128 /* __builtin_inff is intended to be usable to define INFINITY on all
7129 targets. If an infinity is not available, INFINITY expands "to a
7130 positive constant of type float that overflows at translation
7131 time", footnote "In this case, using INFINITY will violate the
7132 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7133 Thus we pedwarn to ensure this constraint violation is
7135 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7136 pedwarn ("target format does not support infinity");
7139 return build_real (type, real);
7142 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7145 fold_builtin_nan (tree arg, tree type, int quiet)
7147 REAL_VALUE_TYPE real;
7150 if (!validate_arg (arg, POINTER_TYPE))
7152 str = c_getstr (arg);
7156 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7159 return build_real (type, real);
7162 /* Return true if the floating point expression T has an integer value.
7163 We also allow +Inf, -Inf and NaN to be considered integer values. */
7166 integer_valued_real_p (tree t)
7168 switch (TREE_CODE (t))
7175 case NON_LVALUE_EXPR:
7176 return integer_valued_real_p (TREE_OPERAND (t, 0));
7181 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7188 return integer_valued_real_p (TREE_OPERAND (t, 0))
7189 && integer_valued_real_p (TREE_OPERAND (t, 1));
7192 return integer_valued_real_p (TREE_OPERAND (t, 1))
7193 && integer_valued_real_p (TREE_OPERAND (t, 2));
7196 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7200 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7201 if (TREE_CODE (type) == INTEGER_TYPE)
7203 if (TREE_CODE (type) == REAL_TYPE)
7204 return integer_valued_real_p (TREE_OPERAND (t, 0));
7209 switch (builtin_mathfn_code (t))
7211 CASE_FLT_FN (BUILT_IN_CEIL):
7212 CASE_FLT_FN (BUILT_IN_FLOOR):
7213 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7214 CASE_FLT_FN (BUILT_IN_RINT):
7215 CASE_FLT_FN (BUILT_IN_ROUND):
7216 CASE_FLT_FN (BUILT_IN_TRUNC):
7219 CASE_FLT_FN (BUILT_IN_FMIN):
7220 CASE_FLT_FN (BUILT_IN_FMAX):
7221 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7222 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7235 /* FNDECL is assumed to be a builtin where truncation can be propagated
7236 across (for instance floor((double)f) == (double)floorf (f).
7237 Do the transformation for a call with argument ARG. */
7240 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7242 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7244 if (!validate_arg (arg, REAL_TYPE))
7247 /* Integer rounding functions are idempotent. */
7248 if (fcode == builtin_mathfn_code (arg))
7251 /* If argument is already integer valued, and we don't need to worry
7252 about setting errno, there's no need to perform rounding. */
7253 if (! flag_errno_math && integer_valued_real_p (arg))
7258 tree arg0 = strip_float_extensions (arg);
7259 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7260 tree newtype = TREE_TYPE (arg0);
7263 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7264 && (decl = mathfn_built_in (newtype, fcode)))
7265 return fold_convert (ftype,
7266 build_call_expr (decl, 1,
7267 fold_convert (newtype, arg0)));
7272 /* FNDECL is assumed to be builtin which can narrow the FP type of
7273 the argument, for instance lround((double)f) -> lroundf (f).
7274 Do the transformation for a call with argument ARG. */
7277 fold_fixed_mathfn (tree fndecl, tree arg)
7279 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7281 if (!validate_arg (arg, REAL_TYPE))
7284 /* If argument is already integer valued, and we don't need to worry
7285 about setting errno, there's no need to perform rounding. */
7286 if (! flag_errno_math && integer_valued_real_p (arg))
7287 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7291 tree ftype = TREE_TYPE (arg);
7292 tree arg0 = strip_float_extensions (arg);
7293 tree newtype = TREE_TYPE (arg0);
7296 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7297 && (decl = mathfn_built_in (newtype, fcode)))
7298 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7301 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7302 sizeof (long long) == sizeof (long). */
7303 if (TYPE_PRECISION (long_long_integer_type_node)
7304 == TYPE_PRECISION (long_integer_type_node))
7306 tree newfn = NULL_TREE;
7309 CASE_FLT_FN (BUILT_IN_LLCEIL):
7310 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7313 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7314 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7317 CASE_FLT_FN (BUILT_IN_LLROUND):
7318 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7321 CASE_FLT_FN (BUILT_IN_LLRINT):
7322 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7331 tree newcall = build_call_expr(newfn, 1, arg);
7332 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7339 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7340 return type. Return NULL_TREE if no simplification can be made. */
7343 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7347 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7348 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7351 /* Calculate the result when the argument is a constant. */
7352 if (TREE_CODE (arg) == COMPLEX_CST
7353 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7357 if (TREE_CODE (arg) == COMPLEX_EXPR)
7359 tree real = TREE_OPERAND (arg, 0);
7360 tree imag = TREE_OPERAND (arg, 1);
7362 /* If either part is zero, cabs is fabs of the other. */
7363 if (real_zerop (real))
7364 return fold_build1 (ABS_EXPR, type, imag);
7365 if (real_zerop (imag))
7366 return fold_build1 (ABS_EXPR, type, real);
7368 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7369 if (flag_unsafe_math_optimizations
7370 && operand_equal_p (real, imag, OEP_PURE_SAME))
7372 const REAL_VALUE_TYPE sqrt2_trunc
7373 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7375 return fold_build2 (MULT_EXPR, type,
7376 fold_build1 (ABS_EXPR, type, real),
7377 build_real (type, sqrt2_trunc));
7381 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7382 if (TREE_CODE (arg) == NEGATE_EXPR
7383 || TREE_CODE (arg) == CONJ_EXPR)
7384 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7386 /* Don't do this when optimizing for size. */
7387 if (flag_unsafe_math_optimizations
7388 && optimize && !optimize_size)
7390 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7392 if (sqrtfn != NULL_TREE)
7394 tree rpart, ipart, result;
7396 arg = builtin_save_expr (arg);
7398 rpart = fold_build1 (REALPART_EXPR, type, arg);
7399 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7401 rpart = builtin_save_expr (rpart);
7402 ipart = builtin_save_expr (ipart);
7404 result = fold_build2 (PLUS_EXPR, type,
7405 fold_build2 (MULT_EXPR, type,
7407 fold_build2 (MULT_EXPR, type,
7410 return build_call_expr (sqrtfn, 1, result);
7417 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7418 Return NULL_TREE if no simplification can be made. */
7421 fold_builtin_sqrt (tree arg, tree type)
7424 enum built_in_function fcode;
7427 if (!validate_arg (arg, REAL_TYPE))
7430 /* Calculate the result when the argument is a constant. */
7431 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7434 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7435 fcode = builtin_mathfn_code (arg);
7436 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7438 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7439 arg = fold_build2 (MULT_EXPR, type,
7440 CALL_EXPR_ARG (arg, 0),
7441 build_real (type, dconsthalf));
7442 return build_call_expr (expfn, 1, arg);
7445 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7446 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7448 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7452 tree arg0 = CALL_EXPR_ARG (arg, 0);
7454 /* The inner root was either sqrt or cbrt. */
7455 REAL_VALUE_TYPE dconstroot =
7456 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
7458 /* Adjust for the outer root. */
7459 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7460 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7461 tree_root = build_real (type, dconstroot);
7462 return build_call_expr (powfn, 2, arg0, tree_root);
7466 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7467 if (flag_unsafe_math_optimizations
7468 && (fcode == BUILT_IN_POW
7469 || fcode == BUILT_IN_POWF
7470 || fcode == BUILT_IN_POWL))
7472 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7473 tree arg0 = CALL_EXPR_ARG (arg, 0);
7474 tree arg1 = CALL_EXPR_ARG (arg, 1);
7476 if (!tree_expr_nonnegative_p (arg0))
7477 arg0 = build1 (ABS_EXPR, type, arg0);
7478 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7479 build_real (type, dconsthalf));
7480 return build_call_expr (powfn, 2, arg0, narg1);
7486 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7487 Return NULL_TREE if no simplification can be made. */
7490 fold_builtin_cbrt (tree arg, tree type)
7492 const enum built_in_function fcode = builtin_mathfn_code (arg);
7495 if (!validate_arg (arg, REAL_TYPE))
7498 /* Calculate the result when the argument is a constant. */
7499 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7502 if (flag_unsafe_math_optimizations)
7504 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7505 if (BUILTIN_EXPONENT_P (fcode))
7507 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7508 const REAL_VALUE_TYPE third_trunc =
7509 real_value_truncate (TYPE_MODE (type), dconstthird);
7510 arg = fold_build2 (MULT_EXPR, type,
7511 CALL_EXPR_ARG (arg, 0),
7512 build_real (type, third_trunc));
7513 return build_call_expr (expfn, 1, arg);
7516 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7517 if (BUILTIN_SQRT_P (fcode))
7519 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7523 tree arg0 = CALL_EXPR_ARG (arg, 0);
7525 REAL_VALUE_TYPE dconstroot = dconstthird;
7527 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7528 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7529 tree_root = build_real (type, dconstroot);
7530 return build_call_expr (powfn, 2, arg0, tree_root);
7534 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7535 if (BUILTIN_CBRT_P (fcode))
7537 tree arg0 = CALL_EXPR_ARG (arg, 0);
7538 if (tree_expr_nonnegative_p (arg0))
7540 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7545 REAL_VALUE_TYPE dconstroot;
7547 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
7548 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7549 tree_root = build_real (type, dconstroot);
7550 return build_call_expr (powfn, 2, arg0, tree_root);
7555 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7556 if (fcode == BUILT_IN_POW
7557 || fcode == BUILT_IN_POWF
7558 || fcode == BUILT_IN_POWL)
7560 tree arg00 = CALL_EXPR_ARG (arg, 0);
7561 tree arg01 = CALL_EXPR_ARG (arg, 1);
7562 if (tree_expr_nonnegative_p (arg00))
7564 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7565 const REAL_VALUE_TYPE dconstroot
7566 = real_value_truncate (TYPE_MODE (type), dconstthird);
7567 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7568 build_real (type, dconstroot));
7569 return build_call_expr (powfn, 2, arg00, narg01);
7576 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7577 TYPE is the type of the return value. Return NULL_TREE if no
7578 simplification can be made. */
7581 fold_builtin_cos (tree arg, tree type, tree fndecl)
7585 if (!validate_arg (arg, REAL_TYPE))
7588 /* Calculate the result when the argument is a constant. */
7589 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7592 /* Optimize cos(-x) into cos (x). */
7593 if ((narg = fold_strip_sign_ops (arg)))
7594 return build_call_expr (fndecl, 1, narg);
7599 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7600 Return NULL_TREE if no simplification can be made. */
7603 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7605 if (validate_arg (arg, REAL_TYPE))
7609 /* Calculate the result when the argument is a constant. */
7610 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7613 /* Optimize cosh(-x) into cosh (x). */
7614 if ((narg = fold_strip_sign_ops (arg)))
7615 return build_call_expr (fndecl, 1, narg);
7621 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7622 Return NULL_TREE if no simplification can be made. */
7625 fold_builtin_tan (tree arg, tree type)
7627 enum built_in_function fcode;
7630 if (!validate_arg (arg, REAL_TYPE))
7633 /* Calculate the result when the argument is a constant. */
7634 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7637 /* Optimize tan(atan(x)) = x. */
7638 fcode = builtin_mathfn_code (arg);
7639 if (flag_unsafe_math_optimizations
7640 && (fcode == BUILT_IN_ATAN
7641 || fcode == BUILT_IN_ATANF
7642 || fcode == BUILT_IN_ATANL))
7643 return CALL_EXPR_ARG (arg, 0);
7648 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7649 NULL_TREE if no simplification can be made. */
7652 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7657 if (!validate_arg (arg0, REAL_TYPE)
7658 || !validate_arg (arg1, POINTER_TYPE)
7659 || !validate_arg (arg2, POINTER_TYPE))
7662 type = TREE_TYPE (arg0);
7664 /* Calculate the result when the argument is a constant. */
7665 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7668 /* Canonicalize sincos to cexpi. */
7669 if (!TARGET_C99_FUNCTIONS)
7671 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7675 call = build_call_expr (fn, 1, arg0);
7676 call = builtin_save_expr (call);
7678 return build2 (COMPOUND_EXPR, type,
7679 build2 (MODIFY_EXPR, void_type_node,
7680 build_fold_indirect_ref (arg1),
7681 build1 (IMAGPART_EXPR, type, call)),
7682 build2 (MODIFY_EXPR, void_type_node,
7683 build_fold_indirect_ref (arg2),
7684 build1 (REALPART_EXPR, type, call)));
7687 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7688 NULL_TREE if no simplification can be made. */
7691 fold_builtin_cexp (tree arg0, tree type)
7694 tree realp, imagp, ifn;
7696 if (!validate_arg (arg0, COMPLEX_TYPE))
7699 rtype = TREE_TYPE (TREE_TYPE (arg0));
7701 /* In case we can figure out the real part of arg0 and it is constant zero
7703 if (!TARGET_C99_FUNCTIONS)
7705 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7709 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7710 && real_zerop (realp))
7712 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7713 return build_call_expr (ifn, 1, narg);
7716 /* In case we can easily decompose real and imaginary parts split cexp
7717 to exp (r) * cexpi (i). */
7718 if (flag_unsafe_math_optimizations
7721 tree rfn, rcall, icall;
7723 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7727 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7731 icall = build_call_expr (ifn, 1, imagp);
7732 icall = builtin_save_expr (icall);
7733 rcall = build_call_expr (rfn, 1, realp);
7734 rcall = builtin_save_expr (rcall);
7735 return build2 (COMPLEX_EXPR, type,
7736 build2 (MULT_EXPR, rtype,
7738 build1 (REALPART_EXPR, rtype, icall)),
7739 build2 (MULT_EXPR, rtype,
7741 build1 (IMAGPART_EXPR, rtype, icall)));
7747 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7748 Return NULL_TREE if no simplification can be made. */
7751 fold_builtin_trunc (tree fndecl, tree arg)
7753 if (!validate_arg (arg, REAL_TYPE))
7756 /* Optimize trunc of constant value. */
7757 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7759 REAL_VALUE_TYPE r, x;
7760 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7762 x = TREE_REAL_CST (arg);
7763 real_trunc (&r, TYPE_MODE (type), &x);
7764 return build_real (type, r);
7767 return fold_trunc_transparent_mathfn (fndecl, arg);
7770 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7771 Return NULL_TREE if no simplification can be made. */
7774 fold_builtin_floor (tree fndecl, tree arg)
7776 if (!validate_arg (arg, REAL_TYPE))
7779 /* Optimize floor of constant value. */
7780 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7784 x = TREE_REAL_CST (arg);
7785 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7787 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7790 real_floor (&r, TYPE_MODE (type), &x);
7791 return build_real (type, r);
7795 /* Fold floor (x) where x is nonnegative to trunc (x). */
7796 if (tree_expr_nonnegative_p (arg))
7798 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7800 return build_call_expr (truncfn, 1, arg);
7803 return fold_trunc_transparent_mathfn (fndecl, arg);
7806 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7807 Return NULL_TREE if no simplification can be made. */
7810 fold_builtin_ceil (tree fndecl, tree arg)
7812 if (!validate_arg (arg, REAL_TYPE))
7815 /* Optimize ceil of constant value. */
7816 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7820 x = TREE_REAL_CST (arg);
7821 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7823 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7826 real_ceil (&r, TYPE_MODE (type), &x);
7827 return build_real (type, r);
7831 return fold_trunc_transparent_mathfn (fndecl, arg);
7834 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7835 Return NULL_TREE if no simplification can be made. */
7838 fold_builtin_round (tree fndecl, tree arg)
7840 if (!validate_arg (arg, REAL_TYPE))
7843 /* Optimize round of constant value. */
7844 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7848 x = TREE_REAL_CST (arg);
7849 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7851 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7854 real_round (&r, TYPE_MODE (type), &x);
7855 return build_real (type, r);
7859 return fold_trunc_transparent_mathfn (fndecl, arg);
7862 /* Fold function call to builtin lround, lroundf or lroundl (or the
7863 corresponding long long versions) and other rounding functions. ARG
7864 is the argument to the call. Return NULL_TREE if no simplification
7868 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7870 if (!validate_arg (arg, REAL_TYPE))
7873 /* Optimize lround of constant value. */
7874 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7876 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7878 if (real_isfinite (&x))
7880 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7881 tree ftype = TREE_TYPE (arg);
7882 unsigned HOST_WIDE_INT lo2;
7883 HOST_WIDE_INT hi, lo;
7886 switch (DECL_FUNCTION_CODE (fndecl))
7888 CASE_FLT_FN (BUILT_IN_LFLOOR):
7889 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7890 real_floor (&r, TYPE_MODE (ftype), &x);
7893 CASE_FLT_FN (BUILT_IN_LCEIL):
7894 CASE_FLT_FN (BUILT_IN_LLCEIL):
7895 real_ceil (&r, TYPE_MODE (ftype), &x);
7898 CASE_FLT_FN (BUILT_IN_LROUND):
7899 CASE_FLT_FN (BUILT_IN_LLROUND):
7900 real_round (&r, TYPE_MODE (ftype), &x);
7907 REAL_VALUE_TO_INT (&lo, &hi, r);
7908 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7909 return build_int_cst_wide (itype, lo2, hi);
7913 switch (DECL_FUNCTION_CODE (fndecl))
7915 CASE_FLT_FN (BUILT_IN_LFLOOR):
7916 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7917 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7918 if (tree_expr_nonnegative_p (arg))
7919 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7925 return fold_fixed_mathfn (fndecl, arg);
7928 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7929 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7930 the argument to the call. Return NULL_TREE if no simplification can
7934 fold_builtin_bitop (tree fndecl, tree arg)
7936 if (!validate_arg (arg, INTEGER_TYPE))
7939 /* Optimize for constant argument. */
7940 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7942 HOST_WIDE_INT hi, width, result;
7943 unsigned HOST_WIDE_INT lo;
7946 type = TREE_TYPE (arg);
7947 width = TYPE_PRECISION (type);
7948 lo = TREE_INT_CST_LOW (arg);
7950 /* Clear all the bits that are beyond the type's precision. */
7951 if (width > HOST_BITS_PER_WIDE_INT)
7953 hi = TREE_INT_CST_HIGH (arg);
7954 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7955 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7960 if (width < HOST_BITS_PER_WIDE_INT)
7961 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7964 switch (DECL_FUNCTION_CODE (fndecl))
7966 CASE_INT_FN (BUILT_IN_FFS):
7968 result = exact_log2 (lo & -lo) + 1;
7970 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7975 CASE_INT_FN (BUILT_IN_CLZ):
7977 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7979 result = width - floor_log2 (lo) - 1;
7980 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7984 CASE_INT_FN (BUILT_IN_CTZ):
7986 result = exact_log2 (lo & -lo);
7988 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7989 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7993 CASE_INT_FN (BUILT_IN_POPCOUNT):
7996 result++, lo &= lo - 1;
7998 result++, hi &= hi - 1;
8001 CASE_INT_FN (BUILT_IN_PARITY):
8004 result++, lo &= lo - 1;
8006 result++, hi &= hi - 1;
8014 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8020 /* Fold function call to builtin_bswap and the long and long long
8021 variants. Return NULL_TREE if no simplification can be made. */
8023 fold_builtin_bswap (tree fndecl, tree arg)
8025 if (! validate_arg (arg, INTEGER_TYPE))
8028 /* Optimize constant value. */
8029 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8031 HOST_WIDE_INT hi, width, r_hi = 0;
8032 unsigned HOST_WIDE_INT lo, r_lo = 0;
8035 type = TREE_TYPE (arg);
8036 width = TYPE_PRECISION (type);
8037 lo = TREE_INT_CST_LOW (arg);
8038 hi = TREE_INT_CST_HIGH (arg);
8040 switch (DECL_FUNCTION_CODE (fndecl))
8042 case BUILT_IN_BSWAP32:
8043 case BUILT_IN_BSWAP64:
8047 for (s = 0; s < width; s += 8)
8049 int d = width - s - 8;
8050 unsigned HOST_WIDE_INT byte;
8052 if (s < HOST_BITS_PER_WIDE_INT)
8053 byte = (lo >> s) & 0xff;
8055 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8057 if (d < HOST_BITS_PER_WIDE_INT)
8060 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8070 if (width < HOST_BITS_PER_WIDE_INT)
8071 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8073 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8079 /* Return true if EXPR is the real constant contained in VALUE. */
8082 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8086 return ((TREE_CODE (expr) == REAL_CST
8087 && !TREE_OVERFLOW (expr)
8088 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8089 || (TREE_CODE (expr) == COMPLEX_CST
8090 && real_dconstp (TREE_REALPART (expr), value)
8091 && real_zerop (TREE_IMAGPART (expr))));
8094 /* A subroutine of fold_builtin to fold the various logarithmic
8095 functions. Return NULL_TREE if no simplification can me made.
8096 FUNC is the corresponding MPFR logarithm function. */
8099 fold_builtin_logarithm (tree fndecl, tree arg,
8100 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8102 if (validate_arg (arg, REAL_TYPE))
8104 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8106 const enum built_in_function fcode = builtin_mathfn_code (arg);
8108 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8109 instead we'll look for 'e' truncated to MODE. So only do
8110 this if flag_unsafe_math_optimizations is set. */
8111 if (flag_unsafe_math_optimizations && func == mpfr_log)
8113 const REAL_VALUE_TYPE e_truncated =
8114 real_value_truncate (TYPE_MODE (type), dconste);
8115 if (real_dconstp (arg, &e_truncated))
8116 return build_real (type, dconst1);
8119 /* Calculate the result when the argument is a constant. */
8120 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8123 /* Special case, optimize logN(expN(x)) = x. */
8124 if (flag_unsafe_math_optimizations
8125 && ((func == mpfr_log
8126 && (fcode == BUILT_IN_EXP
8127 || fcode == BUILT_IN_EXPF
8128 || fcode == BUILT_IN_EXPL))
8129 || (func == mpfr_log2
8130 && (fcode == BUILT_IN_EXP2
8131 || fcode == BUILT_IN_EXP2F
8132 || fcode == BUILT_IN_EXP2L))
8133 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8134 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8136 /* Optimize logN(func()) for various exponential functions. We
8137 want to determine the value "x" and the power "exponent" in
8138 order to transform logN(x**exponent) into exponent*logN(x). */
8139 if (flag_unsafe_math_optimizations)
8141 tree exponent = 0, x = 0;
8145 CASE_FLT_FN (BUILT_IN_EXP):
8146 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8147 x = build_real (type,
8148 real_value_truncate (TYPE_MODE (type), dconste));
8149 exponent = CALL_EXPR_ARG (arg, 0);
8151 CASE_FLT_FN (BUILT_IN_EXP2):
8152 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8153 x = build_real (type, dconst2);
8154 exponent = CALL_EXPR_ARG (arg, 0);
8156 CASE_FLT_FN (BUILT_IN_EXP10):
8157 CASE_FLT_FN (BUILT_IN_POW10):
8158 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8159 x = build_real (type, dconst10);
8160 exponent = CALL_EXPR_ARG (arg, 0);
8162 CASE_FLT_FN (BUILT_IN_SQRT):
8163 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8164 x = CALL_EXPR_ARG (arg, 0);
8165 exponent = build_real (type, dconsthalf);
8167 CASE_FLT_FN (BUILT_IN_CBRT):
8168 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8169 x = CALL_EXPR_ARG (arg, 0);
8170 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8173 CASE_FLT_FN (BUILT_IN_POW):
8174 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8175 x = CALL_EXPR_ARG (arg, 0);
8176 exponent = CALL_EXPR_ARG (arg, 1);
8182 /* Now perform the optimization. */
8185 tree logfn = build_call_expr (fndecl, 1, x);
8186 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8194 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8195 NULL_TREE if no simplification can be made. */
8198 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8200 tree res, narg0, narg1;
8202 if (!validate_arg (arg0, REAL_TYPE)
8203 || !validate_arg (arg1, REAL_TYPE))
8206 /* Calculate the result when the argument is a constant. */
8207 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8210 /* If either argument to hypot has a negate or abs, strip that off.
8211 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8212 narg0 = fold_strip_sign_ops (arg0);
8213 narg1 = fold_strip_sign_ops (arg1);
8216 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8217 narg1 ? narg1 : arg1);
8220 /* If either argument is zero, hypot is fabs of the other. */
8221 if (real_zerop (arg0))
8222 return fold_build1 (ABS_EXPR, type, arg1);
8223 else if (real_zerop (arg1))
8224 return fold_build1 (ABS_EXPR, type, arg0);
8226 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8227 if (flag_unsafe_math_optimizations
8228 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8230 const REAL_VALUE_TYPE sqrt2_trunc
8231 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
8232 return fold_build2 (MULT_EXPR, type,
8233 fold_build1 (ABS_EXPR, type, arg0),
8234 build_real (type, sqrt2_trunc));
8241 /* Fold a builtin function call to pow, powf, or powl. Return
8242 NULL_TREE if no simplification can be made. */
8244 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8248 if (!validate_arg (arg0, REAL_TYPE)
8249 || !validate_arg (arg1, REAL_TYPE))
8252 /* Calculate the result when the argument is a constant. */
8253 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8256 /* Optimize pow(1.0,y) = 1.0. */
8257 if (real_onep (arg0))
8258 return omit_one_operand (type, build_real (type, dconst1), arg1);
8260 if (TREE_CODE (arg1) == REAL_CST
8261 && !TREE_OVERFLOW (arg1))
8263 REAL_VALUE_TYPE cint;
8267 c = TREE_REAL_CST (arg1);
8269 /* Optimize pow(x,0.0) = 1.0. */
8270 if (REAL_VALUES_EQUAL (c, dconst0))
8271 return omit_one_operand (type, build_real (type, dconst1),
8274 /* Optimize pow(x,1.0) = x. */
8275 if (REAL_VALUES_EQUAL (c, dconst1))
8278 /* Optimize pow(x,-1.0) = 1.0/x. */
8279 if (REAL_VALUES_EQUAL (c, dconstm1))
8280 return fold_build2 (RDIV_EXPR, type,
8281 build_real (type, dconst1), arg0);
8283 /* Optimize pow(x,0.5) = sqrt(x). */
8284 if (flag_unsafe_math_optimizations
8285 && REAL_VALUES_EQUAL (c, dconsthalf))
8287 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8289 if (sqrtfn != NULL_TREE)
8290 return build_call_expr (sqrtfn, 1, arg0);
8293 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8294 if (flag_unsafe_math_optimizations)
8296 const REAL_VALUE_TYPE dconstroot
8297 = real_value_truncate (TYPE_MODE (type), dconstthird);
8299 if (REAL_VALUES_EQUAL (c, dconstroot))
8301 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8302 if (cbrtfn != NULL_TREE)
8303 return build_call_expr (cbrtfn, 1, arg0);
8307 /* Check for an integer exponent. */
8308 n = real_to_integer (&c);
8309 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8310 if (real_identical (&c, &cint))
8312 /* Attempt to evaluate pow at compile-time. */
8313 if (TREE_CODE (arg0) == REAL_CST
8314 && !TREE_OVERFLOW (arg0))
8319 x = TREE_REAL_CST (arg0);
8320 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8321 if (flag_unsafe_math_optimizations || !inexact)
8322 return build_real (type, x);
8325 /* Strip sign ops from even integer powers. */
8326 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8328 tree narg0 = fold_strip_sign_ops (arg0);
8330 return build_call_expr (fndecl, 2, narg0, arg1);
8335 if (flag_unsafe_math_optimizations)
8337 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8339 /* Optimize pow(expN(x),y) = expN(x*y). */
8340 if (BUILTIN_EXPONENT_P (fcode))
8342 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8343 tree arg = CALL_EXPR_ARG (arg0, 0);
8344 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8345 return build_call_expr (expfn, 1, arg);
8348 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8349 if (BUILTIN_SQRT_P (fcode))
8351 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8352 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8353 build_real (type, dconsthalf));
8354 return build_call_expr (fndecl, 2, narg0, narg1);
8357 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8358 if (BUILTIN_CBRT_P (fcode))
8360 tree arg = CALL_EXPR_ARG (arg0, 0);
8361 if (tree_expr_nonnegative_p (arg))
8363 const REAL_VALUE_TYPE dconstroot
8364 = real_value_truncate (TYPE_MODE (type), dconstthird);
8365 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8366 build_real (type, dconstroot));
8367 return build_call_expr (fndecl, 2, arg, narg1);
8371 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8372 if (fcode == BUILT_IN_POW
8373 || fcode == BUILT_IN_POWF
8374 || fcode == BUILT_IN_POWL)
8376 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8377 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8378 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8379 return build_call_expr (fndecl, 2, arg00, narg1);
8386 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8387 Return NULL_TREE if no simplification can be made. */
8389 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8390 tree arg0, tree arg1, tree type)
8392 if (!validate_arg (arg0, REAL_TYPE)
8393 || !validate_arg (arg1, INTEGER_TYPE))
8396 /* Optimize pow(1.0,y) = 1.0. */
8397 if (real_onep (arg0))
8398 return omit_one_operand (type, build_real (type, dconst1), arg1);
8400 if (host_integerp (arg1, 0))
8402 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8404 /* Evaluate powi at compile-time. */
8405 if (TREE_CODE (arg0) == REAL_CST
8406 && !TREE_OVERFLOW (arg0))
8409 x = TREE_REAL_CST (arg0);
8410 real_powi (&x, TYPE_MODE (type), &x, c);
8411 return build_real (type, x);
8414 /* Optimize pow(x,0) = 1.0. */
8416 return omit_one_operand (type, build_real (type, dconst1),
8419 /* Optimize pow(x,1) = x. */
8423 /* Optimize pow(x,-1) = 1.0/x. */
8425 return fold_build2 (RDIV_EXPR, type,
8426 build_real (type, dconst1), arg0);
8432 /* A subroutine of fold_builtin to fold the various exponent
8433 functions. Return NULL_TREE if no simplification can be made.
8434 FUNC is the corresponding MPFR exponent function. */
8437 fold_builtin_exponent (tree fndecl, tree arg,
8438 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8440 if (validate_arg (arg, REAL_TYPE))
8442 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8445 /* Calculate the result when the argument is a constant. */
8446 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8449 /* Optimize expN(logN(x)) = x. */
8450 if (flag_unsafe_math_optimizations)
8452 const enum built_in_function fcode = builtin_mathfn_code (arg);
8454 if ((func == mpfr_exp
8455 && (fcode == BUILT_IN_LOG
8456 || fcode == BUILT_IN_LOGF
8457 || fcode == BUILT_IN_LOGL))
8458 || (func == mpfr_exp2
8459 && (fcode == BUILT_IN_LOG2
8460 || fcode == BUILT_IN_LOG2F
8461 || fcode == BUILT_IN_LOG2L))
8462 || (func == mpfr_exp10
8463 && (fcode == BUILT_IN_LOG10
8464 || fcode == BUILT_IN_LOG10F
8465 || fcode == BUILT_IN_LOG10L)))
8466 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8473 /* Return true if VAR is a VAR_DECL or a component thereof. */
8476 var_decl_component_p (tree var)
8479 while (handled_component_p (inner))
8480 inner = TREE_OPERAND (inner, 0);
8481 return SSA_VAR_P (inner);
8484 /* Fold function call to builtin memset. Return
8485 NULL_TREE if no simplification can be made. */
8488 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8491 unsigned HOST_WIDE_INT length, cval;
8493 if (! validate_arg (dest, POINTER_TYPE)
8494 || ! validate_arg (c, INTEGER_TYPE)
8495 || ! validate_arg (len, INTEGER_TYPE))
8498 if (! host_integerp (len, 1))
8501 /* If the LEN parameter is zero, return DEST. */
8502 if (integer_zerop (len))
8503 return omit_one_operand (type, dest, c);
8505 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8510 if (TREE_CODE (var) != ADDR_EXPR)
8513 var = TREE_OPERAND (var, 0);
8514 if (TREE_THIS_VOLATILE (var))
8517 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8518 && !POINTER_TYPE_P (TREE_TYPE (var)))
8521 if (! var_decl_component_p (var))
8524 length = tree_low_cst (len, 1);
8525 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8526 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8530 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8533 if (integer_zerop (c))
8537 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8540 cval = tree_low_cst (c, 1);
8544 cval |= (cval << 31) << 1;
8547 ret = build_int_cst_type (TREE_TYPE (var), cval);
8548 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8552 return omit_one_operand (type, dest, ret);
8555 /* Fold function call to builtin memset. Return
8556 NULL_TREE if no simplification can be made. */
8559 fold_builtin_bzero (tree dest, tree size, bool ignore)
8561 if (! validate_arg (dest, POINTER_TYPE)
8562 || ! validate_arg (size, INTEGER_TYPE))
8568 /* New argument list transforming bzero(ptr x, int y) to
8569 memset(ptr x, int 0, size_t y). This is done this way
8570 so that if it isn't expanded inline, we fallback to
8571 calling bzero instead of memset. */
8573 return fold_builtin_memset (dest, integer_zero_node,
8574 fold_convert (sizetype, size),
8575 void_type_node, ignore);
8578 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8579 NULL_TREE if no simplification can be made.
8580 If ENDP is 0, return DEST (like memcpy).
8581 If ENDP is 1, return DEST+LEN (like mempcpy).
8582 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8583 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8587 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8589 tree destvar, srcvar, expr;
8591 if (! validate_arg (dest, POINTER_TYPE)
8592 || ! validate_arg (src, POINTER_TYPE)
8593 || ! validate_arg (len, INTEGER_TYPE))
8596 /* If the LEN parameter is zero, return DEST. */
8597 if (integer_zerop (len))
8598 return omit_one_operand (type, dest, src);
8600 /* If SRC and DEST are the same (and not volatile), return
8601 DEST{,+LEN,+LEN-1}. */
8602 if (operand_equal_p (src, dest, 0))
8606 tree srctype, desttype;
8609 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8610 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8612 /* Both DEST and SRC must be pointer types.
8613 ??? This is what old code did. Is the testing for pointer types
8616 If either SRC is readonly or length is 1, we can use memcpy. */
8617 if (dest_align && src_align
8618 && (readonly_data_expr (src)
8619 || (host_integerp (len, 1)
8620 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8621 tree_low_cst (len, 1)))))
8623 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8626 return build_call_expr (fn, 3, dest, src, len);
8631 if (!host_integerp (len, 0))
8634 This logic lose for arguments like (type *)malloc (sizeof (type)),
8635 since we strip the casts of up to VOID return value from malloc.
8636 Perhaps we ought to inherit type from non-VOID argument here? */
8639 srctype = TREE_TYPE (TREE_TYPE (src));
8640 desttype = TREE_TYPE (TREE_TYPE (dest));
8641 if (!srctype || !desttype
8642 || !TYPE_SIZE_UNIT (srctype)
8643 || !TYPE_SIZE_UNIT (desttype)
8644 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8645 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8646 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8647 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8650 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8651 < (int) TYPE_ALIGN (desttype)
8652 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8653 < (int) TYPE_ALIGN (srctype)))
8657 dest = builtin_save_expr (dest);
8659 srcvar = build_fold_indirect_ref (src);
8660 if (TREE_THIS_VOLATILE (srcvar))
8662 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8664 /* With memcpy, it is possible to bypass aliasing rules, so without
8665 this check i. e. execute/20060930-2.c would be misoptimized, because
8666 it use conflicting alias set to hold argument for the memcpy call.
8667 This check is probably unnecesary with -fno-strict-aliasing.
8668 Similarly for destvar. See also PR29286. */
8669 if (!var_decl_component_p (srcvar)
8670 /* Accept: memcpy (*char_var, "test", 1); that simplify
8672 || is_gimple_min_invariant (srcvar)
8673 || readonly_data_expr (src))
8676 destvar = build_fold_indirect_ref (dest);
8677 if (TREE_THIS_VOLATILE (destvar))
8679 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8681 if (!var_decl_component_p (destvar))
8684 if (srctype == desttype
8685 || (gimple_in_ssa_p (cfun)
8686 && useless_type_conversion_p (desttype, srctype)))
8688 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8689 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8690 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8691 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8692 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8694 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8695 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8701 if (endp == 0 || endp == 3)
8702 return omit_one_operand (type, dest, expr);
8708 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8711 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8712 dest = fold_convert (type, dest);
8714 dest = omit_one_operand (type, dest, expr);
8718 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8719 If LEN is not NULL, it represents the length of the string to be
8720 copied. Return NULL_TREE if no simplification can be made. */
8723 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8727 if (!validate_arg (dest, POINTER_TYPE)
8728 || !validate_arg (src, POINTER_TYPE))
8731 /* If SRC and DEST are the same (and not volatile), return DEST. */
8732 if (operand_equal_p (src, dest, 0))
8733 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8738 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8744 len = c_strlen (src, 1);
8745 if (! len || TREE_SIDE_EFFECTS (len))
8749 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8750 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8751 build_call_expr (fn, 3, dest, src, len));
8754 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8755 If SLEN is not NULL, it represents the length of the source string.
8756 Return NULL_TREE if no simplification can be made. */
8759 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8763 if (!validate_arg (dest, POINTER_TYPE)
8764 || !validate_arg (src, POINTER_TYPE)
8765 || !validate_arg (len, INTEGER_TYPE))
8768 /* If the LEN parameter is zero, return DEST. */
8769 if (integer_zerop (len))
8770 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8772 /* We can't compare slen with len as constants below if len is not a
8774 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8778 slen = c_strlen (src, 1);
8780 /* Now, we must be passed a constant src ptr parameter. */
8781 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8784 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8786 /* We do not support simplification of this case, though we do
8787 support it when expanding trees into RTL. */
8788 /* FIXME: generate a call to __builtin_memset. */
8789 if (tree_int_cst_lt (slen, len))
8792 /* OK transform into builtin memcpy. */
8793 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8796 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8797 build_call_expr (fn, 3, dest, src, len));
8800 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8801 arguments to the call, and TYPE is its return type.
8802 Return NULL_TREE if no simplification can be made. */
8805 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8807 if (!validate_arg (arg1, POINTER_TYPE)
8808 || !validate_arg (arg2, INTEGER_TYPE)
8809 || !validate_arg (len, INTEGER_TYPE))
8815 if (TREE_CODE (arg2) != INTEGER_CST
8816 || !host_integerp (len, 1))
8819 p1 = c_getstr (arg1);
8820 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8826 if (target_char_cast (arg2, &c))
8829 r = memchr (p1, c, tree_low_cst (len, 1));
8832 return build_int_cst (TREE_TYPE (arg1), 0);
8834 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8836 return fold_convert (type, tem);
8842 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8843 Return NULL_TREE if no simplification can be made. */
8846 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8848 const char *p1, *p2;
8850 if (!validate_arg (arg1, POINTER_TYPE)
8851 || !validate_arg (arg2, POINTER_TYPE)
8852 || !validate_arg (len, INTEGER_TYPE))
8855 /* If the LEN parameter is zero, return zero. */
8856 if (integer_zerop (len))
8857 return omit_two_operands (integer_type_node, integer_zero_node,
8860 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8861 if (operand_equal_p (arg1, arg2, 0))
8862 return omit_one_operand (integer_type_node, integer_zero_node, len);
8864 p1 = c_getstr (arg1);
8865 p2 = c_getstr (arg2);
8867 /* If all arguments are constant, and the value of len is not greater
8868 than the lengths of arg1 and arg2, evaluate at compile-time. */
8869 if (host_integerp (len, 1) && p1 && p2
8870 && compare_tree_int (len, strlen (p1) + 1) <= 0
8871 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8873 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8876 return integer_one_node;
8878 return integer_minus_one_node;
8880 return integer_zero_node;
8883 /* If len parameter is one, return an expression corresponding to
8884 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8885 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8887 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8888 tree cst_uchar_ptr_node
8889 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8891 tree ind1 = fold_convert (integer_type_node,
8892 build1 (INDIRECT_REF, cst_uchar_node,
8893 fold_convert (cst_uchar_ptr_node,
8895 tree ind2 = fold_convert (integer_type_node,
8896 build1 (INDIRECT_REF, cst_uchar_node,
8897 fold_convert (cst_uchar_ptr_node,
8899 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8905 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8906 Return NULL_TREE if no simplification can be made. */
8909 fold_builtin_strcmp (tree arg1, tree arg2)
8911 const char *p1, *p2;
8913 if (!validate_arg (arg1, POINTER_TYPE)
8914 || !validate_arg (arg2, POINTER_TYPE))
8917 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8918 if (operand_equal_p (arg1, arg2, 0))
8919 return integer_zero_node;
8921 p1 = c_getstr (arg1);
8922 p2 = c_getstr (arg2);
8926 const int i = strcmp (p1, p2);
8928 return integer_minus_one_node;
8930 return integer_one_node;
8932 return integer_zero_node;
8935 /* If the second arg is "", return *(const unsigned char*)arg1. */
8936 if (p2 && *p2 == '\0')
8938 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8939 tree cst_uchar_ptr_node
8940 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8942 return fold_convert (integer_type_node,
8943 build1 (INDIRECT_REF, cst_uchar_node,
8944 fold_convert (cst_uchar_ptr_node,
8948 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8949 if (p1 && *p1 == '\0')
8951 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8952 tree cst_uchar_ptr_node
8953 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8955 tree temp = fold_convert (integer_type_node,
8956 build1 (INDIRECT_REF, cst_uchar_node,
8957 fold_convert (cst_uchar_ptr_node,
8959 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8965 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8966 Return NULL_TREE if no simplification can be made. */
8969 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
8971 const char *p1, *p2;
8973 if (!validate_arg (arg1, POINTER_TYPE)
8974 || !validate_arg (arg2, POINTER_TYPE)
8975 || !validate_arg (len, INTEGER_TYPE))
8978 /* If the LEN parameter is zero, return zero. */
8979 if (integer_zerop (len))
8980 return omit_two_operands (integer_type_node, integer_zero_node,
8983 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8984 if (operand_equal_p (arg1, arg2, 0))
8985 return omit_one_operand (integer_type_node, integer_zero_node, len);
8987 p1 = c_getstr (arg1);
8988 p2 = c_getstr (arg2);
8990 if (host_integerp (len, 1) && p1 && p2)
8992 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8994 return integer_one_node;
8996 return integer_minus_one_node;
8998 return integer_zero_node;
9001 /* If the second arg is "", and the length is greater than zero,
9002 return *(const unsigned char*)arg1. */
9003 if (p2 && *p2 == '\0'
9004 && TREE_CODE (len) == INTEGER_CST
9005 && tree_int_cst_sgn (len) == 1)
9007 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9008 tree cst_uchar_ptr_node
9009 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9011 return fold_convert (integer_type_node,
9012 build1 (INDIRECT_REF, cst_uchar_node,
9013 fold_convert (cst_uchar_ptr_node,
9017 /* If the first arg is "", and the length is greater than zero,
9018 return -*(const unsigned char*)arg2. */
9019 if (p1 && *p1 == '\0'
9020 && TREE_CODE (len) == INTEGER_CST
9021 && tree_int_cst_sgn (len) == 1)
9023 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9024 tree cst_uchar_ptr_node
9025 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9027 tree temp = fold_convert (integer_type_node,
9028 build1 (INDIRECT_REF, cst_uchar_node,
9029 fold_convert (cst_uchar_ptr_node,
9031 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9034 /* If len parameter is one, return an expression corresponding to
9035 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9036 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9038 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9039 tree cst_uchar_ptr_node
9040 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9042 tree ind1 = fold_convert (integer_type_node,
9043 build1 (INDIRECT_REF, cst_uchar_node,
9044 fold_convert (cst_uchar_ptr_node,
9046 tree ind2 = fold_convert (integer_type_node,
9047 build1 (INDIRECT_REF, cst_uchar_node,
9048 fold_convert (cst_uchar_ptr_node,
9050 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9056 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9057 ARG. Return NULL_TREE if no simplification can be made. */
9060 fold_builtin_signbit (tree arg, tree type)
9064 if (!validate_arg (arg, REAL_TYPE))
9067 /* If ARG is a compile-time constant, determine the result. */
9068 if (TREE_CODE (arg) == REAL_CST
9069 && !TREE_OVERFLOW (arg))
9073 c = TREE_REAL_CST (arg);
9074 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9075 return fold_convert (type, temp);
9078 /* If ARG is non-negative, the result is always zero. */
9079 if (tree_expr_nonnegative_p (arg))
9080 return omit_one_operand (type, integer_zero_node, arg);
9082 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9083 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9084 return fold_build2 (LT_EXPR, type, arg,
9085 build_real (TREE_TYPE (arg), dconst0));
9090 /* Fold function call to builtin copysign, copysignf or copysignl with
9091 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9095 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9099 if (!validate_arg (arg1, REAL_TYPE)
9100 || !validate_arg (arg2, REAL_TYPE))
9103 /* copysign(X,X) is X. */
9104 if (operand_equal_p (arg1, arg2, 0))
9105 return fold_convert (type, arg1);
9107 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9108 if (TREE_CODE (arg1) == REAL_CST
9109 && TREE_CODE (arg2) == REAL_CST
9110 && !TREE_OVERFLOW (arg1)
9111 && !TREE_OVERFLOW (arg2))
9113 REAL_VALUE_TYPE c1, c2;
9115 c1 = TREE_REAL_CST (arg1);
9116 c2 = TREE_REAL_CST (arg2);
9117 /* c1.sign := c2.sign. */
9118 real_copysign (&c1, &c2);
9119 return build_real (type, c1);
9122 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9123 Remember to evaluate Y for side-effects. */
9124 if (tree_expr_nonnegative_p (arg2))
9125 return omit_one_operand (type,
9126 fold_build1 (ABS_EXPR, type, arg1),
9129 /* Strip sign changing operations for the first argument. */
9130 tem = fold_strip_sign_ops (arg1);
9132 return build_call_expr (fndecl, 2, tem, arg2);
9137 /* Fold a call to builtin isascii with argument ARG. */
9140 fold_builtin_isascii (tree arg)
9142 if (!validate_arg (arg, INTEGER_TYPE))
9146 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9147 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9148 build_int_cst (NULL_TREE,
9149 ~ (unsigned HOST_WIDE_INT) 0x7f));
9150 return fold_build2 (EQ_EXPR, integer_type_node,
9151 arg, integer_zero_node);
9155 /* Fold a call to builtin toascii with argument ARG. */
9158 fold_builtin_toascii (tree arg)
9160 if (!validate_arg (arg, INTEGER_TYPE))
9163 /* Transform toascii(c) -> (c & 0x7f). */
9164 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9165 build_int_cst (NULL_TREE, 0x7f));
9168 /* Fold a call to builtin isdigit with argument ARG. */
9171 fold_builtin_isdigit (tree arg)
9173 if (!validate_arg (arg, INTEGER_TYPE))
9177 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9178 /* According to the C standard, isdigit is unaffected by locale.
9179 However, it definitely is affected by the target character set. */
9180 unsigned HOST_WIDE_INT target_digit0
9181 = lang_hooks.to_target_charset ('0');
9183 if (target_digit0 == 0)
9186 arg = fold_convert (unsigned_type_node, arg);
9187 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9188 build_int_cst (unsigned_type_node, target_digit0));
9189 return fold_build2 (LE_EXPR, integer_type_node, arg,
9190 build_int_cst (unsigned_type_node, 9));
9194 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9197 fold_builtin_fabs (tree arg, tree type)
9199 if (!validate_arg (arg, REAL_TYPE))
9202 arg = fold_convert (type, arg);
9203 if (TREE_CODE (arg) == REAL_CST)
9204 return fold_abs_const (arg, type);
9205 return fold_build1 (ABS_EXPR, type, arg);
9208 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9211 fold_builtin_abs (tree arg, tree type)
9213 if (!validate_arg (arg, INTEGER_TYPE))
9216 arg = fold_convert (type, arg);
9217 if (TREE_CODE (arg) == INTEGER_CST)
9218 return fold_abs_const (arg, type);
9219 return fold_build1 (ABS_EXPR, type, arg);
9222 /* Fold a call to builtin fmin or fmax. */
9225 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9227 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9229 /* Calculate the result when the argument is a constant. */
9230 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9235 /* If either argument is NaN, return the other one. Avoid the
9236 transformation if we get (and honor) a signalling NaN. Using
9237 omit_one_operand() ensures we create a non-lvalue. */
9238 if (TREE_CODE (arg0) == REAL_CST
9239 && real_isnan (&TREE_REAL_CST (arg0))
9240 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9241 || ! TREE_REAL_CST (arg0).signalling))
9242 return omit_one_operand (type, arg1, arg0);
9243 if (TREE_CODE (arg1) == REAL_CST
9244 && real_isnan (&TREE_REAL_CST (arg1))
9245 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9246 || ! TREE_REAL_CST (arg1).signalling))
9247 return omit_one_operand (type, arg0, arg1);
9249 /* Transform fmin/fmax(x,x) -> x. */
9250 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9251 return omit_one_operand (type, arg0, arg1);
9253 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9254 functions to return the numeric arg if the other one is NaN.
9255 These tree codes don't honor that, so only transform if
9256 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9257 handled, so we don't have to worry about it either. */
9258 if (flag_finite_math_only)
9259 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9260 fold_convert (type, arg0),
9261 fold_convert (type, arg1));
9266 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9269 fold_builtin_carg (tree arg, tree type)
9271 if (validate_arg (arg, COMPLEX_TYPE))
9273 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9277 tree new_arg = builtin_save_expr (arg);
9278 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9279 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9280 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9287 /* Fold a call to builtin logb/ilogb. */
9290 fold_builtin_logb (tree arg, tree rettype)
9292 if (! validate_arg (arg, REAL_TYPE))
9297 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9299 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9305 /* If arg is Inf or NaN and we're logb, return it. */
9306 if (TREE_CODE (rettype) == REAL_TYPE)
9307 return fold_convert (rettype, arg);
9308 /* Fall through... */
9310 /* Zero may set errno and/or raise an exception for logb, also
9311 for ilogb we don't know FP_ILOGB0. */
9314 /* For normal numbers, proceed iff radix == 2. In GCC,
9315 normalized significands are in the range [0.5, 1.0). We
9316 want the exponent as if they were [1.0, 2.0) so get the
9317 exponent and subtract 1. */
9318 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9319 return fold_convert (rettype, build_int_cst (NULL_TREE,
9320 REAL_EXP (value)-1));
9328 /* Fold a call to builtin significand, if radix == 2. */
9331 fold_builtin_significand (tree arg, tree rettype)
9333 if (! validate_arg (arg, REAL_TYPE))
9338 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9340 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9347 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9348 return fold_convert (rettype, arg);
9350 /* For normal numbers, proceed iff radix == 2. */
9351 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9353 REAL_VALUE_TYPE result = *value;
9354 /* In GCC, normalized significands are in the range [0.5,
9355 1.0). We want them to be [1.0, 2.0) so set the
9357 SET_REAL_EXP (&result, 1);
9358 return build_real (rettype, result);
9367 /* Fold a call to builtin frexp, we can assume the base is 2. */
9370 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9372 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9377 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9380 arg1 = build_fold_indirect_ref (arg1);
9382 /* Proceed if a valid pointer type was passed in. */
9383 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9385 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9391 /* For +-0, return (*exp = 0, +-0). */
9392 exp = integer_zero_node;
9397 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9398 return omit_one_operand (rettype, arg0, arg1);
9401 /* Since the frexp function always expects base 2, and in
9402 GCC normalized significands are already in the range
9403 [0.5, 1.0), we have exactly what frexp wants. */
9404 REAL_VALUE_TYPE frac_rvt = *value;
9405 SET_REAL_EXP (&frac_rvt, 0);
9406 frac = build_real (rettype, frac_rvt);
9407 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9414 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9415 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9416 TREE_SIDE_EFFECTS (arg1) = 1;
9417 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9423 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9424 then we can assume the base is two. If it's false, then we have to
9425 check the mode of the TYPE parameter in certain cases. */
9428 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9430 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9435 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9436 if (real_zerop (arg0) || integer_zerop (arg1)
9437 || (TREE_CODE (arg0) == REAL_CST
9438 && !real_isfinite (&TREE_REAL_CST (arg0))))
9439 return omit_one_operand (type, arg0, arg1);
9441 /* If both arguments are constant, then try to evaluate it. */
9442 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9443 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9444 && host_integerp (arg1, 0))
9446 /* Bound the maximum adjustment to twice the range of the
9447 mode's valid exponents. Use abs to ensure the range is
9448 positive as a sanity check. */
9449 const long max_exp_adj = 2 *
9450 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9451 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9453 /* Get the user-requested adjustment. */
9454 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9456 /* The requested adjustment must be inside this range. This
9457 is a preliminary cap to avoid things like overflow, we
9458 may still fail to compute the result for other reasons. */
9459 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9461 REAL_VALUE_TYPE initial_result;
9463 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9465 /* Ensure we didn't overflow. */
9466 if (! real_isinf (&initial_result))
9468 const REAL_VALUE_TYPE trunc_result
9469 = real_value_truncate (TYPE_MODE (type), initial_result);
9471 /* Only proceed if the target mode can hold the
9473 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9474 return build_real (type, trunc_result);
9483 /* Fold a call to builtin modf. */
9486 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9488 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9493 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9496 arg1 = build_fold_indirect_ref (arg1);
9498 /* Proceed if a valid pointer type was passed in. */
9499 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9501 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9502 REAL_VALUE_TYPE trunc, frac;
9508 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9509 trunc = frac = *value;
9512 /* For +-Inf, return (*arg1 = arg0, +-0). */
9514 frac.sign = value->sign;
9518 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9519 real_trunc (&trunc, VOIDmode, value);
9520 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9521 /* If the original number was negative and already
9522 integral, then the fractional part is -0.0. */
9523 if (value->sign && frac.cl == rvc_zero)
9524 frac.sign = value->sign;
9528 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9529 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9530 build_real (rettype, trunc));
9531 TREE_SIDE_EFFECTS (arg1) = 1;
9532 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9533 build_real (rettype, frac));
9539 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9540 ARG is the argument for the call. */
9543 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9545 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9548 if (!validate_arg (arg, REAL_TYPE))
9550 error ("non-floating-point argument to function %qs",
9551 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9552 return error_mark_node;
9555 switch (builtin_index)
9557 case BUILT_IN_ISINF:
9558 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9559 return omit_one_operand (type, integer_zero_node, arg);
9561 if (TREE_CODE (arg) == REAL_CST)
9563 r = TREE_REAL_CST (arg);
9564 if (real_isinf (&r))
9565 return real_compare (GT_EXPR, &r, &dconst0)
9566 ? integer_one_node : integer_minus_one_node;
9568 return integer_zero_node;
9573 case BUILT_IN_ISFINITE:
9574 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9575 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9576 return omit_one_operand (type, integer_one_node, arg);
9578 if (TREE_CODE (arg) == REAL_CST)
9580 r = TREE_REAL_CST (arg);
9581 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9586 case BUILT_IN_ISNAN:
9587 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9588 return omit_one_operand (type, integer_zero_node, arg);
9590 if (TREE_CODE (arg) == REAL_CST)
9592 r = TREE_REAL_CST (arg);
9593 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9596 arg = builtin_save_expr (arg);
9597 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9604 /* Fold a call to an unordered comparison function such as
9605 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9606 being called and ARG0 and ARG1 are the arguments for the call.
9607 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9608 the opposite of the desired result. UNORDERED_CODE is used
9609 for modes that can hold NaNs and ORDERED_CODE is used for
9613 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9614 enum tree_code unordered_code,
9615 enum tree_code ordered_code)
9617 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9618 enum tree_code code;
9620 enum tree_code code0, code1;
9621 tree cmp_type = NULL_TREE;
9623 type0 = TREE_TYPE (arg0);
9624 type1 = TREE_TYPE (arg1);
9626 code0 = TREE_CODE (type0);
9627 code1 = TREE_CODE (type1);
9629 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9630 /* Choose the wider of two real types. */
9631 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9633 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9635 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9639 error ("non-floating-point argument to function %qs",
9640 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9641 return error_mark_node;
9644 arg0 = fold_convert (cmp_type, arg0);
9645 arg1 = fold_convert (cmp_type, arg1);
9647 if (unordered_code == UNORDERED_EXPR)
9649 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9650 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9651 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9654 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9656 return fold_build1 (TRUTH_NOT_EXPR, type,
9657 fold_build2 (code, type, arg0, arg1));
9660 /* Fold a call to built-in function FNDECL with 0 arguments.
9661 IGNORE is true if the result of the function call is ignored. This
9662 function returns NULL_TREE if no simplification was possible. */
9665 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9667 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9668 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9671 CASE_FLT_FN (BUILT_IN_INF):
9672 case BUILT_IN_INFD32:
9673 case BUILT_IN_INFD64:
9674 case BUILT_IN_INFD128:
9675 return fold_builtin_inf (type, true);
9677 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9678 return fold_builtin_inf (type, false);
9680 case BUILT_IN_CLASSIFY_TYPE:
9681 return fold_builtin_classify_type (NULL_TREE);
9689 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9690 IGNORE is true if the result of the function call is ignored. This
9691 function returns NULL_TREE if no simplification was possible. */
9694 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9696 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9697 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9701 case BUILT_IN_CONSTANT_P:
9703 tree val = fold_builtin_constant_p (arg0);
9705 /* Gimplification will pull the CALL_EXPR for the builtin out of
9706 an if condition. When not optimizing, we'll not CSE it back.
9707 To avoid link error types of regressions, return false now. */
9708 if (!val && !optimize)
9709 val = integer_zero_node;
9714 case BUILT_IN_CLASSIFY_TYPE:
9715 return fold_builtin_classify_type (arg0);
9717 case BUILT_IN_STRLEN:
9718 return fold_builtin_strlen (arg0);
9720 CASE_FLT_FN (BUILT_IN_FABS):
9721 return fold_builtin_fabs (arg0, type);
9725 case BUILT_IN_LLABS:
9726 case BUILT_IN_IMAXABS:
9727 return fold_builtin_abs (arg0, type);
9729 CASE_FLT_FN (BUILT_IN_CONJ):
9730 if (validate_arg (arg0, COMPLEX_TYPE))
9731 return fold_build1 (CONJ_EXPR, type, arg0);
9734 CASE_FLT_FN (BUILT_IN_CREAL):
9735 if (validate_arg (arg0, COMPLEX_TYPE))
9736 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9739 CASE_FLT_FN (BUILT_IN_CIMAG):
9740 if (validate_arg (arg0, COMPLEX_TYPE))
9741 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9744 CASE_FLT_FN (BUILT_IN_CCOS):
9745 CASE_FLT_FN (BUILT_IN_CCOSH):
9746 /* These functions are "even", i.e. f(x) == f(-x). */
9747 if (validate_arg (arg0, COMPLEX_TYPE))
9749 tree narg = fold_strip_sign_ops (arg0);
9751 return build_call_expr (fndecl, 1, narg);
9755 CASE_FLT_FN (BUILT_IN_CABS):
9756 return fold_builtin_cabs (arg0, type, fndecl);
9758 CASE_FLT_FN (BUILT_IN_CARG):
9759 return fold_builtin_carg (arg0, type);
9761 CASE_FLT_FN (BUILT_IN_SQRT):
9762 return fold_builtin_sqrt (arg0, type);
9764 CASE_FLT_FN (BUILT_IN_CBRT):
9765 return fold_builtin_cbrt (arg0, type);
9767 CASE_FLT_FN (BUILT_IN_ASIN):
9768 if (validate_arg (arg0, REAL_TYPE))
9769 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9770 &dconstm1, &dconst1, true);
9773 CASE_FLT_FN (BUILT_IN_ACOS):
9774 if (validate_arg (arg0, REAL_TYPE))
9775 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9776 &dconstm1, &dconst1, true);
9779 CASE_FLT_FN (BUILT_IN_ATAN):
9780 if (validate_arg (arg0, REAL_TYPE))
9781 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9784 CASE_FLT_FN (BUILT_IN_ASINH):
9785 if (validate_arg (arg0, REAL_TYPE))
9786 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9789 CASE_FLT_FN (BUILT_IN_ACOSH):
9790 if (validate_arg (arg0, REAL_TYPE))
9791 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9792 &dconst1, NULL, true);
9795 CASE_FLT_FN (BUILT_IN_ATANH):
9796 if (validate_arg (arg0, REAL_TYPE))
9797 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9798 &dconstm1, &dconst1, false);
9801 CASE_FLT_FN (BUILT_IN_SIN):
9802 if (validate_arg (arg0, REAL_TYPE))
9803 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9806 CASE_FLT_FN (BUILT_IN_COS):
9807 return fold_builtin_cos (arg0, type, fndecl);
9810 CASE_FLT_FN (BUILT_IN_TAN):
9811 return fold_builtin_tan (arg0, type);
9813 CASE_FLT_FN (BUILT_IN_CEXP):
9814 return fold_builtin_cexp (arg0, type);
9816 CASE_FLT_FN (BUILT_IN_CEXPI):
9817 if (validate_arg (arg0, REAL_TYPE))
9818 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9821 CASE_FLT_FN (BUILT_IN_SINH):
9822 if (validate_arg (arg0, REAL_TYPE))
9823 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9826 CASE_FLT_FN (BUILT_IN_COSH):
9827 return fold_builtin_cosh (arg0, type, fndecl);
9829 CASE_FLT_FN (BUILT_IN_TANH):
9830 if (validate_arg (arg0, REAL_TYPE))
9831 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9834 CASE_FLT_FN (BUILT_IN_ERF):
9835 if (validate_arg (arg0, REAL_TYPE))
9836 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9839 CASE_FLT_FN (BUILT_IN_ERFC):
9840 if (validate_arg (arg0, REAL_TYPE))
9841 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9844 CASE_FLT_FN (BUILT_IN_TGAMMA):
9845 if (validate_arg (arg0, REAL_TYPE))
9846 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9849 CASE_FLT_FN (BUILT_IN_EXP):
9850 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9852 CASE_FLT_FN (BUILT_IN_EXP2):
9853 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9855 CASE_FLT_FN (BUILT_IN_EXP10):
9856 CASE_FLT_FN (BUILT_IN_POW10):
9857 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9859 CASE_FLT_FN (BUILT_IN_EXPM1):
9860 if (validate_arg (arg0, REAL_TYPE))
9861 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9864 CASE_FLT_FN (BUILT_IN_LOG):
9865 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9867 CASE_FLT_FN (BUILT_IN_LOG2):
9868 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9870 CASE_FLT_FN (BUILT_IN_LOG10):
9871 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9873 CASE_FLT_FN (BUILT_IN_LOG1P):
9874 if (validate_arg (arg0, REAL_TYPE))
9875 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9876 &dconstm1, NULL, false);
9879 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9880 CASE_FLT_FN (BUILT_IN_J0):
9881 if (validate_arg (arg0, REAL_TYPE))
9882 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9886 CASE_FLT_FN (BUILT_IN_J1):
9887 if (validate_arg (arg0, REAL_TYPE))
9888 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9892 CASE_FLT_FN (BUILT_IN_Y0):
9893 if (validate_arg (arg0, REAL_TYPE))
9894 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9895 &dconst0, NULL, false);
9898 CASE_FLT_FN (BUILT_IN_Y1):
9899 if (validate_arg (arg0, REAL_TYPE))
9900 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9901 &dconst0, NULL, false);
9905 CASE_FLT_FN (BUILT_IN_NAN):
9906 case BUILT_IN_NAND32:
9907 case BUILT_IN_NAND64:
9908 case BUILT_IN_NAND128:
9909 return fold_builtin_nan (arg0, type, true);
9911 CASE_FLT_FN (BUILT_IN_NANS):
9912 return fold_builtin_nan (arg0, type, false);
9914 CASE_FLT_FN (BUILT_IN_FLOOR):
9915 return fold_builtin_floor (fndecl, arg0);
9917 CASE_FLT_FN (BUILT_IN_CEIL):
9918 return fold_builtin_ceil (fndecl, arg0);
9920 CASE_FLT_FN (BUILT_IN_TRUNC):
9921 return fold_builtin_trunc (fndecl, arg0);
9923 CASE_FLT_FN (BUILT_IN_ROUND):
9924 return fold_builtin_round (fndecl, arg0);
9926 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9927 CASE_FLT_FN (BUILT_IN_RINT):
9928 return fold_trunc_transparent_mathfn (fndecl, arg0);
9930 CASE_FLT_FN (BUILT_IN_LCEIL):
9931 CASE_FLT_FN (BUILT_IN_LLCEIL):
9932 CASE_FLT_FN (BUILT_IN_LFLOOR):
9933 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9934 CASE_FLT_FN (BUILT_IN_LROUND):
9935 CASE_FLT_FN (BUILT_IN_LLROUND):
9936 return fold_builtin_int_roundingfn (fndecl, arg0);
9938 CASE_FLT_FN (BUILT_IN_LRINT):
9939 CASE_FLT_FN (BUILT_IN_LLRINT):
9940 return fold_fixed_mathfn (fndecl, arg0);
9942 case BUILT_IN_BSWAP32:
9943 case BUILT_IN_BSWAP64:
9944 return fold_builtin_bswap (fndecl, arg0);
9946 CASE_INT_FN (BUILT_IN_FFS):
9947 CASE_INT_FN (BUILT_IN_CLZ):
9948 CASE_INT_FN (BUILT_IN_CTZ):
9949 CASE_INT_FN (BUILT_IN_POPCOUNT):
9950 CASE_INT_FN (BUILT_IN_PARITY):
9951 return fold_builtin_bitop (fndecl, arg0);
9953 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9954 return fold_builtin_signbit (arg0, type);
9956 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9957 return fold_builtin_significand (arg0, type);
9959 CASE_FLT_FN (BUILT_IN_ILOGB):
9960 CASE_FLT_FN (BUILT_IN_LOGB):
9961 return fold_builtin_logb (arg0, type);
9963 case BUILT_IN_ISASCII:
9964 return fold_builtin_isascii (arg0);
9966 case BUILT_IN_TOASCII:
9967 return fold_builtin_toascii (arg0);
9969 case BUILT_IN_ISDIGIT:
9970 return fold_builtin_isdigit (arg0);
9972 CASE_FLT_FN (BUILT_IN_FINITE):
9973 case BUILT_IN_FINITED32:
9974 case BUILT_IN_FINITED64:
9975 case BUILT_IN_FINITED128:
9976 case BUILT_IN_ISFINITE:
9977 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
9979 CASE_FLT_FN (BUILT_IN_ISINF):
9980 case BUILT_IN_ISINFD32:
9981 case BUILT_IN_ISINFD64:
9982 case BUILT_IN_ISINFD128:
9983 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
9985 CASE_FLT_FN (BUILT_IN_ISNAN):
9986 case BUILT_IN_ISNAND32:
9987 case BUILT_IN_ISNAND64:
9988 case BUILT_IN_ISNAND128:
9989 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
9991 case BUILT_IN_PRINTF:
9992 case BUILT_IN_PRINTF_UNLOCKED:
9993 case BUILT_IN_VPRINTF:
9994 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10004 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10005 IGNORE is true if the result of the function call is ignored. This
10006 function returns NULL_TREE if no simplification was possible. */
10009 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10011 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10012 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10016 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10017 CASE_FLT_FN (BUILT_IN_JN):
10018 if (validate_arg (arg0, INTEGER_TYPE)
10019 && validate_arg (arg1, REAL_TYPE))
10020 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10023 CASE_FLT_FN (BUILT_IN_YN):
10024 if (validate_arg (arg0, INTEGER_TYPE)
10025 && validate_arg (arg1, REAL_TYPE))
10026 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10030 CASE_FLT_FN (BUILT_IN_DREM):
10031 CASE_FLT_FN (BUILT_IN_REMAINDER):
10032 if (validate_arg (arg0, REAL_TYPE)
10033 && validate_arg(arg1, REAL_TYPE))
10034 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10037 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10038 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10039 if (validate_arg (arg0, REAL_TYPE)
10040 && validate_arg(arg1, POINTER_TYPE))
10041 return do_mpfr_lgamma_r (arg0, arg1, type);
10045 CASE_FLT_FN (BUILT_IN_ATAN2):
10046 if (validate_arg (arg0, REAL_TYPE)
10047 && validate_arg(arg1, REAL_TYPE))
10048 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10051 CASE_FLT_FN (BUILT_IN_FDIM):
10052 if (validate_arg (arg0, REAL_TYPE)
10053 && validate_arg(arg1, REAL_TYPE))
10054 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10057 CASE_FLT_FN (BUILT_IN_HYPOT):
10058 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10060 CASE_FLT_FN (BUILT_IN_LDEXP):
10061 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10062 CASE_FLT_FN (BUILT_IN_SCALBN):
10063 CASE_FLT_FN (BUILT_IN_SCALBLN):
10064 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10066 CASE_FLT_FN (BUILT_IN_FREXP):
10067 return fold_builtin_frexp (arg0, arg1, type);
10069 CASE_FLT_FN (BUILT_IN_MODF):
10070 return fold_builtin_modf (arg0, arg1, type);
10072 case BUILT_IN_BZERO:
10073 return fold_builtin_bzero (arg0, arg1, ignore);
10075 case BUILT_IN_FPUTS:
10076 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10078 case BUILT_IN_FPUTS_UNLOCKED:
10079 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10081 case BUILT_IN_STRSTR:
10082 return fold_builtin_strstr (arg0, arg1, type);
10084 case BUILT_IN_STRCAT:
10085 return fold_builtin_strcat (arg0, arg1);
10087 case BUILT_IN_STRSPN:
10088 return fold_builtin_strspn (arg0, arg1);
10090 case BUILT_IN_STRCSPN:
10091 return fold_builtin_strcspn (arg0, arg1);
10093 case BUILT_IN_STRCHR:
10094 case BUILT_IN_INDEX:
10095 return fold_builtin_strchr (arg0, arg1, type);
10097 case BUILT_IN_STRRCHR:
10098 case BUILT_IN_RINDEX:
10099 return fold_builtin_strrchr (arg0, arg1, type);
10101 case BUILT_IN_STRCPY:
10102 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10104 case BUILT_IN_STRCMP:
10105 return fold_builtin_strcmp (arg0, arg1);
10107 case BUILT_IN_STRPBRK:
10108 return fold_builtin_strpbrk (arg0, arg1, type);
10110 case BUILT_IN_EXPECT:
10111 return fold_builtin_expect (arg0);
10113 CASE_FLT_FN (BUILT_IN_POW):
10114 return fold_builtin_pow (fndecl, arg0, arg1, type);
10116 CASE_FLT_FN (BUILT_IN_POWI):
10117 return fold_builtin_powi (fndecl, arg0, arg1, type);
10119 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10120 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10122 CASE_FLT_FN (BUILT_IN_FMIN):
10123 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10125 CASE_FLT_FN (BUILT_IN_FMAX):
10126 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10128 case BUILT_IN_ISGREATER:
10129 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10130 case BUILT_IN_ISGREATEREQUAL:
10131 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10132 case BUILT_IN_ISLESS:
10133 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10134 case BUILT_IN_ISLESSEQUAL:
10135 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10136 case BUILT_IN_ISLESSGREATER:
10137 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10138 case BUILT_IN_ISUNORDERED:
10139 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10142 /* We do the folding for va_start in the expander. */
10143 case BUILT_IN_VA_START:
10146 case BUILT_IN_SPRINTF:
10147 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10149 case BUILT_IN_OBJECT_SIZE:
10150 return fold_builtin_object_size (arg0, arg1);
10152 case BUILT_IN_PRINTF:
10153 case BUILT_IN_PRINTF_UNLOCKED:
10154 case BUILT_IN_VPRINTF:
10155 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10157 case BUILT_IN_PRINTF_CHK:
10158 case BUILT_IN_VPRINTF_CHK:
10159 if (!validate_arg (arg0, INTEGER_TYPE)
10160 || TREE_SIDE_EFFECTS (arg0))
10163 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10166 case BUILT_IN_FPRINTF:
10167 case BUILT_IN_FPRINTF_UNLOCKED:
10168 case BUILT_IN_VFPRINTF:
10169 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10178 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10179 and ARG2. IGNORE is true if the result of the function call is ignored.
10180 This function returns NULL_TREE if no simplification was possible. */
10183 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10185 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10186 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10190 CASE_FLT_FN (BUILT_IN_SINCOS):
10191 return fold_builtin_sincos (arg0, arg1, arg2);
10193 CASE_FLT_FN (BUILT_IN_FMA):
10194 if (validate_arg (arg0, REAL_TYPE)
10195 && validate_arg(arg1, REAL_TYPE)
10196 && validate_arg(arg2, REAL_TYPE))
10197 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10200 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10201 CASE_FLT_FN (BUILT_IN_REMQUO):
10202 if (validate_arg (arg0, REAL_TYPE)
10203 && validate_arg(arg1, REAL_TYPE)
10204 && validate_arg(arg2, POINTER_TYPE))
10205 return do_mpfr_remquo (arg0, arg1, arg2);
10209 case BUILT_IN_MEMSET:
10210 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10212 case BUILT_IN_BCOPY:
10213 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10215 case BUILT_IN_MEMCPY:
10216 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10218 case BUILT_IN_MEMPCPY:
10219 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10221 case BUILT_IN_MEMMOVE:
10222 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10224 case BUILT_IN_STRNCAT:
10225 return fold_builtin_strncat (arg0, arg1, arg2);
10227 case BUILT_IN_STRNCPY:
10228 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10230 case BUILT_IN_STRNCMP:
10231 return fold_builtin_strncmp (arg0, arg1, arg2);
10233 case BUILT_IN_MEMCHR:
10234 return fold_builtin_memchr (arg0, arg1, arg2, type);
10236 case BUILT_IN_BCMP:
10237 case BUILT_IN_MEMCMP:
10238 return fold_builtin_memcmp (arg0, arg1, arg2);;
10240 case BUILT_IN_SPRINTF:
10241 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10243 case BUILT_IN_STRCPY_CHK:
10244 case BUILT_IN_STPCPY_CHK:
10245 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10248 case BUILT_IN_STRCAT_CHK:
10249 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10251 case BUILT_IN_PRINTF_CHK:
10252 case BUILT_IN_VPRINTF_CHK:
10253 if (!validate_arg (arg0, INTEGER_TYPE)
10254 || TREE_SIDE_EFFECTS (arg0))
10257 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10260 case BUILT_IN_FPRINTF:
10261 case BUILT_IN_FPRINTF_UNLOCKED:
10262 case BUILT_IN_VFPRINTF:
10263 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10265 case BUILT_IN_FPRINTF_CHK:
10266 case BUILT_IN_VFPRINTF_CHK:
10267 if (!validate_arg (arg1, INTEGER_TYPE)
10268 || TREE_SIDE_EFFECTS (arg1))
10271 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10280 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10281 ARG2, and ARG3. IGNORE is true if the result of the function call is
10282 ignored. This function returns NULL_TREE if no simplification was
10286 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10289 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10293 case BUILT_IN_MEMCPY_CHK:
10294 case BUILT_IN_MEMPCPY_CHK:
10295 case BUILT_IN_MEMMOVE_CHK:
10296 case BUILT_IN_MEMSET_CHK:
10297 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10299 DECL_FUNCTION_CODE (fndecl));
10301 case BUILT_IN_STRNCPY_CHK:
10302 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10304 case BUILT_IN_STRNCAT_CHK:
10305 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10307 case BUILT_IN_FPRINTF_CHK:
10308 case BUILT_IN_VFPRINTF_CHK:
10309 if (!validate_arg (arg1, INTEGER_TYPE)
10310 || TREE_SIDE_EFFECTS (arg1))
10313 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10323 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10324 arguments, where NARGS <= 4. IGNORE is true if the result of the
10325 function call is ignored. This function returns NULL_TREE if no
10326 simplification was possible. Note that this only folds builtins with
10327 fixed argument patterns. Foldings that do varargs-to-varargs
10328 transformations, or that match calls with more than 4 arguments,
10329 need to be handled with fold_builtin_varargs instead. */
10331 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10334 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10336 tree ret = NULL_TREE;
10340 ret = fold_builtin_0 (fndecl, ignore);
10343 ret = fold_builtin_1 (fndecl, args[0], ignore);
10346 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10349 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10352 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10360 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10361 TREE_NO_WARNING (ret) = 1;
10367 /* Builtins with folding operations that operate on "..." arguments
10368 need special handling; we need to store the arguments in a convenient
10369 data structure before attempting any folding. Fortunately there are
10370 only a few builtins that fall into this category. FNDECL is the
10371 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10372 result of the function call is ignored. */
10375 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10377 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10378 tree ret = NULL_TREE;
10382 case BUILT_IN_SPRINTF_CHK:
10383 case BUILT_IN_VSPRINTF_CHK:
10384 ret = fold_builtin_sprintf_chk (exp, fcode);
10387 case BUILT_IN_SNPRINTF_CHK:
10388 case BUILT_IN_VSNPRINTF_CHK:
10389 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10396 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10397 TREE_NO_WARNING (ret) = 1;
10403 /* A wrapper function for builtin folding that prevents warnings for
10404 "statement without effect" and the like, caused by removing the
10405 call node earlier than the warning is generated. */
10408 fold_call_expr (tree exp, bool ignore)
10410 tree ret = NULL_TREE;
10411 tree fndecl = get_callee_fndecl (exp);
10413 && TREE_CODE (fndecl) == FUNCTION_DECL
10414 && DECL_BUILT_IN (fndecl))
10416 /* FIXME: Don't use a list in this interface. */
10417 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10418 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10421 int nargs = call_expr_nargs (exp);
10422 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10424 tree *args = CALL_EXPR_ARGP (exp);
10425 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10428 ret = fold_builtin_varargs (fndecl, exp, ignore);
10431 /* Propagate location information from original call to
10432 expansion of builtin. Otherwise things like
10433 maybe_emit_chk_warning, that operate on the expansion
10434 of a builtin, will use the wrong location information. */
10435 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10437 tree realret = ret;
10438 if (TREE_CODE (ret) == NOP_EXPR)
10439 realret = TREE_OPERAND (ret, 0);
10440 if (CAN_HAVE_LOCATION_P (realret)
10441 && !EXPR_HAS_LOCATION (realret))
10442 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10451 /* Conveniently construct a function call expression. FNDECL names the
10452 function to be called and ARGLIST is a TREE_LIST of arguments. */
10455 build_function_call_expr (tree fndecl, tree arglist)
10457 tree fntype = TREE_TYPE (fndecl);
10458 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10459 int n = list_length (arglist);
10460 tree *argarray = (tree *) alloca (n * sizeof (tree));
10463 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10464 argarray[i] = TREE_VALUE (arglist);
10465 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10468 /* Conveniently construct a function call expression. FNDECL names the
10469 function to be called, N is the number of arguments, and the "..."
10470 parameters are the argument expressions. */
10473 build_call_expr (tree fndecl, int n, ...)
10476 tree fntype = TREE_TYPE (fndecl);
10477 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10478 tree *argarray = (tree *) alloca (n * sizeof (tree));
10482 for (i = 0; i < n; i++)
10483 argarray[i] = va_arg (ap, tree);
10485 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10488 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10489 N arguments are passed in the array ARGARRAY. */
10492 fold_builtin_call_array (tree type,
10497 tree ret = NULL_TREE;
10501 if (TREE_CODE (fn) == ADDR_EXPR)
10503 tree fndecl = TREE_OPERAND (fn, 0);
10504 if (TREE_CODE (fndecl) == FUNCTION_DECL
10505 && DECL_BUILT_IN (fndecl))
10507 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10509 tree arglist = NULL_TREE;
10510 for (i = n - 1; i >= 0; i--)
10511 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10512 ret = targetm.fold_builtin (fndecl, arglist, false);
10516 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10518 /* First try the transformations that don't require consing up
10520 ret = fold_builtin_n (fndecl, argarray, n, false);
10525 /* If we got this far, we need to build an exp. */
10526 exp = build_call_array (type, fn, n, argarray);
10527 ret = fold_builtin_varargs (fndecl, exp, false);
10528 return ret ? ret : exp;
10532 return build_call_array (type, fn, n, argarray);
10535 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10536 along with N new arguments specified as the "..." parameters. SKIP
10537 is the number of arguments in EXP to be omitted. This function is used
10538 to do varargs-to-varargs transformations. */
10541 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10543 int oldnargs = call_expr_nargs (exp);
10544 int nargs = oldnargs - skip + n;
10545 tree fntype = TREE_TYPE (fndecl);
10546 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10554 buffer = alloca (nargs * sizeof (tree));
10556 for (i = 0; i < n; i++)
10557 buffer[i] = va_arg (ap, tree);
10559 for (j = skip; j < oldnargs; j++, i++)
10560 buffer[i] = CALL_EXPR_ARG (exp, j);
10563 buffer = CALL_EXPR_ARGP (exp) + skip;
10565 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10568 /* Validate a single argument ARG against a tree code CODE representing
10572 validate_arg (const_tree arg, enum tree_code code)
10576 else if (code == POINTER_TYPE)
10577 return POINTER_TYPE_P (TREE_TYPE (arg));
10578 return code == TREE_CODE (TREE_TYPE (arg));
10581 /* This function validates the types of a function call argument list
10582 against a specified list of tree_codes. If the last specifier is a 0,
10583 that represents an ellipses, otherwise the last specifier must be a
10587 validate_arglist (const_tree callexpr, ...)
10589 enum tree_code code;
10592 const_call_expr_arg_iterator iter;
10595 va_start (ap, callexpr);
10596 init_const_call_expr_arg_iterator (callexpr, &iter);
10600 code = va_arg (ap, enum tree_code);
10604 /* This signifies an ellipses, any further arguments are all ok. */
10608 /* This signifies an endlink, if no arguments remain, return
10609 true, otherwise return false. */
10610 res = !more_const_call_expr_args_p (&iter);
10613 /* If no parameters remain or the parameter's code does not
10614 match the specified code, return false. Otherwise continue
10615 checking any remaining arguments. */
10616 arg = next_const_call_expr_arg (&iter);
10617 if (!validate_arg (arg, code))
10624 /* We need gotos here since we can only have one VA_CLOSE in a
10632 /* Default target-specific builtin expander that does nothing. */
10635 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10636 rtx target ATTRIBUTE_UNUSED,
10637 rtx subtarget ATTRIBUTE_UNUSED,
10638 enum machine_mode mode ATTRIBUTE_UNUSED,
10639 int ignore ATTRIBUTE_UNUSED)
10644 /* Returns true is EXP represents data that would potentially reside
10645 in a readonly section. */
10648 readonly_data_expr (tree exp)
10652 if (TREE_CODE (exp) != ADDR_EXPR)
10655 exp = get_base_address (TREE_OPERAND (exp, 0));
10659 /* Make sure we call decl_readonly_section only for trees it
10660 can handle (since it returns true for everything it doesn't
10662 if (TREE_CODE (exp) == STRING_CST
10663 || TREE_CODE (exp) == CONSTRUCTOR
10664 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10665 return decl_readonly_section (exp, 0);
10670 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10671 to the call, and TYPE is its return type.
10673 Return NULL_TREE if no simplification was possible, otherwise return the
10674 simplified form of the call as a tree.
10676 The simplified form may be a constant or other expression which
10677 computes the same value, but in a more efficient manner (including
10678 calls to other builtin functions).
10680 The call may contain arguments which need to be evaluated, but
10681 which are not useful to determine the result of the call. In
10682 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10683 COMPOUND_EXPR will be an argument which must be evaluated.
10684 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10685 COMPOUND_EXPR in the chain will contain the tree for the simplified
10686 form of the builtin function call. */
10689 fold_builtin_strstr (tree s1, tree s2, tree type)
10691 if (!validate_arg (s1, POINTER_TYPE)
10692 || !validate_arg (s2, POINTER_TYPE))
10697 const char *p1, *p2;
10699 p2 = c_getstr (s2);
10703 p1 = c_getstr (s1);
10706 const char *r = strstr (p1, p2);
10710 return build_int_cst (TREE_TYPE (s1), 0);
10712 /* Return an offset into the constant string argument. */
10713 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10714 s1, size_int (r - p1));
10715 return fold_convert (type, tem);
10718 /* The argument is const char *, and the result is char *, so we need
10719 a type conversion here to avoid a warning. */
10721 return fold_convert (type, s1);
10726 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10730 /* New argument list transforming strstr(s1, s2) to
10731 strchr(s1, s2[0]). */
10732 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10736 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10737 the call, and TYPE is its return type.
10739 Return NULL_TREE if no simplification was possible, otherwise return the
10740 simplified form of the call as a tree.
10742 The simplified form may be a constant or other expression which
10743 computes the same value, but in a more efficient manner (including
10744 calls to other builtin functions).
10746 The call may contain arguments which need to be evaluated, but
10747 which are not useful to determine the result of the call. In
10748 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10749 COMPOUND_EXPR will be an argument which must be evaluated.
10750 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10751 COMPOUND_EXPR in the chain will contain the tree for the simplified
10752 form of the builtin function call. */
10755 fold_builtin_strchr (tree s1, tree s2, tree type)
10757 if (!validate_arg (s1, POINTER_TYPE)
10758 || !validate_arg (s2, INTEGER_TYPE))
10764 if (TREE_CODE (s2) != INTEGER_CST)
10767 p1 = c_getstr (s1);
10774 if (target_char_cast (s2, &c))
10777 r = strchr (p1, c);
10780 return build_int_cst (TREE_TYPE (s1), 0);
10782 /* Return an offset into the constant string argument. */
10783 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10784 s1, size_int (r - p1));
10785 return fold_convert (type, tem);
10791 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10792 the call, and TYPE is its return type.
10794 Return NULL_TREE if no simplification was possible, otherwise return the
10795 simplified form of the call as a tree.
10797 The simplified form may be a constant or other expression which
10798 computes the same value, but in a more efficient manner (including
10799 calls to other builtin functions).
10801 The call may contain arguments which need to be evaluated, but
10802 which are not useful to determine the result of the call. In
10803 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10804 COMPOUND_EXPR will be an argument which must be evaluated.
10805 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10806 COMPOUND_EXPR in the chain will contain the tree for the simplified
10807 form of the builtin function call. */
10810 fold_builtin_strrchr (tree s1, tree s2, tree type)
10812 if (!validate_arg (s1, POINTER_TYPE)
10813 || !validate_arg (s2, INTEGER_TYPE))
10820 if (TREE_CODE (s2) != INTEGER_CST)
10823 p1 = c_getstr (s1);
10830 if (target_char_cast (s2, &c))
10833 r = strrchr (p1, c);
10836 return build_int_cst (TREE_TYPE (s1), 0);
10838 /* Return an offset into the constant string argument. */
10839 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10840 s1, size_int (r - p1));
10841 return fold_convert (type, tem);
10844 if (! integer_zerop (s2))
10847 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10851 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10852 return build_call_expr (fn, 2, s1, s2);
10856 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10857 to the call, and TYPE is its return type.
10859 Return NULL_TREE if no simplification was possible, otherwise return the
10860 simplified form of the call as a tree.
10862 The simplified form may be a constant or other expression which
10863 computes the same value, but in a more efficient manner (including
10864 calls to other builtin functions).
10866 The call may contain arguments which need to be evaluated, but
10867 which are not useful to determine the result of the call. In
10868 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10869 COMPOUND_EXPR will be an argument which must be evaluated.
10870 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10871 COMPOUND_EXPR in the chain will contain the tree for the simplified
10872 form of the builtin function call. */
10875 fold_builtin_strpbrk (tree s1, tree s2, tree type)
10877 if (!validate_arg (s1, POINTER_TYPE)
10878 || !validate_arg (s2, POINTER_TYPE))
10883 const char *p1, *p2;
10885 p2 = c_getstr (s2);
10889 p1 = c_getstr (s1);
10892 const char *r = strpbrk (p1, p2);
10896 return build_int_cst (TREE_TYPE (s1), 0);
10898 /* Return an offset into the constant string argument. */
10899 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10900 s1, size_int (r - p1));
10901 return fold_convert (type, tem);
10905 /* strpbrk(x, "") == NULL.
10906 Evaluate and ignore s1 in case it had side-effects. */
10907 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
10910 return NULL_TREE; /* Really call strpbrk. */
10912 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10916 /* New argument list transforming strpbrk(s1, s2) to
10917 strchr(s1, s2[0]). */
10918 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10922 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10925 Return NULL_TREE if no simplification was possible, otherwise return the
10926 simplified form of the call as a tree.
10928 The simplified form may be a constant or other expression which
10929 computes the same value, but in a more efficient manner (including
10930 calls to other builtin functions).
10932 The call may contain arguments which need to be evaluated, but
10933 which are not useful to determine the result of the call. In
10934 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10935 COMPOUND_EXPR will be an argument which must be evaluated.
10936 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10937 COMPOUND_EXPR in the chain will contain the tree for the simplified
10938 form of the builtin function call. */
10941 fold_builtin_strcat (tree dst, tree src)
10943 if (!validate_arg (dst, POINTER_TYPE)
10944 || !validate_arg (src, POINTER_TYPE))
10948 const char *p = c_getstr (src);
10950 /* If the string length is zero, return the dst parameter. */
10951 if (p && *p == '\0')
10958 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10959 arguments to the call.
10961 Return NULL_TREE if no simplification was possible, otherwise return the
10962 simplified form of the call as a tree.
10964 The simplified form may be a constant or other expression which
10965 computes the same value, but in a more efficient manner (including
10966 calls to other builtin functions).
10968 The call may contain arguments which need to be evaluated, but
10969 which are not useful to determine the result of the call. In
10970 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10971 COMPOUND_EXPR will be an argument which must be evaluated.
10972 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10973 COMPOUND_EXPR in the chain will contain the tree for the simplified
10974 form of the builtin function call. */
10977 fold_builtin_strncat (tree dst, tree src, tree len)
10979 if (!validate_arg (dst, POINTER_TYPE)
10980 || !validate_arg (src, POINTER_TYPE)
10981 || !validate_arg (len, INTEGER_TYPE))
10985 const char *p = c_getstr (src);
10987 /* If the requested length is zero, or the src parameter string
10988 length is zero, return the dst parameter. */
10989 if (integer_zerop (len) || (p && *p == '\0'))
10990 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
10992 /* If the requested len is greater than or equal to the string
10993 length, call strcat. */
10994 if (TREE_CODE (len) == INTEGER_CST && p
10995 && compare_tree_int (len, strlen (p)) >= 0)
10997 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
10999 /* If the replacement _DECL isn't initialized, don't do the
11004 return build_call_expr (fn, 2, dst, src);
11010 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11013 Return NULL_TREE if no simplification was possible, otherwise return the
11014 simplified form of the call as a tree.
11016 The simplified form may be a constant or other expression which
11017 computes the same value, but in a more efficient manner (including
11018 calls to other builtin functions).
11020 The call may contain arguments which need to be evaluated, but
11021 which are not useful to determine the result of the call. In
11022 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11023 COMPOUND_EXPR will be an argument which must be evaluated.
11024 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11025 COMPOUND_EXPR in the chain will contain the tree for the simplified
11026 form of the builtin function call. */
11029 fold_builtin_strspn (tree s1, tree s2)
11031 if (!validate_arg (s1, POINTER_TYPE)
11032 || !validate_arg (s2, POINTER_TYPE))
11036 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11038 /* If both arguments are constants, evaluate at compile-time. */
11041 const size_t r = strspn (p1, p2);
11042 return size_int (r);
11045 /* If either argument is "", return NULL_TREE. */
11046 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11047 /* Evaluate and ignore both arguments in case either one has
11049 return omit_two_operands (integer_type_node, integer_zero_node,
11055 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11058 Return NULL_TREE if no simplification was possible, otherwise return the
11059 simplified form of the call as a tree.
11061 The simplified form may be a constant or other expression which
11062 computes the same value, but in a more efficient manner (including
11063 calls to other builtin functions).
11065 The call may contain arguments which need to be evaluated, but
11066 which are not useful to determine the result of the call. In
11067 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11068 COMPOUND_EXPR will be an argument which must be evaluated.
11069 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11070 COMPOUND_EXPR in the chain will contain the tree for the simplified
11071 form of the builtin function call. */
11074 fold_builtin_strcspn (tree s1, tree s2)
11076 if (!validate_arg (s1, POINTER_TYPE)
11077 || !validate_arg (s2, POINTER_TYPE))
11081 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11083 /* If both arguments are constants, evaluate at compile-time. */
11086 const size_t r = strcspn (p1, p2);
11087 return size_int (r);
11090 /* If the first argument is "", return NULL_TREE. */
11091 if (p1 && *p1 == '\0')
11093 /* Evaluate and ignore argument s2 in case it has
11095 return omit_one_operand (integer_type_node,
11096 integer_zero_node, s2);
11099 /* If the second argument is "", return __builtin_strlen(s1). */
11100 if (p2 && *p2 == '\0')
11102 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11104 /* If the replacement _DECL isn't initialized, don't do the
11109 return build_call_expr (fn, 1, s1);
11115 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11116 to the call. IGNORE is true if the value returned
11117 by the builtin will be ignored. UNLOCKED is true is true if this
11118 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11119 the known length of the string. Return NULL_TREE if no simplification
11123 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11125 /* If we're using an unlocked function, assume the other unlocked
11126 functions exist explicitly. */
11127 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11128 : implicit_built_in_decls[BUILT_IN_FPUTC];
11129 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11130 : implicit_built_in_decls[BUILT_IN_FWRITE];
11132 /* If the return value is used, don't do the transformation. */
11136 /* Verify the arguments in the original call. */
11137 if (!validate_arg (arg0, POINTER_TYPE)
11138 || !validate_arg (arg1, POINTER_TYPE))
11142 len = c_strlen (arg0, 0);
11144 /* Get the length of the string passed to fputs. If the length
11145 can't be determined, punt. */
11147 || TREE_CODE (len) != INTEGER_CST)
11150 switch (compare_tree_int (len, 1))
11152 case -1: /* length is 0, delete the call entirely . */
11153 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11155 case 0: /* length is 1, call fputc. */
11157 const char *p = c_getstr (arg0);
11162 return build_call_expr (fn_fputc, 2,
11163 build_int_cst (NULL_TREE, p[0]), arg1);
11169 case 1: /* length is greater than 1, call fwrite. */
11171 /* If optimizing for size keep fputs. */
11174 /* New argument list transforming fputs(string, stream) to
11175 fwrite(string, 1, len, stream). */
11177 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11182 gcc_unreachable ();
11187 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11188 produced. False otherwise. This is done so that we don't output the error
11189 or warning twice or three times. */
11191 fold_builtin_next_arg (tree exp, bool va_start_p)
11193 tree fntype = TREE_TYPE (current_function_decl);
11194 int nargs = call_expr_nargs (exp);
11197 if (TYPE_ARG_TYPES (fntype) == 0
11198 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11199 == void_type_node))
11201 error ("%<va_start%> used in function with fixed args");
11207 if (va_start_p && (nargs != 2))
11209 error ("wrong number of arguments to function %<va_start%>");
11212 arg = CALL_EXPR_ARG (exp, 1);
11214 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11215 when we checked the arguments and if needed issued a warning. */
11220 /* Evidently an out of date version of <stdarg.h>; can't validate
11221 va_start's second argument, but can still work as intended. */
11222 warning (0, "%<__builtin_next_arg%> called without an argument");
11225 else if (nargs > 1)
11227 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11230 arg = CALL_EXPR_ARG (exp, 0);
11233 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11234 or __builtin_next_arg (0) the first time we see it, after checking
11235 the arguments and if needed issuing a warning. */
11236 if (!integer_zerop (arg))
11238 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11240 /* Strip off all nops for the sake of the comparison. This
11241 is not quite the same as STRIP_NOPS. It does more.
11242 We must also strip off INDIRECT_EXPR for C++ reference
11244 while (TREE_CODE (arg) == NOP_EXPR
11245 || TREE_CODE (arg) == CONVERT_EXPR
11246 || TREE_CODE (arg) == NON_LVALUE_EXPR
11247 || TREE_CODE (arg) == INDIRECT_REF)
11248 arg = TREE_OPERAND (arg, 0);
11249 if (arg != last_parm)
11251 /* FIXME: Sometimes with the tree optimizers we can get the
11252 not the last argument even though the user used the last
11253 argument. We just warn and set the arg to be the last
11254 argument so that we will get wrong-code because of
11256 warning (0, "second parameter of %<va_start%> not last named argument");
11258 /* We want to verify the second parameter just once before the tree
11259 optimizers are run and then avoid keeping it in the tree,
11260 as otherwise we could warn even for correct code like:
11261 void foo (int i, ...)
11262 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11264 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11266 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11272 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11273 ORIG may be null if this is a 2-argument call. We don't attempt to
11274 simplify calls with more than 3 arguments.
11276 Return NULL_TREE if no simplification was possible, otherwise return the
11277 simplified form of the call as a tree. If IGNORED is true, it means that
11278 the caller does not use the returned value of the function. */
11281 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11284 const char *fmt_str = NULL;
11286 /* Verify the required arguments in the original call. We deal with two
11287 types of sprintf() calls: 'sprintf (str, fmt)' and
11288 'sprintf (dest, "%s", orig)'. */
11289 if (!validate_arg (dest, POINTER_TYPE)
11290 || !validate_arg (fmt, POINTER_TYPE))
11292 if (orig && !validate_arg (orig, POINTER_TYPE))
11295 /* Check whether the format is a literal string constant. */
11296 fmt_str = c_getstr (fmt);
11297 if (fmt_str == NULL)
11301 retval = NULL_TREE;
11303 if (!init_target_chars ())
11306 /* If the format doesn't contain % args or %%, use strcpy. */
11307 if (strchr (fmt_str, target_percent) == NULL)
11309 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11314 /* Don't optimize sprintf (buf, "abc", ptr++). */
11318 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11319 'format' is known to contain no % formats. */
11320 call = build_call_expr (fn, 2, dest, fmt);
11322 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11325 /* If the format is "%s", use strcpy if the result isn't used. */
11326 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11329 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11334 /* Don't crash on sprintf (str1, "%s"). */
11338 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11341 retval = c_strlen (orig, 1);
11342 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11345 call = build_call_expr (fn, 2, dest, orig);
11348 if (call && retval)
11350 retval = fold_convert
11351 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11353 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11359 /* Expand a call EXP to __builtin_object_size. */
11362 expand_builtin_object_size (tree exp)
11365 int object_size_type;
11366 tree fndecl = get_callee_fndecl (exp);
11367 location_t locus = EXPR_LOCATION (exp);
11369 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11371 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11373 expand_builtin_trap ();
11377 ost = CALL_EXPR_ARG (exp, 1);
11380 if (TREE_CODE (ost) != INTEGER_CST
11381 || tree_int_cst_sgn (ost) < 0
11382 || compare_tree_int (ost, 3) > 0)
11384 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11386 expand_builtin_trap ();
11390 object_size_type = tree_low_cst (ost, 0);
11392 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11395 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11396 FCODE is the BUILT_IN_* to use.
11397 Return NULL_RTX if we failed; the caller should emit a normal call,
11398 otherwise try to get the result in TARGET, if convenient (and in
11399 mode MODE if that's convenient). */
11402 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11403 enum built_in_function fcode)
11405 tree dest, src, len, size;
11407 if (!validate_arglist (exp,
11409 fcode == BUILT_IN_MEMSET_CHK
11410 ? INTEGER_TYPE : POINTER_TYPE,
11411 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11414 dest = CALL_EXPR_ARG (exp, 0);
11415 src = CALL_EXPR_ARG (exp, 1);
11416 len = CALL_EXPR_ARG (exp, 2);
11417 size = CALL_EXPR_ARG (exp, 3);
11419 if (! host_integerp (size, 1))
11422 if (host_integerp (len, 1) || integer_all_onesp (size))
11426 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11428 location_t locus = EXPR_LOCATION (exp);
11429 warning (0, "%Hcall to %D will always overflow destination buffer",
11430 &locus, get_callee_fndecl (exp));
11435 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11436 mem{cpy,pcpy,move,set} is available. */
11439 case BUILT_IN_MEMCPY_CHK:
11440 fn = built_in_decls[BUILT_IN_MEMCPY];
11442 case BUILT_IN_MEMPCPY_CHK:
11443 fn = built_in_decls[BUILT_IN_MEMPCPY];
11445 case BUILT_IN_MEMMOVE_CHK:
11446 fn = built_in_decls[BUILT_IN_MEMMOVE];
11448 case BUILT_IN_MEMSET_CHK:
11449 fn = built_in_decls[BUILT_IN_MEMSET];
11458 fn = build_call_expr (fn, 3, dest, src, len);
11459 if (TREE_CODE (fn) == CALL_EXPR)
11460 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11461 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11463 else if (fcode == BUILT_IN_MEMSET_CHK)
11467 unsigned int dest_align
11468 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11470 /* If DEST is not a pointer type, call the normal function. */
11471 if (dest_align == 0)
11474 /* If SRC and DEST are the same (and not volatile), do nothing. */
11475 if (operand_equal_p (src, dest, 0))
11479 if (fcode != BUILT_IN_MEMPCPY_CHK)
11481 /* Evaluate and ignore LEN in case it has side-effects. */
11482 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11483 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11486 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11487 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11490 /* __memmove_chk special case. */
11491 if (fcode == BUILT_IN_MEMMOVE_CHK)
11493 unsigned int src_align
11494 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11496 if (src_align == 0)
11499 /* If src is categorized for a readonly section we can use
11500 normal __memcpy_chk. */
11501 if (readonly_data_expr (src))
11503 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11506 fn = build_call_expr (fn, 4, dest, src, len, size);
11507 if (TREE_CODE (fn) == CALL_EXPR)
11508 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11509 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11516 /* Emit warning if a buffer overflow is detected at compile time. */
11519 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11527 case BUILT_IN_STRCPY_CHK:
11528 case BUILT_IN_STPCPY_CHK:
11529 /* For __strcat_chk the warning will be emitted only if overflowing
11530 by at least strlen (dest) + 1 bytes. */
11531 case BUILT_IN_STRCAT_CHK:
11532 len = CALL_EXPR_ARG (exp, 1);
11533 size = CALL_EXPR_ARG (exp, 2);
11536 case BUILT_IN_STRNCAT_CHK:
11537 case BUILT_IN_STRNCPY_CHK:
11538 len = CALL_EXPR_ARG (exp, 2);
11539 size = CALL_EXPR_ARG (exp, 3);
11541 case BUILT_IN_SNPRINTF_CHK:
11542 case BUILT_IN_VSNPRINTF_CHK:
11543 len = CALL_EXPR_ARG (exp, 1);
11544 size = CALL_EXPR_ARG (exp, 3);
11547 gcc_unreachable ();
11553 if (! host_integerp (size, 1) || integer_all_onesp (size))
11558 len = c_strlen (len, 1);
11559 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11562 else if (fcode == BUILT_IN_STRNCAT_CHK)
11564 tree src = CALL_EXPR_ARG (exp, 1);
11565 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11567 src = c_strlen (src, 1);
11568 if (! src || ! host_integerp (src, 1))
11570 locus = EXPR_LOCATION (exp);
11571 warning (0, "%Hcall to %D might overflow destination buffer",
11572 &locus, get_callee_fndecl (exp));
11575 else if (tree_int_cst_lt (src, size))
11578 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11581 locus = EXPR_LOCATION (exp);
11582 warning (0, "%Hcall to %D will always overflow destination buffer",
11583 &locus, get_callee_fndecl (exp));
11586 /* Emit warning if a buffer overflow is detected at compile time
11587 in __sprintf_chk/__vsprintf_chk calls. */
11590 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11592 tree dest, size, len, fmt, flag;
11593 const char *fmt_str;
11594 int nargs = call_expr_nargs (exp);
11596 /* Verify the required arguments in the original call. */
11600 dest = CALL_EXPR_ARG (exp, 0);
11601 flag = CALL_EXPR_ARG (exp, 1);
11602 size = CALL_EXPR_ARG (exp, 2);
11603 fmt = CALL_EXPR_ARG (exp, 3);
11605 if (! host_integerp (size, 1) || integer_all_onesp (size))
11608 /* Check whether the format is a literal string constant. */
11609 fmt_str = c_getstr (fmt);
11610 if (fmt_str == NULL)
11613 if (!init_target_chars ())
11616 /* If the format doesn't contain % args or %%, we know its size. */
11617 if (strchr (fmt_str, target_percent) == 0)
11618 len = build_int_cstu (size_type_node, strlen (fmt_str));
11619 /* If the format is "%s" and first ... argument is a string literal,
11621 else if (fcode == BUILT_IN_SPRINTF_CHK
11622 && strcmp (fmt_str, target_percent_s) == 0)
11628 arg = CALL_EXPR_ARG (exp, 4);
11629 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11632 len = c_strlen (arg, 1);
11633 if (!len || ! host_integerp (len, 1))
11639 if (! tree_int_cst_lt (len, size))
11641 location_t locus = EXPR_LOCATION (exp);
11642 warning (0, "%Hcall to %D will always overflow destination buffer",
11643 &locus, get_callee_fndecl (exp));
11647 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11651 fold_builtin_object_size (tree ptr, tree ost)
11653 tree ret = NULL_TREE;
11654 int object_size_type;
11656 if (!validate_arg (ptr, POINTER_TYPE)
11657 || !validate_arg (ost, INTEGER_TYPE))
11662 if (TREE_CODE (ost) != INTEGER_CST
11663 || tree_int_cst_sgn (ost) < 0
11664 || compare_tree_int (ost, 3) > 0)
11667 object_size_type = tree_low_cst (ost, 0);
11669 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11670 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11671 and (size_t) 0 for types 2 and 3. */
11672 if (TREE_SIDE_EFFECTS (ptr))
11673 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11675 if (TREE_CODE (ptr) == ADDR_EXPR)
11676 ret = build_int_cstu (size_type_node,
11677 compute_builtin_object_size (ptr, object_size_type));
11679 else if (TREE_CODE (ptr) == SSA_NAME)
11681 unsigned HOST_WIDE_INT bytes;
11683 /* If object size is not known yet, delay folding until
11684 later. Maybe subsequent passes will help determining
11686 bytes = compute_builtin_object_size (ptr, object_size_type);
11687 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11689 ret = build_int_cstu (size_type_node, bytes);
11694 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11695 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11696 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11703 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11704 DEST, SRC, LEN, and SIZE are the arguments to the call.
11705 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11706 code of the builtin. If MAXLEN is not NULL, it is maximum length
11707 passed as third argument. */
11710 fold_builtin_memory_chk (tree fndecl,
11711 tree dest, tree src, tree len, tree size,
11712 tree maxlen, bool ignore,
11713 enum built_in_function fcode)
11717 if (!validate_arg (dest, POINTER_TYPE)
11718 || !validate_arg (src,
11719 (fcode == BUILT_IN_MEMSET_CHK
11720 ? INTEGER_TYPE : POINTER_TYPE))
11721 || !validate_arg (len, INTEGER_TYPE)
11722 || !validate_arg (size, INTEGER_TYPE))
11725 /* If SRC and DEST are the same (and not volatile), return DEST
11726 (resp. DEST+LEN for __mempcpy_chk). */
11727 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11729 if (fcode != BUILT_IN_MEMPCPY_CHK)
11730 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11733 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11734 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11738 if (! host_integerp (size, 1))
11741 if (! integer_all_onesp (size))
11743 if (! host_integerp (len, 1))
11745 /* If LEN is not constant, try MAXLEN too.
11746 For MAXLEN only allow optimizing into non-_ocs function
11747 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11748 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11750 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11752 /* (void) __mempcpy_chk () can be optimized into
11753 (void) __memcpy_chk (). */
11754 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11758 return build_call_expr (fn, 4, dest, src, len, size);
11766 if (tree_int_cst_lt (size, maxlen))
11771 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11772 mem{cpy,pcpy,move,set} is available. */
11775 case BUILT_IN_MEMCPY_CHK:
11776 fn = built_in_decls[BUILT_IN_MEMCPY];
11778 case BUILT_IN_MEMPCPY_CHK:
11779 fn = built_in_decls[BUILT_IN_MEMPCPY];
11781 case BUILT_IN_MEMMOVE_CHK:
11782 fn = built_in_decls[BUILT_IN_MEMMOVE];
11784 case BUILT_IN_MEMSET_CHK:
11785 fn = built_in_decls[BUILT_IN_MEMSET];
11794 return build_call_expr (fn, 3, dest, src, len);
11797 /* Fold a call to the __st[rp]cpy_chk builtin.
11798 DEST, SRC, and SIZE are the arguments to the call.
11799 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11800 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11801 strings passed as second argument. */
11804 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11805 tree maxlen, bool ignore,
11806 enum built_in_function fcode)
11810 if (!validate_arg (dest, POINTER_TYPE)
11811 || !validate_arg (src, POINTER_TYPE)
11812 || !validate_arg (size, INTEGER_TYPE))
11815 /* If SRC and DEST are the same (and not volatile), return DEST. */
11816 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11817 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11819 if (! host_integerp (size, 1))
11822 if (! integer_all_onesp (size))
11824 len = c_strlen (src, 1);
11825 if (! len || ! host_integerp (len, 1))
11827 /* If LEN is not constant, try MAXLEN too.
11828 For MAXLEN only allow optimizing into non-_ocs function
11829 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11830 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11832 if (fcode == BUILT_IN_STPCPY_CHK)
11837 /* If return value of __stpcpy_chk is ignored,
11838 optimize into __strcpy_chk. */
11839 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11843 return build_call_expr (fn, 3, dest, src, size);
11846 if (! len || TREE_SIDE_EFFECTS (len))
11849 /* If c_strlen returned something, but not a constant,
11850 transform __strcpy_chk into __memcpy_chk. */
11851 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11855 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11856 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11857 build_call_expr (fn, 4,
11858 dest, src, len, size));
11864 if (! tree_int_cst_lt (maxlen, size))
11868 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11869 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
11870 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
11874 return build_call_expr (fn, 2, dest, src);
11877 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11878 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11879 length passed as third argument. */
11882 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
11887 if (!validate_arg (dest, POINTER_TYPE)
11888 || !validate_arg (src, POINTER_TYPE)
11889 || !validate_arg (len, INTEGER_TYPE)
11890 || !validate_arg (size, INTEGER_TYPE))
11893 if (! host_integerp (size, 1))
11896 if (! integer_all_onesp (size))
11898 if (! host_integerp (len, 1))
11900 /* If LEN is not constant, try MAXLEN too.
11901 For MAXLEN only allow optimizing into non-_ocs function
11902 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11903 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11909 if (tree_int_cst_lt (size, maxlen))
11913 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11914 fn = built_in_decls[BUILT_IN_STRNCPY];
11918 return build_call_expr (fn, 3, dest, src, len);
11921 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11922 are the arguments to the call. */
11925 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
11930 if (!validate_arg (dest, POINTER_TYPE)
11931 || !validate_arg (src, POINTER_TYPE)
11932 || !validate_arg (size, INTEGER_TYPE))
11935 p = c_getstr (src);
11936 /* If the SRC parameter is "", return DEST. */
11937 if (p && *p == '\0')
11938 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11940 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
11943 /* If __builtin_strcat_chk is used, assume strcat is available. */
11944 fn = built_in_decls[BUILT_IN_STRCAT];
11948 return build_call_expr (fn, 2, dest, src);
11951 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11955 fold_builtin_strncat_chk (tree fndecl,
11956 tree dest, tree src, tree len, tree size)
11961 if (!validate_arg (dest, POINTER_TYPE)
11962 || !validate_arg (src, POINTER_TYPE)
11963 || !validate_arg (size, INTEGER_TYPE)
11964 || !validate_arg (size, INTEGER_TYPE))
11967 p = c_getstr (src);
11968 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11969 if (p && *p == '\0')
11970 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11971 else if (integer_zerop (len))
11972 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11974 if (! host_integerp (size, 1))
11977 if (! integer_all_onesp (size))
11979 tree src_len = c_strlen (src, 1);
11981 && host_integerp (src_len, 1)
11982 && host_integerp (len, 1)
11983 && ! tree_int_cst_lt (len, src_len))
11985 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11986 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
11990 return build_call_expr (fn, 3, dest, src, size);
11995 /* If __builtin_strncat_chk is used, assume strncat is available. */
11996 fn = built_in_decls[BUILT_IN_STRNCAT];
12000 return build_call_expr (fn, 3, dest, src, len);
12003 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12004 a normal call should be emitted rather than expanding the function
12005 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12008 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12010 tree dest, size, len, fn, fmt, flag;
12011 const char *fmt_str;
12012 int nargs = call_expr_nargs (exp);
12014 /* Verify the required arguments in the original call. */
12017 dest = CALL_EXPR_ARG (exp, 0);
12018 if (!validate_arg (dest, POINTER_TYPE))
12020 flag = CALL_EXPR_ARG (exp, 1);
12021 if (!validate_arg (flag, INTEGER_TYPE))
12023 size = CALL_EXPR_ARG (exp, 2);
12024 if (!validate_arg (size, INTEGER_TYPE))
12026 fmt = CALL_EXPR_ARG (exp, 3);
12027 if (!validate_arg (fmt, POINTER_TYPE))
12030 if (! host_integerp (size, 1))
12035 if (!init_target_chars ())
12038 /* Check whether the format is a literal string constant. */
12039 fmt_str = c_getstr (fmt);
12040 if (fmt_str != NULL)
12042 /* If the format doesn't contain % args or %%, we know the size. */
12043 if (strchr (fmt_str, target_percent) == 0)
12045 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12046 len = build_int_cstu (size_type_node, strlen (fmt_str));
12048 /* If the format is "%s" and first ... argument is a string literal,
12049 we know the size too. */
12050 else if (fcode == BUILT_IN_SPRINTF_CHK
12051 && strcmp (fmt_str, target_percent_s) == 0)
12057 arg = CALL_EXPR_ARG (exp, 4);
12058 if (validate_arg (arg, POINTER_TYPE))
12060 len = c_strlen (arg, 1);
12061 if (! len || ! host_integerp (len, 1))
12068 if (! integer_all_onesp (size))
12070 if (! len || ! tree_int_cst_lt (len, size))
12074 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12075 or if format doesn't contain % chars or is "%s". */
12076 if (! integer_zerop (flag))
12078 if (fmt_str == NULL)
12080 if (strchr (fmt_str, target_percent) != NULL
12081 && strcmp (fmt_str, target_percent_s))
12085 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12086 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12087 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12091 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12094 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12095 a normal call should be emitted rather than expanding the function
12096 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12097 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12098 passed as second argument. */
12101 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12102 enum built_in_function fcode)
12104 tree dest, size, len, fn, fmt, flag;
12105 const char *fmt_str;
12107 /* Verify the required arguments in the original call. */
12108 if (call_expr_nargs (exp) < 5)
12110 dest = CALL_EXPR_ARG (exp, 0);
12111 if (!validate_arg (dest, POINTER_TYPE))
12113 len = CALL_EXPR_ARG (exp, 1);
12114 if (!validate_arg (len, INTEGER_TYPE))
12116 flag = CALL_EXPR_ARG (exp, 2);
12117 if (!validate_arg (flag, INTEGER_TYPE))
12119 size = CALL_EXPR_ARG (exp, 3);
12120 if (!validate_arg (size, INTEGER_TYPE))
12122 fmt = CALL_EXPR_ARG (exp, 4);
12123 if (!validate_arg (fmt, POINTER_TYPE))
12126 if (! host_integerp (size, 1))
12129 if (! integer_all_onesp (size))
12131 if (! host_integerp (len, 1))
12133 /* If LEN is not constant, try MAXLEN too.
12134 For MAXLEN only allow optimizing into non-_ocs function
12135 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12136 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12142 if (tree_int_cst_lt (size, maxlen))
12146 if (!init_target_chars ())
12149 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12150 or if format doesn't contain % chars or is "%s". */
12151 if (! integer_zerop (flag))
12153 fmt_str = c_getstr (fmt);
12154 if (fmt_str == NULL)
12156 if (strchr (fmt_str, target_percent) != NULL
12157 && strcmp (fmt_str, target_percent_s))
12161 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12163 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12164 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12168 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12171 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12172 FMT and ARG are the arguments to the call; we don't fold cases with
12173 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12175 Return NULL_TREE if no simplification was possible, otherwise return the
12176 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12177 code of the function to be simplified. */
12180 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12181 enum built_in_function fcode)
12183 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12184 const char *fmt_str = NULL;
12186 /* If the return value is used, don't do the transformation. */
12190 /* Verify the required arguments in the original call. */
12191 if (!validate_arg (fmt, POINTER_TYPE))
12194 /* Check whether the format is a literal string constant. */
12195 fmt_str = c_getstr (fmt);
12196 if (fmt_str == NULL)
12199 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12201 /* If we're using an unlocked function, assume the other
12202 unlocked functions exist explicitly. */
12203 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12204 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12208 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12209 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12212 if (!init_target_chars ())
12215 if (strcmp (fmt_str, target_percent_s) == 0
12216 || strchr (fmt_str, target_percent) == NULL)
12220 if (strcmp (fmt_str, target_percent_s) == 0)
12222 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12225 if (!arg || !validate_arg (arg, POINTER_TYPE))
12228 str = c_getstr (arg);
12234 /* The format specifier doesn't contain any '%' characters. */
12235 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12241 /* If the string was "", printf does nothing. */
12242 if (str[0] == '\0')
12243 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12245 /* If the string has length of 1, call putchar. */
12246 if (str[1] == '\0')
12248 /* Given printf("c"), (where c is any one character,)
12249 convert "c"[0] to an int and pass that to the replacement
12251 newarg = build_int_cst (NULL_TREE, str[0]);
12253 call = build_call_expr (fn_putchar, 1, newarg);
12257 /* If the string was "string\n", call puts("string"). */
12258 size_t len = strlen (str);
12259 if ((unsigned char)str[len - 1] == target_newline)
12261 /* Create a NUL-terminated string that's one char shorter
12262 than the original, stripping off the trailing '\n'. */
12263 char *newstr = alloca (len);
12264 memcpy (newstr, str, len - 1);
12265 newstr[len - 1] = 0;
12267 newarg = build_string_literal (len, newstr);
12269 call = build_call_expr (fn_puts, 1, newarg);
12272 /* We'd like to arrange to call fputs(string,stdout) here,
12273 but we need stdout and don't have a way to get it yet. */
12278 /* The other optimizations can be done only on the non-va_list variants. */
12279 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12282 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12283 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12285 if (!arg || !validate_arg (arg, POINTER_TYPE))
12288 call = build_call_expr (fn_puts, 1, arg);
12291 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12292 else if (strcmp (fmt_str, target_percent_c) == 0)
12294 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12297 call = build_call_expr (fn_putchar, 1, arg);
12303 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12306 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12307 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12308 more than 3 arguments, and ARG may be null in the 2-argument case.
12310 Return NULL_TREE if no simplification was possible, otherwise return the
12311 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12312 code of the function to be simplified. */
12315 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12316 enum built_in_function fcode)
12318 tree fn_fputc, fn_fputs, call = NULL_TREE;
12319 const char *fmt_str = NULL;
12321 /* If the return value is used, don't do the transformation. */
12325 /* Verify the required arguments in the original call. */
12326 if (!validate_arg (fp, POINTER_TYPE))
12328 if (!validate_arg (fmt, POINTER_TYPE))
12331 /* Check whether the format is a literal string constant. */
12332 fmt_str = c_getstr (fmt);
12333 if (fmt_str == NULL)
12336 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12338 /* If we're using an unlocked function, assume the other
12339 unlocked functions exist explicitly. */
12340 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12341 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12345 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12346 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12349 if (!init_target_chars ())
12352 /* If the format doesn't contain % args or %%, use strcpy. */
12353 if (strchr (fmt_str, target_percent) == NULL)
12355 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12359 /* If the format specifier was "", fprintf does nothing. */
12360 if (fmt_str[0] == '\0')
12362 /* If FP has side-effects, just wait until gimplification is
12364 if (TREE_SIDE_EFFECTS (fp))
12367 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12370 /* When "string" doesn't contain %, replace all cases of
12371 fprintf (fp, string) with fputs (string, fp). The fputs
12372 builtin will take care of special cases like length == 1. */
12374 call = build_call_expr (fn_fputs, 2, fmt, fp);
12377 /* The other optimizations can be done only on the non-va_list variants. */
12378 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12381 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12382 else if (strcmp (fmt_str, target_percent_s) == 0)
12384 if (!arg || !validate_arg (arg, POINTER_TYPE))
12387 call = build_call_expr (fn_fputs, 2, arg, fp);
12390 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12391 else if (strcmp (fmt_str, target_percent_c) == 0)
12393 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12396 call = build_call_expr (fn_fputc, 2, arg, fp);
12401 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12404 /* Initialize format string characters in the target charset. */
12407 init_target_chars (void)
12412 target_newline = lang_hooks.to_target_charset ('\n');
12413 target_percent = lang_hooks.to_target_charset ('%');
12414 target_c = lang_hooks.to_target_charset ('c');
12415 target_s = lang_hooks.to_target_charset ('s');
12416 if (target_newline == 0 || target_percent == 0 || target_c == 0
12420 target_percent_c[0] = target_percent;
12421 target_percent_c[1] = target_c;
12422 target_percent_c[2] = '\0';
12424 target_percent_s[0] = target_percent;
12425 target_percent_s[1] = target_s;
12426 target_percent_s[2] = '\0';
12428 target_percent_s_newline[0] = target_percent;
12429 target_percent_s_newline[1] = target_s;
12430 target_percent_s_newline[2] = target_newline;
12431 target_percent_s_newline[3] = '\0';
12438 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12439 and no overflow/underflow occurred. INEXACT is true if M was not
12440 exactly calculated. TYPE is the tree type for the result. This
12441 function assumes that you cleared the MPFR flags and then
12442 calculated M to see if anything subsequently set a flag prior to
12443 entering this function. Return NULL_TREE if any checks fail. */
12446 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12448 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12449 overflow/underflow occurred. If -frounding-math, proceed iff the
12450 result of calling FUNC was exact. */
12451 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12452 && (!flag_rounding_math || !inexact))
12454 REAL_VALUE_TYPE rr;
12456 real_from_mpfr (&rr, m, type, GMP_RNDN);
12457 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12458 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12459 but the mpft_t is not, then we underflowed in the
12461 if (real_isfinite (&rr)
12462 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12464 REAL_VALUE_TYPE rmode;
12466 real_convert (&rmode, TYPE_MODE (type), &rr);
12467 /* Proceed iff the specified mode can hold the value. */
12468 if (real_identical (&rmode, &rr))
12469 return build_real (type, rmode);
12475 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12476 FUNC on it and return the resulting value as a tree with type TYPE.
12477 If MIN and/or MAX are not NULL, then the supplied ARG must be
12478 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12479 acceptable values, otherwise they are not. The mpfr precision is
12480 set to the precision of TYPE. We assume that function FUNC returns
12481 zero if the result could be calculated exactly within the requested
12485 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12486 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12489 tree result = NULL_TREE;
12493 /* To proceed, MPFR must exactly represent the target floating point
12494 format, which only happens when the target base equals two. */
12495 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12496 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12498 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12500 if (real_isfinite (ra)
12501 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12502 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12504 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12508 mpfr_init2 (m, prec);
12509 mpfr_from_real (m, ra, GMP_RNDN);
12510 mpfr_clear_flags ();
12511 inexact = func (m, m, GMP_RNDN);
12512 result = do_mpfr_ckconv (m, type, inexact);
12520 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12521 FUNC on it and return the resulting value as a tree with type TYPE.
12522 The mpfr precision is set to the precision of TYPE. We assume that
12523 function FUNC returns zero if the result could be calculated
12524 exactly within the requested precision. */
12527 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12528 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12530 tree result = NULL_TREE;
12535 /* To proceed, MPFR must exactly represent the target floating point
12536 format, which only happens when the target base equals two. */
12537 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12538 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12539 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12541 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12542 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12544 if (real_isfinite (ra1) && real_isfinite (ra2))
12546 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12550 mpfr_inits2 (prec, m1, m2, NULL);
12551 mpfr_from_real (m1, ra1, GMP_RNDN);
12552 mpfr_from_real (m2, ra2, GMP_RNDN);
12553 mpfr_clear_flags ();
12554 inexact = func (m1, m1, m2, GMP_RNDN);
12555 result = do_mpfr_ckconv (m1, type, inexact);
12556 mpfr_clears (m1, m2, NULL);
12563 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12564 FUNC on it and return the resulting value as a tree with type TYPE.
12565 The mpfr precision is set to the precision of TYPE. We assume that
12566 function FUNC returns zero if the result could be calculated
12567 exactly within the requested precision. */
12570 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12571 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12573 tree result = NULL_TREE;
12579 /* To proceed, MPFR must exactly represent the target floating point
12580 format, which only happens when the target base equals two. */
12581 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12582 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12583 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12584 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12586 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12587 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12588 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12590 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12592 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12596 mpfr_inits2 (prec, m1, m2, m3, NULL);
12597 mpfr_from_real (m1, ra1, GMP_RNDN);
12598 mpfr_from_real (m2, ra2, GMP_RNDN);
12599 mpfr_from_real (m3, ra3, GMP_RNDN);
12600 mpfr_clear_flags ();
12601 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12602 result = do_mpfr_ckconv (m1, type, inexact);
12603 mpfr_clears (m1, m2, m3, NULL);
12610 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12611 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12612 If ARG_SINP and ARG_COSP are NULL then the result is returned
12613 as a complex value.
12614 The type is taken from the type of ARG and is used for setting the
12615 precision of the calculation and results. */
12618 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12620 tree const type = TREE_TYPE (arg);
12621 tree result = NULL_TREE;
12625 /* To proceed, MPFR must exactly represent the target floating point
12626 format, which only happens when the target base equals two. */
12627 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12628 && TREE_CODE (arg) == REAL_CST
12629 && !TREE_OVERFLOW (arg))
12631 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12633 if (real_isfinite (ra))
12635 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12636 tree result_s, result_c;
12640 mpfr_inits2 (prec, m, ms, mc, NULL);
12641 mpfr_from_real (m, ra, GMP_RNDN);
12642 mpfr_clear_flags ();
12643 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12644 result_s = do_mpfr_ckconv (ms, type, inexact);
12645 result_c = do_mpfr_ckconv (mc, type, inexact);
12646 mpfr_clears (m, ms, mc, NULL);
12647 if (result_s && result_c)
12649 /* If we are to return in a complex value do so. */
12650 if (!arg_sinp && !arg_cosp)
12651 return build_complex (build_complex_type (type),
12652 result_c, result_s);
12654 /* Dereference the sin/cos pointer arguments. */
12655 arg_sinp = build_fold_indirect_ref (arg_sinp);
12656 arg_cosp = build_fold_indirect_ref (arg_cosp);
12657 /* Proceed if valid pointer type were passed in. */
12658 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12659 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12661 /* Set the values. */
12662 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12664 TREE_SIDE_EFFECTS (result_s) = 1;
12665 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12667 TREE_SIDE_EFFECTS (result_c) = 1;
12668 /* Combine the assignments into a compound expr. */
12669 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12670 result_s, result_c));
12678 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12679 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12680 two-argument mpfr order N Bessel function FUNC on them and return
12681 the resulting value as a tree with type TYPE. The mpfr precision
12682 is set to the precision of TYPE. We assume that function FUNC
12683 returns zero if the result could be calculated exactly within the
12684 requested precision. */
12686 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12687 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12688 const REAL_VALUE_TYPE *min, bool inclusive)
12690 tree result = NULL_TREE;
12695 /* To proceed, MPFR must exactly represent the target floating point
12696 format, which only happens when the target base equals two. */
12697 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12698 && host_integerp (arg1, 0)
12699 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12701 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12702 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12705 && real_isfinite (ra)
12706 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12708 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12712 mpfr_init2 (m, prec);
12713 mpfr_from_real (m, ra, GMP_RNDN);
12714 mpfr_clear_flags ();
12715 inexact = func (m, n, m, GMP_RNDN);
12716 result = do_mpfr_ckconv (m, type, inexact);
12724 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12725 the pointer *(ARG_QUO) and return the result. The type is taken
12726 from the type of ARG0 and is used for setting the precision of the
12727 calculation and results. */
12730 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12732 tree const type = TREE_TYPE (arg0);
12733 tree result = NULL_TREE;
12738 /* To proceed, MPFR must exactly represent the target floating point
12739 format, which only happens when the target base equals two. */
12740 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12741 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12742 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12744 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12745 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12747 if (real_isfinite (ra0) && real_isfinite (ra1))
12749 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12754 mpfr_inits2 (prec, m0, m1, NULL);
12755 mpfr_from_real (m0, ra0, GMP_RNDN);
12756 mpfr_from_real (m1, ra1, GMP_RNDN);
12757 mpfr_clear_flags ();
12758 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12759 /* Remquo is independent of the rounding mode, so pass
12760 inexact=0 to do_mpfr_ckconv(). */
12761 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12762 mpfr_clears (m0, m1, NULL);
12765 /* MPFR calculates quo in the host's long so it may
12766 return more bits in quo than the target int can hold
12767 if sizeof(host long) > sizeof(target int). This can
12768 happen even for native compilers in LP64 mode. In
12769 these cases, modulo the quo value with the largest
12770 number that the target int can hold while leaving one
12771 bit for the sign. */
12772 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12773 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12775 /* Dereference the quo pointer argument. */
12776 arg_quo = build_fold_indirect_ref (arg_quo);
12777 /* Proceed iff a valid pointer type was passed in. */
12778 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12780 /* Set the value. */
12781 tree result_quo = fold_build2 (MODIFY_EXPR,
12782 TREE_TYPE (arg_quo), arg_quo,
12783 build_int_cst (NULL, integer_quo));
12784 TREE_SIDE_EFFECTS (result_quo) = 1;
12785 /* Combine the quo assignment with the rem. */
12786 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12787 result_quo, result_rem));
12795 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12796 resulting value as a tree with type TYPE. The mpfr precision is
12797 set to the precision of TYPE. We assume that this mpfr function
12798 returns zero if the result could be calculated exactly within the
12799 requested precision. In addition, the integer pointer represented
12800 by ARG_SG will be dereferenced and set to the appropriate signgam
12804 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12806 tree result = NULL_TREE;
12810 /* To proceed, MPFR must exactly represent the target floating point
12811 format, which only happens when the target base equals two. Also
12812 verify ARG is a constant and that ARG_SG is an int pointer. */
12813 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12814 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12815 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12816 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12818 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12820 /* In addition to NaN and Inf, the argument cannot be zero or a
12821 negative integer. */
12822 if (real_isfinite (ra)
12823 && ra->cl != rvc_zero
12824 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
12826 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12831 mpfr_init2 (m, prec);
12832 mpfr_from_real (m, ra, GMP_RNDN);
12833 mpfr_clear_flags ();
12834 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
12835 result_lg = do_mpfr_ckconv (m, type, inexact);
12841 /* Dereference the arg_sg pointer argument. */
12842 arg_sg = build_fold_indirect_ref (arg_sg);
12843 /* Assign the signgam value into *arg_sg. */
12844 result_sg = fold_build2 (MODIFY_EXPR,
12845 TREE_TYPE (arg_sg), arg_sg,
12846 build_int_cst (NULL, sg));
12847 TREE_SIDE_EFFECTS (result_sg) = 1;
12848 /* Combine the signgam assignment with the lgamma result. */
12849 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12850 result_sg, result_lg));