1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
250 if (strncmp (name, "__sync_", 7) == 0)
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
280 switch (TREE_CODE (exp))
283 exp = TREE_OPERAND (exp, 0);
284 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
287 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
288 align = MIN (inner, max_align);
291 case POINTER_PLUS_EXPR:
292 /* If sum of pointer + int, restrict our maximum alignment to that
293 imposed by the integer. If not, we can't do any better than
295 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
298 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
299 & (max_align / BITS_PER_UNIT - 1))
303 exp = TREE_OPERAND (exp, 0);
307 /* See what we are pointing at and look at its alignment. */
308 exp = TREE_OPERAND (exp, 0);
310 if (handled_component_p (exp))
312 HOST_WIDE_INT bitsize, bitpos;
314 enum machine_mode mode;
315 int unsignedp, volatilep;
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
320 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
321 if (offset && TREE_CODE (offset) == PLUS_EXPR
322 && host_integerp (TREE_OPERAND (offset, 1), 1))
324 /* Any overflow in calculating offset_bits won't change
327 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
331 inner = MIN (inner, (offset_bits & -offset_bits));
332 offset = TREE_OPERAND (offset, 0);
334 if (offset && TREE_CODE (offset) == MULT_EXPR
335 && host_integerp (TREE_OPERAND (offset, 1), 1))
337 /* Any overflow in calculating offset_factor won't change
339 unsigned offset_factor
340 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
344 inner = MIN (inner, (offset_factor & -offset_factor));
347 inner = MIN (inner, BITS_PER_UNIT);
350 align = MIN (inner, DECL_ALIGN (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 else if (CONSTANT_CLASS_P (exp))
353 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
355 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
356 || TREE_CODE (exp) == INDIRECT_REF)
357 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
359 align = MIN (align, inner);
360 return MIN (align, max_align);
368 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
369 way, because it could contain a zero byte in the middle.
370 TREE_STRING_LENGTH is the size of the character array, not the string.
372 ONLY_VALUE should be nonzero if the result is not going to be emitted
373 into the instruction stream and zero if it is going to be expanded.
374 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
375 is returned, otherwise NULL, since
376 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
377 evaluate the side-effects.
379 The value returned is of type `ssizetype'.
381 Unfortunately, string_constant can't access the values of const char
382 arrays with initializers, so neither can we do so here. */
385 c_strlen (tree src, int only_value)
388 HOST_WIDE_INT offset;
393 if (TREE_CODE (src) == COND_EXPR
394 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
398 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
399 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
400 if (tree_int_cst_equal (len1, len2))
404 if (TREE_CODE (src) == COMPOUND_EXPR
405 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
406 return c_strlen (TREE_OPERAND (src, 1), only_value);
408 src = string_constant (src, &offset_node);
412 max = TREE_STRING_LENGTH (src) - 1;
413 ptr = TREE_STRING_POINTER (src);
415 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
417 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
418 compute the offset to the following null if we don't know where to
419 start searching for it. */
422 for (i = 0; i < max; i++)
426 /* We don't know the starting offset, but we do know that the string
427 has no internal zero bytes. We can assume that the offset falls
428 within the bounds of the string; otherwise, the programmer deserves
429 what he gets. Subtract the offset from the length of the string,
430 and return that. This would perhaps not be valid if we were dealing
431 with named arrays in addition to literal string constants. */
433 return size_diffop (size_int (max), offset_node);
436 /* We have a known offset into the string. Start searching there for
437 a null character if we can represent it as a single HOST_WIDE_INT. */
438 if (offset_node == 0)
440 else if (! host_integerp (offset_node, 0))
443 offset = tree_low_cst (offset_node, 0);
445 /* If the offset is known to be out of bounds, warn, and call strlen at
447 if (offset < 0 || offset > max)
449 /* Suppress multiple warnings for propagated constant strings. */
450 if (! TREE_NO_WARNING (src))
452 warning (0, "offset outside bounds of constant string");
453 TREE_NO_WARNING (src) = 1;
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
475 src = string_constant (src, &offset_node);
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
492 c_readstr (const char *str, enum machine_mode mode)
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
518 return immed_double_const (c[0], c[1], mode);
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
526 target_char_cast (tree cst, char *p)
528 unsigned HOST_WIDE_INT val, hostval;
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
554 builtin_save_expr (tree exp)
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
561 return save_expr (exp);
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
591 tem = hard_frame_pointer_rtx;
593 /* Tell reload not to eliminate the frame pointer. */
594 crtl->accesses_prior_frames = 1;
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
603 SETUP_FRAME_ADDRESSES ();
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
648 /* Alias set used for setjmp buffer. */
649 static alias_set_type setjmp_alias_set = -1;
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
665 buf_addr = convert_memory_address (Pmode, buf_addr);
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 cfun->calls_setjmp = 1;
699 /* We have a nonlocal label. */
700 cfun->has_nonlocal_label = 1;
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_use (hard_frame_pointer_rtx);
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_clobber (static_chain_rtx);
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_clobber (hard_frame_pointer_rtx);
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
732 #ifdef ELIMINABLE_REGS
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
741 if (i == ARRAY_SIZE (elim_regs))
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx,
747 copy_to_reg (get_arg_pointer_save_area ()));
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
776 expand_builtin_longjmp (rtx buf_addr, rtx value)
778 rtx fp, lab, stack, insn, last;
779 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 if (setjmp_alias_set == -1)
782 setjmp_alias_set = new_alias_set ();
784 buf_addr = convert_memory_address (Pmode, buf_addr);
786 buf_addr = force_reg (Pmode, buf_addr);
788 /* We used to store value in static_chain_rtx, but that fails if pointers
789 are smaller than integers. We instead require that the user must pass
790 a second argument of 1, because that is what builtin_setjmp will
791 return. This also makes EH slightly more efficient, since we are no
792 longer copying around a value that we don't care about. */
793 gcc_assert (value == const1_rtx);
795 last = get_last_insn ();
796 #ifdef HAVE_builtin_longjmp
797 if (HAVE_builtin_longjmp)
798 emit_insn (gen_builtin_longjmp (buf_addr));
802 fp = gen_rtx_MEM (Pmode, buf_addr);
803 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
804 GET_MODE_SIZE (Pmode)));
806 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (fp, setjmp_alias_set);
809 set_mem_alias_set (lab, setjmp_alias_set);
810 set_mem_alias_set (stack, setjmp_alias_set);
812 /* Pick up FP, label, and SP from the block and jump. This code is
813 from expand_goto in stmt.c; see there for detailed comments. */
814 #ifdef HAVE_nonlocal_goto
815 if (HAVE_nonlocal_goto)
816 /* We have to pass a value to the nonlocal_goto pattern that will
817 get copied into the static_chain pointer, but it does not matter
818 what that value is, because builtin_setjmp does not use it. */
819 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
823 lab = copy_to_reg (lab);
825 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
826 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
828 emit_move_insn (hard_frame_pointer_rtx, fp);
829 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
831 emit_use (hard_frame_pointer_rtx);
832 emit_use (stack_pointer_rtx);
833 emit_indirect_jump (lab);
837 /* Search backwards and mark the jump insn as a non-local goto.
838 Note that this precludes the use of __builtin_longjmp to a
839 __builtin_setjmp target in the same function. However, we've
840 already cautioned the user that these functions are for
841 internal exception handling use only. */
842 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
844 gcc_assert (insn != last);
848 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
852 else if (CALL_P (insn))
857 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
858 and the address of the save area. */
861 expand_builtin_nonlocal_goto (tree exp)
863 tree t_label, t_save_area;
864 rtx r_label, r_save_area, r_fp, r_sp, insn;
866 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
869 t_label = CALL_EXPR_ARG (exp, 0);
870 t_save_area = CALL_EXPR_ARG (exp, 1);
872 r_label = expand_normal (t_label);
873 r_label = convert_memory_address (Pmode, r_label);
874 r_save_area = expand_normal (t_save_area);
875 r_save_area = convert_memory_address (Pmode, r_save_area);
876 r_fp = gen_rtx_MEM (Pmode, r_save_area);
877 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
878 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
880 crtl->has_nonlocal_goto = 1;
882 #ifdef HAVE_nonlocal_goto
883 /* ??? We no longer need to pass the static chain value, afaik. */
884 if (HAVE_nonlocal_goto)
885 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
889 r_label = copy_to_reg (r_label);
891 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
892 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
894 /* Restore frame pointer for containing function.
895 This sets the actual hard register used for the frame pointer
896 to the location of the function's incoming static chain info.
897 The non-local goto handler will then adjust it to contain the
898 proper value and reload the argument pointer, if needed. */
899 emit_move_insn (hard_frame_pointer_rtx, r_fp);
900 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
902 /* USE of hard_frame_pointer_rtx added for consistency;
903 not clear if really needed. */
904 emit_use (hard_frame_pointer_rtx);
905 emit_use (stack_pointer_rtx);
907 /* If the architecture is using a GP register, we must
908 conservatively assume that the target function makes use of it.
909 The prologue of functions with nonlocal gotos must therefore
910 initialize the GP register to the appropriate value, and we
911 must then make sure that this value is live at the point
912 of the jump. (Note that this doesn't necessarily apply
913 to targets with a nonlocal_goto pattern; they are free
914 to implement it in their own way. Note also that this is
915 a no-op if the GP register is a global invariant.) */
916 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
917 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
918 emit_use (pic_offset_table_rtx);
920 emit_indirect_jump (r_label);
923 /* Search backwards to the jump insn and mark it as a
925 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
929 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
930 const0_rtx, REG_NOTES (insn));
933 else if (CALL_P (insn))
940 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
941 (not all will be used on all machines) that was passed to __builtin_setjmp.
942 It updates the stack pointer in that block to correspond to the current
946 expand_builtin_update_setjmp_buf (rtx buf_addr)
948 enum machine_mode sa_mode = Pmode;
952 #ifdef HAVE_save_stack_nonlocal
953 if (HAVE_save_stack_nonlocal)
954 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
956 #ifdef STACK_SAVEAREA_MODE
957 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
961 = gen_rtx_MEM (sa_mode,
964 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
968 emit_insn (gen_setjmp ());
971 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
974 /* Expand a call to __builtin_prefetch. For a target that does not support
975 data prefetch, evaluate the memory address argument in case it has side
979 expand_builtin_prefetch (tree exp)
981 tree arg0, arg1, arg2;
985 if (!validate_arglist (exp, POINTER_TYPE, 0))
988 arg0 = CALL_EXPR_ARG (exp, 0);
990 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
991 zero (read) and argument 2 (locality) defaults to 3 (high degree of
993 nargs = call_expr_nargs (exp);
995 arg1 = CALL_EXPR_ARG (exp, 1);
997 arg1 = integer_zero_node;
999 arg2 = CALL_EXPR_ARG (exp, 2);
1001 arg2 = build_int_cst (NULL_TREE, 3);
1003 /* Argument 0 is an address. */
1004 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1006 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1007 if (TREE_CODE (arg1) != INTEGER_CST)
1009 error ("second argument to %<__builtin_prefetch%> must be a constant");
1010 arg1 = integer_zero_node;
1012 op1 = expand_normal (arg1);
1013 /* Argument 1 must be either zero or one. */
1014 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1016 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1021 /* Argument 2 (locality) must be a compile-time constant int. */
1022 if (TREE_CODE (arg2) != INTEGER_CST)
1024 error ("third argument to %<__builtin_prefetch%> must be a constant");
1025 arg2 = integer_zero_node;
1027 op2 = expand_normal (arg2);
1028 /* Argument 2 must be 0, 1, 2, or 3. */
1029 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1031 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1035 #ifdef HAVE_prefetch
1038 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1040 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1041 || (GET_MODE (op0) != Pmode))
1043 op0 = convert_memory_address (Pmode, op0);
1044 op0 = force_reg (Pmode, op0);
1046 emit_insn (gen_prefetch (op0, op1, op2));
1050 /* Don't do anything with direct references to volatile memory, but
1051 generate code to handle other side effects. */
1052 if (!MEM_P (op0) && side_effects_p (op0))
1056 /* Get a MEM rtx for expression EXP which is the address of an operand
1057 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1058 the maximum length of the block of memory that might be accessed or
1062 get_memory_rtx (tree exp, tree len)
1064 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1065 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1067 /* Get an expression we can use to find the attributes to assign to MEM.
1068 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1069 we can. First remove any nops. */
1070 while (CONVERT_EXPR_P (exp)
1071 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1072 exp = TREE_OPERAND (exp, 0);
1074 if (TREE_CODE (exp) == ADDR_EXPR)
1075 exp = TREE_OPERAND (exp, 0);
1076 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1077 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1081 /* Honor attributes derived from exp, except for the alias set
1082 (as builtin stringops may alias with anything) and the size
1083 (as stringops may access multiple array elements). */
1086 set_mem_attributes (mem, exp, 0);
1088 /* Allow the string and memory builtins to overflow from one
1089 field into another, see http://gcc.gnu.org/PR23561.
1090 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1091 memory accessed by the string or memory builtin will fit
1092 within the field. */
1093 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1095 tree mem_expr = MEM_EXPR (mem);
1096 HOST_WIDE_INT offset = -1, length = -1;
1099 while (TREE_CODE (inner) == ARRAY_REF
1100 || CONVERT_EXPR_P (inner)
1101 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1102 || TREE_CODE (inner) == SAVE_EXPR)
1103 inner = TREE_OPERAND (inner, 0);
1105 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1107 if (MEM_OFFSET (mem)
1108 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1109 offset = INTVAL (MEM_OFFSET (mem));
1111 if (offset >= 0 && len && host_integerp (len, 0))
1112 length = tree_low_cst (len, 0);
1114 while (TREE_CODE (inner) == COMPONENT_REF)
1116 tree field = TREE_OPERAND (inner, 1);
1117 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1118 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1120 /* Bitfields are generally not byte-addressable. */
1121 gcc_assert (!DECL_BIT_FIELD (field)
1122 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1123 % BITS_PER_UNIT) == 0
1124 && host_integerp (DECL_SIZE (field), 0)
1125 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1126 % BITS_PER_UNIT) == 0));
1129 && host_integerp (DECL_SIZE_UNIT (field), 0))
1132 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1133 /* If we can prove the memory starting at XEXP (mem, 0)
1134 and ending at XEXP (mem, 0) + LENGTH will fit into
1135 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1138 && offset + length <= size)
1143 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1144 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1145 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1153 mem_expr = TREE_OPERAND (mem_expr, 0);
1154 inner = TREE_OPERAND (inner, 0);
1157 if (mem_expr == NULL)
1159 if (mem_expr != MEM_EXPR (mem))
1161 set_mem_expr (mem, mem_expr);
1162 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1165 set_mem_alias_set (mem, 0);
1166 set_mem_size (mem, NULL_RTX);
1172 /* Built-in functions to perform an untyped call and return. */
1174 /* For each register that may be used for calling a function, this
1175 gives a mode used to copy the register's value. VOIDmode indicates
1176 the register is not used for calling a function. If the machine
1177 has register windows, this gives only the outbound registers.
1178 INCOMING_REGNO gives the corresponding inbound register. */
1179 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1181 /* For each register that may be used for returning values, this gives
1182 a mode used to copy the register's value. VOIDmode indicates the
1183 register is not used for returning values. If the machine has
1184 register windows, this gives only the outbound registers.
1185 INCOMING_REGNO gives the corresponding inbound register. */
1186 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1188 /* For each register that may be used for calling a function, this
1189 gives the offset of that register into the block returned by
1190 __builtin_apply_args. 0 indicates that the register is not
1191 used for calling a function. */
1192 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1194 /* Return the size required for the block returned by __builtin_apply_args,
1195 and initialize apply_args_mode. */
1198 apply_args_size (void)
1200 static int size = -1;
1203 enum machine_mode mode;
1205 /* The values computed by this function never change. */
1208 /* The first value is the incoming arg-pointer. */
1209 size = GET_MODE_SIZE (Pmode);
1211 /* The second value is the structure value address unless this is
1212 passed as an "invisible" first argument. */
1213 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1214 size += GET_MODE_SIZE (Pmode);
1216 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1217 if (FUNCTION_ARG_REGNO_P (regno))
1219 mode = reg_raw_mode[regno];
1221 gcc_assert (mode != VOIDmode);
1223 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1224 if (size % align != 0)
1225 size = CEIL (size, align) * align;
1226 apply_args_reg_offset[regno] = size;
1227 size += GET_MODE_SIZE (mode);
1228 apply_args_mode[regno] = mode;
1232 apply_args_mode[regno] = VOIDmode;
1233 apply_args_reg_offset[regno] = 0;
1239 /* Return the size required for the block returned by __builtin_apply,
1240 and initialize apply_result_mode. */
1243 apply_result_size (void)
1245 static int size = -1;
1247 enum machine_mode mode;
1249 /* The values computed by this function never change. */
1254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1255 if (FUNCTION_VALUE_REGNO_P (regno))
1257 mode = reg_raw_mode[regno];
1259 gcc_assert (mode != VOIDmode);
1261 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1262 if (size % align != 0)
1263 size = CEIL (size, align) * align;
1264 size += GET_MODE_SIZE (mode);
1265 apply_result_mode[regno] = mode;
1268 apply_result_mode[regno] = VOIDmode;
1270 /* Allow targets that use untyped_call and untyped_return to override
1271 the size so that machine-specific information can be stored here. */
1272 #ifdef APPLY_RESULT_SIZE
1273 size = APPLY_RESULT_SIZE;
1279 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1280 /* Create a vector describing the result block RESULT. If SAVEP is true,
1281 the result block is used to save the values; otherwise it is used to
1282 restore the values. */
1285 result_vector (int savep, rtx result)
1287 int regno, size, align, nelts;
1288 enum machine_mode mode;
1290 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1293 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1294 if ((mode = apply_result_mode[regno]) != VOIDmode)
1296 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1297 if (size % align != 0)
1298 size = CEIL (size, align) * align;
1299 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1300 mem = adjust_address (result, mode, size);
1301 savevec[nelts++] = (savep
1302 ? gen_rtx_SET (VOIDmode, mem, reg)
1303 : gen_rtx_SET (VOIDmode, reg, mem));
1304 size += GET_MODE_SIZE (mode);
1306 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1308 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1310 /* Save the state required to perform an untyped call with the same
1311 arguments as were passed to the current function. */
1314 expand_builtin_apply_args_1 (void)
1317 int size, align, regno;
1318 enum machine_mode mode;
1319 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1321 /* Create a block where the arg-pointer, structure value address,
1322 and argument registers can be saved. */
1323 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1325 /* Walk past the arg-pointer and structure value address. */
1326 size = GET_MODE_SIZE (Pmode);
1327 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1328 size += GET_MODE_SIZE (Pmode);
1330 /* Save each register used in calling a function to the block. */
1331 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1332 if ((mode = apply_args_mode[regno]) != VOIDmode)
1334 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1335 if (size % align != 0)
1336 size = CEIL (size, align) * align;
1338 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1340 emit_move_insn (adjust_address (registers, mode, size), tem);
1341 size += GET_MODE_SIZE (mode);
1344 /* Save the arg pointer to the block. */
1345 tem = copy_to_reg (virtual_incoming_args_rtx);
1346 #ifdef STACK_GROWS_DOWNWARD
1347 /* We need the pointer as the caller actually passed them to us, not
1348 as we might have pretended they were passed. Make sure it's a valid
1349 operand, as emit_move_insn isn't expected to handle a PLUS. */
1351 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1354 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1356 size = GET_MODE_SIZE (Pmode);
1358 /* Save the structure value address unless this is passed as an
1359 "invisible" first argument. */
1360 if (struct_incoming_value)
1362 emit_move_insn (adjust_address (registers, Pmode, size),
1363 copy_to_reg (struct_incoming_value));
1364 size += GET_MODE_SIZE (Pmode);
1367 /* Return the address of the block. */
1368 return copy_addr_to_reg (XEXP (registers, 0));
1371 /* __builtin_apply_args returns block of memory allocated on
1372 the stack into which is stored the arg pointer, structure
1373 value address, static chain, and all the registers that might
1374 possibly be used in performing a function call. The code is
1375 moved to the start of the function so the incoming values are
1379 expand_builtin_apply_args (void)
1381 /* Don't do __builtin_apply_args more than once in a function.
1382 Save the result of the first call and reuse it. */
1383 if (apply_args_value != 0)
1384 return apply_args_value;
1386 /* When this function is called, it means that registers must be
1387 saved on entry to this function. So we migrate the
1388 call to the first insn of this function. */
1393 temp = expand_builtin_apply_args_1 ();
1397 apply_args_value = temp;
1399 /* Put the insns after the NOTE that starts the function.
1400 If this is inside a start_sequence, make the outer-level insn
1401 chain current, so the code is placed at the start of the
1403 push_topmost_sequence ();
1404 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1405 pop_topmost_sequence ();
1410 /* Perform an untyped call and save the state required to perform an
1411 untyped return of whatever value was returned by the given function. */
1414 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1416 int size, align, regno;
1417 enum machine_mode mode;
1418 rtx incoming_args, result, reg, dest, src, call_insn;
1419 rtx old_stack_level = 0;
1420 rtx call_fusage = 0;
1421 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1423 arguments = convert_memory_address (Pmode, arguments);
1425 /* Create a block where the return registers can be saved. */
1426 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1428 /* Fetch the arg pointer from the ARGUMENTS block. */
1429 incoming_args = gen_reg_rtx (Pmode);
1430 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1431 #ifndef STACK_GROWS_DOWNWARD
1432 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1433 incoming_args, 0, OPTAB_LIB_WIDEN);
1436 /* Push a new argument block and copy the arguments. Do not allow
1437 the (potential) memcpy call below to interfere with our stack
1439 do_pending_stack_adjust ();
1442 /* Save the stack with nonlocal if available. */
1443 #ifdef HAVE_save_stack_nonlocal
1444 if (HAVE_save_stack_nonlocal)
1445 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1448 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1450 /* Allocate a block of memory onto the stack and copy the memory
1451 arguments to the outgoing arguments address. */
1452 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1453 dest = virtual_outgoing_args_rtx;
1454 #ifndef STACK_GROWS_DOWNWARD
1455 if (GET_CODE (argsize) == CONST_INT)
1456 dest = plus_constant (dest, -INTVAL (argsize));
1458 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1460 dest = gen_rtx_MEM (BLKmode, dest);
1461 set_mem_align (dest, PARM_BOUNDARY);
1462 src = gen_rtx_MEM (BLKmode, incoming_args);
1463 set_mem_align (src, PARM_BOUNDARY);
1464 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1466 /* Refer to the argument block. */
1468 arguments = gen_rtx_MEM (BLKmode, arguments);
1469 set_mem_align (arguments, PARM_BOUNDARY);
1471 /* Walk past the arg-pointer and structure value address. */
1472 size = GET_MODE_SIZE (Pmode);
1474 size += GET_MODE_SIZE (Pmode);
1476 /* Restore each of the registers previously saved. Make USE insns
1477 for each of these registers for use in making the call. */
1478 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1479 if ((mode = apply_args_mode[regno]) != VOIDmode)
1481 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1482 if (size % align != 0)
1483 size = CEIL (size, align) * align;
1484 reg = gen_rtx_REG (mode, regno);
1485 emit_move_insn (reg, adjust_address (arguments, mode, size));
1486 use_reg (&call_fusage, reg);
1487 size += GET_MODE_SIZE (mode);
1490 /* Restore the structure value address unless this is passed as an
1491 "invisible" first argument. */
1492 size = GET_MODE_SIZE (Pmode);
1495 rtx value = gen_reg_rtx (Pmode);
1496 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1497 emit_move_insn (struct_value, value);
1498 if (REG_P (struct_value))
1499 use_reg (&call_fusage, struct_value);
1500 size += GET_MODE_SIZE (Pmode);
1503 /* All arguments and registers used for the call are set up by now! */
1504 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1506 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1507 and we don't want to load it into a register as an optimization,
1508 because prepare_call_address already did it if it should be done. */
1509 if (GET_CODE (function) != SYMBOL_REF)
1510 function = memory_address (FUNCTION_MODE, function);
1512 /* Generate the actual call instruction and save the return value. */
1513 #ifdef HAVE_untyped_call
1514 if (HAVE_untyped_call)
1515 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1516 result, result_vector (1, result)));
1519 #ifdef HAVE_call_value
1520 if (HAVE_call_value)
1524 /* Locate the unique return register. It is not possible to
1525 express a call that sets more than one return register using
1526 call_value; use untyped_call for that. In fact, untyped_call
1527 only needs to save the return registers in the given block. */
1528 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1529 if ((mode = apply_result_mode[regno]) != VOIDmode)
1531 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1533 valreg = gen_rtx_REG (mode, regno);
1536 emit_call_insn (GEN_CALL_VALUE (valreg,
1537 gen_rtx_MEM (FUNCTION_MODE, function),
1538 const0_rtx, NULL_RTX, const0_rtx));
1540 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1546 /* Find the CALL insn we just emitted, and attach the register usage
1548 call_insn = last_call_insn ();
1549 add_function_usage_to (call_insn, call_fusage);
1551 /* Restore the stack. */
1552 #ifdef HAVE_save_stack_nonlocal
1553 if (HAVE_save_stack_nonlocal)
1554 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1557 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1561 /* Return the address of the result block. */
1562 result = copy_addr_to_reg (XEXP (result, 0));
1563 return convert_memory_address (ptr_mode, result);
1566 /* Perform an untyped return. */
1569 expand_builtin_return (rtx result)
1571 int size, align, regno;
1572 enum machine_mode mode;
1574 rtx call_fusage = 0;
1576 result = convert_memory_address (Pmode, result);
1578 apply_result_size ();
1579 result = gen_rtx_MEM (BLKmode, result);
1581 #ifdef HAVE_untyped_return
1582 if (HAVE_untyped_return)
1584 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1590 /* Restore the return value and note that each value is used. */
1592 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1593 if ((mode = apply_result_mode[regno]) != VOIDmode)
1595 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1596 if (size % align != 0)
1597 size = CEIL (size, align) * align;
1598 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1599 emit_move_insn (reg, adjust_address (result, mode, size));
1601 push_to_sequence (call_fusage);
1603 call_fusage = get_insns ();
1605 size += GET_MODE_SIZE (mode);
1608 /* Put the USE insns before the return. */
1609 emit_insn (call_fusage);
1611 /* Return whatever values was restored by jumping directly to the end
1613 expand_naked_return ();
1616 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1618 static enum type_class
1619 type_to_class (tree type)
1621 switch (TREE_CODE (type))
1623 case VOID_TYPE: return void_type_class;
1624 case INTEGER_TYPE: return integer_type_class;
1625 case ENUMERAL_TYPE: return enumeral_type_class;
1626 case BOOLEAN_TYPE: return boolean_type_class;
1627 case POINTER_TYPE: return pointer_type_class;
1628 case REFERENCE_TYPE: return reference_type_class;
1629 case OFFSET_TYPE: return offset_type_class;
1630 case REAL_TYPE: return real_type_class;
1631 case COMPLEX_TYPE: return complex_type_class;
1632 case FUNCTION_TYPE: return function_type_class;
1633 case METHOD_TYPE: return method_type_class;
1634 case RECORD_TYPE: return record_type_class;
1636 case QUAL_UNION_TYPE: return union_type_class;
1637 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1638 ? string_type_class : array_type_class);
1639 case LANG_TYPE: return lang_type_class;
1640 default: return no_type_class;
1644 /* Expand a call EXP to __builtin_classify_type. */
1647 expand_builtin_classify_type (tree exp)
1649 if (call_expr_nargs (exp))
1650 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1651 return GEN_INT (no_type_class);
1654 /* This helper macro, meant to be used in mathfn_built_in below,
1655 determines which among a set of three builtin math functions is
1656 appropriate for a given type mode. The `F' and `L' cases are
1657 automatically generated from the `double' case. */
1658 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1659 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1660 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1661 fcodel = BUILT_IN_MATHFN##L ; break;
1662 /* Similar to above, but appends _R after any F/L suffix. */
1663 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1664 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1665 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1666 fcodel = BUILT_IN_MATHFN##L_R ; break;
1668 /* Return mathematic function equivalent to FN but operating directly
1669 on TYPE, if available. If IMPLICIT is true find the function in
1670 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1671 can't do the conversion, return zero. */
1674 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1676 tree const *const fn_arr
1677 = implicit ? implicit_built_in_decls : built_in_decls;
1678 enum built_in_function fcode, fcodef, fcodel;
1682 CASE_MATHFN (BUILT_IN_ACOS)
1683 CASE_MATHFN (BUILT_IN_ACOSH)
1684 CASE_MATHFN (BUILT_IN_ASIN)
1685 CASE_MATHFN (BUILT_IN_ASINH)
1686 CASE_MATHFN (BUILT_IN_ATAN)
1687 CASE_MATHFN (BUILT_IN_ATAN2)
1688 CASE_MATHFN (BUILT_IN_ATANH)
1689 CASE_MATHFN (BUILT_IN_CBRT)
1690 CASE_MATHFN (BUILT_IN_CEIL)
1691 CASE_MATHFN (BUILT_IN_CEXPI)
1692 CASE_MATHFN (BUILT_IN_COPYSIGN)
1693 CASE_MATHFN (BUILT_IN_COS)
1694 CASE_MATHFN (BUILT_IN_COSH)
1695 CASE_MATHFN (BUILT_IN_DREM)
1696 CASE_MATHFN (BUILT_IN_ERF)
1697 CASE_MATHFN (BUILT_IN_ERFC)
1698 CASE_MATHFN (BUILT_IN_EXP)
1699 CASE_MATHFN (BUILT_IN_EXP10)
1700 CASE_MATHFN (BUILT_IN_EXP2)
1701 CASE_MATHFN (BUILT_IN_EXPM1)
1702 CASE_MATHFN (BUILT_IN_FABS)
1703 CASE_MATHFN (BUILT_IN_FDIM)
1704 CASE_MATHFN (BUILT_IN_FLOOR)
1705 CASE_MATHFN (BUILT_IN_FMA)
1706 CASE_MATHFN (BUILT_IN_FMAX)
1707 CASE_MATHFN (BUILT_IN_FMIN)
1708 CASE_MATHFN (BUILT_IN_FMOD)
1709 CASE_MATHFN (BUILT_IN_FREXP)
1710 CASE_MATHFN (BUILT_IN_GAMMA)
1711 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1712 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1713 CASE_MATHFN (BUILT_IN_HYPOT)
1714 CASE_MATHFN (BUILT_IN_ILOGB)
1715 CASE_MATHFN (BUILT_IN_INF)
1716 CASE_MATHFN (BUILT_IN_ISINF)
1717 CASE_MATHFN (BUILT_IN_J0)
1718 CASE_MATHFN (BUILT_IN_J1)
1719 CASE_MATHFN (BUILT_IN_JN)
1720 CASE_MATHFN (BUILT_IN_LCEIL)
1721 CASE_MATHFN (BUILT_IN_LDEXP)
1722 CASE_MATHFN (BUILT_IN_LFLOOR)
1723 CASE_MATHFN (BUILT_IN_LGAMMA)
1724 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1725 CASE_MATHFN (BUILT_IN_LLCEIL)
1726 CASE_MATHFN (BUILT_IN_LLFLOOR)
1727 CASE_MATHFN (BUILT_IN_LLRINT)
1728 CASE_MATHFN (BUILT_IN_LLROUND)
1729 CASE_MATHFN (BUILT_IN_LOG)
1730 CASE_MATHFN (BUILT_IN_LOG10)
1731 CASE_MATHFN (BUILT_IN_LOG1P)
1732 CASE_MATHFN (BUILT_IN_LOG2)
1733 CASE_MATHFN (BUILT_IN_LOGB)
1734 CASE_MATHFN (BUILT_IN_LRINT)
1735 CASE_MATHFN (BUILT_IN_LROUND)
1736 CASE_MATHFN (BUILT_IN_MODF)
1737 CASE_MATHFN (BUILT_IN_NAN)
1738 CASE_MATHFN (BUILT_IN_NANS)
1739 CASE_MATHFN (BUILT_IN_NEARBYINT)
1740 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1741 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1742 CASE_MATHFN (BUILT_IN_POW)
1743 CASE_MATHFN (BUILT_IN_POWI)
1744 CASE_MATHFN (BUILT_IN_POW10)
1745 CASE_MATHFN (BUILT_IN_REMAINDER)
1746 CASE_MATHFN (BUILT_IN_REMQUO)
1747 CASE_MATHFN (BUILT_IN_RINT)
1748 CASE_MATHFN (BUILT_IN_ROUND)
1749 CASE_MATHFN (BUILT_IN_SCALB)
1750 CASE_MATHFN (BUILT_IN_SCALBLN)
1751 CASE_MATHFN (BUILT_IN_SCALBN)
1752 CASE_MATHFN (BUILT_IN_SIGNBIT)
1753 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1754 CASE_MATHFN (BUILT_IN_SIN)
1755 CASE_MATHFN (BUILT_IN_SINCOS)
1756 CASE_MATHFN (BUILT_IN_SINH)
1757 CASE_MATHFN (BUILT_IN_SQRT)
1758 CASE_MATHFN (BUILT_IN_TAN)
1759 CASE_MATHFN (BUILT_IN_TANH)
1760 CASE_MATHFN (BUILT_IN_TGAMMA)
1761 CASE_MATHFN (BUILT_IN_TRUNC)
1762 CASE_MATHFN (BUILT_IN_Y0)
1763 CASE_MATHFN (BUILT_IN_Y1)
1764 CASE_MATHFN (BUILT_IN_YN)
1770 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1771 return fn_arr[fcode];
1772 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1773 return fn_arr[fcodef];
1774 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1775 return fn_arr[fcodel];
1780 /* Like mathfn_built_in_1(), but always use the implicit array. */
1783 mathfn_built_in (tree type, enum built_in_function fn)
1785 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1788 /* If errno must be maintained, expand the RTL to check if the result,
1789 TARGET, of a built-in function call, EXP, is NaN, and if so set
1793 expand_errno_check (tree exp, rtx target)
1795 rtx lab = gen_label_rtx ();
1797 /* Test the result; if it is NaN, set errno=EDOM because
1798 the argument was not in the domain. */
1799 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1803 /* If this built-in doesn't throw an exception, set errno directly. */
1804 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1806 #ifdef GEN_ERRNO_RTX
1807 rtx errno_rtx = GEN_ERRNO_RTX;
1810 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1812 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1818 /* Make sure the library call isn't expanded as a tail call. */
1819 CALL_EXPR_TAILCALL (exp) = 0;
1821 /* We can't set errno=EDOM directly; let the library call do it.
1822 Pop the arguments right away in case the call gets deleted. */
1824 expand_call (exp, target, 0);
1829 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1830 Return NULL_RTX if a normal call should be emitted rather than expanding
1831 the function in-line. EXP is the expression that is a call to the builtin
1832 function; if convenient, the result should be placed in TARGET.
1833 SUBTARGET may be used as the target for computing one of EXP's operands. */
1836 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1838 optab builtin_optab;
1839 rtx op0, insns, before_call;
1840 tree fndecl = get_callee_fndecl (exp);
1841 enum machine_mode mode;
1842 bool errno_set = false;
1845 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1848 arg = CALL_EXPR_ARG (exp, 0);
1850 switch (DECL_FUNCTION_CODE (fndecl))
1852 CASE_FLT_FN (BUILT_IN_SQRT):
1853 errno_set = ! tree_expr_nonnegative_p (arg);
1854 builtin_optab = sqrt_optab;
1856 CASE_FLT_FN (BUILT_IN_EXP):
1857 errno_set = true; builtin_optab = exp_optab; break;
1858 CASE_FLT_FN (BUILT_IN_EXP10):
1859 CASE_FLT_FN (BUILT_IN_POW10):
1860 errno_set = true; builtin_optab = exp10_optab; break;
1861 CASE_FLT_FN (BUILT_IN_EXP2):
1862 errno_set = true; builtin_optab = exp2_optab; break;
1863 CASE_FLT_FN (BUILT_IN_EXPM1):
1864 errno_set = true; builtin_optab = expm1_optab; break;
1865 CASE_FLT_FN (BUILT_IN_LOGB):
1866 errno_set = true; builtin_optab = logb_optab; break;
1867 CASE_FLT_FN (BUILT_IN_LOG):
1868 errno_set = true; builtin_optab = log_optab; break;
1869 CASE_FLT_FN (BUILT_IN_LOG10):
1870 errno_set = true; builtin_optab = log10_optab; break;
1871 CASE_FLT_FN (BUILT_IN_LOG2):
1872 errno_set = true; builtin_optab = log2_optab; break;
1873 CASE_FLT_FN (BUILT_IN_LOG1P):
1874 errno_set = true; builtin_optab = log1p_optab; break;
1875 CASE_FLT_FN (BUILT_IN_ASIN):
1876 builtin_optab = asin_optab; break;
1877 CASE_FLT_FN (BUILT_IN_ACOS):
1878 builtin_optab = acos_optab; break;
1879 CASE_FLT_FN (BUILT_IN_TAN):
1880 builtin_optab = tan_optab; break;
1881 CASE_FLT_FN (BUILT_IN_ATAN):
1882 builtin_optab = atan_optab; break;
1883 CASE_FLT_FN (BUILT_IN_FLOOR):
1884 builtin_optab = floor_optab; break;
1885 CASE_FLT_FN (BUILT_IN_CEIL):
1886 builtin_optab = ceil_optab; break;
1887 CASE_FLT_FN (BUILT_IN_TRUNC):
1888 builtin_optab = btrunc_optab; break;
1889 CASE_FLT_FN (BUILT_IN_ROUND):
1890 builtin_optab = round_optab; break;
1891 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1892 builtin_optab = nearbyint_optab;
1893 if (flag_trapping_math)
1895 /* Else fallthrough and expand as rint. */
1896 CASE_FLT_FN (BUILT_IN_RINT):
1897 builtin_optab = rint_optab; break;
1902 /* Make a suitable register to place result in. */
1903 mode = TYPE_MODE (TREE_TYPE (exp));
1905 if (! flag_errno_math || ! HONOR_NANS (mode))
1908 /* Before working hard, check whether the instruction is available. */
1909 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1911 target = gen_reg_rtx (mode);
1913 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1914 need to expand the argument again. This way, we will not perform
1915 side-effects more the once. */
1916 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1918 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1922 /* Compute into TARGET.
1923 Set TARGET to wherever the result comes back. */
1924 target = expand_unop (mode, builtin_optab, op0, target, 0);
1929 expand_errno_check (exp, target);
1931 /* Output the entire sequence. */
1932 insns = get_insns ();
1938 /* If we were unable to expand via the builtin, stop the sequence
1939 (without outputting the insns) and call to the library function
1940 with the stabilized argument list. */
1944 before_call = get_last_insn ();
1946 target = expand_call (exp, target, target == const0_rtx);
1948 /* If this is a sqrt operation and we don't care about errno, try to
1949 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1950 This allows the semantics of the libcall to be visible to the RTL
1952 if (builtin_optab == sqrt_optab && !errno_set)
1954 /* Search backwards through the insns emitted by expand_call looking
1955 for the instruction with the REG_RETVAL note. */
1956 rtx last = get_last_insn ();
1957 while (last != before_call)
1959 if (find_reg_note (last, REG_RETVAL, NULL))
1961 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1962 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1963 two elements, i.e. symbol_ref(sqrt) and the operand. */
1965 && GET_CODE (note) == EXPR_LIST
1966 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1967 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1968 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1970 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1971 /* Check operand is a register with expected mode. */
1974 && GET_MODE (operand) == mode)
1976 /* Replace the REG_EQUAL note with a SQRT rtx. */
1977 rtx equiv = gen_rtx_SQRT (mode, operand);
1978 set_unique_reg_note (last, REG_EQUAL, equiv);
1983 last = PREV_INSN (last);
1990 /* Expand a call to the builtin binary math functions (pow and atan2).
1991 Return NULL_RTX if a normal call should be emitted rather than expanding the
1992 function in-line. EXP is the expression that is a call to the builtin
1993 function; if convenient, the result should be placed in TARGET.
1994 SUBTARGET may be used as the target for computing one of EXP's
1998 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2000 optab builtin_optab;
2001 rtx op0, op1, insns;
2002 int op1_type = REAL_TYPE;
2003 tree fndecl = get_callee_fndecl (exp);
2005 enum machine_mode mode;
2006 bool errno_set = true;
2008 switch (DECL_FUNCTION_CODE (fndecl))
2010 CASE_FLT_FN (BUILT_IN_SCALBN):
2011 CASE_FLT_FN (BUILT_IN_SCALBLN):
2012 CASE_FLT_FN (BUILT_IN_LDEXP):
2013 op1_type = INTEGER_TYPE;
2018 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2021 arg0 = CALL_EXPR_ARG (exp, 0);
2022 arg1 = CALL_EXPR_ARG (exp, 1);
2024 switch (DECL_FUNCTION_CODE (fndecl))
2026 CASE_FLT_FN (BUILT_IN_POW):
2027 builtin_optab = pow_optab; break;
2028 CASE_FLT_FN (BUILT_IN_ATAN2):
2029 builtin_optab = atan2_optab; break;
2030 CASE_FLT_FN (BUILT_IN_SCALB):
2031 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2033 builtin_optab = scalb_optab; break;
2034 CASE_FLT_FN (BUILT_IN_SCALBN):
2035 CASE_FLT_FN (BUILT_IN_SCALBLN):
2036 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2038 /* Fall through... */
2039 CASE_FLT_FN (BUILT_IN_LDEXP):
2040 builtin_optab = ldexp_optab; break;
2041 CASE_FLT_FN (BUILT_IN_FMOD):
2042 builtin_optab = fmod_optab; break;
2043 CASE_FLT_FN (BUILT_IN_REMAINDER):
2044 CASE_FLT_FN (BUILT_IN_DREM):
2045 builtin_optab = remainder_optab; break;
2050 /* Make a suitable register to place result in. */
2051 mode = TYPE_MODE (TREE_TYPE (exp));
2053 /* Before working hard, check whether the instruction is available. */
2054 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2057 target = gen_reg_rtx (mode);
2059 if (! flag_errno_math || ! HONOR_NANS (mode))
2062 /* Always stabilize the argument list. */
2063 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2064 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2066 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2067 op1 = expand_normal (arg1);
2071 /* Compute into TARGET.
2072 Set TARGET to wherever the result comes back. */
2073 target = expand_binop (mode, builtin_optab, op0, op1,
2074 target, 0, OPTAB_DIRECT);
2076 /* If we were unable to expand via the builtin, stop the sequence
2077 (without outputting the insns) and call to the library function
2078 with the stabilized argument list. */
2082 return expand_call (exp, target, target == const0_rtx);
2086 expand_errno_check (exp, target);
2088 /* Output the entire sequence. */
2089 insns = get_insns ();
2096 /* Expand a call to the builtin sin and cos math functions.
2097 Return NULL_RTX if a normal call should be emitted rather than expanding the
2098 function in-line. EXP is the expression that is a call to the builtin
2099 function; if convenient, the result should be placed in TARGET.
2100 SUBTARGET may be used as the target for computing one of EXP's
2104 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2106 optab builtin_optab;
2108 tree fndecl = get_callee_fndecl (exp);
2109 enum machine_mode mode;
2112 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2115 arg = CALL_EXPR_ARG (exp, 0);
2117 switch (DECL_FUNCTION_CODE (fndecl))
2119 CASE_FLT_FN (BUILT_IN_SIN):
2120 CASE_FLT_FN (BUILT_IN_COS):
2121 builtin_optab = sincos_optab; break;
2126 /* Make a suitable register to place result in. */
2127 mode = TYPE_MODE (TREE_TYPE (exp));
2129 /* Check if sincos insn is available, otherwise fallback
2130 to sin or cos insn. */
2131 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2132 switch (DECL_FUNCTION_CODE (fndecl))
2134 CASE_FLT_FN (BUILT_IN_SIN):
2135 builtin_optab = sin_optab; break;
2136 CASE_FLT_FN (BUILT_IN_COS):
2137 builtin_optab = cos_optab; break;
2142 /* Before working hard, check whether the instruction is available. */
2143 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2145 target = gen_reg_rtx (mode);
2147 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2148 need to expand the argument again. This way, we will not perform
2149 side-effects more the once. */
2150 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2152 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2156 /* Compute into TARGET.
2157 Set TARGET to wherever the result comes back. */
2158 if (builtin_optab == sincos_optab)
2162 switch (DECL_FUNCTION_CODE (fndecl))
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2167 CASE_FLT_FN (BUILT_IN_COS):
2168 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2173 gcc_assert (result);
2177 target = expand_unop (mode, builtin_optab, op0, target, 0);
2182 /* Output the entire sequence. */
2183 insns = get_insns ();
2189 /* If we were unable to expand via the builtin, stop the sequence
2190 (without outputting the insns) and call to the library function
2191 with the stabilized argument list. */
2195 target = expand_call (exp, target, target == const0_rtx);
2200 /* Expand a call to one of the builtin math functions that operate on
2201 floating point argument and output an integer result (ilogb, isinf,
2203 Return 0 if a normal call should be emitted rather than expanding the
2204 function in-line. EXP is the expression that is a call to the builtin
2205 function; if convenient, the result should be placed in TARGET.
2206 SUBTARGET may be used as the target for computing one of EXP's operands. */
2209 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2211 optab builtin_optab = 0;
2212 enum insn_code icode = CODE_FOR_nothing;
2214 tree fndecl = get_callee_fndecl (exp);
2215 enum machine_mode mode;
2216 bool errno_set = false;
2219 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2222 arg = CALL_EXPR_ARG (exp, 0);
2224 switch (DECL_FUNCTION_CODE (fndecl))
2226 CASE_FLT_FN (BUILT_IN_ILOGB):
2227 errno_set = true; builtin_optab = ilogb_optab; break;
2228 CASE_FLT_FN (BUILT_IN_ISINF):
2229 builtin_optab = isinf_optab; break;
2230 case BUILT_IN_ISNORMAL:
2231 case BUILT_IN_ISFINITE:
2232 CASE_FLT_FN (BUILT_IN_FINITE):
2233 /* These builtins have no optabs (yet). */
2239 /* There's no easy way to detect the case we need to set EDOM. */
2240 if (flag_errno_math && errno_set)
2243 /* Optab mode depends on the mode of the input argument. */
2244 mode = TYPE_MODE (TREE_TYPE (arg));
2247 icode = optab_handler (builtin_optab, mode)->insn_code;
2249 /* Before working hard, check whether the instruction is available. */
2250 if (icode != CODE_FOR_nothing)
2252 /* Make a suitable register to place result in. */
2254 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2255 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2257 gcc_assert (insn_data[icode].operand[0].predicate
2258 (target, GET_MODE (target)));
2260 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2261 need to expand the argument again. This way, we will not perform
2262 side-effects more the once. */
2263 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2265 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2267 if (mode != GET_MODE (op0))
2268 op0 = convert_to_mode (mode, op0, 0);
2270 /* Compute into TARGET.
2271 Set TARGET to wherever the result comes back. */
2272 emit_unop_insn (icode, target, op0, UNKNOWN);
2276 /* If there is no optab, try generic code. */
2277 switch (DECL_FUNCTION_CODE (fndecl))
2281 CASE_FLT_FN (BUILT_IN_ISINF):
2283 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2284 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2285 tree const type = TREE_TYPE (arg);
2289 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2290 real_from_string (&r, buf);
2291 result = build_call_expr (isgr_fn, 2,
2292 fold_build1 (ABS_EXPR, type, arg),
2293 build_real (type, r));
2294 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2296 CASE_FLT_FN (BUILT_IN_FINITE):
2297 case BUILT_IN_ISFINITE:
2299 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2300 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2301 tree const type = TREE_TYPE (arg);
2305 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2306 real_from_string (&r, buf);
2307 result = build_call_expr (isle_fn, 2,
2308 fold_build1 (ABS_EXPR, type, arg),
2309 build_real (type, r));
2310 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2312 case BUILT_IN_ISNORMAL:
2314 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2315 islessequal(fabs(x),DBL_MAX). */
2316 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2317 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2318 tree const type = TREE_TYPE (arg);
2319 REAL_VALUE_TYPE rmax, rmin;
2322 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2323 real_from_string (&rmax, buf);
2324 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2325 real_from_string (&rmin, buf);
2326 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2327 result = build_call_expr (isle_fn, 2, arg,
2328 build_real (type, rmax));
2329 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2330 build_call_expr (isge_fn, 2, arg,
2331 build_real (type, rmin)));
2332 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2338 target = expand_call (exp, target, target == const0_rtx);
2343 /* Expand a call to the builtin sincos math function.
2344 Return NULL_RTX if a normal call should be emitted rather than expanding the
2345 function in-line. EXP is the expression that is a call to the builtin
2349 expand_builtin_sincos (tree exp)
2351 rtx op0, op1, op2, target1, target2;
2352 enum machine_mode mode;
2353 tree arg, sinp, cosp;
2356 if (!validate_arglist (exp, REAL_TYPE,
2357 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2360 arg = CALL_EXPR_ARG (exp, 0);
2361 sinp = CALL_EXPR_ARG (exp, 1);
2362 cosp = CALL_EXPR_ARG (exp, 2);
2364 /* Make a suitable register to place result in. */
2365 mode = TYPE_MODE (TREE_TYPE (arg));
2367 /* Check if sincos insn is available, otherwise emit the call. */
2368 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2371 target1 = gen_reg_rtx (mode);
2372 target2 = gen_reg_rtx (mode);
2374 op0 = expand_normal (arg);
2375 op1 = expand_normal (build_fold_indirect_ref (sinp));
2376 op2 = expand_normal (build_fold_indirect_ref (cosp));
2378 /* Compute into target1 and target2.
2379 Set TARGET to wherever the result comes back. */
2380 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2381 gcc_assert (result);
2383 /* Move target1 and target2 to the memory locations indicated
2385 emit_move_insn (op1, target1);
2386 emit_move_insn (op2, target2);
2391 /* Expand a call to the internal cexpi builtin to the sincos math function.
2392 EXP is the expression that is a call to the builtin function; if convenient,
2393 the result should be placed in TARGET. SUBTARGET may be used as the target
2394 for computing one of EXP's operands. */
2397 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2399 tree fndecl = get_callee_fndecl (exp);
2401 enum machine_mode mode;
2404 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2407 arg = CALL_EXPR_ARG (exp, 0);
2408 type = TREE_TYPE (arg);
2409 mode = TYPE_MODE (TREE_TYPE (arg));
2411 /* Try expanding via a sincos optab, fall back to emitting a libcall
2412 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2413 is only generated from sincos, cexp or if we have either of them. */
2414 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2416 op1 = gen_reg_rtx (mode);
2417 op2 = gen_reg_rtx (mode);
2419 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2421 /* Compute into op1 and op2. */
2422 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2424 else if (TARGET_HAS_SINCOS)
2426 tree call, fn = NULL_TREE;
2430 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2431 fn = built_in_decls[BUILT_IN_SINCOSF];
2432 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2433 fn = built_in_decls[BUILT_IN_SINCOS];
2434 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2435 fn = built_in_decls[BUILT_IN_SINCOSL];
2439 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2440 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2441 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2442 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2443 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2444 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2446 /* Make sure not to fold the sincos call again. */
2447 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2448 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2449 call, 3, arg, top1, top2));
2453 tree call, fn = NULL_TREE, narg;
2454 tree ctype = build_complex_type (type);
2456 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2457 fn = built_in_decls[BUILT_IN_CEXPF];
2458 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2459 fn = built_in_decls[BUILT_IN_CEXP];
2460 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2461 fn = built_in_decls[BUILT_IN_CEXPL];
2465 /* If we don't have a decl for cexp create one. This is the
2466 friendliest fallback if the user calls __builtin_cexpi
2467 without full target C99 function support. */
2468 if (fn == NULL_TREE)
2471 const char *name = NULL;
2473 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2475 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2477 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2480 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2481 fn = build_fn_decl (name, fntype);
2484 narg = fold_build2 (COMPLEX_EXPR, ctype,
2485 build_real (type, dconst0), arg);
2487 /* Make sure not to fold the cexp call again. */
2488 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2489 return expand_expr (build_call_nary (ctype, call, 1, narg),
2490 target, VOIDmode, EXPAND_NORMAL);
2493 /* Now build the proper return type. */
2494 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2495 make_tree (TREE_TYPE (arg), op2),
2496 make_tree (TREE_TYPE (arg), op1)),
2497 target, VOIDmode, EXPAND_NORMAL);
2500 /* Expand a call to one of the builtin rounding functions gcc defines
2501 as an extension (lfloor and lceil). As these are gcc extensions we
2502 do not need to worry about setting errno to EDOM.
2503 If expanding via optab fails, lower expression to (int)(floor(x)).
2504 EXP is the expression that is a call to the builtin function;
2505 if convenient, the result should be placed in TARGET. SUBTARGET may
2506 be used as the target for computing one of EXP's operands. */
2509 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2511 convert_optab builtin_optab;
2512 rtx op0, insns, tmp;
2513 tree fndecl = get_callee_fndecl (exp);
2514 enum built_in_function fallback_fn;
2515 tree fallback_fndecl;
2516 enum machine_mode mode;
2519 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2522 arg = CALL_EXPR_ARG (exp, 0);
2524 switch (DECL_FUNCTION_CODE (fndecl))
2526 CASE_FLT_FN (BUILT_IN_LCEIL):
2527 CASE_FLT_FN (BUILT_IN_LLCEIL):
2528 builtin_optab = lceil_optab;
2529 fallback_fn = BUILT_IN_CEIL;
2532 CASE_FLT_FN (BUILT_IN_LFLOOR):
2533 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2534 builtin_optab = lfloor_optab;
2535 fallback_fn = BUILT_IN_FLOOR;
2542 /* Make a suitable register to place result in. */
2543 mode = TYPE_MODE (TREE_TYPE (exp));
2545 target = gen_reg_rtx (mode);
2547 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2548 need to expand the argument again. This way, we will not perform
2549 side-effects more the once. */
2550 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2552 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2556 /* Compute into TARGET. */
2557 if (expand_sfix_optab (target, op0, builtin_optab))
2559 /* Output the entire sequence. */
2560 insns = get_insns ();
2566 /* If we were unable to expand via the builtin, stop the sequence
2567 (without outputting the insns). */
2570 /* Fall back to floating point rounding optab. */
2571 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2573 /* For non-C99 targets we may end up without a fallback fndecl here
2574 if the user called __builtin_lfloor directly. In this case emit
2575 a call to the floor/ceil variants nevertheless. This should result
2576 in the best user experience for not full C99 targets. */
2577 if (fallback_fndecl == NULL_TREE)
2580 const char *name = NULL;
2582 switch (DECL_FUNCTION_CODE (fndecl))
2584 case BUILT_IN_LCEIL:
2585 case BUILT_IN_LLCEIL:
2588 case BUILT_IN_LCEILF:
2589 case BUILT_IN_LLCEILF:
2592 case BUILT_IN_LCEILL:
2593 case BUILT_IN_LLCEILL:
2596 case BUILT_IN_LFLOOR:
2597 case BUILT_IN_LLFLOOR:
2600 case BUILT_IN_LFLOORF:
2601 case BUILT_IN_LLFLOORF:
2604 case BUILT_IN_LFLOORL:
2605 case BUILT_IN_LLFLOORL:
2612 fntype = build_function_type_list (TREE_TYPE (arg),
2613 TREE_TYPE (arg), NULL_TREE);
2614 fallback_fndecl = build_fn_decl (name, fntype);
2617 exp = build_call_expr (fallback_fndecl, 1, arg);
2619 tmp = expand_normal (exp);
2621 /* Truncate the result of floating point optab to integer
2622 via expand_fix (). */
2623 target = gen_reg_rtx (mode);
2624 expand_fix (target, tmp, 0);
2629 /* Expand a call to one of the builtin math functions doing integer
2631 Return 0 if a normal call should be emitted rather than expanding the
2632 function in-line. EXP is the expression that is a call to the builtin
2633 function; if convenient, the result should be placed in TARGET.
2634 SUBTARGET may be used as the target for computing one of EXP's operands. */
2637 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2639 convert_optab builtin_optab;
2641 tree fndecl = get_callee_fndecl (exp);
2643 enum machine_mode mode;
2645 /* There's no easy way to detect the case we need to set EDOM. */
2646 if (flag_errno_math)
2649 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2652 arg = CALL_EXPR_ARG (exp, 0);
2654 switch (DECL_FUNCTION_CODE (fndecl))
2656 CASE_FLT_FN (BUILT_IN_LRINT):
2657 CASE_FLT_FN (BUILT_IN_LLRINT):
2658 builtin_optab = lrint_optab; break;
2659 CASE_FLT_FN (BUILT_IN_LROUND):
2660 CASE_FLT_FN (BUILT_IN_LLROUND):
2661 builtin_optab = lround_optab; break;
2666 /* Make a suitable register to place result in. */
2667 mode = TYPE_MODE (TREE_TYPE (exp));
2669 target = gen_reg_rtx (mode);
2671 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2672 need to expand the argument again. This way, we will not perform
2673 side-effects more the once. */
2674 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2676 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2680 if (expand_sfix_optab (target, op0, builtin_optab))
2682 /* Output the entire sequence. */
2683 insns = get_insns ();
2689 /* If we were unable to expand via the builtin, stop the sequence
2690 (without outputting the insns) and call to the library function
2691 with the stabilized argument list. */
2694 target = expand_call (exp, target, target == const0_rtx);
2699 /* To evaluate powi(x,n), the floating point value x raised to the
2700 constant integer exponent n, we use a hybrid algorithm that
2701 combines the "window method" with look-up tables. For an
2702 introduction to exponentiation algorithms and "addition chains",
2703 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2704 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2705 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2706 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2708 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2709 multiplications to inline before calling the system library's pow
2710 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2711 so this default never requires calling pow, powf or powl. */
2713 #ifndef POWI_MAX_MULTS
2714 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2717 /* The size of the "optimal power tree" lookup table. All
2718 exponents less than this value are simply looked up in the
2719 powi_table below. This threshold is also used to size the
2720 cache of pseudo registers that hold intermediate results. */
2721 #define POWI_TABLE_SIZE 256
2723 /* The size, in bits of the window, used in the "window method"
2724 exponentiation algorithm. This is equivalent to a radix of
2725 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2726 #define POWI_WINDOW_SIZE 3
2728 /* The following table is an efficient representation of an
2729 "optimal power tree". For each value, i, the corresponding
2730 value, j, in the table states than an optimal evaluation
2731 sequence for calculating pow(x,i) can be found by evaluating
2732 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2733 100 integers is given in Knuth's "Seminumerical algorithms". */
2735 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2737 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2738 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2739 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2740 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2741 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2742 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2743 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2744 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2745 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2746 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2747 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2748 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2749 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2750 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2751 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2752 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2753 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2754 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2755 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2756 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2757 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2758 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2759 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2760 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2761 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2762 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2763 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2764 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2765 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2766 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2767 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2768 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2772 /* Return the number of multiplications required to calculate
2773 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2774 subroutine of powi_cost. CACHE is an array indicating
2775 which exponents have already been calculated. */
2778 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2780 /* If we've already calculated this exponent, then this evaluation
2781 doesn't require any additional multiplications. */
2786 return powi_lookup_cost (n - powi_table[n], cache)
2787 + powi_lookup_cost (powi_table[n], cache) + 1;
2790 /* Return the number of multiplications required to calculate
2791 powi(x,n) for an arbitrary x, given the exponent N. This
2792 function needs to be kept in sync with expand_powi below. */
2795 powi_cost (HOST_WIDE_INT n)
2797 bool cache[POWI_TABLE_SIZE];
2798 unsigned HOST_WIDE_INT digit;
2799 unsigned HOST_WIDE_INT val;
2805 /* Ignore the reciprocal when calculating the cost. */
2806 val = (n < 0) ? -n : n;
2808 /* Initialize the exponent cache. */
2809 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2814 while (val >= POWI_TABLE_SIZE)
2818 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2819 result += powi_lookup_cost (digit, cache)
2820 + POWI_WINDOW_SIZE + 1;
2821 val >>= POWI_WINDOW_SIZE;
2830 return result + powi_lookup_cost (val, cache);
2833 /* Recursive subroutine of expand_powi. This function takes the array,
2834 CACHE, of already calculated exponents and an exponent N and returns
2835 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2838 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2840 unsigned HOST_WIDE_INT digit;
2844 if (n < POWI_TABLE_SIZE)
2849 target = gen_reg_rtx (mode);
2852 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2853 op1 = expand_powi_1 (mode, powi_table[n], cache);
2857 target = gen_reg_rtx (mode);
2858 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2859 op0 = expand_powi_1 (mode, n - digit, cache);
2860 op1 = expand_powi_1 (mode, digit, cache);
2864 target = gen_reg_rtx (mode);
2865 op0 = expand_powi_1 (mode, n >> 1, cache);
2869 result = expand_mult (mode, op0, op1, target, 0);
2870 if (result != target)
2871 emit_move_insn (target, result);
2875 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2876 floating point operand in mode MODE, and N is the exponent. This
2877 function needs to be kept in sync with powi_cost above. */
2880 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2882 unsigned HOST_WIDE_INT val;
2883 rtx cache[POWI_TABLE_SIZE];
2887 return CONST1_RTX (mode);
2889 val = (n < 0) ? -n : n;
2891 memset (cache, 0, sizeof (cache));
2894 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2896 /* If the original exponent was negative, reciprocate the result. */
2898 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2899 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2904 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2905 a normal call should be emitted rather than expanding the function
2906 in-line. EXP is the expression that is a call to the builtin
2907 function; if convenient, the result should be placed in TARGET. */
2910 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2914 tree type = TREE_TYPE (exp);
2915 REAL_VALUE_TYPE cint, c, c2;
2918 enum machine_mode mode = TYPE_MODE (type);
2920 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2923 arg0 = CALL_EXPR_ARG (exp, 0);
2924 arg1 = CALL_EXPR_ARG (exp, 1);
2926 if (TREE_CODE (arg1) != REAL_CST
2927 || TREE_OVERFLOW (arg1))
2928 return expand_builtin_mathfn_2 (exp, target, subtarget);
2930 /* Handle constant exponents. */
2932 /* For integer valued exponents we can expand to an optimal multiplication
2933 sequence using expand_powi. */
2934 c = TREE_REAL_CST (arg1);
2935 n = real_to_integer (&c);
2936 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2937 if (real_identical (&c, &cint)
2938 && ((n >= -1 && n <= 2)
2939 || (flag_unsafe_math_optimizations
2941 && powi_cost (n) <= POWI_MAX_MULTS)))
2943 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2946 op = force_reg (mode, op);
2947 op = expand_powi (op, mode, n);
2952 narg0 = builtin_save_expr (arg0);
2954 /* If the exponent is not integer valued, check if it is half of an integer.
2955 In this case we can expand to sqrt (x) * x**(n/2). */
2956 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2957 if (fn != NULL_TREE)
2959 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2960 n = real_to_integer (&c2);
2961 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2962 if (real_identical (&c2, &cint)
2963 && ((flag_unsafe_math_optimizations
2965 && powi_cost (n/2) <= POWI_MAX_MULTS)
2968 tree call_expr = build_call_expr (fn, 1, narg0);
2969 /* Use expand_expr in case the newly built call expression
2970 was folded to a non-call. */
2971 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2974 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2975 op2 = force_reg (mode, op2);
2976 op2 = expand_powi (op2, mode, abs (n / 2));
2977 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2978 0, OPTAB_LIB_WIDEN);
2979 /* If the original exponent was negative, reciprocate the
2982 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2983 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2989 /* Try if the exponent is a third of an integer. In this case
2990 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2991 different from pow (x, 1./3.) due to rounding and behavior
2992 with negative x we need to constrain this transformation to
2993 unsafe math and positive x or finite math. */
2994 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2996 && flag_unsafe_math_optimizations
2997 && (tree_expr_nonnegative_p (arg0)
2998 || !HONOR_NANS (mode)))
3000 REAL_VALUE_TYPE dconst3;
3001 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3002 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3003 real_round (&c2, mode, &c2);
3004 n = real_to_integer (&c2);
3005 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3006 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3007 real_convert (&c2, mode, &c2);
3008 if (real_identical (&c2, &c)
3010 && powi_cost (n/3) <= POWI_MAX_MULTS)
3013 tree call_expr = build_call_expr (fn, 1,narg0);
3014 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3015 if (abs (n) % 3 == 2)
3016 op = expand_simple_binop (mode, MULT, op, op, op,
3017 0, OPTAB_LIB_WIDEN);
3020 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3021 op2 = force_reg (mode, op2);
3022 op2 = expand_powi (op2, mode, abs (n / 3));
3023 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3024 0, OPTAB_LIB_WIDEN);
3025 /* If the original exponent was negative, reciprocate the
3028 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3029 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3035 /* Fall back to optab expansion. */
3036 return expand_builtin_mathfn_2 (exp, target, subtarget);
3039 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3040 a normal call should be emitted rather than expanding the function
3041 in-line. EXP is the expression that is a call to the builtin
3042 function; if convenient, the result should be placed in TARGET. */
3045 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3049 enum machine_mode mode;
3050 enum machine_mode mode2;
3052 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3055 arg0 = CALL_EXPR_ARG (exp, 0);
3056 arg1 = CALL_EXPR_ARG (exp, 1);
3057 mode = TYPE_MODE (TREE_TYPE (exp));
3059 /* Handle constant power. */
3061 if (TREE_CODE (arg1) == INTEGER_CST
3062 && !TREE_OVERFLOW (arg1))
3064 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3066 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3067 Otherwise, check the number of multiplications required. */
3068 if ((TREE_INT_CST_HIGH (arg1) == 0
3069 || TREE_INT_CST_HIGH (arg1) == -1)
3070 && ((n >= -1 && n <= 2)
3072 && powi_cost (n) <= POWI_MAX_MULTS)))
3074 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3075 op0 = force_reg (mode, op0);
3076 return expand_powi (op0, mode, n);
3080 /* Emit a libcall to libgcc. */
3082 /* Mode of the 2nd argument must match that of an int. */
3083 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3085 if (target == NULL_RTX)
3086 target = gen_reg_rtx (mode);
3088 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3089 if (GET_MODE (op0) != mode)
3090 op0 = convert_to_mode (mode, op0, 0);
3091 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3092 if (GET_MODE (op1) != mode2)
3093 op1 = convert_to_mode (mode2, op1, 0);
3095 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3096 target, LCT_CONST, mode, 2,
3097 op0, mode, op1, mode2);
3102 /* Expand expression EXP which is a call to the strlen builtin. Return
3103 NULL_RTX if we failed the caller should emit a normal call, otherwise
3104 try to get the result in TARGET, if convenient. */
3107 expand_builtin_strlen (tree exp, rtx target,
3108 enum machine_mode target_mode)
3110 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3116 tree src = CALL_EXPR_ARG (exp, 0);
3117 rtx result, src_reg, char_rtx, before_strlen;
3118 enum machine_mode insn_mode = target_mode, char_mode;
3119 enum insn_code icode = CODE_FOR_nothing;
3122 /* If the length can be computed at compile-time, return it. */
3123 len = c_strlen (src, 0);
3125 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3127 /* If the length can be computed at compile-time and is constant
3128 integer, but there are side-effects in src, evaluate
3129 src for side-effects, then return len.
3130 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3131 can be optimized into: i++; x = 3; */
3132 len = c_strlen (src, 1);
3133 if (len && TREE_CODE (len) == INTEGER_CST)
3135 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3136 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3139 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3141 /* If SRC is not a pointer type, don't do this operation inline. */
3145 /* Bail out if we can't compute strlen in the right mode. */
3146 while (insn_mode != VOIDmode)
3148 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3149 if (icode != CODE_FOR_nothing)
3152 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3154 if (insn_mode == VOIDmode)
3157 /* Make a place to write the result of the instruction. */
3161 && GET_MODE (result) == insn_mode
3162 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3163 result = gen_reg_rtx (insn_mode);
3165 /* Make a place to hold the source address. We will not expand
3166 the actual source until we are sure that the expansion will
3167 not fail -- there are trees that cannot be expanded twice. */
3168 src_reg = gen_reg_rtx (Pmode);
3170 /* Mark the beginning of the strlen sequence so we can emit the
3171 source operand later. */
3172 before_strlen = get_last_insn ();
3174 char_rtx = const0_rtx;
3175 char_mode = insn_data[(int) icode].operand[2].mode;
3176 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3178 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3180 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3181 char_rtx, GEN_INT (align));
3186 /* Now that we are assured of success, expand the source. */
3188 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3190 emit_move_insn (src_reg, pat);
3195 emit_insn_after (pat, before_strlen);
3197 emit_insn_before (pat, get_insns ());
3199 /* Return the value in the proper mode for this function. */
3200 if (GET_MODE (result) == target_mode)
3202 else if (target != 0)
3203 convert_move (target, result, 0);
3205 target = convert_to_mode (target_mode, result, 0);
3211 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3212 caller should emit a normal call, otherwise try to get the result
3213 in TARGET, if convenient (and in mode MODE if that's convenient). */
3216 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3218 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3220 tree type = TREE_TYPE (exp);
3221 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3222 CALL_EXPR_ARG (exp, 1), type);
3224 return expand_expr (result, target, mode, EXPAND_NORMAL);
3229 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3230 caller should emit a normal call, otherwise try to get the result
3231 in TARGET, if convenient (and in mode MODE if that's convenient). */
3234 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3236 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3238 tree type = TREE_TYPE (exp);
3239 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3240 CALL_EXPR_ARG (exp, 1), type);
3242 return expand_expr (result, target, mode, EXPAND_NORMAL);
3244 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3249 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3250 caller should emit a normal call, otherwise try to get the result
3251 in TARGET, if convenient (and in mode MODE if that's convenient). */
3254 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3256 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3258 tree type = TREE_TYPE (exp);
3259 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3260 CALL_EXPR_ARG (exp, 1), type);
3262 return expand_expr (result, target, mode, EXPAND_NORMAL);
3267 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3268 caller should emit a normal call, otherwise try to get the result
3269 in TARGET, if convenient (and in mode MODE if that's convenient). */
3272 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3274 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3276 tree type = TREE_TYPE (exp);
3277 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3278 CALL_EXPR_ARG (exp, 1), type);
3280 return expand_expr (result, target, mode, EXPAND_NORMAL);
3285 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3286 bytes from constant string DATA + OFFSET and return it as target
3290 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3291 enum machine_mode mode)
3293 const char *str = (const char *) data;
3295 gcc_assert (offset >= 0
3296 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3297 <= strlen (str) + 1));
3299 return c_readstr (str + offset, mode);
3302 /* Expand a call EXP to the memcpy builtin.
3303 Return NULL_RTX if we failed, the caller should emit a normal call,
3304 otherwise try to get the result in TARGET, if convenient (and in
3305 mode MODE if that's convenient). */
3308 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3310 tree fndecl = get_callee_fndecl (exp);
3312 if (!validate_arglist (exp,
3313 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3317 tree dest = CALL_EXPR_ARG (exp, 0);
3318 tree src = CALL_EXPR_ARG (exp, 1);
3319 tree len = CALL_EXPR_ARG (exp, 2);
3320 const char *src_str;
3321 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3322 unsigned int dest_align
3323 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3324 rtx dest_mem, src_mem, dest_addr, len_rtx;
3325 tree result = fold_builtin_memory_op (dest, src, len,
3326 TREE_TYPE (TREE_TYPE (fndecl)),
3328 HOST_WIDE_INT expected_size = -1;
3329 unsigned int expected_align = 0;
3333 while (TREE_CODE (result) == COMPOUND_EXPR)
3335 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3337 result = TREE_OPERAND (result, 1);
3339 return expand_expr (result, target, mode, EXPAND_NORMAL);
3342 /* If DEST is not a pointer type, call the normal function. */
3343 if (dest_align == 0)
3346 /* If either SRC is not a pointer type, don't do this
3347 operation in-line. */
3351 stringop_block_profile (exp, &expected_align, &expected_size);
3352 if (expected_align < dest_align)
3353 expected_align = dest_align;
3354 dest_mem = get_memory_rtx (dest, len);
3355 set_mem_align (dest_mem, dest_align);
3356 len_rtx = expand_normal (len);
3357 src_str = c_getstr (src);
3359 /* If SRC is a string constant and block move would be done
3360 by pieces, we can avoid loading the string from memory
3361 and only stored the computed constants. */
3363 && GET_CODE (len_rtx) == CONST_INT
3364 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3365 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3366 (void *) src_str, dest_align, false))
3368 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3369 builtin_memcpy_read_str,
3370 (void *) src_str, dest_align, false, 0);
3371 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3372 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3376 src_mem = get_memory_rtx (src, len);
3377 set_mem_align (src_mem, src_align);
3379 /* Copy word part most expediently. */
3380 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3381 CALL_EXPR_TAILCALL (exp)
3382 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3383 expected_align, expected_size);
3387 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3388 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3394 /* Expand a call EXP to the mempcpy builtin.
3395 Return NULL_RTX if we failed; the caller should emit a normal call,
3396 otherwise try to get the result in TARGET, if convenient (and in
3397 mode MODE if that's convenient). If ENDP is 0 return the
3398 destination pointer, if ENDP is 1 return the end pointer ala
3399 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3403 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3405 if (!validate_arglist (exp,
3406 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3410 tree dest = CALL_EXPR_ARG (exp, 0);
3411 tree src = CALL_EXPR_ARG (exp, 1);
3412 tree len = CALL_EXPR_ARG (exp, 2);
3413 return expand_builtin_mempcpy_args (dest, src, len,
3415 target, mode, /*endp=*/ 1);
3419 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3420 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3421 so that this can also be called without constructing an actual CALL_EXPR.
3422 TYPE is the return type of the call. The other arguments and return value
3423 are the same as for expand_builtin_mempcpy. */
3426 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3427 rtx target, enum machine_mode mode, int endp)
3429 /* If return value is ignored, transform mempcpy into memcpy. */
3430 if (target == const0_rtx)
3432 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3437 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3438 target, mode, EXPAND_NORMAL);
3442 const char *src_str;
3443 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3444 unsigned int dest_align
3445 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3446 rtx dest_mem, src_mem, len_rtx;
3447 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3451 while (TREE_CODE (result) == COMPOUND_EXPR)
3453 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3455 result = TREE_OPERAND (result, 1);
3457 return expand_expr (result, target, mode, EXPAND_NORMAL);
3460 /* If either SRC or DEST is not a pointer type, don't do this
3461 operation in-line. */
3462 if (dest_align == 0 || src_align == 0)
3465 /* If LEN is not constant, call the normal function. */
3466 if (! host_integerp (len, 1))
3469 len_rtx = expand_normal (len);
3470 src_str = c_getstr (src);
3472 /* If SRC is a string constant and block move would be done
3473 by pieces, we can avoid loading the string from memory
3474 and only stored the computed constants. */
3476 && GET_CODE (len_rtx) == CONST_INT
3477 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3478 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3479 (void *) src_str, dest_align, false))
3481 dest_mem = get_memory_rtx (dest, len);
3482 set_mem_align (dest_mem, dest_align);
3483 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3484 builtin_memcpy_read_str,
3485 (void *) src_str, dest_align,
3487 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3488 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3492 if (GET_CODE (len_rtx) == CONST_INT
3493 && can_move_by_pieces (INTVAL (len_rtx),
3494 MIN (dest_align, src_align)))
3496 dest_mem = get_memory_rtx (dest, len);
3497 set_mem_align (dest_mem, dest_align);
3498 src_mem = get_memory_rtx (src, len);
3499 set_mem_align (src_mem, src_align);
3500 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3501 MIN (dest_align, src_align), endp);
3502 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3503 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3511 /* Expand expression EXP, which is a call to the memmove builtin. Return
3512 NULL_RTX if we failed; the caller should emit a normal call. */
3515 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3517 if (!validate_arglist (exp,
3518 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3522 tree dest = CALL_EXPR_ARG (exp, 0);
3523 tree src = CALL_EXPR_ARG (exp, 1);
3524 tree len = CALL_EXPR_ARG (exp, 2);
3525 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3526 target, mode, ignore);
3530 /* Helper function to do the actual work for expand_builtin_memmove. The
3531 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3532 so that this can also be called without constructing an actual CALL_EXPR.
3533 TYPE is the return type of the call. The other arguments and return value
3534 are the same as for expand_builtin_memmove. */
3537 expand_builtin_memmove_args (tree dest, tree src, tree len,
3538 tree type, rtx target, enum machine_mode mode,
3541 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3545 STRIP_TYPE_NOPS (result);
3546 while (TREE_CODE (result) == COMPOUND_EXPR)
3548 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3550 result = TREE_OPERAND (result, 1);
3552 return expand_expr (result, target, mode, EXPAND_NORMAL);
3555 /* Otherwise, call the normal function. */
3559 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3560 NULL_RTX if we failed the caller should emit a normal call. */
3563 expand_builtin_bcopy (tree exp, int ignore)
3565 tree type = TREE_TYPE (exp);
3566 tree src, dest, size;
3568 if (!validate_arglist (exp,
3569 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3572 src = CALL_EXPR_ARG (exp, 0);
3573 dest = CALL_EXPR_ARG (exp, 1);
3574 size = CALL_EXPR_ARG (exp, 2);
3576 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3577 This is done this way so that if it isn't expanded inline, we fall
3578 back to calling bcopy instead of memmove. */
3579 return expand_builtin_memmove_args (dest, src,
3580 fold_convert (sizetype, size),
3581 type, const0_rtx, VOIDmode,
3586 # define HAVE_movstr 0
3587 # define CODE_FOR_movstr CODE_FOR_nothing
3590 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3591 we failed, the caller should emit a normal call, otherwise try to
3592 get the result in TARGET, if convenient. If ENDP is 0 return the
3593 destination pointer, if ENDP is 1 return the end pointer ala
3594 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3598 expand_movstr (tree dest, tree src, rtx target, int endp)
3604 const struct insn_data * data;
3609 dest_mem = get_memory_rtx (dest, NULL);
3610 src_mem = get_memory_rtx (src, NULL);
3613 target = force_reg (Pmode, XEXP (dest_mem, 0));
3614 dest_mem = replace_equiv_address (dest_mem, target);
3615 end = gen_reg_rtx (Pmode);
3619 if (target == 0 || target == const0_rtx)
3621 end = gen_reg_rtx (Pmode);
3629 data = insn_data + CODE_FOR_movstr;
3631 if (data->operand[0].mode != VOIDmode)
3632 end = gen_lowpart (data->operand[0].mode, end);
3634 insn = data->genfun (end, dest_mem, src_mem);
3640 /* movstr is supposed to set end to the address of the NUL
3641 terminator. If the caller requested a mempcpy-like return value,
3643 if (endp == 1 && target != const0_rtx)
3645 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3646 emit_move_insn (target, force_operand (tem, NULL_RTX));
3652 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3653 NULL_RTX if we failed the caller should emit a normal call, otherwise
3654 try to get the result in TARGET, if convenient (and in mode MODE if that's
3658 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3660 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3662 tree dest = CALL_EXPR_ARG (exp, 0);
3663 tree src = CALL_EXPR_ARG (exp, 1);
3664 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3669 /* Helper function to do the actual work for expand_builtin_strcpy. The
3670 arguments to the builtin_strcpy call DEST and SRC are broken out
3671 so that this can also be called without constructing an actual CALL_EXPR.
3672 The other arguments and return value are the same as for
3673 expand_builtin_strcpy. */
3676 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3677 rtx target, enum machine_mode mode)
3679 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3681 return expand_expr (result, target, mode, EXPAND_NORMAL);
3682 return expand_movstr (dest, src, target, /*endp=*/0);
3686 /* Expand a call EXP to the stpcpy builtin.
3687 Return NULL_RTX if we failed the caller should emit a normal call,
3688 otherwise try to get the result in TARGET, if convenient (and in
3689 mode MODE if that's convenient). */
3692 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3696 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3699 dst = CALL_EXPR_ARG (exp, 0);
3700 src = CALL_EXPR_ARG (exp, 1);
3702 /* If return value is ignored, transform stpcpy into strcpy. */
3703 if (target == const0_rtx)
3705 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3709 return expand_expr (build_call_expr (fn, 2, dst, src),
3710 target, mode, EXPAND_NORMAL);
3717 /* Ensure we get an actual string whose length can be evaluated at
3718 compile-time, not an expression containing a string. This is
3719 because the latter will potentially produce pessimized code
3720 when used to produce the return value. */
3721 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3722 return expand_movstr (dst, src, target, /*endp=*/2);
3724 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3725 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3726 target, mode, /*endp=*/2);
3731 if (TREE_CODE (len) == INTEGER_CST)
3733 rtx len_rtx = expand_normal (len);
3735 if (GET_CODE (len_rtx) == CONST_INT)
3737 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3738 dst, src, target, mode);
3744 if (mode != VOIDmode)
3745 target = gen_reg_rtx (mode);
3747 target = gen_reg_rtx (GET_MODE (ret));
3749 if (GET_MODE (target) != GET_MODE (ret))
3750 ret = gen_lowpart (GET_MODE (target), ret);
3752 ret = plus_constant (ret, INTVAL (len_rtx));
3753 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3761 return expand_movstr (dst, src, target, /*endp=*/2);
3765 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3766 bytes from constant string DATA + OFFSET and return it as target
3770 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3771 enum machine_mode mode)
3773 const char *str = (const char *) data;
3775 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3778 return c_readstr (str + offset, mode);
3781 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3782 NULL_RTX if we failed the caller should emit a normal call. */
3785 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3787 tree fndecl = get_callee_fndecl (exp);
3789 if (validate_arglist (exp,
3790 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3792 tree dest = CALL_EXPR_ARG (exp, 0);
3793 tree src = CALL_EXPR_ARG (exp, 1);
3794 tree len = CALL_EXPR_ARG (exp, 2);
3795 tree slen = c_strlen (src, 1);
3796 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3800 while (TREE_CODE (result) == COMPOUND_EXPR)
3802 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3804 result = TREE_OPERAND (result, 1);
3806 return expand_expr (result, target, mode, EXPAND_NORMAL);
3809 /* We must be passed a constant len and src parameter. */
3810 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3813 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3815 /* We're required to pad with trailing zeros if the requested
3816 len is greater than strlen(s2)+1. In that case try to
3817 use store_by_pieces, if it fails, punt. */
3818 if (tree_int_cst_lt (slen, len))
3820 unsigned int dest_align
3821 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3822 const char *p = c_getstr (src);
3825 if (!p || dest_align == 0 || !host_integerp (len, 1)
3826 || !can_store_by_pieces (tree_low_cst (len, 1),
3827 builtin_strncpy_read_str,
3828 (void *) p, dest_align, false))
3831 dest_mem = get_memory_rtx (dest, len);
3832 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3833 builtin_strncpy_read_str,
3834 (void *) p, dest_align, false, 0);
3835 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3836 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3843 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3844 bytes from constant string DATA + OFFSET and return it as target
3848 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3849 enum machine_mode mode)
3851 const char *c = (const char *) data;
3852 char *p = alloca (GET_MODE_SIZE (mode));
3854 memset (p, *c, GET_MODE_SIZE (mode));
3856 return c_readstr (p, mode);
3859 /* Callback routine for store_by_pieces. Return the RTL of a register
3860 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3861 char value given in the RTL register data. For example, if mode is
3862 4 bytes wide, return the RTL for 0x01010101*data. */
3865 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3866 enum machine_mode mode)
3872 size = GET_MODE_SIZE (mode);
3877 memset (p, 1, size);
3878 coeff = c_readstr (p, mode);
3880 target = convert_to_mode (mode, (rtx) data, 1);
3881 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3882 return force_reg (mode, target);
3885 /* Expand expression EXP, which is a call to the memset builtin. Return
3886 NULL_RTX if we failed the caller should emit a normal call, otherwise
3887 try to get the result in TARGET, if convenient (and in mode MODE if that's
3891 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3893 if (!validate_arglist (exp,
3894 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3898 tree dest = CALL_EXPR_ARG (exp, 0);
3899 tree val = CALL_EXPR_ARG (exp, 1);
3900 tree len = CALL_EXPR_ARG (exp, 2);
3901 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3905 /* Helper function to do the actual work for expand_builtin_memset. The
3906 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3907 so that this can also be called without constructing an actual CALL_EXPR.
3908 The other arguments and return value are the same as for
3909 expand_builtin_memset. */
3912 expand_builtin_memset_args (tree dest, tree val, tree len,
3913 rtx target, enum machine_mode mode, tree orig_exp)
3916 enum built_in_function fcode;
3918 unsigned int dest_align;
3919 rtx dest_mem, dest_addr, len_rtx;
3920 HOST_WIDE_INT expected_size = -1;
3921 unsigned int expected_align = 0;
3923 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3925 /* If DEST is not a pointer type, don't do this operation in-line. */
3926 if (dest_align == 0)
3929 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3930 if (expected_align < dest_align)
3931 expected_align = dest_align;
3933 /* If the LEN parameter is zero, return DEST. */
3934 if (integer_zerop (len))
3936 /* Evaluate and ignore VAL in case it has side-effects. */
3937 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3938 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3941 /* Stabilize the arguments in case we fail. */
3942 dest = builtin_save_expr (dest);
3943 val = builtin_save_expr (val);
3944 len = builtin_save_expr (len);
3946 len_rtx = expand_normal (len);
3947 dest_mem = get_memory_rtx (dest, len);
3949 if (TREE_CODE (val) != INTEGER_CST)
3953 val_rtx = expand_normal (val);
3954 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3957 /* Assume that we can memset by pieces if we can store
3958 * the coefficients by pieces (in the required modes).
3959 * We can't pass builtin_memset_gen_str as that emits RTL. */
3961 if (host_integerp (len, 1)
3962 && can_store_by_pieces (tree_low_cst (len, 1),
3963 builtin_memset_read_str, &c, dest_align,
3966 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3968 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3969 builtin_memset_gen_str, val_rtx, dest_align,
3972 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3973 dest_align, expected_align,
3977 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3978 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3982 if (target_char_cast (val, &c))
3987 if (host_integerp (len, 1)
3988 && can_store_by_pieces (tree_low_cst (len, 1),
3989 builtin_memset_read_str, &c, dest_align,
3991 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3992 builtin_memset_read_str, &c, dest_align, true, 0);
3993 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3994 dest_align, expected_align,
3998 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3999 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4003 set_mem_align (dest_mem, dest_align);
4004 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4005 CALL_EXPR_TAILCALL (orig_exp)
4006 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4007 expected_align, expected_size);
4011 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4012 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4018 fndecl = get_callee_fndecl (orig_exp);
4019 fcode = DECL_FUNCTION_CODE (fndecl);
4020 if (fcode == BUILT_IN_MEMSET)
4021 fn = build_call_expr (fndecl, 3, dest, val, len);
4022 else if (fcode == BUILT_IN_BZERO)
4023 fn = build_call_expr (fndecl, 2, dest, len);
4026 if (TREE_CODE (fn) == CALL_EXPR)
4027 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4028 return expand_call (fn, target, target == const0_rtx);
4031 /* Expand expression EXP, which is a call to the bzero builtin. Return
4032 NULL_RTX if we failed the caller should emit a normal call. */
4035 expand_builtin_bzero (tree exp)
4039 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4042 dest = CALL_EXPR_ARG (exp, 0);
4043 size = CALL_EXPR_ARG (exp, 1);
4045 /* New argument list transforming bzero(ptr x, int y) to
4046 memset(ptr x, int 0, size_t y). This is done this way
4047 so that if it isn't expanded inline, we fallback to
4048 calling bzero instead of memset. */
4050 return expand_builtin_memset_args (dest, integer_zero_node,
4051 fold_convert (sizetype, size),
4052 const0_rtx, VOIDmode, exp);
4055 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4056 caller should emit a normal call, otherwise try to get the result
4057 in TARGET, if convenient (and in mode MODE if that's convenient). */
4060 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4062 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4063 INTEGER_TYPE, VOID_TYPE))
4065 tree type = TREE_TYPE (exp);
4066 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4067 CALL_EXPR_ARG (exp, 1),
4068 CALL_EXPR_ARG (exp, 2), type);
4070 return expand_expr (result, target, mode, EXPAND_NORMAL);
4075 /* Expand expression EXP, which is a call to the memcmp built-in function.
4076 Return NULL_RTX if we failed and the
4077 caller should emit a normal call, otherwise try to get the result in
4078 TARGET, if convenient (and in mode MODE, if that's convenient). */
4081 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4083 if (!validate_arglist (exp,
4084 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4088 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4089 CALL_EXPR_ARG (exp, 1),
4090 CALL_EXPR_ARG (exp, 2));
4092 return expand_expr (result, target, mode, EXPAND_NORMAL);
4095 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4097 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4100 tree arg1 = CALL_EXPR_ARG (exp, 0);
4101 tree arg2 = CALL_EXPR_ARG (exp, 1);
4102 tree len = CALL_EXPR_ARG (exp, 2);
4105 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4107 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4108 enum machine_mode insn_mode;
4110 #ifdef HAVE_cmpmemsi
4112 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4115 #ifdef HAVE_cmpstrnsi
4117 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4122 /* If we don't have POINTER_TYPE, call the function. */
4123 if (arg1_align == 0 || arg2_align == 0)
4126 /* Make a place to write the result of the instruction. */
4129 && REG_P (result) && GET_MODE (result) == insn_mode
4130 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4131 result = gen_reg_rtx (insn_mode);
4133 arg1_rtx = get_memory_rtx (arg1, len);
4134 arg2_rtx = get_memory_rtx (arg2, len);
4135 arg3_rtx = expand_normal (len);
4137 /* Set MEM_SIZE as appropriate. */
4138 if (GET_CODE (arg3_rtx) == CONST_INT)
4140 set_mem_size (arg1_rtx, arg3_rtx);
4141 set_mem_size (arg2_rtx, arg3_rtx);
4144 #ifdef HAVE_cmpmemsi
4146 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4147 GEN_INT (MIN (arg1_align, arg2_align)));
4150 #ifdef HAVE_cmpstrnsi
4152 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4153 GEN_INT (MIN (arg1_align, arg2_align)));
4161 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4162 TYPE_MODE (integer_type_node), 3,
4163 XEXP (arg1_rtx, 0), Pmode,
4164 XEXP (arg2_rtx, 0), Pmode,
4165 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4166 TYPE_UNSIGNED (sizetype)),
4167 TYPE_MODE (sizetype));
4169 /* Return the value in the proper mode for this function. */
4170 mode = TYPE_MODE (TREE_TYPE (exp));
4171 if (GET_MODE (result) == mode)
4173 else if (target != 0)
4175 convert_move (target, result, 0);
4179 return convert_to_mode (mode, result, 0);
4186 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4187 if we failed the caller should emit a normal call, otherwise try to get
4188 the result in TARGET, if convenient. */
4191 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4193 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4197 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4198 CALL_EXPR_ARG (exp, 1));
4200 return expand_expr (result, target, mode, EXPAND_NORMAL);
4203 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4204 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4205 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4207 rtx arg1_rtx, arg2_rtx;
4208 rtx result, insn = NULL_RTX;
4210 tree arg1 = CALL_EXPR_ARG (exp, 0);
4211 tree arg2 = CALL_EXPR_ARG (exp, 1);
4214 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4216 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4218 /* If we don't have POINTER_TYPE, call the function. */
4219 if (arg1_align == 0 || arg2_align == 0)
4222 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4223 arg1 = builtin_save_expr (arg1);
4224 arg2 = builtin_save_expr (arg2);
4226 arg1_rtx = get_memory_rtx (arg1, NULL);
4227 arg2_rtx = get_memory_rtx (arg2, NULL);
4229 #ifdef HAVE_cmpstrsi
4230 /* Try to call cmpstrsi. */
4233 enum machine_mode insn_mode
4234 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4236 /* Make a place to write the result of the instruction. */
4239 && REG_P (result) && GET_MODE (result) == insn_mode
4240 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4241 result = gen_reg_rtx (insn_mode);
4243 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4244 GEN_INT (MIN (arg1_align, arg2_align)));
4247 #ifdef HAVE_cmpstrnsi
4248 /* Try to determine at least one length and call cmpstrnsi. */
4249 if (!insn && HAVE_cmpstrnsi)
4254 enum machine_mode insn_mode
4255 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4256 tree len1 = c_strlen (arg1, 1);
4257 tree len2 = c_strlen (arg2, 1);
4260 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4262 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4264 /* If we don't have a constant length for the first, use the length
4265 of the second, if we know it. We don't require a constant for
4266 this case; some cost analysis could be done if both are available
4267 but neither is constant. For now, assume they're equally cheap,
4268 unless one has side effects. If both strings have constant lengths,
4275 else if (TREE_SIDE_EFFECTS (len1))
4277 else if (TREE_SIDE_EFFECTS (len2))
4279 else if (TREE_CODE (len1) != INTEGER_CST)
4281 else if (TREE_CODE (len2) != INTEGER_CST)
4283 else if (tree_int_cst_lt (len1, len2))
4288 /* If both arguments have side effects, we cannot optimize. */
4289 if (!len || TREE_SIDE_EFFECTS (len))
4292 arg3_rtx = expand_normal (len);
4294 /* Make a place to write the result of the instruction. */
4297 && REG_P (result) && GET_MODE (result) == insn_mode
4298 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4299 result = gen_reg_rtx (insn_mode);
4301 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4302 GEN_INT (MIN (arg1_align, arg2_align)));
4310 /* Return the value in the proper mode for this function. */
4311 mode = TYPE_MODE (TREE_TYPE (exp));
4312 if (GET_MODE (result) == mode)
4315 return convert_to_mode (mode, result, 0);
4316 convert_move (target, result, 0);
4320 /* Expand the library call ourselves using a stabilized argument
4321 list to avoid re-evaluating the function's arguments twice. */
4322 #ifdef HAVE_cmpstrnsi
4325 fndecl = get_callee_fndecl (exp);
4326 fn = build_call_expr (fndecl, 2, arg1, arg2);
4327 if (TREE_CODE (fn) == CALL_EXPR)
4328 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4329 return expand_call (fn, target, target == const0_rtx);
4335 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4336 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4337 the result in TARGET, if convenient. */
4340 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4342 if (!validate_arglist (exp,
4343 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4347 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4348 CALL_EXPR_ARG (exp, 1),
4349 CALL_EXPR_ARG (exp, 2));
4351 return expand_expr (result, target, mode, EXPAND_NORMAL);
4354 /* If c_strlen can determine an expression for one of the string
4355 lengths, and it doesn't have side effects, then emit cmpstrnsi
4356 using length MIN(strlen(string)+1, arg3). */
4357 #ifdef HAVE_cmpstrnsi
4360 tree len, len1, len2;
4361 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4364 tree arg1 = CALL_EXPR_ARG (exp, 0);
4365 tree arg2 = CALL_EXPR_ARG (exp, 1);
4366 tree arg3 = CALL_EXPR_ARG (exp, 2);
4369 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4371 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4372 enum machine_mode insn_mode
4373 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4375 len1 = c_strlen (arg1, 1);
4376 len2 = c_strlen (arg2, 1);
4379 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4381 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4383 /* If we don't have a constant length for the first, use the length
4384 of the second, if we know it. We don't require a constant for
4385 this case; some cost analysis could be done if both are available
4386 but neither is constant. For now, assume they're equally cheap,
4387 unless one has side effects. If both strings have constant lengths,
4394 else if (TREE_SIDE_EFFECTS (len1))
4396 else if (TREE_SIDE_EFFECTS (len2))
4398 else if (TREE_CODE (len1) != INTEGER_CST)
4400 else if (TREE_CODE (len2) != INTEGER_CST)
4402 else if (tree_int_cst_lt (len1, len2))
4407 /* If both arguments have side effects, we cannot optimize. */
4408 if (!len || TREE_SIDE_EFFECTS (len))
4411 /* The actual new length parameter is MIN(len,arg3). */
4412 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4413 fold_convert (TREE_TYPE (len), arg3));
4415 /* If we don't have POINTER_TYPE, call the function. */
4416 if (arg1_align == 0 || arg2_align == 0)
4419 /* Make a place to write the result of the instruction. */
4422 && REG_P (result) && GET_MODE (result) == insn_mode
4423 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4424 result = gen_reg_rtx (insn_mode);
4426 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4427 arg1 = builtin_save_expr (arg1);
4428 arg2 = builtin_save_expr (arg2);
4429 len = builtin_save_expr (len);
4431 arg1_rtx = get_memory_rtx (arg1, len);
4432 arg2_rtx = get_memory_rtx (arg2, len);
4433 arg3_rtx = expand_normal (len);
4434 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4435 GEN_INT (MIN (arg1_align, arg2_align)));
4440 /* Return the value in the proper mode for this function. */
4441 mode = TYPE_MODE (TREE_TYPE (exp));
4442 if (GET_MODE (result) == mode)
4445 return convert_to_mode (mode, result, 0);
4446 convert_move (target, result, 0);
4450 /* Expand the library call ourselves using a stabilized argument
4451 list to avoid re-evaluating the function's arguments twice. */
4452 fndecl = get_callee_fndecl (exp);
4453 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4454 if (TREE_CODE (fn) == CALL_EXPR)
4455 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4456 return expand_call (fn, target, target == const0_rtx);
4462 /* Expand expression EXP, which is a call to the strcat builtin.
4463 Return NULL_RTX if we failed the caller should emit a normal call,
4464 otherwise try to get the result in TARGET, if convenient. */
4467 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4469 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4473 tree dst = CALL_EXPR_ARG (exp, 0);
4474 tree src = CALL_EXPR_ARG (exp, 1);
4475 const char *p = c_getstr (src);
4477 /* If the string length is zero, return the dst parameter. */
4478 if (p && *p == '\0')
4479 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4483 /* See if we can store by pieces into (dst + strlen(dst)). */
4484 tree newsrc, newdst,
4485 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4488 /* Stabilize the argument list. */
4489 newsrc = builtin_save_expr (src);
4490 dst = builtin_save_expr (dst);
4494 /* Create strlen (dst). */
4495 newdst = build_call_expr (strlen_fn, 1, dst);
4496 /* Create (dst p+ strlen (dst)). */
4498 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4499 newdst = builtin_save_expr (newdst);
4501 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4503 end_sequence (); /* Stop sequence. */
4507 /* Output the entire sequence. */
4508 insns = get_insns ();
4512 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4519 /* Expand expression EXP, which is a call to the strncat builtin.
4520 Return NULL_RTX if we failed the caller should emit a normal call,
4521 otherwise try to get the result in TARGET, if convenient. */
4524 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4526 if (validate_arglist (exp,
4527 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4529 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4530 CALL_EXPR_ARG (exp, 1),
4531 CALL_EXPR_ARG (exp, 2));
4533 return expand_expr (result, target, mode, EXPAND_NORMAL);
4538 /* Expand expression EXP, which is a call to the strspn builtin.
4539 Return NULL_RTX if we failed the caller should emit a normal call,
4540 otherwise try to get the result in TARGET, if convenient. */
4543 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4545 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4547 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4548 CALL_EXPR_ARG (exp, 1));
4550 return expand_expr (result, target, mode, EXPAND_NORMAL);
4555 /* Expand expression EXP, which is a call to the strcspn builtin.
4556 Return NULL_RTX if we failed the caller should emit a normal call,
4557 otherwise try to get the result in TARGET, if convenient. */
4560 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4562 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4564 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4565 CALL_EXPR_ARG (exp, 1));
4567 return expand_expr (result, target, mode, EXPAND_NORMAL);
4572 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4573 if that's convenient. */
4576 expand_builtin_saveregs (void)
4580 /* Don't do __builtin_saveregs more than once in a function.
4581 Save the result of the first call and reuse it. */
4582 if (saveregs_value != 0)
4583 return saveregs_value;
4585 /* When this function is called, it means that registers must be
4586 saved on entry to this function. So we migrate the call to the
4587 first insn of this function. */
4591 /* Do whatever the machine needs done in this case. */
4592 val = targetm.calls.expand_builtin_saveregs ();
4597 saveregs_value = val;
4599 /* Put the insns after the NOTE that starts the function. If this
4600 is inside a start_sequence, make the outer-level insn chain current, so
4601 the code is placed at the start of the function. */
4602 push_topmost_sequence ();
4603 emit_insn_after (seq, entry_of_function ());
4604 pop_topmost_sequence ();
4609 /* __builtin_args_info (N) returns word N of the arg space info
4610 for the current function. The number and meanings of words
4611 is controlled by the definition of CUMULATIVE_ARGS. */
4614 expand_builtin_args_info (tree exp)
4616 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4617 int *word_ptr = (int *) &crtl->args.info;
4619 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4621 if (call_expr_nargs (exp) != 0)
4623 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4624 error ("argument of %<__builtin_args_info%> must be constant");
4627 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4629 if (wordnum < 0 || wordnum >= nwords)
4630 error ("argument of %<__builtin_args_info%> out of range");
4632 return GEN_INT (word_ptr[wordnum]);
4636 error ("missing argument in %<__builtin_args_info%>");
4641 /* Expand a call to __builtin_next_arg. */
4644 expand_builtin_next_arg (void)
4646 /* Checking arguments is already done in fold_builtin_next_arg
4647 that must be called before this function. */
4648 return expand_binop (ptr_mode, add_optab,
4649 crtl->args.internal_arg_pointer,
4650 crtl->args.arg_offset_rtx,
4651 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4654 /* Make it easier for the backends by protecting the valist argument
4655 from multiple evaluations. */
4658 stabilize_va_list (tree valist, int needs_lvalue)
4660 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4662 if (TREE_SIDE_EFFECTS (valist))
4663 valist = save_expr (valist);
4665 /* For this case, the backends will be expecting a pointer to
4666 TREE_TYPE (va_list_type_node), but it's possible we've
4667 actually been given an array (an actual va_list_type_node).
4669 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4671 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4672 valist = build_fold_addr_expr_with_type (valist, p1);
4681 if (! TREE_SIDE_EFFECTS (valist))
4684 pt = build_pointer_type (va_list_type_node);
4685 valist = fold_build1 (ADDR_EXPR, pt, valist);
4686 TREE_SIDE_EFFECTS (valist) = 1;
4689 if (TREE_SIDE_EFFECTS (valist))
4690 valist = save_expr (valist);
4691 valist = build_fold_indirect_ref (valist);
4697 /* The "standard" definition of va_list is void*. */
4700 std_build_builtin_va_list (void)
4702 return ptr_type_node;
4705 /* The "standard" implementation of va_start: just assign `nextarg' to
4709 std_expand_builtin_va_start (tree valist, rtx nextarg)
4711 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4712 convert_move (va_r, nextarg, 0);
4715 /* Expand EXP, a call to __builtin_va_start. */
4718 expand_builtin_va_start (tree exp)
4723 if (call_expr_nargs (exp) < 2)
4725 error ("too few arguments to function %<va_start%>");
4729 if (fold_builtin_next_arg (exp, true))
4732 nextarg = expand_builtin_next_arg ();
4733 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4735 if (targetm.expand_builtin_va_start)
4736 targetm.expand_builtin_va_start (valist, nextarg);
4738 std_expand_builtin_va_start (valist, nextarg);
4743 /* The "standard" implementation of va_arg: read the value from the
4744 current (padded) address and increment by the (padded) size. */
4747 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4749 tree addr, t, type_size, rounded_size, valist_tmp;
4750 unsigned HOST_WIDE_INT align, boundary;
4753 #ifdef ARGS_GROW_DOWNWARD
4754 /* All of the alignment and movement below is for args-grow-up machines.
4755 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4756 implement their own specialized gimplify_va_arg_expr routines. */
4760 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4762 type = build_pointer_type (type);
4764 align = PARM_BOUNDARY / BITS_PER_UNIT;
4765 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4767 /* Hoist the valist value into a temporary for the moment. */
4768 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4770 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4771 requires greater alignment, we must perform dynamic alignment. */
4772 if (boundary > align
4773 && !integer_zerop (TYPE_SIZE (type)))
4775 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4776 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4777 valist_tmp, size_int (boundary - 1)));
4778 gimplify_and_add (t, pre_p);
4780 t = fold_convert (sizetype, valist_tmp);
4781 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4782 fold_convert (TREE_TYPE (valist),
4783 fold_build2 (BIT_AND_EXPR, sizetype, t,
4784 size_int (-boundary))));
4785 gimplify_and_add (t, pre_p);
4790 /* If the actual alignment is less than the alignment of the type,
4791 adjust the type accordingly so that we don't assume strict alignment
4792 when dereferencing the pointer. */
4793 boundary *= BITS_PER_UNIT;
4794 if (boundary < TYPE_ALIGN (type))
4796 type = build_variant_type_copy (type);
4797 TYPE_ALIGN (type) = boundary;
4800 /* Compute the rounded size of the type. */
4801 type_size = size_in_bytes (type);
4802 rounded_size = round_up (type_size, align);
4804 /* Reduce rounded_size so it's sharable with the postqueue. */
4805 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4809 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4811 /* Small args are padded downward. */
4812 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4813 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4814 size_binop (MINUS_EXPR, rounded_size, type_size));
4815 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4818 /* Compute new value for AP. */
4819 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4820 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4821 gimplify_and_add (t, pre_p);
4823 addr = fold_convert (build_pointer_type (type), addr);
4826 addr = build_va_arg_indirect_ref (addr);
4828 return build_va_arg_indirect_ref (addr);
4831 /* Build an indirect-ref expression over the given TREE, which represents a
4832 piece of a va_arg() expansion. */
4834 build_va_arg_indirect_ref (tree addr)
4836 addr = build_fold_indirect_ref (addr);
4838 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4844 /* Return a dummy expression of type TYPE in order to keep going after an
4848 dummy_object (tree type)
4850 tree t = build_int_cst (build_pointer_type (type), 0);
4851 return build1 (INDIRECT_REF, type, t);
4854 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4855 builtin function, but a very special sort of operator. */
4857 enum gimplify_status
4858 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4860 tree promoted_type, want_va_type, have_va_type;
4861 tree valist = TREE_OPERAND (*expr_p, 0);
4862 tree type = TREE_TYPE (*expr_p);
4865 /* Verify that valist is of the proper type. */
4866 want_va_type = va_list_type_node;
4867 have_va_type = TREE_TYPE (valist);
4869 if (have_va_type == error_mark_node)
4872 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4874 /* If va_list is an array type, the argument may have decayed
4875 to a pointer type, e.g. by being passed to another function.
4876 In that case, unwrap both types so that we can compare the
4877 underlying records. */
4878 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4879 || POINTER_TYPE_P (have_va_type))
4881 want_va_type = TREE_TYPE (want_va_type);
4882 have_va_type = TREE_TYPE (have_va_type);
4886 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4888 error ("first argument to %<va_arg%> not of type %<va_list%>");
4892 /* Generate a diagnostic for requesting data of a type that cannot
4893 be passed through `...' due to type promotion at the call site. */
4894 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4897 static bool gave_help;
4899 /* Unfortunately, this is merely undefined, rather than a constraint
4900 violation, so we cannot make this an error. If this call is never
4901 executed, the program is still strictly conforming. */
4902 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4903 type, promoted_type);
4907 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4908 promoted_type, type);
4911 /* We can, however, treat "undefined" any way we please.
4912 Call abort to encourage the user to fix the program. */
4913 inform ("if this code is reached, the program will abort");
4914 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4915 append_to_statement_list (t, pre_p);
4917 /* This is dead code, but go ahead and finish so that the
4918 mode of the result comes out right. */
4919 *expr_p = dummy_object (type);
4924 /* Make it easier for the backends by protecting the valist argument
4925 from multiple evaluations. */
4926 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4928 /* For this case, the backends will be expecting a pointer to
4929 TREE_TYPE (va_list_type_node), but it's possible we've
4930 actually been given an array (an actual va_list_type_node).
4932 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4934 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4935 valist = build_fold_addr_expr_with_type (valist, p1);
4937 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4940 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4942 if (!targetm.gimplify_va_arg_expr)
4943 /* FIXME:Once most targets are converted we should merely
4944 assert this is non-null. */
4947 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4952 /* Expand EXP, a call to __builtin_va_end. */
4955 expand_builtin_va_end (tree exp)
4957 tree valist = CALL_EXPR_ARG (exp, 0);
4959 /* Evaluate for side effects, if needed. I hate macros that don't
4961 if (TREE_SIDE_EFFECTS (valist))
4962 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4967 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4968 builtin rather than just as an assignment in stdarg.h because of the
4969 nastiness of array-type va_list types. */
4972 expand_builtin_va_copy (tree exp)
4976 dst = CALL_EXPR_ARG (exp, 0);
4977 src = CALL_EXPR_ARG (exp, 1);
4979 dst = stabilize_va_list (dst, 1);
4980 src = stabilize_va_list (src, 0);
4982 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4984 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4985 TREE_SIDE_EFFECTS (t) = 1;
4986 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4990 rtx dstb, srcb, size;
4992 /* Evaluate to pointers. */
4993 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4994 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4995 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4996 VOIDmode, EXPAND_NORMAL);
4998 dstb = convert_memory_address (Pmode, dstb);
4999 srcb = convert_memory_address (Pmode, srcb);
5001 /* "Dereference" to BLKmode memories. */
5002 dstb = gen_rtx_MEM (BLKmode, dstb);
5003 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5004 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
5005 srcb = gen_rtx_MEM (BLKmode, srcb);
5006 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5007 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
5010 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5016 /* Expand a call to one of the builtin functions __builtin_frame_address or
5017 __builtin_return_address. */
5020 expand_builtin_frame_address (tree fndecl, tree exp)
5022 /* The argument must be a nonnegative integer constant.
5023 It counts the number of frames to scan up the stack.
5024 The value is the return address saved in that frame. */
5025 if (call_expr_nargs (exp) == 0)
5026 /* Warning about missing arg was already issued. */
5028 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5030 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5031 error ("invalid argument to %<__builtin_frame_address%>");
5033 error ("invalid argument to %<__builtin_return_address%>");
5039 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5040 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5042 /* Some ports cannot access arbitrary stack frames. */
5045 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5046 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5048 warning (0, "unsupported argument to %<__builtin_return_address%>");
5052 /* For __builtin_frame_address, return what we've got. */
5053 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5057 && ! CONSTANT_P (tem))
5058 tem = copy_to_mode_reg (Pmode, tem);
5063 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5064 we failed and the caller should emit a normal call, otherwise try to get
5065 the result in TARGET, if convenient. */
5068 expand_builtin_alloca (tree exp, rtx target)
5073 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5074 should always expand to function calls. These can be intercepted
5079 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5082 /* Compute the argument. */
5083 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5085 /* Allocate the desired space. */
5086 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5087 result = convert_memory_address (ptr_mode, result);
5092 /* Expand a call to a bswap builtin with argument ARG0. MODE
5093 is the mode to expand with. */
5096 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5098 enum machine_mode mode;
5102 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5105 arg = CALL_EXPR_ARG (exp, 0);
5106 mode = TYPE_MODE (TREE_TYPE (arg));
5107 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5109 target = expand_unop (mode, bswap_optab, op0, target, 1);
5111 gcc_assert (target);
5113 return convert_to_mode (mode, target, 0);
5116 /* Expand a call to a unary builtin in EXP.
5117 Return NULL_RTX if a normal call should be emitted rather than expanding the
5118 function in-line. If convenient, the result should be placed in TARGET.
5119 SUBTARGET may be used as the target for computing one of EXP's operands. */
5122 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5123 rtx subtarget, optab op_optab)
5127 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5130 /* Compute the argument. */
5131 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5132 VOIDmode, EXPAND_NORMAL);
5133 /* Compute op, into TARGET if possible.
5134 Set TARGET to wherever the result comes back. */
5135 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5136 op_optab, op0, target, 1);
5137 gcc_assert (target);
5139 return convert_to_mode (target_mode, target, 0);
5142 /* If the string passed to fputs is a constant and is one character
5143 long, we attempt to transform this call into __builtin_fputc(). */
5146 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5148 /* Verify the arguments in the original call. */
5149 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5151 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5152 CALL_EXPR_ARG (exp, 1),
5153 (target == const0_rtx),
5154 unlocked, NULL_TREE);
5156 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5161 /* Expand a call to __builtin_expect. We just return our argument
5162 as the builtin_expect semantic should've been already executed by
5163 tree branch prediction pass. */
5166 expand_builtin_expect (tree exp, rtx target)
5170 if (call_expr_nargs (exp) < 2)
5172 arg = CALL_EXPR_ARG (exp, 0);
5173 c = CALL_EXPR_ARG (exp, 1);
5175 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5176 /* When guessing was done, the hints should be already stripped away. */
5177 gcc_assert (!flag_guess_branch_prob
5178 || optimize == 0 || errorcount || sorrycount);
5183 expand_builtin_trap (void)
5187 emit_insn (gen_trap ());
5190 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5194 /* Expand EXP, a call to fabs, fabsf or fabsl.
5195 Return NULL_RTX if a normal call should be emitted rather than expanding
5196 the function inline. If convenient, the result should be placed
5197 in TARGET. SUBTARGET may be used as the target for computing
5201 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5203 enum machine_mode mode;
5207 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5210 arg = CALL_EXPR_ARG (exp, 0);
5211 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5212 mode = TYPE_MODE (TREE_TYPE (arg));
5213 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5214 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5217 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5218 Return NULL is a normal call should be emitted rather than expanding the
5219 function inline. If convenient, the result should be placed in TARGET.
5220 SUBTARGET may be used as the target for computing the operand. */
5223 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5228 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5231 arg = CALL_EXPR_ARG (exp, 0);
5232 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5234 arg = CALL_EXPR_ARG (exp, 1);
5235 op1 = expand_normal (arg);
5237 return expand_copysign (op0, op1, target);
5240 /* Create a new constant string literal and return a char* pointer to it.
5241 The STRING_CST value is the LEN characters at STR. */
5243 build_string_literal (int len, const char *str)
5245 tree t, elem, index, type;
5247 t = build_string (len, str);
5248 elem = build_type_variant (char_type_node, 1, 0);
5249 index = build_index_type (size_int (len - 1));
5250 type = build_array_type (elem, index);
5251 TREE_TYPE (t) = type;
5252 TREE_CONSTANT (t) = 1;
5253 TREE_READONLY (t) = 1;
5254 TREE_STATIC (t) = 1;
5256 type = build_pointer_type (elem);
5257 t = build1 (ADDR_EXPR, type,
5258 build4 (ARRAY_REF, elem,
5259 t, integer_zero_node, NULL_TREE, NULL_TREE));
5263 /* Expand EXP, a call to printf or printf_unlocked.
5264 Return NULL_RTX if a normal call should be emitted rather than transforming
5265 the function inline. If convenient, the result should be placed in
5266 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5269 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5272 /* If we're using an unlocked function, assume the other unlocked
5273 functions exist explicitly. */
5274 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5275 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5276 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5277 : implicit_built_in_decls[BUILT_IN_PUTS];
5278 const char *fmt_str;
5281 int nargs = call_expr_nargs (exp);
5283 /* If the return value is used, don't do the transformation. */
5284 if (target != const0_rtx)
5287 /* Verify the required arguments in the original call. */
5290 fmt = CALL_EXPR_ARG (exp, 0);
5291 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5294 /* Check whether the format is a literal string constant. */
5295 fmt_str = c_getstr (fmt);
5296 if (fmt_str == NULL)
5299 if (!init_target_chars ())
5302 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5303 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5306 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5309 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5311 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5312 else if (strcmp (fmt_str, target_percent_c) == 0)
5315 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5318 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5322 /* We can't handle anything else with % args or %% ... yet. */
5323 if (strchr (fmt_str, target_percent))
5329 /* If the format specifier was "", printf does nothing. */
5330 if (fmt_str[0] == '\0')
5332 /* If the format specifier has length of 1, call putchar. */
5333 if (fmt_str[1] == '\0')
5335 /* Given printf("c"), (where c is any one character,)
5336 convert "c"[0] to an int and pass that to the replacement
5338 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5340 fn = build_call_expr (fn_putchar, 1, arg);
5344 /* If the format specifier was "string\n", call puts("string"). */
5345 size_t len = strlen (fmt_str);
5346 if ((unsigned char)fmt_str[len - 1] == target_newline)
5348 /* Create a NUL-terminated string that's one char shorter
5349 than the original, stripping off the trailing '\n'. */
5350 char *newstr = alloca (len);
5351 memcpy (newstr, fmt_str, len - 1);
5352 newstr[len - 1] = 0;
5353 arg = build_string_literal (len, newstr);
5355 fn = build_call_expr (fn_puts, 1, arg);
5358 /* We'd like to arrange to call fputs(string,stdout) here,
5359 but we need stdout and don't have a way to get it yet. */
5366 if (TREE_CODE (fn) == CALL_EXPR)
5367 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5368 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5371 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5372 Return NULL_RTX if a normal call should be emitted rather than transforming
5373 the function inline. If convenient, the result should be placed in
5374 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5377 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5380 /* If we're using an unlocked function, assume the other unlocked
5381 functions exist explicitly. */
5382 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5383 : implicit_built_in_decls[BUILT_IN_FPUTC];
5384 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5385 : implicit_built_in_decls[BUILT_IN_FPUTS];
5386 const char *fmt_str;
5389 int nargs = call_expr_nargs (exp);
5391 /* If the return value is used, don't do the transformation. */
5392 if (target != const0_rtx)
5395 /* Verify the required arguments in the original call. */
5398 fp = CALL_EXPR_ARG (exp, 0);
5399 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5401 fmt = CALL_EXPR_ARG (exp, 1);
5402 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5405 /* Check whether the format is a literal string constant. */
5406 fmt_str = c_getstr (fmt);
5407 if (fmt_str == NULL)
5410 if (!init_target_chars ())
5413 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5414 if (strcmp (fmt_str, target_percent_s) == 0)
5417 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5419 arg = CALL_EXPR_ARG (exp, 2);
5421 fn = build_call_expr (fn_fputs, 2, arg, fp);
5423 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5424 else if (strcmp (fmt_str, target_percent_c) == 0)
5427 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5429 arg = CALL_EXPR_ARG (exp, 2);
5431 fn = build_call_expr (fn_fputc, 2, arg, fp);
5435 /* We can't handle anything else with % args or %% ... yet. */
5436 if (strchr (fmt_str, target_percent))
5442 /* If the format specifier was "", fprintf does nothing. */
5443 if (fmt_str[0] == '\0')
5445 /* Evaluate and ignore FILE* argument for side-effects. */
5446 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5450 /* When "string" doesn't contain %, replace all cases of
5451 fprintf(stream,string) with fputs(string,stream). The fputs
5452 builtin will take care of special cases like length == 1. */
5454 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5459 if (TREE_CODE (fn) == CALL_EXPR)
5460 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5461 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5464 /* Expand a call EXP to sprintf. Return NULL_RTX if
5465 a normal call should be emitted rather than expanding the function
5466 inline. If convenient, the result should be placed in TARGET with
5470 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5473 const char *fmt_str;
5474 int nargs = call_expr_nargs (exp);
5476 /* Verify the required arguments in the original call. */
5479 dest = CALL_EXPR_ARG (exp, 0);
5480 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5482 fmt = CALL_EXPR_ARG (exp, 0);
5483 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5486 /* Check whether the format is a literal string constant. */
5487 fmt_str = c_getstr (fmt);
5488 if (fmt_str == NULL)
5491 if (!init_target_chars ())
5494 /* If the format doesn't contain % args or %%, use strcpy. */
5495 if (strchr (fmt_str, target_percent) == 0)
5497 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5500 if ((nargs > 2) || ! fn)
5502 expand_expr (build_call_expr (fn, 2, dest, fmt),
5503 const0_rtx, VOIDmode, EXPAND_NORMAL);
5504 if (target == const0_rtx)
5506 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5507 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5509 /* If the format is "%s", use strcpy if the result isn't used. */
5510 else if (strcmp (fmt_str, target_percent_s) == 0)
5513 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5519 arg = CALL_EXPR_ARG (exp, 2);
5520 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5523 if (target != const0_rtx)
5525 len = c_strlen (arg, 1);
5526 if (! len || TREE_CODE (len) != INTEGER_CST)
5532 expand_expr (build_call_expr (fn, 2, dest, arg),
5533 const0_rtx, VOIDmode, EXPAND_NORMAL);
5535 if (target == const0_rtx)
5537 return expand_expr (len, target, mode, EXPAND_NORMAL);
5543 /* Expand a call to either the entry or exit function profiler. */
5546 expand_builtin_profile_func (bool exitp)
5550 this = DECL_RTL (current_function_decl);
5551 gcc_assert (MEM_P (this));
5552 this = XEXP (this, 0);
5555 which = profile_function_exit_libfunc;
5557 which = profile_function_entry_libfunc;
5559 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5560 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5567 /* Expand a call to __builtin___clear_cache. */
5570 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5572 #ifndef HAVE_clear_cache
5573 #ifdef CLEAR_INSN_CACHE
5574 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5575 does something. Just do the default expansion to a call to
5579 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5580 does nothing. There is no need to call it. Do nothing. */
5582 #endif /* CLEAR_INSN_CACHE */
5584 /* We have a "clear_cache" insn, and it will handle everything. */
5586 rtx begin_rtx, end_rtx;
5587 enum insn_code icode;
5589 /* We must not expand to a library call. If we did, any
5590 fallback library function in libgcc that might contain a call to
5591 __builtin___clear_cache() would recurse infinitely. */
5592 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5594 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5598 if (HAVE_clear_cache)
5600 icode = CODE_FOR_clear_cache;
5602 begin = CALL_EXPR_ARG (exp, 0);
5603 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5604 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5605 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5606 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5608 end = CALL_EXPR_ARG (exp, 1);
5609 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5610 end_rtx = convert_memory_address (Pmode, end_rtx);
5611 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5612 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5614 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5617 #endif /* HAVE_clear_cache */
5620 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5623 round_trampoline_addr (rtx tramp)
5625 rtx temp, addend, mask;
5627 /* If we don't need too much alignment, we'll have been guaranteed
5628 proper alignment by get_trampoline_type. */
5629 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5632 /* Round address up to desired boundary. */
5633 temp = gen_reg_rtx (Pmode);
5634 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5635 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5637 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5638 temp, 0, OPTAB_LIB_WIDEN);
5639 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5640 temp, 0, OPTAB_LIB_WIDEN);
5646 expand_builtin_init_trampoline (tree exp)
5648 tree t_tramp, t_func, t_chain;
5649 rtx r_tramp, r_func, r_chain;
5650 #ifdef TRAMPOLINE_TEMPLATE
5654 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5655 POINTER_TYPE, VOID_TYPE))
5658 t_tramp = CALL_EXPR_ARG (exp, 0);
5659 t_func = CALL_EXPR_ARG (exp, 1);
5660 t_chain = CALL_EXPR_ARG (exp, 2);
5662 r_tramp = expand_normal (t_tramp);
5663 r_func = expand_normal (t_func);
5664 r_chain = expand_normal (t_chain);
5666 /* Generate insns to initialize the trampoline. */
5667 r_tramp = round_trampoline_addr (r_tramp);
5668 #ifdef TRAMPOLINE_TEMPLATE
5669 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5670 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5671 emit_block_move (blktramp, assemble_trampoline_template (),
5672 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5674 trampolines_created = 1;
5675 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5681 expand_builtin_adjust_trampoline (tree exp)
5685 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5688 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5689 tramp = round_trampoline_addr (tramp);
5690 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5691 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5697 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5698 function. The function first checks whether the back end provides
5699 an insn to implement signbit for the respective mode. If not, it
5700 checks whether the floating point format of the value is such that
5701 the sign bit can be extracted. If that is not the case, the
5702 function returns NULL_RTX to indicate that a normal call should be
5703 emitted rather than expanding the function in-line. EXP is the
5704 expression that is a call to the builtin function; if convenient,
5705 the result should be placed in TARGET. */
5707 expand_builtin_signbit (tree exp, rtx target)
5709 const struct real_format *fmt;
5710 enum machine_mode fmode, imode, rmode;
5711 HOST_WIDE_INT hi, lo;
5714 enum insn_code icode;
5717 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5720 arg = CALL_EXPR_ARG (exp, 0);
5721 fmode = TYPE_MODE (TREE_TYPE (arg));
5722 rmode = TYPE_MODE (TREE_TYPE (exp));
5723 fmt = REAL_MODE_FORMAT (fmode);
5725 arg = builtin_save_expr (arg);
5727 /* Expand the argument yielding a RTX expression. */
5728 temp = expand_normal (arg);
5730 /* Check if the back end provides an insn that handles signbit for the
5732 icode = signbit_optab->handlers [(int) fmode].insn_code;
5733 if (icode != CODE_FOR_nothing)
5735 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5736 emit_unop_insn (icode, target, temp, UNKNOWN);
5740 /* For floating point formats without a sign bit, implement signbit
5742 bitpos = fmt->signbit_ro;
5745 /* But we can't do this if the format supports signed zero. */
5746 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5749 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5750 build_real (TREE_TYPE (arg), dconst0));
5751 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5754 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5756 imode = int_mode_for_mode (fmode);
5757 if (imode == BLKmode)
5759 temp = gen_lowpart (imode, temp);
5764 /* Handle targets with different FP word orders. */
5765 if (FLOAT_WORDS_BIG_ENDIAN)
5766 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5768 word = bitpos / BITS_PER_WORD;
5769 temp = operand_subword_force (temp, word, fmode);
5770 bitpos = bitpos % BITS_PER_WORD;
5773 /* Force the intermediate word_mode (or narrower) result into a
5774 register. This avoids attempting to create paradoxical SUBREGs
5775 of floating point modes below. */
5776 temp = force_reg (imode, temp);
5778 /* If the bitpos is within the "result mode" lowpart, the operation
5779 can be implement with a single bitwise AND. Otherwise, we need
5780 a right shift and an AND. */
5782 if (bitpos < GET_MODE_BITSIZE (rmode))
5784 if (bitpos < HOST_BITS_PER_WIDE_INT)
5787 lo = (HOST_WIDE_INT) 1 << bitpos;
5791 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5795 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5796 temp = gen_lowpart (rmode, temp);
5797 temp = expand_binop (rmode, and_optab, temp,
5798 immed_double_const (lo, hi, rmode),
5799 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5803 /* Perform a logical right shift to place the signbit in the least
5804 significant bit, then truncate the result to the desired mode
5805 and mask just this bit. */
5806 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5807 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5808 temp = gen_lowpart (rmode, temp);
5809 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5810 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5816 /* Expand fork or exec calls. TARGET is the desired target of the
5817 call. EXP is the call. FN is the
5818 identificator of the actual function. IGNORE is nonzero if the
5819 value is to be ignored. */
5822 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5827 /* If we are not profiling, just call the function. */
5828 if (!profile_arc_flag)
5831 /* Otherwise call the wrapper. This should be equivalent for the rest of
5832 compiler, so the code does not diverge, and the wrapper may run the
5833 code necessary for keeping the profiling sane. */
5835 switch (DECL_FUNCTION_CODE (fn))
5838 id = get_identifier ("__gcov_fork");
5841 case BUILT_IN_EXECL:
5842 id = get_identifier ("__gcov_execl");
5845 case BUILT_IN_EXECV:
5846 id = get_identifier ("__gcov_execv");
5849 case BUILT_IN_EXECLP:
5850 id = get_identifier ("__gcov_execlp");
5853 case BUILT_IN_EXECLE:
5854 id = get_identifier ("__gcov_execle");
5857 case BUILT_IN_EXECVP:
5858 id = get_identifier ("__gcov_execvp");
5861 case BUILT_IN_EXECVE:
5862 id = get_identifier ("__gcov_execve");
5869 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5870 DECL_EXTERNAL (decl) = 1;
5871 TREE_PUBLIC (decl) = 1;
5872 DECL_ARTIFICIAL (decl) = 1;
5873 TREE_NOTHROW (decl) = 1;
5874 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5875 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5876 call = rewrite_call_expr (exp, 0, decl, 0);
5877 return expand_call (call, target, ignore);
5882 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5883 the pointer in these functions is void*, the tree optimizers may remove
5884 casts. The mode computed in expand_builtin isn't reliable either, due
5885 to __sync_bool_compare_and_swap.
5887 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5888 group of builtins. This gives us log2 of the mode size. */
5890 static inline enum machine_mode
5891 get_builtin_sync_mode (int fcode_diff)
5893 /* The size is not negotiable, so ask not to get BLKmode in return
5894 if the target indicates that a smaller size would be better. */
5895 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5898 /* Expand the memory expression LOC and return the appropriate memory operand
5899 for the builtin_sync operations. */
5902 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5906 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5908 /* Note that we explicitly do not want any alias information for this
5909 memory, so that we kill all other live memories. Otherwise we don't
5910 satisfy the full barrier semantics of the intrinsic. */
5911 mem = validize_mem (gen_rtx_MEM (mode, addr));
5913 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5914 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5915 MEM_VOLATILE_P (mem) = 1;
5920 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5921 EXP is the CALL_EXPR. CODE is the rtx code
5922 that corresponds to the arithmetic or logical operation from the name;
5923 an exception here is that NOT actually means NAND. TARGET is an optional
5924 place for us to store the results; AFTER is true if this is the
5925 fetch_and_xxx form. IGNORE is true if we don't actually care about
5926 the result of the operation at all. */
5929 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5930 enum rtx_code code, bool after,
5931 rtx target, bool ignore)
5934 enum machine_mode old_mode;
5936 /* Expand the operands. */
5937 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5939 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5940 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5941 of CONST_INTs, where we know the old_mode only from the call argument. */
5942 old_mode = GET_MODE (val);
5943 if (old_mode == VOIDmode)
5944 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5945 val = convert_modes (mode, old_mode, val, 1);
5948 return expand_sync_operation (mem, val, code);
5950 return expand_sync_fetch_operation (mem, val, code, after, target);
5953 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5954 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5955 true if this is the boolean form. TARGET is a place for us to store the
5956 results; this is NOT optional if IS_BOOL is true. */
5959 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5960 bool is_bool, rtx target)
5962 rtx old_val, new_val, mem;
5963 enum machine_mode old_mode;
5965 /* Expand the operands. */
5966 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5969 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5970 mode, EXPAND_NORMAL);
5971 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5972 of CONST_INTs, where we know the old_mode only from the call argument. */
5973 old_mode = GET_MODE (old_val);
5974 if (old_mode == VOIDmode)
5975 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5976 old_val = convert_modes (mode, old_mode, old_val, 1);
5978 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5979 mode, EXPAND_NORMAL);
5980 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5981 of CONST_INTs, where we know the old_mode only from the call argument. */
5982 old_mode = GET_MODE (new_val);
5983 if (old_mode == VOIDmode)
5984 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5985 new_val = convert_modes (mode, old_mode, new_val, 1);
5988 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5990 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5993 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5994 general form is actually an atomic exchange, and some targets only
5995 support a reduced form with the second argument being a constant 1.
5996 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6000 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6004 enum machine_mode old_mode;
6006 /* Expand the operands. */
6007 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6008 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6009 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6010 of CONST_INTs, where we know the old_mode only from the call argument. */
6011 old_mode = GET_MODE (val);
6012 if (old_mode == VOIDmode)
6013 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6014 val = convert_modes (mode, old_mode, val, 1);
6016 return expand_sync_lock_test_and_set (mem, val, target);
6019 /* Expand the __sync_synchronize intrinsic. */
6022 expand_builtin_synchronize (void)
6026 #ifdef HAVE_memory_barrier
6027 if (HAVE_memory_barrier)
6029 emit_insn (gen_memory_barrier ());
6034 /* If no explicit memory barrier instruction is available, create an
6035 empty asm stmt with a memory clobber. */
6036 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6037 tree_cons (NULL, build_string (6, "memory"), NULL));
6038 ASM_VOLATILE_P (x) = 1;
6039 expand_asm_expr (x);
6042 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6045 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6047 enum insn_code icode;
6049 rtx val = const0_rtx;
6051 /* Expand the operands. */
6052 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6054 /* If there is an explicit operation in the md file, use it. */
6055 icode = sync_lock_release[mode];
6056 if (icode != CODE_FOR_nothing)
6058 if (!insn_data[icode].operand[1].predicate (val, mode))
6059 val = force_reg (mode, val);
6061 insn = GEN_FCN (icode) (mem, val);
6069 /* Otherwise we can implement this operation by emitting a barrier
6070 followed by a store of zero. */
6071 expand_builtin_synchronize ();
6072 emit_move_insn (mem, val);
6075 /* Expand an expression EXP that calls a built-in function,
6076 with result going to TARGET if that's convenient
6077 (and in mode MODE if that's convenient).
6078 SUBTARGET may be used as the target for computing one of EXP's operands.
6079 IGNORE is nonzero if the value is to be ignored. */
6082 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6085 tree fndecl = get_callee_fndecl (exp);
6086 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6087 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6089 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6090 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6092 /* When not optimizing, generate calls to library functions for a certain
6095 && !called_as_built_in (fndecl)
6096 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6097 && fcode != BUILT_IN_ALLOCA)
6098 return expand_call (exp, target, ignore);
6100 /* The built-in function expanders test for target == const0_rtx
6101 to determine whether the function's result will be ignored. */
6103 target = const0_rtx;
6105 /* If the result of a pure or const built-in function is ignored, and
6106 none of its arguments are volatile, we can avoid expanding the
6107 built-in call and just evaluate the arguments for side-effects. */
6108 if (target == const0_rtx
6109 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6111 bool volatilep = false;
6113 call_expr_arg_iterator iter;
6115 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6116 if (TREE_THIS_VOLATILE (arg))
6124 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6125 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6132 CASE_FLT_FN (BUILT_IN_FABS):
6133 target = expand_builtin_fabs (exp, target, subtarget);
6138 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6139 target = expand_builtin_copysign (exp, target, subtarget);
6144 /* Just do a normal library call if we were unable to fold
6146 CASE_FLT_FN (BUILT_IN_CABS):
6149 CASE_FLT_FN (BUILT_IN_EXP):
6150 CASE_FLT_FN (BUILT_IN_EXP10):
6151 CASE_FLT_FN (BUILT_IN_POW10):
6152 CASE_FLT_FN (BUILT_IN_EXP2):
6153 CASE_FLT_FN (BUILT_IN_EXPM1):
6154 CASE_FLT_FN (BUILT_IN_LOGB):
6155 CASE_FLT_FN (BUILT_IN_LOG):
6156 CASE_FLT_FN (BUILT_IN_LOG10):
6157 CASE_FLT_FN (BUILT_IN_LOG2):
6158 CASE_FLT_FN (BUILT_IN_LOG1P):
6159 CASE_FLT_FN (BUILT_IN_TAN):
6160 CASE_FLT_FN (BUILT_IN_ASIN):
6161 CASE_FLT_FN (BUILT_IN_ACOS):
6162 CASE_FLT_FN (BUILT_IN_ATAN):
6163 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6164 because of possible accuracy problems. */
6165 if (! flag_unsafe_math_optimizations)
6167 CASE_FLT_FN (BUILT_IN_SQRT):
6168 CASE_FLT_FN (BUILT_IN_FLOOR):
6169 CASE_FLT_FN (BUILT_IN_CEIL):
6170 CASE_FLT_FN (BUILT_IN_TRUNC):
6171 CASE_FLT_FN (BUILT_IN_ROUND):
6172 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6173 CASE_FLT_FN (BUILT_IN_RINT):
6174 target = expand_builtin_mathfn (exp, target, subtarget);
6179 CASE_FLT_FN (BUILT_IN_ILOGB):
6180 if (! flag_unsafe_math_optimizations)
6182 CASE_FLT_FN (BUILT_IN_ISINF):
6183 CASE_FLT_FN (BUILT_IN_FINITE):
6184 case BUILT_IN_ISFINITE:
6185 case BUILT_IN_ISNORMAL:
6186 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6191 CASE_FLT_FN (BUILT_IN_LCEIL):
6192 CASE_FLT_FN (BUILT_IN_LLCEIL):
6193 CASE_FLT_FN (BUILT_IN_LFLOOR):
6194 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6195 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6200 CASE_FLT_FN (BUILT_IN_LRINT):
6201 CASE_FLT_FN (BUILT_IN_LLRINT):
6202 CASE_FLT_FN (BUILT_IN_LROUND):
6203 CASE_FLT_FN (BUILT_IN_LLROUND):
6204 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6209 CASE_FLT_FN (BUILT_IN_POW):
6210 target = expand_builtin_pow (exp, target, subtarget);
6215 CASE_FLT_FN (BUILT_IN_POWI):
6216 target = expand_builtin_powi (exp, target, subtarget);
6221 CASE_FLT_FN (BUILT_IN_ATAN2):
6222 CASE_FLT_FN (BUILT_IN_LDEXP):
6223 CASE_FLT_FN (BUILT_IN_SCALB):
6224 CASE_FLT_FN (BUILT_IN_SCALBN):
6225 CASE_FLT_FN (BUILT_IN_SCALBLN):
6226 if (! flag_unsafe_math_optimizations)
6229 CASE_FLT_FN (BUILT_IN_FMOD):
6230 CASE_FLT_FN (BUILT_IN_REMAINDER):
6231 CASE_FLT_FN (BUILT_IN_DREM):
6232 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6237 CASE_FLT_FN (BUILT_IN_CEXPI):
6238 target = expand_builtin_cexpi (exp, target, subtarget);
6239 gcc_assert (target);
6242 CASE_FLT_FN (BUILT_IN_SIN):
6243 CASE_FLT_FN (BUILT_IN_COS):
6244 if (! flag_unsafe_math_optimizations)
6246 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6251 CASE_FLT_FN (BUILT_IN_SINCOS):
6252 if (! flag_unsafe_math_optimizations)
6254 target = expand_builtin_sincos (exp);
6259 case BUILT_IN_APPLY_ARGS:
6260 return expand_builtin_apply_args ();
6262 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6263 FUNCTION with a copy of the parameters described by
6264 ARGUMENTS, and ARGSIZE. It returns a block of memory
6265 allocated on the stack into which is stored all the registers
6266 that might possibly be used for returning the result of a
6267 function. ARGUMENTS is the value returned by
6268 __builtin_apply_args. ARGSIZE is the number of bytes of
6269 arguments that must be copied. ??? How should this value be
6270 computed? We'll also need a safe worst case value for varargs
6272 case BUILT_IN_APPLY:
6273 if (!validate_arglist (exp, POINTER_TYPE,
6274 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6275 && !validate_arglist (exp, REFERENCE_TYPE,
6276 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6282 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6283 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6284 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6286 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6289 /* __builtin_return (RESULT) causes the function to return the
6290 value described by RESULT. RESULT is address of the block of
6291 memory returned by __builtin_apply. */
6292 case BUILT_IN_RETURN:
6293 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6294 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6297 case BUILT_IN_SAVEREGS:
6298 return expand_builtin_saveregs ();
6300 case BUILT_IN_ARGS_INFO:
6301 return expand_builtin_args_info (exp);
6303 case BUILT_IN_VA_ARG_PACK:
6304 /* All valid uses of __builtin_va_arg_pack () are removed during
6306 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6309 case BUILT_IN_VA_ARG_PACK_LEN:
6310 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6312 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6315 /* Return the address of the first anonymous stack arg. */
6316 case BUILT_IN_NEXT_ARG:
6317 if (fold_builtin_next_arg (exp, false))
6319 return expand_builtin_next_arg ();
6321 case BUILT_IN_CLEAR_CACHE:
6322 target = expand_builtin___clear_cache (exp);
6327 case BUILT_IN_CLASSIFY_TYPE:
6328 return expand_builtin_classify_type (exp);
6330 case BUILT_IN_CONSTANT_P:
6333 case BUILT_IN_FRAME_ADDRESS:
6334 case BUILT_IN_RETURN_ADDRESS:
6335 return expand_builtin_frame_address (fndecl, exp);
6337 /* Returns the address of the area where the structure is returned.
6339 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6340 if (call_expr_nargs (exp) != 0
6341 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6342 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6345 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6347 case BUILT_IN_ALLOCA:
6348 target = expand_builtin_alloca (exp, target);
6353 case BUILT_IN_STACK_SAVE:
6354 return expand_stack_save ();
6356 case BUILT_IN_STACK_RESTORE:
6357 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6360 case BUILT_IN_BSWAP32:
6361 case BUILT_IN_BSWAP64:
6362 target = expand_builtin_bswap (exp, target, subtarget);
6368 CASE_INT_FN (BUILT_IN_FFS):
6369 case BUILT_IN_FFSIMAX:
6370 target = expand_builtin_unop (target_mode, exp, target,
6371 subtarget, ffs_optab);
6376 CASE_INT_FN (BUILT_IN_CLZ):
6377 case BUILT_IN_CLZIMAX:
6378 target = expand_builtin_unop (target_mode, exp, target,
6379 subtarget, clz_optab);
6384 CASE_INT_FN (BUILT_IN_CTZ):
6385 case BUILT_IN_CTZIMAX:
6386 target = expand_builtin_unop (target_mode, exp, target,
6387 subtarget, ctz_optab);
6392 CASE_INT_FN (BUILT_IN_POPCOUNT):
6393 case BUILT_IN_POPCOUNTIMAX:
6394 target = expand_builtin_unop (target_mode, exp, target,
6395 subtarget, popcount_optab);
6400 CASE_INT_FN (BUILT_IN_PARITY):
6401 case BUILT_IN_PARITYIMAX:
6402 target = expand_builtin_unop (target_mode, exp, target,
6403 subtarget, parity_optab);
6408 case BUILT_IN_STRLEN:
6409 target = expand_builtin_strlen (exp, target, target_mode);
6414 case BUILT_IN_STRCPY:
6415 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6420 case BUILT_IN_STRNCPY:
6421 target = expand_builtin_strncpy (exp, target, mode);
6426 case BUILT_IN_STPCPY:
6427 target = expand_builtin_stpcpy (exp, target, mode);
6432 case BUILT_IN_STRCAT:
6433 target = expand_builtin_strcat (fndecl, exp, target, mode);
6438 case BUILT_IN_STRNCAT:
6439 target = expand_builtin_strncat (exp, target, mode);
6444 case BUILT_IN_STRSPN:
6445 target = expand_builtin_strspn (exp, target, mode);
6450 case BUILT_IN_STRCSPN:
6451 target = expand_builtin_strcspn (exp, target, mode);
6456 case BUILT_IN_STRSTR:
6457 target = expand_builtin_strstr (exp, target, mode);
6462 case BUILT_IN_STRPBRK:
6463 target = expand_builtin_strpbrk (exp, target, mode);
6468 case BUILT_IN_INDEX:
6469 case BUILT_IN_STRCHR:
6470 target = expand_builtin_strchr (exp, target, mode);
6475 case BUILT_IN_RINDEX:
6476 case BUILT_IN_STRRCHR:
6477 target = expand_builtin_strrchr (exp, target, mode);
6482 case BUILT_IN_MEMCPY:
6483 target = expand_builtin_memcpy (exp, target, mode);
6488 case BUILT_IN_MEMPCPY:
6489 target = expand_builtin_mempcpy (exp, target, mode);
6494 case BUILT_IN_MEMMOVE:
6495 target = expand_builtin_memmove (exp, target, mode, ignore);
6500 case BUILT_IN_BCOPY:
6501 target = expand_builtin_bcopy (exp, ignore);
6506 case BUILT_IN_MEMSET:
6507 target = expand_builtin_memset (exp, target, mode);
6512 case BUILT_IN_BZERO:
6513 target = expand_builtin_bzero (exp);
6518 case BUILT_IN_STRCMP:
6519 target = expand_builtin_strcmp (exp, target, mode);
6524 case BUILT_IN_STRNCMP:
6525 target = expand_builtin_strncmp (exp, target, mode);
6530 case BUILT_IN_MEMCHR:
6531 target = expand_builtin_memchr (exp, target, mode);
6537 case BUILT_IN_MEMCMP:
6538 target = expand_builtin_memcmp (exp, target, mode);
6543 case BUILT_IN_SETJMP:
6544 /* This should have been lowered to the builtins below. */
6547 case BUILT_IN_SETJMP_SETUP:
6548 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6549 and the receiver label. */
6550 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6552 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6553 VOIDmode, EXPAND_NORMAL);
6554 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6555 rtx label_r = label_rtx (label);
6557 /* This is copied from the handling of non-local gotos. */
6558 expand_builtin_setjmp_setup (buf_addr, label_r);
6559 nonlocal_goto_handler_labels
6560 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6561 nonlocal_goto_handler_labels);
6562 /* ??? Do not let expand_label treat us as such since we would
6563 not want to be both on the list of non-local labels and on
6564 the list of forced labels. */
6565 FORCED_LABEL (label) = 0;
6570 case BUILT_IN_SETJMP_DISPATCHER:
6571 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6572 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6574 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6575 rtx label_r = label_rtx (label);
6577 /* Remove the dispatcher label from the list of non-local labels
6578 since the receiver labels have been added to it above. */
6579 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6584 case BUILT_IN_SETJMP_RECEIVER:
6585 /* __builtin_setjmp_receiver is passed the receiver label. */
6586 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6588 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6589 rtx label_r = label_rtx (label);
6591 expand_builtin_setjmp_receiver (label_r);
6596 /* __builtin_longjmp is passed a pointer to an array of five words.
6597 It's similar to the C library longjmp function but works with
6598 __builtin_setjmp above. */
6599 case BUILT_IN_LONGJMP:
6600 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6602 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6603 VOIDmode, EXPAND_NORMAL);
6604 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6606 if (value != const1_rtx)
6608 error ("%<__builtin_longjmp%> second argument must be 1");
6612 expand_builtin_longjmp (buf_addr, value);
6617 case BUILT_IN_NONLOCAL_GOTO:
6618 target = expand_builtin_nonlocal_goto (exp);
6623 /* This updates the setjmp buffer that is its argument with the value
6624 of the current stack pointer. */
6625 case BUILT_IN_UPDATE_SETJMP_BUF:
6626 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6629 = expand_normal (CALL_EXPR_ARG (exp, 0));
6631 expand_builtin_update_setjmp_buf (buf_addr);
6637 expand_builtin_trap ();
6640 case BUILT_IN_PRINTF:
6641 target = expand_builtin_printf (exp, target, mode, false);
6646 case BUILT_IN_PRINTF_UNLOCKED:
6647 target = expand_builtin_printf (exp, target, mode, true);
6652 case BUILT_IN_FPUTS:
6653 target = expand_builtin_fputs (exp, target, false);
6657 case BUILT_IN_FPUTS_UNLOCKED:
6658 target = expand_builtin_fputs (exp, target, true);
6663 case BUILT_IN_FPRINTF:
6664 target = expand_builtin_fprintf (exp, target, mode, false);
6669 case BUILT_IN_FPRINTF_UNLOCKED:
6670 target = expand_builtin_fprintf (exp, target, mode, true);
6675 case BUILT_IN_SPRINTF:
6676 target = expand_builtin_sprintf (exp, target, mode);
6681 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6682 case BUILT_IN_SIGNBITD32:
6683 case BUILT_IN_SIGNBITD64:
6684 case BUILT_IN_SIGNBITD128:
6685 target = expand_builtin_signbit (exp, target);
6690 /* Various hooks for the DWARF 2 __throw routine. */
6691 case BUILT_IN_UNWIND_INIT:
6692 expand_builtin_unwind_init ();
6694 case BUILT_IN_DWARF_CFA:
6695 return virtual_cfa_rtx;
6696 #ifdef DWARF2_UNWIND_INFO
6697 case BUILT_IN_DWARF_SP_COLUMN:
6698 return expand_builtin_dwarf_sp_column ();
6699 case BUILT_IN_INIT_DWARF_REG_SIZES:
6700 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6703 case BUILT_IN_FROB_RETURN_ADDR:
6704 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6705 case BUILT_IN_EXTRACT_RETURN_ADDR:
6706 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6707 case BUILT_IN_EH_RETURN:
6708 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6709 CALL_EXPR_ARG (exp, 1));
6711 #ifdef EH_RETURN_DATA_REGNO
6712 case BUILT_IN_EH_RETURN_DATA_REGNO:
6713 return expand_builtin_eh_return_data_regno (exp);
6715 case BUILT_IN_EXTEND_POINTER:
6716 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6718 case BUILT_IN_VA_START:
6719 return expand_builtin_va_start (exp);
6720 case BUILT_IN_VA_END:
6721 return expand_builtin_va_end (exp);
6722 case BUILT_IN_VA_COPY:
6723 return expand_builtin_va_copy (exp);
6724 case BUILT_IN_EXPECT:
6725 return expand_builtin_expect (exp, target);
6726 case BUILT_IN_PREFETCH:
6727 expand_builtin_prefetch (exp);
6730 case BUILT_IN_PROFILE_FUNC_ENTER:
6731 return expand_builtin_profile_func (false);
6732 case BUILT_IN_PROFILE_FUNC_EXIT:
6733 return expand_builtin_profile_func (true);
6735 case BUILT_IN_INIT_TRAMPOLINE:
6736 return expand_builtin_init_trampoline (exp);
6737 case BUILT_IN_ADJUST_TRAMPOLINE:
6738 return expand_builtin_adjust_trampoline (exp);
6741 case BUILT_IN_EXECL:
6742 case BUILT_IN_EXECV:
6743 case BUILT_IN_EXECLP:
6744 case BUILT_IN_EXECLE:
6745 case BUILT_IN_EXECVP:
6746 case BUILT_IN_EXECVE:
6747 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6752 case BUILT_IN_FETCH_AND_ADD_1:
6753 case BUILT_IN_FETCH_AND_ADD_2:
6754 case BUILT_IN_FETCH_AND_ADD_4:
6755 case BUILT_IN_FETCH_AND_ADD_8:
6756 case BUILT_IN_FETCH_AND_ADD_16:
6757 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6758 target = expand_builtin_sync_operation (mode, exp, PLUS,
6759 false, target, ignore);
6764 case BUILT_IN_FETCH_AND_SUB_1:
6765 case BUILT_IN_FETCH_AND_SUB_2:
6766 case BUILT_IN_FETCH_AND_SUB_4:
6767 case BUILT_IN_FETCH_AND_SUB_8:
6768 case BUILT_IN_FETCH_AND_SUB_16:
6769 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6770 target = expand_builtin_sync_operation (mode, exp, MINUS,
6771 false, target, ignore);
6776 case BUILT_IN_FETCH_AND_OR_1:
6777 case BUILT_IN_FETCH_AND_OR_2:
6778 case BUILT_IN_FETCH_AND_OR_4:
6779 case BUILT_IN_FETCH_AND_OR_8:
6780 case BUILT_IN_FETCH_AND_OR_16:
6781 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6782 target = expand_builtin_sync_operation (mode, exp, IOR,
6783 false, target, ignore);
6788 case BUILT_IN_FETCH_AND_AND_1:
6789 case BUILT_IN_FETCH_AND_AND_2:
6790 case BUILT_IN_FETCH_AND_AND_4:
6791 case BUILT_IN_FETCH_AND_AND_8:
6792 case BUILT_IN_FETCH_AND_AND_16:
6793 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6794 target = expand_builtin_sync_operation (mode, exp, AND,
6795 false, target, ignore);
6800 case BUILT_IN_FETCH_AND_XOR_1:
6801 case BUILT_IN_FETCH_AND_XOR_2:
6802 case BUILT_IN_FETCH_AND_XOR_4:
6803 case BUILT_IN_FETCH_AND_XOR_8:
6804 case BUILT_IN_FETCH_AND_XOR_16:
6805 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6806 target = expand_builtin_sync_operation (mode, exp, XOR,
6807 false, target, ignore);
6812 case BUILT_IN_FETCH_AND_NAND_1:
6813 case BUILT_IN_FETCH_AND_NAND_2:
6814 case BUILT_IN_FETCH_AND_NAND_4:
6815 case BUILT_IN_FETCH_AND_NAND_8:
6816 case BUILT_IN_FETCH_AND_NAND_16:
6817 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6818 target = expand_builtin_sync_operation (mode, exp, NOT,
6819 false, target, ignore);
6824 case BUILT_IN_ADD_AND_FETCH_1:
6825 case BUILT_IN_ADD_AND_FETCH_2:
6826 case BUILT_IN_ADD_AND_FETCH_4:
6827 case BUILT_IN_ADD_AND_FETCH_8:
6828 case BUILT_IN_ADD_AND_FETCH_16:
6829 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6830 target = expand_builtin_sync_operation (mode, exp, PLUS,
6831 true, target, ignore);
6836 case BUILT_IN_SUB_AND_FETCH_1:
6837 case BUILT_IN_SUB_AND_FETCH_2:
6838 case BUILT_IN_SUB_AND_FETCH_4:
6839 case BUILT_IN_SUB_AND_FETCH_8:
6840 case BUILT_IN_SUB_AND_FETCH_16:
6841 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6842 target = expand_builtin_sync_operation (mode, exp, MINUS,
6843 true, target, ignore);
6848 case BUILT_IN_OR_AND_FETCH_1:
6849 case BUILT_IN_OR_AND_FETCH_2:
6850 case BUILT_IN_OR_AND_FETCH_4:
6851 case BUILT_IN_OR_AND_FETCH_8:
6852 case BUILT_IN_OR_AND_FETCH_16:
6853 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6854 target = expand_builtin_sync_operation (mode, exp, IOR,
6855 true, target, ignore);
6860 case BUILT_IN_AND_AND_FETCH_1:
6861 case BUILT_IN_AND_AND_FETCH_2:
6862 case BUILT_IN_AND_AND_FETCH_4:
6863 case BUILT_IN_AND_AND_FETCH_8:
6864 case BUILT_IN_AND_AND_FETCH_16:
6865 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6866 target = expand_builtin_sync_operation (mode, exp, AND,
6867 true, target, ignore);
6872 case BUILT_IN_XOR_AND_FETCH_1:
6873 case BUILT_IN_XOR_AND_FETCH_2:
6874 case BUILT_IN_XOR_AND_FETCH_4:
6875 case BUILT_IN_XOR_AND_FETCH_8:
6876 case BUILT_IN_XOR_AND_FETCH_16:
6877 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6878 target = expand_builtin_sync_operation (mode, exp, XOR,
6879 true, target, ignore);
6884 case BUILT_IN_NAND_AND_FETCH_1:
6885 case BUILT_IN_NAND_AND_FETCH_2:
6886 case BUILT_IN_NAND_AND_FETCH_4:
6887 case BUILT_IN_NAND_AND_FETCH_8:
6888 case BUILT_IN_NAND_AND_FETCH_16:
6889 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6890 target = expand_builtin_sync_operation (mode, exp, NOT,
6891 true, target, ignore);
6896 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6897 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6898 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6899 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6900 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6901 if (mode == VOIDmode)
6902 mode = TYPE_MODE (boolean_type_node);
6903 if (!target || !register_operand (target, mode))
6904 target = gen_reg_rtx (mode);
6906 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6907 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6912 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6913 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6914 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6915 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6916 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6917 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6918 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6923 case BUILT_IN_LOCK_TEST_AND_SET_1:
6924 case BUILT_IN_LOCK_TEST_AND_SET_2:
6925 case BUILT_IN_LOCK_TEST_AND_SET_4:
6926 case BUILT_IN_LOCK_TEST_AND_SET_8:
6927 case BUILT_IN_LOCK_TEST_AND_SET_16:
6928 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6929 target = expand_builtin_lock_test_and_set (mode, exp, target);
6934 case BUILT_IN_LOCK_RELEASE_1:
6935 case BUILT_IN_LOCK_RELEASE_2:
6936 case BUILT_IN_LOCK_RELEASE_4:
6937 case BUILT_IN_LOCK_RELEASE_8:
6938 case BUILT_IN_LOCK_RELEASE_16:
6939 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6940 expand_builtin_lock_release (mode, exp);
6943 case BUILT_IN_SYNCHRONIZE:
6944 expand_builtin_synchronize ();
6947 case BUILT_IN_OBJECT_SIZE:
6948 return expand_builtin_object_size (exp);
6950 case BUILT_IN_MEMCPY_CHK:
6951 case BUILT_IN_MEMPCPY_CHK:
6952 case BUILT_IN_MEMMOVE_CHK:
6953 case BUILT_IN_MEMSET_CHK:
6954 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6959 case BUILT_IN_STRCPY_CHK:
6960 case BUILT_IN_STPCPY_CHK:
6961 case BUILT_IN_STRNCPY_CHK:
6962 case BUILT_IN_STRCAT_CHK:
6963 case BUILT_IN_STRNCAT_CHK:
6964 case BUILT_IN_SNPRINTF_CHK:
6965 case BUILT_IN_VSNPRINTF_CHK:
6966 maybe_emit_chk_warning (exp, fcode);
6969 case BUILT_IN_SPRINTF_CHK:
6970 case BUILT_IN_VSPRINTF_CHK:
6971 maybe_emit_sprintf_chk_warning (exp, fcode);
6974 default: /* just do library call, if unknown builtin */
6978 /* The switch statement above can drop through to cause the function
6979 to be called normally. */
6980 return expand_call (exp, target, ignore);
6983 /* Determine whether a tree node represents a call to a built-in
6984 function. If the tree T is a call to a built-in function with
6985 the right number of arguments of the appropriate types, return
6986 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6987 Otherwise the return value is END_BUILTINS. */
6989 enum built_in_function
6990 builtin_mathfn_code (const_tree t)
6992 const_tree fndecl, arg, parmlist;
6993 const_tree argtype, parmtype;
6994 const_call_expr_arg_iterator iter;
6996 if (TREE_CODE (t) != CALL_EXPR
6997 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6998 return END_BUILTINS;
7000 fndecl = get_callee_fndecl (t);
7001 if (fndecl == NULL_TREE
7002 || TREE_CODE (fndecl) != FUNCTION_DECL
7003 || ! DECL_BUILT_IN (fndecl)
7004 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7005 return END_BUILTINS;
7007 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7008 init_const_call_expr_arg_iterator (t, &iter);
7009 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7011 /* If a function doesn't take a variable number of arguments,
7012 the last element in the list will have type `void'. */
7013 parmtype = TREE_VALUE (parmlist);
7014 if (VOID_TYPE_P (parmtype))
7016 if (more_const_call_expr_args_p (&iter))
7017 return END_BUILTINS;
7018 return DECL_FUNCTION_CODE (fndecl);
7021 if (! more_const_call_expr_args_p (&iter))
7022 return END_BUILTINS;
7024 arg = next_const_call_expr_arg (&iter);
7025 argtype = TREE_TYPE (arg);
7027 if (SCALAR_FLOAT_TYPE_P (parmtype))
7029 if (! SCALAR_FLOAT_TYPE_P (argtype))
7030 return END_BUILTINS;
7032 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7034 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7035 return END_BUILTINS;
7037 else if (POINTER_TYPE_P (parmtype))
7039 if (! POINTER_TYPE_P (argtype))
7040 return END_BUILTINS;
7042 else if (INTEGRAL_TYPE_P (parmtype))
7044 if (! INTEGRAL_TYPE_P (argtype))
7045 return END_BUILTINS;
7048 return END_BUILTINS;
7051 /* Variable-length argument list. */
7052 return DECL_FUNCTION_CODE (fndecl);
7055 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7056 evaluate to a constant. */
7059 fold_builtin_constant_p (tree arg)
7061 /* We return 1 for a numeric type that's known to be a constant
7062 value at compile-time or for an aggregate type that's a
7063 literal constant. */
7066 /* If we know this is a constant, emit the constant of one. */
7067 if (CONSTANT_CLASS_P (arg)
7068 || (TREE_CODE (arg) == CONSTRUCTOR
7069 && TREE_CONSTANT (arg)))
7070 return integer_one_node;
7071 if (TREE_CODE (arg) == ADDR_EXPR)
7073 tree op = TREE_OPERAND (arg, 0);
7074 if (TREE_CODE (op) == STRING_CST
7075 || (TREE_CODE (op) == ARRAY_REF
7076 && integer_zerop (TREE_OPERAND (op, 1))
7077 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7078 return integer_one_node;
7081 /* If this expression has side effects, show we don't know it to be a
7082 constant. Likewise if it's a pointer or aggregate type since in
7083 those case we only want literals, since those are only optimized
7084 when generating RTL, not later.
7085 And finally, if we are compiling an initializer, not code, we
7086 need to return a definite result now; there's not going to be any
7087 more optimization done. */
7088 if (TREE_SIDE_EFFECTS (arg)
7089 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7090 || POINTER_TYPE_P (TREE_TYPE (arg))
7092 || folding_initializer)
7093 return integer_zero_node;
7098 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7099 return it as a truthvalue. */
7102 build_builtin_expect_predicate (tree pred, tree expected)
7104 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7106 fn = built_in_decls[BUILT_IN_EXPECT];
7107 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7108 ret_type = TREE_TYPE (TREE_TYPE (fn));
7109 pred_type = TREE_VALUE (arg_types);
7110 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7112 pred = fold_convert (pred_type, pred);
7113 expected = fold_convert (expected_type, expected);
7114 call_expr = build_call_expr (fn, 2, pred, expected);
7116 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7117 build_int_cst (ret_type, 0));
7120 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7121 NULL_TREE if no simplification is possible. */
7124 fold_builtin_expect (tree arg0, tree arg1)
7127 enum tree_code code;
7129 /* If this is a builtin_expect within a builtin_expect keep the
7130 inner one. See through a comparison against a constant. It
7131 might have been added to create a thruthvalue. */
7133 if (COMPARISON_CLASS_P (inner)
7134 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7135 inner = TREE_OPERAND (inner, 0);
7137 if (TREE_CODE (inner) == CALL_EXPR
7138 && (fndecl = get_callee_fndecl (inner))
7139 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7140 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7143 /* Distribute the expected value over short-circuiting operators.
7144 See through the cast from truthvalue_type_node to long. */
7146 while (TREE_CODE (inner) == NOP_EXPR
7147 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7148 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7149 inner = TREE_OPERAND (inner, 0);
7151 code = TREE_CODE (inner);
7152 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7154 tree op0 = TREE_OPERAND (inner, 0);
7155 tree op1 = TREE_OPERAND (inner, 1);
7157 op0 = build_builtin_expect_predicate (op0, arg1);
7158 op1 = build_builtin_expect_predicate (op1, arg1);
7159 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7161 return fold_convert (TREE_TYPE (arg0), inner);
7164 /* If the argument isn't invariant then there's nothing else we can do. */
7165 if (!TREE_CONSTANT (arg0))
7168 /* If we expect that a comparison against the argument will fold to
7169 a constant return the constant. In practice, this means a true
7170 constant or the address of a non-weak symbol. */
7173 if (TREE_CODE (inner) == ADDR_EXPR)
7177 inner = TREE_OPERAND (inner, 0);
7179 while (TREE_CODE (inner) == COMPONENT_REF
7180 || TREE_CODE (inner) == ARRAY_REF);
7181 if (DECL_P (inner) && DECL_WEAK (inner))
7185 /* Otherwise, ARG0 already has the proper type for the return value. */
7189 /* Fold a call to __builtin_classify_type with argument ARG. */
7192 fold_builtin_classify_type (tree arg)
7195 return build_int_cst (NULL_TREE, no_type_class);
7197 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7200 /* Fold a call to __builtin_strlen with argument ARG. */
7203 fold_builtin_strlen (tree arg)
7205 if (!validate_arg (arg, POINTER_TYPE))
7209 tree len = c_strlen (arg, 0);
7213 /* Convert from the internal "sizetype" type to "size_t". */
7215 len = fold_convert (size_type_node, len);
7223 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7226 fold_builtin_inf (tree type, int warn)
7228 REAL_VALUE_TYPE real;
7230 /* __builtin_inff is intended to be usable to define INFINITY on all
7231 targets. If an infinity is not available, INFINITY expands "to a
7232 positive constant of type float that overflows at translation
7233 time", footnote "In this case, using INFINITY will violate the
7234 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7235 Thus we pedwarn to ensure this constraint violation is
7237 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7238 pedwarn ("target format does not support infinity");
7241 return build_real (type, real);
7244 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7247 fold_builtin_nan (tree arg, tree type, int quiet)
7249 REAL_VALUE_TYPE real;
7252 if (!validate_arg (arg, POINTER_TYPE))
7254 str = c_getstr (arg);
7258 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7261 return build_real (type, real);
7264 /* Return true if the floating point expression T has an integer value.
7265 We also allow +Inf, -Inf and NaN to be considered integer values. */
7268 integer_valued_real_p (tree t)
7270 switch (TREE_CODE (t))
7277 return integer_valued_real_p (TREE_OPERAND (t, 0));
7282 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7289 return integer_valued_real_p (TREE_OPERAND (t, 0))
7290 && integer_valued_real_p (TREE_OPERAND (t, 1));
7293 return integer_valued_real_p (TREE_OPERAND (t, 1))
7294 && integer_valued_real_p (TREE_OPERAND (t, 2));
7297 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7301 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7302 if (TREE_CODE (type) == INTEGER_TYPE)
7304 if (TREE_CODE (type) == REAL_TYPE)
7305 return integer_valued_real_p (TREE_OPERAND (t, 0));
7310 switch (builtin_mathfn_code (t))
7312 CASE_FLT_FN (BUILT_IN_CEIL):
7313 CASE_FLT_FN (BUILT_IN_FLOOR):
7314 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7315 CASE_FLT_FN (BUILT_IN_RINT):
7316 CASE_FLT_FN (BUILT_IN_ROUND):
7317 CASE_FLT_FN (BUILT_IN_TRUNC):
7320 CASE_FLT_FN (BUILT_IN_FMIN):
7321 CASE_FLT_FN (BUILT_IN_FMAX):
7322 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7323 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7336 /* FNDECL is assumed to be a builtin where truncation can be propagated
7337 across (for instance floor((double)f) == (double)floorf (f).
7338 Do the transformation for a call with argument ARG. */
7341 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7343 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7345 if (!validate_arg (arg, REAL_TYPE))
7348 /* Integer rounding functions are idempotent. */
7349 if (fcode == builtin_mathfn_code (arg))
7352 /* If argument is already integer valued, and we don't need to worry
7353 about setting errno, there's no need to perform rounding. */
7354 if (! flag_errno_math && integer_valued_real_p (arg))
7359 tree arg0 = strip_float_extensions (arg);
7360 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7361 tree newtype = TREE_TYPE (arg0);
7364 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7365 && (decl = mathfn_built_in (newtype, fcode)))
7366 return fold_convert (ftype,
7367 build_call_expr (decl, 1,
7368 fold_convert (newtype, arg0)));
7373 /* FNDECL is assumed to be builtin which can narrow the FP type of
7374 the argument, for instance lround((double)f) -> lroundf (f).
7375 Do the transformation for a call with argument ARG. */
7378 fold_fixed_mathfn (tree fndecl, tree arg)
7380 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7382 if (!validate_arg (arg, REAL_TYPE))
7385 /* If argument is already integer valued, and we don't need to worry
7386 about setting errno, there's no need to perform rounding. */
7387 if (! flag_errno_math && integer_valued_real_p (arg))
7388 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7392 tree ftype = TREE_TYPE (arg);
7393 tree arg0 = strip_float_extensions (arg);
7394 tree newtype = TREE_TYPE (arg0);
7397 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7398 && (decl = mathfn_built_in (newtype, fcode)))
7399 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7402 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7403 sizeof (long long) == sizeof (long). */
7404 if (TYPE_PRECISION (long_long_integer_type_node)
7405 == TYPE_PRECISION (long_integer_type_node))
7407 tree newfn = NULL_TREE;
7410 CASE_FLT_FN (BUILT_IN_LLCEIL):
7411 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7414 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7415 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7418 CASE_FLT_FN (BUILT_IN_LLROUND):
7419 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7422 CASE_FLT_FN (BUILT_IN_LLRINT):
7423 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7432 tree newcall = build_call_expr(newfn, 1, arg);
7433 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7440 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7441 return type. Return NULL_TREE if no simplification can be made. */
7444 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7448 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7449 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7452 /* Calculate the result when the argument is a constant. */
7453 if (TREE_CODE (arg) == COMPLEX_CST
7454 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7458 if (TREE_CODE (arg) == COMPLEX_EXPR)
7460 tree real = TREE_OPERAND (arg, 0);
7461 tree imag = TREE_OPERAND (arg, 1);
7463 /* If either part is zero, cabs is fabs of the other. */
7464 if (real_zerop (real))
7465 return fold_build1 (ABS_EXPR, type, imag);
7466 if (real_zerop (imag))
7467 return fold_build1 (ABS_EXPR, type, real);
7469 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7470 if (flag_unsafe_math_optimizations
7471 && operand_equal_p (real, imag, OEP_PURE_SAME))
7473 const REAL_VALUE_TYPE sqrt2_trunc
7474 = real_value_truncate (TYPE_MODE (type),
7475 *get_real_const (rv_sqrt2));
7477 return fold_build2 (MULT_EXPR, type,
7478 fold_build1 (ABS_EXPR, type, real),
7479 build_real (type, sqrt2_trunc));
7483 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7484 if (TREE_CODE (arg) == NEGATE_EXPR
7485 || TREE_CODE (arg) == CONJ_EXPR)
7486 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7488 /* Don't do this when optimizing for size. */
7489 if (flag_unsafe_math_optimizations
7490 && optimize && !optimize_size)
7492 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7494 if (sqrtfn != NULL_TREE)
7496 tree rpart, ipart, result;
7498 arg = builtin_save_expr (arg);
7500 rpart = fold_build1 (REALPART_EXPR, type, arg);
7501 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7503 rpart = builtin_save_expr (rpart);
7504 ipart = builtin_save_expr (ipart);
7506 result = fold_build2 (PLUS_EXPR, type,
7507 fold_build2 (MULT_EXPR, type,
7509 fold_build2 (MULT_EXPR, type,
7512 return build_call_expr (sqrtfn, 1, result);
7519 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7520 Return NULL_TREE if no simplification can be made. */
7523 fold_builtin_sqrt (tree arg, tree type)
7526 enum built_in_function fcode;
7529 if (!validate_arg (arg, REAL_TYPE))
7532 /* Calculate the result when the argument is a constant. */
7533 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7536 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7537 fcode = builtin_mathfn_code (arg);
7538 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7540 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7541 arg = fold_build2 (MULT_EXPR, type,
7542 CALL_EXPR_ARG (arg, 0),
7543 build_real (type, dconsthalf));
7544 return build_call_expr (expfn, 1, arg);
7547 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7548 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7550 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7554 tree arg0 = CALL_EXPR_ARG (arg, 0);
7556 /* The inner root was either sqrt or cbrt. */
7557 REAL_VALUE_TYPE dconstroot =
7558 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7560 /* Adjust for the outer root. */
7561 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7562 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7563 tree_root = build_real (type, dconstroot);
7564 return build_call_expr (powfn, 2, arg0, tree_root);
7568 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7569 if (flag_unsafe_math_optimizations
7570 && (fcode == BUILT_IN_POW
7571 || fcode == BUILT_IN_POWF
7572 || fcode == BUILT_IN_POWL))
7574 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7575 tree arg0 = CALL_EXPR_ARG (arg, 0);
7576 tree arg1 = CALL_EXPR_ARG (arg, 1);
7578 if (!tree_expr_nonnegative_p (arg0))
7579 arg0 = build1 (ABS_EXPR, type, arg0);
7580 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7581 build_real (type, dconsthalf));
7582 return build_call_expr (powfn, 2, arg0, narg1);
7588 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7589 Return NULL_TREE if no simplification can be made. */
7592 fold_builtin_cbrt (tree arg, tree type)
7594 const enum built_in_function fcode = builtin_mathfn_code (arg);
7597 if (!validate_arg (arg, REAL_TYPE))
7600 /* Calculate the result when the argument is a constant. */
7601 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7604 if (flag_unsafe_math_optimizations)
7606 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7607 if (BUILTIN_EXPONENT_P (fcode))
7609 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7610 const REAL_VALUE_TYPE third_trunc =
7611 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7612 arg = fold_build2 (MULT_EXPR, type,
7613 CALL_EXPR_ARG (arg, 0),
7614 build_real (type, third_trunc));
7615 return build_call_expr (expfn, 1, arg);
7618 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7619 if (BUILTIN_SQRT_P (fcode))
7621 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7625 tree arg0 = CALL_EXPR_ARG (arg, 0);
7627 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7629 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7630 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7631 tree_root = build_real (type, dconstroot);
7632 return build_call_expr (powfn, 2, arg0, tree_root);
7636 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7637 if (BUILTIN_CBRT_P (fcode))
7639 tree arg0 = CALL_EXPR_ARG (arg, 0);
7640 if (tree_expr_nonnegative_p (arg0))
7642 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7647 REAL_VALUE_TYPE dconstroot;
7649 real_arithmetic (&dconstroot, MULT_EXPR,
7650 get_real_const (rv_third),
7651 get_real_const (rv_third));
7652 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7653 tree_root = build_real (type, dconstroot);
7654 return build_call_expr (powfn, 2, arg0, tree_root);
7659 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7660 if (fcode == BUILT_IN_POW
7661 || fcode == BUILT_IN_POWF
7662 || fcode == BUILT_IN_POWL)
7664 tree arg00 = CALL_EXPR_ARG (arg, 0);
7665 tree arg01 = CALL_EXPR_ARG (arg, 1);
7666 if (tree_expr_nonnegative_p (arg00))
7668 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7669 const REAL_VALUE_TYPE dconstroot
7670 = real_value_truncate (TYPE_MODE (type),
7671 *get_real_const (rv_third));
7672 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7673 build_real (type, dconstroot));
7674 return build_call_expr (powfn, 2, arg00, narg01);
7681 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7682 TYPE is the type of the return value. Return NULL_TREE if no
7683 simplification can be made. */
7686 fold_builtin_cos (tree arg, tree type, tree fndecl)
7690 if (!validate_arg (arg, REAL_TYPE))
7693 /* Calculate the result when the argument is a constant. */
7694 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7697 /* Optimize cos(-x) into cos (x). */
7698 if ((narg = fold_strip_sign_ops (arg)))
7699 return build_call_expr (fndecl, 1, narg);
7704 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7705 Return NULL_TREE if no simplification can be made. */
7708 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7710 if (validate_arg (arg, REAL_TYPE))
7714 /* Calculate the result when the argument is a constant. */
7715 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7718 /* Optimize cosh(-x) into cosh (x). */
7719 if ((narg = fold_strip_sign_ops (arg)))
7720 return build_call_expr (fndecl, 1, narg);
7726 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7727 Return NULL_TREE if no simplification can be made. */
7730 fold_builtin_tan (tree arg, tree type)
7732 enum built_in_function fcode;
7735 if (!validate_arg (arg, REAL_TYPE))
7738 /* Calculate the result when the argument is a constant. */
7739 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7742 /* Optimize tan(atan(x)) = x. */
7743 fcode = builtin_mathfn_code (arg);
7744 if (flag_unsafe_math_optimizations
7745 && (fcode == BUILT_IN_ATAN
7746 || fcode == BUILT_IN_ATANF
7747 || fcode == BUILT_IN_ATANL))
7748 return CALL_EXPR_ARG (arg, 0);
7753 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7754 NULL_TREE if no simplification can be made. */
7757 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7762 if (!validate_arg (arg0, REAL_TYPE)
7763 || !validate_arg (arg1, POINTER_TYPE)
7764 || !validate_arg (arg2, POINTER_TYPE))
7767 type = TREE_TYPE (arg0);
7769 /* Calculate the result when the argument is a constant. */
7770 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7773 /* Canonicalize sincos to cexpi. */
7774 if (!TARGET_C99_FUNCTIONS)
7776 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7780 call = build_call_expr (fn, 1, arg0);
7781 call = builtin_save_expr (call);
7783 return build2 (COMPOUND_EXPR, type,
7784 build2 (MODIFY_EXPR, void_type_node,
7785 build_fold_indirect_ref (arg1),
7786 build1 (IMAGPART_EXPR, type, call)),
7787 build2 (MODIFY_EXPR, void_type_node,
7788 build_fold_indirect_ref (arg2),
7789 build1 (REALPART_EXPR, type, call)));
7792 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7793 NULL_TREE if no simplification can be made. */
7796 fold_builtin_cexp (tree arg0, tree type)
7799 tree realp, imagp, ifn;
7801 if (!validate_arg (arg0, COMPLEX_TYPE))
7804 rtype = TREE_TYPE (TREE_TYPE (arg0));
7806 /* In case we can figure out the real part of arg0 and it is constant zero
7808 if (!TARGET_C99_FUNCTIONS)
7810 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7814 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7815 && real_zerop (realp))
7817 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7818 return build_call_expr (ifn, 1, narg);
7821 /* In case we can easily decompose real and imaginary parts split cexp
7822 to exp (r) * cexpi (i). */
7823 if (flag_unsafe_math_optimizations
7826 tree rfn, rcall, icall;
7828 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7832 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7836 icall = build_call_expr (ifn, 1, imagp);
7837 icall = builtin_save_expr (icall);
7838 rcall = build_call_expr (rfn, 1, realp);
7839 rcall = builtin_save_expr (rcall);
7840 return fold_build2 (COMPLEX_EXPR, type,
7841 fold_build2 (MULT_EXPR, rtype,
7843 fold_build1 (REALPART_EXPR, rtype, icall)),
7844 fold_build2 (MULT_EXPR, rtype,
7846 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7852 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7853 Return NULL_TREE if no simplification can be made. */
7856 fold_builtin_trunc (tree fndecl, tree arg)
7858 if (!validate_arg (arg, REAL_TYPE))
7861 /* Optimize trunc of constant value. */
7862 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7864 REAL_VALUE_TYPE r, x;
7865 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7867 x = TREE_REAL_CST (arg);
7868 real_trunc (&r, TYPE_MODE (type), &x);
7869 return build_real (type, r);
7872 return fold_trunc_transparent_mathfn (fndecl, arg);
7875 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7876 Return NULL_TREE if no simplification can be made. */
7879 fold_builtin_floor (tree fndecl, tree arg)
7881 if (!validate_arg (arg, REAL_TYPE))
7884 /* Optimize floor of constant value. */
7885 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7889 x = TREE_REAL_CST (arg);
7890 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7892 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7895 real_floor (&r, TYPE_MODE (type), &x);
7896 return build_real (type, r);
7900 /* Fold floor (x) where x is nonnegative to trunc (x). */
7901 if (tree_expr_nonnegative_p (arg))
7903 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7905 return build_call_expr (truncfn, 1, arg);
7908 return fold_trunc_transparent_mathfn (fndecl, arg);
7911 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7912 Return NULL_TREE if no simplification can be made. */
7915 fold_builtin_ceil (tree fndecl, tree arg)
7917 if (!validate_arg (arg, REAL_TYPE))
7920 /* Optimize ceil of constant value. */
7921 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7925 x = TREE_REAL_CST (arg);
7926 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7928 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7931 real_ceil (&r, TYPE_MODE (type), &x);
7932 return build_real (type, r);
7936 return fold_trunc_transparent_mathfn (fndecl, arg);
7939 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7940 Return NULL_TREE if no simplification can be made. */
7943 fold_builtin_round (tree fndecl, tree arg)
7945 if (!validate_arg (arg, REAL_TYPE))
7948 /* Optimize round of constant value. */
7949 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7953 x = TREE_REAL_CST (arg);
7954 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7956 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7959 real_round (&r, TYPE_MODE (type), &x);
7960 return build_real (type, r);
7964 return fold_trunc_transparent_mathfn (fndecl, arg);
7967 /* Fold function call to builtin lround, lroundf or lroundl (or the
7968 corresponding long long versions) and other rounding functions. ARG
7969 is the argument to the call. Return NULL_TREE if no simplification
7973 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7975 if (!validate_arg (arg, REAL_TYPE))
7978 /* Optimize lround of constant value. */
7979 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7981 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7983 if (real_isfinite (&x))
7985 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7986 tree ftype = TREE_TYPE (arg);
7987 unsigned HOST_WIDE_INT lo2;
7988 HOST_WIDE_INT hi, lo;
7991 switch (DECL_FUNCTION_CODE (fndecl))
7993 CASE_FLT_FN (BUILT_IN_LFLOOR):
7994 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7995 real_floor (&r, TYPE_MODE (ftype), &x);
7998 CASE_FLT_FN (BUILT_IN_LCEIL):
7999 CASE_FLT_FN (BUILT_IN_LLCEIL):
8000 real_ceil (&r, TYPE_MODE (ftype), &x);
8003 CASE_FLT_FN (BUILT_IN_LROUND):
8004 CASE_FLT_FN (BUILT_IN_LLROUND):
8005 real_round (&r, TYPE_MODE (ftype), &x);
8012 REAL_VALUE_TO_INT (&lo, &hi, r);
8013 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8014 return build_int_cst_wide (itype, lo2, hi);
8018 switch (DECL_FUNCTION_CODE (fndecl))
8020 CASE_FLT_FN (BUILT_IN_LFLOOR):
8021 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8022 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8023 if (tree_expr_nonnegative_p (arg))
8024 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8030 return fold_fixed_mathfn (fndecl, arg);
8033 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8034 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8035 the argument to the call. Return NULL_TREE if no simplification can
8039 fold_builtin_bitop (tree fndecl, tree arg)
8041 if (!validate_arg (arg, INTEGER_TYPE))
8044 /* Optimize for constant argument. */
8045 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8047 HOST_WIDE_INT hi, width, result;
8048 unsigned HOST_WIDE_INT lo;
8051 type = TREE_TYPE (arg);
8052 width = TYPE_PRECISION (type);
8053 lo = TREE_INT_CST_LOW (arg);
8055 /* Clear all the bits that are beyond the type's precision. */
8056 if (width > HOST_BITS_PER_WIDE_INT)
8058 hi = TREE_INT_CST_HIGH (arg);
8059 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8060 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8065 if (width < HOST_BITS_PER_WIDE_INT)
8066 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8069 switch (DECL_FUNCTION_CODE (fndecl))
8071 CASE_INT_FN (BUILT_IN_FFS):
8073 result = exact_log2 (lo & -lo) + 1;
8075 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8080 CASE_INT_FN (BUILT_IN_CLZ):
8082 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8084 result = width - floor_log2 (lo) - 1;
8085 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8089 CASE_INT_FN (BUILT_IN_CTZ):
8091 result = exact_log2 (lo & -lo);
8093 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8094 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8098 CASE_INT_FN (BUILT_IN_POPCOUNT):
8101 result++, lo &= lo - 1;
8103 result++, hi &= hi - 1;
8106 CASE_INT_FN (BUILT_IN_PARITY):
8109 result++, lo &= lo - 1;
8111 result++, hi &= hi - 1;
8119 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8125 /* Fold function call to builtin_bswap and the long and long long
8126 variants. Return NULL_TREE if no simplification can be made. */
8128 fold_builtin_bswap (tree fndecl, tree arg)
8130 if (! validate_arg (arg, INTEGER_TYPE))
8133 /* Optimize constant value. */
8134 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8136 HOST_WIDE_INT hi, width, r_hi = 0;
8137 unsigned HOST_WIDE_INT lo, r_lo = 0;
8140 type = TREE_TYPE (arg);
8141 width = TYPE_PRECISION (type);
8142 lo = TREE_INT_CST_LOW (arg);
8143 hi = TREE_INT_CST_HIGH (arg);
8145 switch (DECL_FUNCTION_CODE (fndecl))
8147 case BUILT_IN_BSWAP32:
8148 case BUILT_IN_BSWAP64:
8152 for (s = 0; s < width; s += 8)
8154 int d = width - s - 8;
8155 unsigned HOST_WIDE_INT byte;
8157 if (s < HOST_BITS_PER_WIDE_INT)
8158 byte = (lo >> s) & 0xff;
8160 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8162 if (d < HOST_BITS_PER_WIDE_INT)
8165 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8175 if (width < HOST_BITS_PER_WIDE_INT)
8176 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8178 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8184 /* Return true if EXPR is the real constant contained in VALUE. */
8187 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8191 return ((TREE_CODE (expr) == REAL_CST
8192 && !TREE_OVERFLOW (expr)
8193 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8194 || (TREE_CODE (expr) == COMPLEX_CST
8195 && real_dconstp (TREE_REALPART (expr), value)
8196 && real_zerop (TREE_IMAGPART (expr))));
8199 /* A subroutine of fold_builtin to fold the various logarithmic
8200 functions. Return NULL_TREE if no simplification can me made.
8201 FUNC is the corresponding MPFR logarithm function. */
8204 fold_builtin_logarithm (tree fndecl, tree arg,
8205 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8207 if (validate_arg (arg, REAL_TYPE))
8209 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8211 const enum built_in_function fcode = builtin_mathfn_code (arg);
8213 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8214 instead we'll look for 'e' truncated to MODE. So only do
8215 this if flag_unsafe_math_optimizations is set. */
8216 if (flag_unsafe_math_optimizations && func == mpfr_log)
8218 const REAL_VALUE_TYPE e_truncated =
8219 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8220 if (real_dconstp (arg, &e_truncated))
8221 return build_real (type, dconst1);
8224 /* Calculate the result when the argument is a constant. */
8225 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8228 /* Special case, optimize logN(expN(x)) = x. */
8229 if (flag_unsafe_math_optimizations
8230 && ((func == mpfr_log
8231 && (fcode == BUILT_IN_EXP
8232 || fcode == BUILT_IN_EXPF
8233 || fcode == BUILT_IN_EXPL))
8234 || (func == mpfr_log2
8235 && (fcode == BUILT_IN_EXP2
8236 || fcode == BUILT_IN_EXP2F
8237 || fcode == BUILT_IN_EXP2L))
8238 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8239 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8241 /* Optimize logN(func()) for various exponential functions. We
8242 want to determine the value "x" and the power "exponent" in
8243 order to transform logN(x**exponent) into exponent*logN(x). */
8244 if (flag_unsafe_math_optimizations)
8246 tree exponent = 0, x = 0;
8250 CASE_FLT_FN (BUILT_IN_EXP):
8251 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8252 x = build_real (type,
8253 real_value_truncate (TYPE_MODE (type),
8254 *get_real_const (rv_e)));
8255 exponent = CALL_EXPR_ARG (arg, 0);
8257 CASE_FLT_FN (BUILT_IN_EXP2):
8258 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8259 x = build_real (type, dconst2);
8260 exponent = CALL_EXPR_ARG (arg, 0);
8262 CASE_FLT_FN (BUILT_IN_EXP10):
8263 CASE_FLT_FN (BUILT_IN_POW10):
8264 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8266 REAL_VALUE_TYPE dconst10;
8267 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8268 x = build_real (type, dconst10);
8270 exponent = CALL_EXPR_ARG (arg, 0);
8272 CASE_FLT_FN (BUILT_IN_SQRT):
8273 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8274 x = CALL_EXPR_ARG (arg, 0);
8275 exponent = build_real (type, dconsthalf);
8277 CASE_FLT_FN (BUILT_IN_CBRT):
8278 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8279 x = CALL_EXPR_ARG (arg, 0);
8280 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8281 *get_real_const (rv_third)));
8283 CASE_FLT_FN (BUILT_IN_POW):
8284 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8285 x = CALL_EXPR_ARG (arg, 0);
8286 exponent = CALL_EXPR_ARG (arg, 1);
8292 /* Now perform the optimization. */
8295 tree logfn = build_call_expr (fndecl, 1, x);
8296 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8304 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8305 NULL_TREE if no simplification can be made. */
8308 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8310 tree res, narg0, narg1;
8312 if (!validate_arg (arg0, REAL_TYPE)
8313 || !validate_arg (arg1, REAL_TYPE))
8316 /* Calculate the result when the argument is a constant. */
8317 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8320 /* If either argument to hypot has a negate or abs, strip that off.
8321 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8322 narg0 = fold_strip_sign_ops (arg0);
8323 narg1 = fold_strip_sign_ops (arg1);
8326 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8327 narg1 ? narg1 : arg1);
8330 /* If either argument is zero, hypot is fabs of the other. */
8331 if (real_zerop (arg0))
8332 return fold_build1 (ABS_EXPR, type, arg1);
8333 else if (real_zerop (arg1))
8334 return fold_build1 (ABS_EXPR, type, arg0);
8336 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8337 if (flag_unsafe_math_optimizations
8338 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8340 const REAL_VALUE_TYPE sqrt2_trunc
8341 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8342 return fold_build2 (MULT_EXPR, type,
8343 fold_build1 (ABS_EXPR, type, arg0),
8344 build_real (type, sqrt2_trunc));
8351 /* Fold a builtin function call to pow, powf, or powl. Return
8352 NULL_TREE if no simplification can be made. */
8354 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8358 if (!validate_arg (arg0, REAL_TYPE)
8359 || !validate_arg (arg1, REAL_TYPE))
8362 /* Calculate the result when the argument is a constant. */
8363 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8366 /* Optimize pow(1.0,y) = 1.0. */
8367 if (real_onep (arg0))
8368 return omit_one_operand (type, build_real (type, dconst1), arg1);
8370 if (TREE_CODE (arg1) == REAL_CST
8371 && !TREE_OVERFLOW (arg1))
8373 REAL_VALUE_TYPE cint;
8377 c = TREE_REAL_CST (arg1);
8379 /* Optimize pow(x,0.0) = 1.0. */
8380 if (REAL_VALUES_EQUAL (c, dconst0))
8381 return omit_one_operand (type, build_real (type, dconst1),
8384 /* Optimize pow(x,1.0) = x. */
8385 if (REAL_VALUES_EQUAL (c, dconst1))
8388 /* Optimize pow(x,-1.0) = 1.0/x. */
8389 if (REAL_VALUES_EQUAL (c, dconstm1))
8390 return fold_build2 (RDIV_EXPR, type,
8391 build_real (type, dconst1), arg0);
8393 /* Optimize pow(x,0.5) = sqrt(x). */
8394 if (flag_unsafe_math_optimizations
8395 && REAL_VALUES_EQUAL (c, dconsthalf))
8397 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8399 if (sqrtfn != NULL_TREE)
8400 return build_call_expr (sqrtfn, 1, arg0);
8403 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8404 if (flag_unsafe_math_optimizations)
8406 const REAL_VALUE_TYPE dconstroot
8407 = real_value_truncate (TYPE_MODE (type),
8408 *get_real_const (rv_third));
8410 if (REAL_VALUES_EQUAL (c, dconstroot))
8412 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8413 if (cbrtfn != NULL_TREE)
8414 return build_call_expr (cbrtfn, 1, arg0);
8418 /* Check for an integer exponent. */
8419 n = real_to_integer (&c);
8420 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8421 if (real_identical (&c, &cint))
8423 /* Attempt to evaluate pow at compile-time. */
8424 if (TREE_CODE (arg0) == REAL_CST
8425 && !TREE_OVERFLOW (arg0))
8430 x = TREE_REAL_CST (arg0);
8431 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8432 if (flag_unsafe_math_optimizations || !inexact)
8433 return build_real (type, x);
8436 /* Strip sign ops from even integer powers. */
8437 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8439 tree narg0 = fold_strip_sign_ops (arg0);
8441 return build_call_expr (fndecl, 2, narg0, arg1);
8446 if (flag_unsafe_math_optimizations)
8448 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8450 /* Optimize pow(expN(x),y) = expN(x*y). */
8451 if (BUILTIN_EXPONENT_P (fcode))
8453 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8454 tree arg = CALL_EXPR_ARG (arg0, 0);
8455 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8456 return build_call_expr (expfn, 1, arg);
8459 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8460 if (BUILTIN_SQRT_P (fcode))
8462 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8463 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8464 build_real (type, dconsthalf));
8465 return build_call_expr (fndecl, 2, narg0, narg1);
8468 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8469 if (BUILTIN_CBRT_P (fcode))
8471 tree arg = CALL_EXPR_ARG (arg0, 0);
8472 if (tree_expr_nonnegative_p (arg))
8474 const REAL_VALUE_TYPE dconstroot
8475 = real_value_truncate (TYPE_MODE (type),
8476 *get_real_const (rv_third));
8477 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8478 build_real (type, dconstroot));
8479 return build_call_expr (fndecl, 2, arg, narg1);
8483 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8484 if (fcode == BUILT_IN_POW
8485 || fcode == BUILT_IN_POWF
8486 || fcode == BUILT_IN_POWL)
8488 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8489 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8490 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8491 return build_call_expr (fndecl, 2, arg00, narg1);
8498 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8499 Return NULL_TREE if no simplification can be made. */
8501 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8502 tree arg0, tree arg1, tree type)
8504 if (!validate_arg (arg0, REAL_TYPE)
8505 || !validate_arg (arg1, INTEGER_TYPE))
8508 /* Optimize pow(1.0,y) = 1.0. */
8509 if (real_onep (arg0))
8510 return omit_one_operand (type, build_real (type, dconst1), arg1);
8512 if (host_integerp (arg1, 0))
8514 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8516 /* Evaluate powi at compile-time. */
8517 if (TREE_CODE (arg0) == REAL_CST
8518 && !TREE_OVERFLOW (arg0))
8521 x = TREE_REAL_CST (arg0);
8522 real_powi (&x, TYPE_MODE (type), &x, c);
8523 return build_real (type, x);
8526 /* Optimize pow(x,0) = 1.0. */
8528 return omit_one_operand (type, build_real (type, dconst1),
8531 /* Optimize pow(x,1) = x. */
8535 /* Optimize pow(x,-1) = 1.0/x. */
8537 return fold_build2 (RDIV_EXPR, type,
8538 build_real (type, dconst1), arg0);
8544 /* A subroutine of fold_builtin to fold the various exponent
8545 functions. Return NULL_TREE if no simplification can be made.
8546 FUNC is the corresponding MPFR exponent function. */
8549 fold_builtin_exponent (tree fndecl, tree arg,
8550 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8552 if (validate_arg (arg, REAL_TYPE))
8554 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8557 /* Calculate the result when the argument is a constant. */
8558 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8561 /* Optimize expN(logN(x)) = x. */
8562 if (flag_unsafe_math_optimizations)
8564 const enum built_in_function fcode = builtin_mathfn_code (arg);
8566 if ((func == mpfr_exp
8567 && (fcode == BUILT_IN_LOG
8568 || fcode == BUILT_IN_LOGF
8569 || fcode == BUILT_IN_LOGL))
8570 || (func == mpfr_exp2
8571 && (fcode == BUILT_IN_LOG2
8572 || fcode == BUILT_IN_LOG2F
8573 || fcode == BUILT_IN_LOG2L))
8574 || (func == mpfr_exp10
8575 && (fcode == BUILT_IN_LOG10
8576 || fcode == BUILT_IN_LOG10F
8577 || fcode == BUILT_IN_LOG10L)))
8578 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8585 /* Return true if VAR is a VAR_DECL or a component thereof. */
8588 var_decl_component_p (tree var)
8591 while (handled_component_p (inner))
8592 inner = TREE_OPERAND (inner, 0);
8593 return SSA_VAR_P (inner);
8596 /* Fold function call to builtin memset. Return
8597 NULL_TREE if no simplification can be made. */
8600 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8603 unsigned HOST_WIDE_INT length, cval;
8605 if (! validate_arg (dest, POINTER_TYPE)
8606 || ! validate_arg (c, INTEGER_TYPE)
8607 || ! validate_arg (len, INTEGER_TYPE))
8610 if (! host_integerp (len, 1))
8613 /* If the LEN parameter is zero, return DEST. */
8614 if (integer_zerop (len))
8615 return omit_one_operand (type, dest, c);
8617 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8622 if (TREE_CODE (var) != ADDR_EXPR)
8625 var = TREE_OPERAND (var, 0);
8626 if (TREE_THIS_VOLATILE (var))
8629 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8630 && !POINTER_TYPE_P (TREE_TYPE (var)))
8633 if (! var_decl_component_p (var))
8636 length = tree_low_cst (len, 1);
8637 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8638 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8642 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8645 if (integer_zerop (c))
8649 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8652 cval = tree_low_cst (c, 1);
8656 cval |= (cval << 31) << 1;
8659 ret = build_int_cst_type (TREE_TYPE (var), cval);
8660 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8664 return omit_one_operand (type, dest, ret);
8667 /* Fold function call to builtin memset. Return
8668 NULL_TREE if no simplification can be made. */
8671 fold_builtin_bzero (tree dest, tree size, bool ignore)
8673 if (! validate_arg (dest, POINTER_TYPE)
8674 || ! validate_arg (size, INTEGER_TYPE))
8680 /* New argument list transforming bzero(ptr x, int y) to
8681 memset(ptr x, int 0, size_t y). This is done this way
8682 so that if it isn't expanded inline, we fallback to
8683 calling bzero instead of memset. */
8685 return fold_builtin_memset (dest, integer_zero_node,
8686 fold_convert (sizetype, size),
8687 void_type_node, ignore);
8690 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8691 NULL_TREE if no simplification can be made.
8692 If ENDP is 0, return DEST (like memcpy).
8693 If ENDP is 1, return DEST+LEN (like mempcpy).
8694 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8695 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8699 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8701 tree destvar, srcvar, expr;
8703 if (! validate_arg (dest, POINTER_TYPE)
8704 || ! validate_arg (src, POINTER_TYPE)
8705 || ! validate_arg (len, INTEGER_TYPE))
8708 /* If the LEN parameter is zero, return DEST. */
8709 if (integer_zerop (len))
8710 return omit_one_operand (type, dest, src);
8712 /* If SRC and DEST are the same (and not volatile), return
8713 DEST{,+LEN,+LEN-1}. */
8714 if (operand_equal_p (src, dest, 0))
8718 tree srctype, desttype;
8721 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8722 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8724 /* Both DEST and SRC must be pointer types.
8725 ??? This is what old code did. Is the testing for pointer types
8728 If either SRC is readonly or length is 1, we can use memcpy. */
8729 if (dest_align && src_align
8730 && (readonly_data_expr (src)
8731 || (host_integerp (len, 1)
8732 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8733 tree_low_cst (len, 1)))))
8735 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8738 return build_call_expr (fn, 3, dest, src, len);
8743 if (!host_integerp (len, 0))
8746 This logic lose for arguments like (type *)malloc (sizeof (type)),
8747 since we strip the casts of up to VOID return value from malloc.
8748 Perhaps we ought to inherit type from non-VOID argument here? */
8751 srctype = TREE_TYPE (TREE_TYPE (src));
8752 desttype = TREE_TYPE (TREE_TYPE (dest));
8753 if (!srctype || !desttype
8754 || !TYPE_SIZE_UNIT (srctype)
8755 || !TYPE_SIZE_UNIT (desttype)
8756 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8757 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8758 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8759 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8762 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8763 < (int) TYPE_ALIGN (desttype)
8764 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8765 < (int) TYPE_ALIGN (srctype)))
8769 dest = builtin_save_expr (dest);
8771 srcvar = build_fold_indirect_ref (src);
8772 if (TREE_THIS_VOLATILE (srcvar))
8774 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8776 /* With memcpy, it is possible to bypass aliasing rules, so without
8777 this check i.e. execute/20060930-2.c would be misoptimized, because
8778 it use conflicting alias set to hold argument for the memcpy call.
8779 This check is probably unnecessary with -fno-strict-aliasing.
8780 Similarly for destvar. See also PR29286. */
8781 if (!var_decl_component_p (srcvar)
8782 /* Accept: memcpy (*char_var, "test", 1); that simplify
8784 || is_gimple_min_invariant (srcvar)
8785 || readonly_data_expr (src))
8788 destvar = build_fold_indirect_ref (dest);
8789 if (TREE_THIS_VOLATILE (destvar))
8791 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8793 if (!var_decl_component_p (destvar))
8796 if (srctype == desttype
8797 || (gimple_in_ssa_p (cfun)
8798 && useless_type_conversion_p (desttype, srctype)))
8800 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8801 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8802 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8803 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8804 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8806 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8807 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8813 if (endp == 0 || endp == 3)
8814 return omit_one_operand (type, dest, expr);
8820 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8823 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8824 dest = fold_convert (type, dest);
8826 dest = omit_one_operand (type, dest, expr);
8830 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8831 If LEN is not NULL, it represents the length of the string to be
8832 copied. Return NULL_TREE if no simplification can be made. */
8835 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8839 if (!validate_arg (dest, POINTER_TYPE)
8840 || !validate_arg (src, POINTER_TYPE))
8843 /* If SRC and DEST are the same (and not volatile), return DEST. */
8844 if (operand_equal_p (src, dest, 0))
8845 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8850 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8856 len = c_strlen (src, 1);
8857 if (! len || TREE_SIDE_EFFECTS (len))
8861 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8862 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8863 build_call_expr (fn, 3, dest, src, len));
8866 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8867 If SLEN is not NULL, it represents the length of the source string.
8868 Return NULL_TREE if no simplification can be made. */
8871 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8875 if (!validate_arg (dest, POINTER_TYPE)
8876 || !validate_arg (src, POINTER_TYPE)
8877 || !validate_arg (len, INTEGER_TYPE))
8880 /* If the LEN parameter is zero, return DEST. */
8881 if (integer_zerop (len))
8882 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8884 /* We can't compare slen with len as constants below if len is not a
8886 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8890 slen = c_strlen (src, 1);
8892 /* Now, we must be passed a constant src ptr parameter. */
8893 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8896 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8898 /* We do not support simplification of this case, though we do
8899 support it when expanding trees into RTL. */
8900 /* FIXME: generate a call to __builtin_memset. */
8901 if (tree_int_cst_lt (slen, len))
8904 /* OK transform into builtin memcpy. */
8905 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8908 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8909 build_call_expr (fn, 3, dest, src, len));
8912 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8913 arguments to the call, and TYPE is its return type.
8914 Return NULL_TREE if no simplification can be made. */
8917 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8919 if (!validate_arg (arg1, POINTER_TYPE)
8920 || !validate_arg (arg2, INTEGER_TYPE)
8921 || !validate_arg (len, INTEGER_TYPE))
8927 if (TREE_CODE (arg2) != INTEGER_CST
8928 || !host_integerp (len, 1))
8931 p1 = c_getstr (arg1);
8932 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8938 if (target_char_cast (arg2, &c))
8941 r = memchr (p1, c, tree_low_cst (len, 1));
8944 return build_int_cst (TREE_TYPE (arg1), 0);
8946 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8948 return fold_convert (type, tem);
8954 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8955 Return NULL_TREE if no simplification can be made. */
8958 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8960 const char *p1, *p2;
8962 if (!validate_arg (arg1, POINTER_TYPE)
8963 || !validate_arg (arg2, POINTER_TYPE)
8964 || !validate_arg (len, INTEGER_TYPE))
8967 /* If the LEN parameter is zero, return zero. */
8968 if (integer_zerop (len))
8969 return omit_two_operands (integer_type_node, integer_zero_node,
8972 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8973 if (operand_equal_p (arg1, arg2, 0))
8974 return omit_one_operand (integer_type_node, integer_zero_node, len);
8976 p1 = c_getstr (arg1);
8977 p2 = c_getstr (arg2);
8979 /* If all arguments are constant, and the value of len is not greater
8980 than the lengths of arg1 and arg2, evaluate at compile-time. */
8981 if (host_integerp (len, 1) && p1 && p2
8982 && compare_tree_int (len, strlen (p1) + 1) <= 0
8983 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8985 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8988 return integer_one_node;
8990 return integer_minus_one_node;
8992 return integer_zero_node;
8995 /* If len parameter is one, return an expression corresponding to
8996 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8997 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8999 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9000 tree cst_uchar_ptr_node
9001 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9003 tree ind1 = fold_convert (integer_type_node,
9004 build1 (INDIRECT_REF, cst_uchar_node,
9005 fold_convert (cst_uchar_ptr_node,
9007 tree ind2 = fold_convert (integer_type_node,
9008 build1 (INDIRECT_REF, cst_uchar_node,
9009 fold_convert (cst_uchar_ptr_node,
9011 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9017 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9018 Return NULL_TREE if no simplification can be made. */
9021 fold_builtin_strcmp (tree arg1, tree arg2)
9023 const char *p1, *p2;
9025 if (!validate_arg (arg1, POINTER_TYPE)
9026 || !validate_arg (arg2, POINTER_TYPE))
9029 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9030 if (operand_equal_p (arg1, arg2, 0))
9031 return integer_zero_node;
9033 p1 = c_getstr (arg1);
9034 p2 = c_getstr (arg2);
9038 const int i = strcmp (p1, p2);
9040 return integer_minus_one_node;
9042 return integer_one_node;
9044 return integer_zero_node;
9047 /* If the second arg is "", return *(const unsigned char*)arg1. */
9048 if (p2 && *p2 == '\0')
9050 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9051 tree cst_uchar_ptr_node
9052 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9054 return fold_convert (integer_type_node,
9055 build1 (INDIRECT_REF, cst_uchar_node,
9056 fold_convert (cst_uchar_ptr_node,
9060 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9061 if (p1 && *p1 == '\0')
9063 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9064 tree cst_uchar_ptr_node
9065 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9067 tree temp = fold_convert (integer_type_node,
9068 build1 (INDIRECT_REF, cst_uchar_node,
9069 fold_convert (cst_uchar_ptr_node,
9071 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9077 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9078 Return NULL_TREE if no simplification can be made. */
9081 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9083 const char *p1, *p2;
9085 if (!validate_arg (arg1, POINTER_TYPE)
9086 || !validate_arg (arg2, POINTER_TYPE)
9087 || !validate_arg (len, INTEGER_TYPE))
9090 /* If the LEN parameter is zero, return zero. */
9091 if (integer_zerop (len))
9092 return omit_two_operands (integer_type_node, integer_zero_node,
9095 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9096 if (operand_equal_p (arg1, arg2, 0))
9097 return omit_one_operand (integer_type_node, integer_zero_node, len);
9099 p1 = c_getstr (arg1);
9100 p2 = c_getstr (arg2);
9102 if (host_integerp (len, 1) && p1 && p2)
9104 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9106 return integer_one_node;
9108 return integer_minus_one_node;
9110 return integer_zero_node;
9113 /* If the second arg is "", and the length is greater than zero,
9114 return *(const unsigned char*)arg1. */
9115 if (p2 && *p2 == '\0'
9116 && TREE_CODE (len) == INTEGER_CST
9117 && tree_int_cst_sgn (len) == 1)
9119 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9120 tree cst_uchar_ptr_node
9121 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9123 return fold_convert (integer_type_node,
9124 build1 (INDIRECT_REF, cst_uchar_node,
9125 fold_convert (cst_uchar_ptr_node,
9129 /* If the first arg is "", and the length is greater than zero,
9130 return -*(const unsigned char*)arg2. */
9131 if (p1 && *p1 == '\0'
9132 && TREE_CODE (len) == INTEGER_CST
9133 && tree_int_cst_sgn (len) == 1)
9135 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9136 tree cst_uchar_ptr_node
9137 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9139 tree temp = fold_convert (integer_type_node,
9140 build1 (INDIRECT_REF, cst_uchar_node,
9141 fold_convert (cst_uchar_ptr_node,
9143 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9146 /* If len parameter is one, return an expression corresponding to
9147 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9148 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9150 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9151 tree cst_uchar_ptr_node
9152 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9154 tree ind1 = fold_convert (integer_type_node,
9155 build1 (INDIRECT_REF, cst_uchar_node,
9156 fold_convert (cst_uchar_ptr_node,
9158 tree ind2 = fold_convert (integer_type_node,
9159 build1 (INDIRECT_REF, cst_uchar_node,
9160 fold_convert (cst_uchar_ptr_node,
9162 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9168 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9169 ARG. Return NULL_TREE if no simplification can be made. */
9172 fold_builtin_signbit (tree arg, tree type)
9176 if (!validate_arg (arg, REAL_TYPE))
9179 /* If ARG is a compile-time constant, determine the result. */
9180 if (TREE_CODE (arg) == REAL_CST
9181 && !TREE_OVERFLOW (arg))
9185 c = TREE_REAL_CST (arg);
9186 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9187 return fold_convert (type, temp);
9190 /* If ARG is non-negative, the result is always zero. */
9191 if (tree_expr_nonnegative_p (arg))
9192 return omit_one_operand (type, integer_zero_node, arg);
9194 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9195 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9196 return fold_build2 (LT_EXPR, type, arg,
9197 build_real (TREE_TYPE (arg), dconst0));
9202 /* Fold function call to builtin copysign, copysignf or copysignl with
9203 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9207 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9211 if (!validate_arg (arg1, REAL_TYPE)
9212 || !validate_arg (arg2, REAL_TYPE))
9215 /* copysign(X,X) is X. */
9216 if (operand_equal_p (arg1, arg2, 0))
9217 return fold_convert (type, arg1);
9219 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9220 if (TREE_CODE (arg1) == REAL_CST
9221 && TREE_CODE (arg2) == REAL_CST
9222 && !TREE_OVERFLOW (arg1)
9223 && !TREE_OVERFLOW (arg2))
9225 REAL_VALUE_TYPE c1, c2;
9227 c1 = TREE_REAL_CST (arg1);
9228 c2 = TREE_REAL_CST (arg2);
9229 /* c1.sign := c2.sign. */
9230 real_copysign (&c1, &c2);
9231 return build_real (type, c1);
9234 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9235 Remember to evaluate Y for side-effects. */
9236 if (tree_expr_nonnegative_p (arg2))
9237 return omit_one_operand (type,
9238 fold_build1 (ABS_EXPR, type, arg1),
9241 /* Strip sign changing operations for the first argument. */
9242 tem = fold_strip_sign_ops (arg1);
9244 return build_call_expr (fndecl, 2, tem, arg2);
9249 /* Fold a call to builtin isascii with argument ARG. */
9252 fold_builtin_isascii (tree arg)
9254 if (!validate_arg (arg, INTEGER_TYPE))
9258 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9259 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9260 build_int_cst (NULL_TREE,
9261 ~ (unsigned HOST_WIDE_INT) 0x7f));
9262 return fold_build2 (EQ_EXPR, integer_type_node,
9263 arg, integer_zero_node);
9267 /* Fold a call to builtin toascii with argument ARG. */
9270 fold_builtin_toascii (tree arg)
9272 if (!validate_arg (arg, INTEGER_TYPE))
9275 /* Transform toascii(c) -> (c & 0x7f). */
9276 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9277 build_int_cst (NULL_TREE, 0x7f));
9280 /* Fold a call to builtin isdigit with argument ARG. */
9283 fold_builtin_isdigit (tree arg)
9285 if (!validate_arg (arg, INTEGER_TYPE))
9289 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9290 /* According to the C standard, isdigit is unaffected by locale.
9291 However, it definitely is affected by the target character set. */
9292 unsigned HOST_WIDE_INT target_digit0
9293 = lang_hooks.to_target_charset ('0');
9295 if (target_digit0 == 0)
9298 arg = fold_convert (unsigned_type_node, arg);
9299 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9300 build_int_cst (unsigned_type_node, target_digit0));
9301 return fold_build2 (LE_EXPR, integer_type_node, arg,
9302 build_int_cst (unsigned_type_node, 9));
9306 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9309 fold_builtin_fabs (tree arg, tree type)
9311 if (!validate_arg (arg, REAL_TYPE))
9314 arg = fold_convert (type, arg);
9315 if (TREE_CODE (arg) == REAL_CST)
9316 return fold_abs_const (arg, type);
9317 return fold_build1 (ABS_EXPR, type, arg);
9320 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9323 fold_builtin_abs (tree arg, tree type)
9325 if (!validate_arg (arg, INTEGER_TYPE))
9328 arg = fold_convert (type, arg);
9329 if (TREE_CODE (arg) == INTEGER_CST)
9330 return fold_abs_const (arg, type);
9331 return fold_build1 (ABS_EXPR, type, arg);
9334 /* Fold a call to builtin fmin or fmax. */
9337 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9339 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9341 /* Calculate the result when the argument is a constant. */
9342 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9347 /* If either argument is NaN, return the other one. Avoid the
9348 transformation if we get (and honor) a signalling NaN. Using
9349 omit_one_operand() ensures we create a non-lvalue. */
9350 if (TREE_CODE (arg0) == REAL_CST
9351 && real_isnan (&TREE_REAL_CST (arg0))
9352 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9353 || ! TREE_REAL_CST (arg0).signalling))
9354 return omit_one_operand (type, arg1, arg0);
9355 if (TREE_CODE (arg1) == REAL_CST
9356 && real_isnan (&TREE_REAL_CST (arg1))
9357 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9358 || ! TREE_REAL_CST (arg1).signalling))
9359 return omit_one_operand (type, arg0, arg1);
9361 /* Transform fmin/fmax(x,x) -> x. */
9362 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9363 return omit_one_operand (type, arg0, arg1);
9365 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9366 functions to return the numeric arg if the other one is NaN.
9367 These tree codes don't honor that, so only transform if
9368 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9369 handled, so we don't have to worry about it either. */
9370 if (flag_finite_math_only)
9371 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9372 fold_convert (type, arg0),
9373 fold_convert (type, arg1));
9378 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9381 fold_builtin_carg (tree arg, tree type)
9383 if (validate_arg (arg, COMPLEX_TYPE))
9385 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9389 tree new_arg = builtin_save_expr (arg);
9390 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9391 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9392 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9399 /* Fold a call to builtin logb/ilogb. */
9402 fold_builtin_logb (tree arg, tree rettype)
9404 if (! validate_arg (arg, REAL_TYPE))
9409 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9411 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9417 /* If arg is Inf or NaN and we're logb, return it. */
9418 if (TREE_CODE (rettype) == REAL_TYPE)
9419 return fold_convert (rettype, arg);
9420 /* Fall through... */
9422 /* Zero may set errno and/or raise an exception for logb, also
9423 for ilogb we don't know FP_ILOGB0. */
9426 /* For normal numbers, proceed iff radix == 2. In GCC,
9427 normalized significands are in the range [0.5, 1.0). We
9428 want the exponent as if they were [1.0, 2.0) so get the
9429 exponent and subtract 1. */
9430 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9431 return fold_convert (rettype, build_int_cst (NULL_TREE,
9432 REAL_EXP (value)-1));
9440 /* Fold a call to builtin significand, if radix == 2. */
9443 fold_builtin_significand (tree arg, tree rettype)
9445 if (! validate_arg (arg, REAL_TYPE))
9450 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9452 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9459 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9460 return fold_convert (rettype, arg);
9462 /* For normal numbers, proceed iff radix == 2. */
9463 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9465 REAL_VALUE_TYPE result = *value;
9466 /* In GCC, normalized significands are in the range [0.5,
9467 1.0). We want them to be [1.0, 2.0) so set the
9469 SET_REAL_EXP (&result, 1);
9470 return build_real (rettype, result);
9479 /* Fold a call to builtin frexp, we can assume the base is 2. */
9482 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9484 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9489 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9492 arg1 = build_fold_indirect_ref (arg1);
9494 /* Proceed if a valid pointer type was passed in. */
9495 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9497 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9503 /* For +-0, return (*exp = 0, +-0). */
9504 exp = integer_zero_node;
9509 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9510 return omit_one_operand (rettype, arg0, arg1);
9513 /* Since the frexp function always expects base 2, and in
9514 GCC normalized significands are already in the range
9515 [0.5, 1.0), we have exactly what frexp wants. */
9516 REAL_VALUE_TYPE frac_rvt = *value;
9517 SET_REAL_EXP (&frac_rvt, 0);
9518 frac = build_real (rettype, frac_rvt);
9519 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9526 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9527 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9528 TREE_SIDE_EFFECTS (arg1) = 1;
9529 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9535 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9536 then we can assume the base is two. If it's false, then we have to
9537 check the mode of the TYPE parameter in certain cases. */
9540 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9542 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9547 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9548 if (real_zerop (arg0) || integer_zerop (arg1)
9549 || (TREE_CODE (arg0) == REAL_CST
9550 && !real_isfinite (&TREE_REAL_CST (arg0))))
9551 return omit_one_operand (type, arg0, arg1);
9553 /* If both arguments are constant, then try to evaluate it. */
9554 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9555 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9556 && host_integerp (arg1, 0))
9558 /* Bound the maximum adjustment to twice the range of the
9559 mode's valid exponents. Use abs to ensure the range is
9560 positive as a sanity check. */
9561 const long max_exp_adj = 2 *
9562 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9563 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9565 /* Get the user-requested adjustment. */
9566 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9568 /* The requested adjustment must be inside this range. This
9569 is a preliminary cap to avoid things like overflow, we
9570 may still fail to compute the result for other reasons. */
9571 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9573 REAL_VALUE_TYPE initial_result;
9575 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9577 /* Ensure we didn't overflow. */
9578 if (! real_isinf (&initial_result))
9580 const REAL_VALUE_TYPE trunc_result
9581 = real_value_truncate (TYPE_MODE (type), initial_result);
9583 /* Only proceed if the target mode can hold the
9585 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9586 return build_real (type, trunc_result);
9595 /* Fold a call to builtin modf. */
9598 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9600 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9605 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9608 arg1 = build_fold_indirect_ref (arg1);
9610 /* Proceed if a valid pointer type was passed in. */
9611 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9613 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9614 REAL_VALUE_TYPE trunc, frac;
9620 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9621 trunc = frac = *value;
9624 /* For +-Inf, return (*arg1 = arg0, +-0). */
9626 frac.sign = value->sign;
9630 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9631 real_trunc (&trunc, VOIDmode, value);
9632 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9633 /* If the original number was negative and already
9634 integral, then the fractional part is -0.0. */
9635 if (value->sign && frac.cl == rvc_zero)
9636 frac.sign = value->sign;
9640 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9641 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9642 build_real (rettype, trunc));
9643 TREE_SIDE_EFFECTS (arg1) = 1;
9644 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9645 build_real (rettype, frac));
9651 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9652 ARG is the argument for the call. */
9655 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9657 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9660 if (!validate_arg (arg, REAL_TYPE))
9663 switch (builtin_index)
9665 case BUILT_IN_ISINF:
9666 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9667 return omit_one_operand (type, integer_zero_node, arg);
9669 if (TREE_CODE (arg) == REAL_CST)
9671 r = TREE_REAL_CST (arg);
9672 if (real_isinf (&r))
9673 return real_compare (GT_EXPR, &r, &dconst0)
9674 ? integer_one_node : integer_minus_one_node;
9676 return integer_zero_node;
9681 case BUILT_IN_ISINF_SIGN:
9683 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9684 /* In a boolean context, GCC will fold the inner COND_EXPR to
9685 1. So e.g. "if (isinf_sign(x))" would be folded to just
9686 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9687 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9688 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9689 tree tmp = NULL_TREE;
9691 arg = builtin_save_expr (arg);
9693 if (signbit_fn && isinf_fn)
9695 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9696 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9698 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9699 signbit_call, integer_zero_node);
9700 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9701 isinf_call, integer_zero_node);
9703 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9704 integer_minus_one_node, integer_one_node);
9705 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9712 case BUILT_IN_ISFINITE:
9713 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9714 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9715 return omit_one_operand (type, integer_one_node, arg);
9717 if (TREE_CODE (arg) == REAL_CST)
9719 r = TREE_REAL_CST (arg);
9720 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9725 case BUILT_IN_ISNAN:
9726 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9727 return omit_one_operand (type, integer_zero_node, arg);
9729 if (TREE_CODE (arg) == REAL_CST)
9731 r = TREE_REAL_CST (arg);
9732 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9735 arg = builtin_save_expr (arg);
9736 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9743 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9744 This builtin will generate code to return the appropriate floating
9745 point classification depending on the value of the floating point
9746 number passed in. The possible return values must be supplied as
9747 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9748 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9749 one floating point argument which is "type generic". */
9752 fold_builtin_fpclassify (tree exp)
9754 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9755 arg, type, res, tmp;
9756 enum machine_mode mode;
9760 /* Verify the required arguments in the original call. */
9761 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9762 INTEGER_TYPE, INTEGER_TYPE,
9763 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9766 fp_nan = CALL_EXPR_ARG (exp, 0);
9767 fp_infinite = CALL_EXPR_ARG (exp, 1);
9768 fp_normal = CALL_EXPR_ARG (exp, 2);
9769 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9770 fp_zero = CALL_EXPR_ARG (exp, 4);
9771 arg = CALL_EXPR_ARG (exp, 5);
9772 type = TREE_TYPE (arg);
9773 mode = TYPE_MODE (type);
9774 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9778 (fabs(x) == Inf ? FP_INFINITE :
9779 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9780 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9782 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9783 build_real (type, dconst0));
9784 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9786 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9787 real_from_string (&r, buf);
9788 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9789 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9791 if (HONOR_INFINITIES (mode))
9794 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9795 build_real (type, r));
9796 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9799 if (HONOR_NANS (mode))
9801 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9802 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9808 /* Fold a call to an unordered comparison function such as
9809 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9810 being called and ARG0 and ARG1 are the arguments for the call.
9811 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9812 the opposite of the desired result. UNORDERED_CODE is used
9813 for modes that can hold NaNs and ORDERED_CODE is used for
9817 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9818 enum tree_code unordered_code,
9819 enum tree_code ordered_code)
9821 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9822 enum tree_code code;
9824 enum tree_code code0, code1;
9825 tree cmp_type = NULL_TREE;
9827 type0 = TREE_TYPE (arg0);
9828 type1 = TREE_TYPE (arg1);
9830 code0 = TREE_CODE (type0);
9831 code1 = TREE_CODE (type1);
9833 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9834 /* Choose the wider of two real types. */
9835 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9837 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9839 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9842 arg0 = fold_convert (cmp_type, arg0);
9843 arg1 = fold_convert (cmp_type, arg1);
9845 if (unordered_code == UNORDERED_EXPR)
9847 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9848 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9849 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9852 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9854 return fold_build1 (TRUTH_NOT_EXPR, type,
9855 fold_build2 (code, type, arg0, arg1));
9858 /* Fold a call to built-in function FNDECL with 0 arguments.
9859 IGNORE is true if the result of the function call is ignored. This
9860 function returns NULL_TREE if no simplification was possible. */
9863 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9865 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9866 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9869 CASE_FLT_FN (BUILT_IN_INF):
9870 case BUILT_IN_INFD32:
9871 case BUILT_IN_INFD64:
9872 case BUILT_IN_INFD128:
9873 return fold_builtin_inf (type, true);
9875 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9876 return fold_builtin_inf (type, false);
9878 case BUILT_IN_CLASSIFY_TYPE:
9879 return fold_builtin_classify_type (NULL_TREE);
9887 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9888 IGNORE is true if the result of the function call is ignored. This
9889 function returns NULL_TREE if no simplification was possible. */
9892 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9894 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9895 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9899 case BUILT_IN_CONSTANT_P:
9901 tree val = fold_builtin_constant_p (arg0);
9903 /* Gimplification will pull the CALL_EXPR for the builtin out of
9904 an if condition. When not optimizing, we'll not CSE it back.
9905 To avoid link error types of regressions, return false now. */
9906 if (!val && !optimize)
9907 val = integer_zero_node;
9912 case BUILT_IN_CLASSIFY_TYPE:
9913 return fold_builtin_classify_type (arg0);
9915 case BUILT_IN_STRLEN:
9916 return fold_builtin_strlen (arg0);
9918 CASE_FLT_FN (BUILT_IN_FABS):
9919 return fold_builtin_fabs (arg0, type);
9923 case BUILT_IN_LLABS:
9924 case BUILT_IN_IMAXABS:
9925 return fold_builtin_abs (arg0, type);
9927 CASE_FLT_FN (BUILT_IN_CONJ):
9928 if (validate_arg (arg0, COMPLEX_TYPE))
9929 return fold_build1 (CONJ_EXPR, type, arg0);
9932 CASE_FLT_FN (BUILT_IN_CREAL):
9933 if (validate_arg (arg0, COMPLEX_TYPE))
9934 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9937 CASE_FLT_FN (BUILT_IN_CIMAG):
9938 if (validate_arg (arg0, COMPLEX_TYPE))
9939 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9942 CASE_FLT_FN (BUILT_IN_CCOS):
9943 CASE_FLT_FN (BUILT_IN_CCOSH):
9944 /* These functions are "even", i.e. f(x) == f(-x). */
9945 if (validate_arg (arg0, COMPLEX_TYPE))
9947 tree narg = fold_strip_sign_ops (arg0);
9949 return build_call_expr (fndecl, 1, narg);
9953 CASE_FLT_FN (BUILT_IN_CABS):
9954 return fold_builtin_cabs (arg0, type, fndecl);
9956 CASE_FLT_FN (BUILT_IN_CARG):
9957 return fold_builtin_carg (arg0, type);
9959 CASE_FLT_FN (BUILT_IN_SQRT):
9960 return fold_builtin_sqrt (arg0, type);
9962 CASE_FLT_FN (BUILT_IN_CBRT):
9963 return fold_builtin_cbrt (arg0, type);
9965 CASE_FLT_FN (BUILT_IN_ASIN):
9966 if (validate_arg (arg0, REAL_TYPE))
9967 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9968 &dconstm1, &dconst1, true);
9971 CASE_FLT_FN (BUILT_IN_ACOS):
9972 if (validate_arg (arg0, REAL_TYPE))
9973 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9974 &dconstm1, &dconst1, true);
9977 CASE_FLT_FN (BUILT_IN_ATAN):
9978 if (validate_arg (arg0, REAL_TYPE))
9979 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9982 CASE_FLT_FN (BUILT_IN_ASINH):
9983 if (validate_arg (arg0, REAL_TYPE))
9984 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9987 CASE_FLT_FN (BUILT_IN_ACOSH):
9988 if (validate_arg (arg0, REAL_TYPE))
9989 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9990 &dconst1, NULL, true);
9993 CASE_FLT_FN (BUILT_IN_ATANH):
9994 if (validate_arg (arg0, REAL_TYPE))
9995 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9996 &dconstm1, &dconst1, false);
9999 CASE_FLT_FN (BUILT_IN_SIN):
10000 if (validate_arg (arg0, REAL_TYPE))
10001 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10004 CASE_FLT_FN (BUILT_IN_COS):
10005 return fold_builtin_cos (arg0, type, fndecl);
10008 CASE_FLT_FN (BUILT_IN_TAN):
10009 return fold_builtin_tan (arg0, type);
10011 CASE_FLT_FN (BUILT_IN_CEXP):
10012 return fold_builtin_cexp (arg0, type);
10014 CASE_FLT_FN (BUILT_IN_CEXPI):
10015 if (validate_arg (arg0, REAL_TYPE))
10016 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10019 CASE_FLT_FN (BUILT_IN_SINH):
10020 if (validate_arg (arg0, REAL_TYPE))
10021 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10024 CASE_FLT_FN (BUILT_IN_COSH):
10025 return fold_builtin_cosh (arg0, type, fndecl);
10027 CASE_FLT_FN (BUILT_IN_TANH):
10028 if (validate_arg (arg0, REAL_TYPE))
10029 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10032 CASE_FLT_FN (BUILT_IN_ERF):
10033 if (validate_arg (arg0, REAL_TYPE))
10034 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10037 CASE_FLT_FN (BUILT_IN_ERFC):
10038 if (validate_arg (arg0, REAL_TYPE))
10039 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10042 CASE_FLT_FN (BUILT_IN_TGAMMA):
10043 if (validate_arg (arg0, REAL_TYPE))
10044 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10047 CASE_FLT_FN (BUILT_IN_EXP):
10048 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10050 CASE_FLT_FN (BUILT_IN_EXP2):
10051 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10053 CASE_FLT_FN (BUILT_IN_EXP10):
10054 CASE_FLT_FN (BUILT_IN_POW10):
10055 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10057 CASE_FLT_FN (BUILT_IN_EXPM1):
10058 if (validate_arg (arg0, REAL_TYPE))
10059 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10062 CASE_FLT_FN (BUILT_IN_LOG):
10063 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10065 CASE_FLT_FN (BUILT_IN_LOG2):
10066 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10068 CASE_FLT_FN (BUILT_IN_LOG10):
10069 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10071 CASE_FLT_FN (BUILT_IN_LOG1P):
10072 if (validate_arg (arg0, REAL_TYPE))
10073 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10074 &dconstm1, NULL, false);
10077 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10078 CASE_FLT_FN (BUILT_IN_J0):
10079 if (validate_arg (arg0, REAL_TYPE))
10080 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10084 CASE_FLT_FN (BUILT_IN_J1):
10085 if (validate_arg (arg0, REAL_TYPE))
10086 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10090 CASE_FLT_FN (BUILT_IN_Y0):
10091 if (validate_arg (arg0, REAL_TYPE))
10092 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10093 &dconst0, NULL, false);
10096 CASE_FLT_FN (BUILT_IN_Y1):
10097 if (validate_arg (arg0, REAL_TYPE))
10098 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10099 &dconst0, NULL, false);
10103 CASE_FLT_FN (BUILT_IN_NAN):
10104 case BUILT_IN_NAND32:
10105 case BUILT_IN_NAND64:
10106 case BUILT_IN_NAND128:
10107 return fold_builtin_nan (arg0, type, true);
10109 CASE_FLT_FN (BUILT_IN_NANS):
10110 return fold_builtin_nan (arg0, type, false);
10112 CASE_FLT_FN (BUILT_IN_FLOOR):
10113 return fold_builtin_floor (fndecl, arg0);
10115 CASE_FLT_FN (BUILT_IN_CEIL):
10116 return fold_builtin_ceil (fndecl, arg0);
10118 CASE_FLT_FN (BUILT_IN_TRUNC):
10119 return fold_builtin_trunc (fndecl, arg0);
10121 CASE_FLT_FN (BUILT_IN_ROUND):
10122 return fold_builtin_round (fndecl, arg0);
10124 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10125 CASE_FLT_FN (BUILT_IN_RINT):
10126 return fold_trunc_transparent_mathfn (fndecl, arg0);
10128 CASE_FLT_FN (BUILT_IN_LCEIL):
10129 CASE_FLT_FN (BUILT_IN_LLCEIL):
10130 CASE_FLT_FN (BUILT_IN_LFLOOR):
10131 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10132 CASE_FLT_FN (BUILT_IN_LROUND):
10133 CASE_FLT_FN (BUILT_IN_LLROUND):
10134 return fold_builtin_int_roundingfn (fndecl, arg0);
10136 CASE_FLT_FN (BUILT_IN_LRINT):
10137 CASE_FLT_FN (BUILT_IN_LLRINT):
10138 return fold_fixed_mathfn (fndecl, arg0);
10140 case BUILT_IN_BSWAP32:
10141 case BUILT_IN_BSWAP64:
10142 return fold_builtin_bswap (fndecl, arg0);
10144 CASE_INT_FN (BUILT_IN_FFS):
10145 CASE_INT_FN (BUILT_IN_CLZ):
10146 CASE_INT_FN (BUILT_IN_CTZ):
10147 CASE_INT_FN (BUILT_IN_POPCOUNT):
10148 CASE_INT_FN (BUILT_IN_PARITY):
10149 return fold_builtin_bitop (fndecl, arg0);
10151 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10152 return fold_builtin_signbit (arg0, type);
10154 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10155 return fold_builtin_significand (arg0, type);
10157 CASE_FLT_FN (BUILT_IN_ILOGB):
10158 CASE_FLT_FN (BUILT_IN_LOGB):
10159 return fold_builtin_logb (arg0, type);
10161 case BUILT_IN_ISASCII:
10162 return fold_builtin_isascii (arg0);
10164 case BUILT_IN_TOASCII:
10165 return fold_builtin_toascii (arg0);
10167 case BUILT_IN_ISDIGIT:
10168 return fold_builtin_isdigit (arg0);
10170 CASE_FLT_FN (BUILT_IN_FINITE):
10171 case BUILT_IN_FINITED32:
10172 case BUILT_IN_FINITED64:
10173 case BUILT_IN_FINITED128:
10174 case BUILT_IN_ISFINITE:
10175 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10177 CASE_FLT_FN (BUILT_IN_ISINF):
10178 case BUILT_IN_ISINFD32:
10179 case BUILT_IN_ISINFD64:
10180 case BUILT_IN_ISINFD128:
10181 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10183 case BUILT_IN_ISINF_SIGN:
10184 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10186 CASE_FLT_FN (BUILT_IN_ISNAN):
10187 case BUILT_IN_ISNAND32:
10188 case BUILT_IN_ISNAND64:
10189 case BUILT_IN_ISNAND128:
10190 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10192 case BUILT_IN_PRINTF:
10193 case BUILT_IN_PRINTF_UNLOCKED:
10194 case BUILT_IN_VPRINTF:
10195 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10205 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10206 IGNORE is true if the result of the function call is ignored. This
10207 function returns NULL_TREE if no simplification was possible. */
10210 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10212 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10213 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10217 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10218 CASE_FLT_FN (BUILT_IN_JN):
10219 if (validate_arg (arg0, INTEGER_TYPE)
10220 && validate_arg (arg1, REAL_TYPE))
10221 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10224 CASE_FLT_FN (BUILT_IN_YN):
10225 if (validate_arg (arg0, INTEGER_TYPE)
10226 && validate_arg (arg1, REAL_TYPE))
10227 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10231 CASE_FLT_FN (BUILT_IN_DREM):
10232 CASE_FLT_FN (BUILT_IN_REMAINDER):
10233 if (validate_arg (arg0, REAL_TYPE)
10234 && validate_arg(arg1, REAL_TYPE))
10235 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10238 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10239 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10240 if (validate_arg (arg0, REAL_TYPE)
10241 && validate_arg(arg1, POINTER_TYPE))
10242 return do_mpfr_lgamma_r (arg0, arg1, type);
10246 CASE_FLT_FN (BUILT_IN_ATAN2):
10247 if (validate_arg (arg0, REAL_TYPE)
10248 && validate_arg(arg1, REAL_TYPE))
10249 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10252 CASE_FLT_FN (BUILT_IN_FDIM):
10253 if (validate_arg (arg0, REAL_TYPE)
10254 && validate_arg(arg1, REAL_TYPE))
10255 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10258 CASE_FLT_FN (BUILT_IN_HYPOT):
10259 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10261 CASE_FLT_FN (BUILT_IN_LDEXP):
10262 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10263 CASE_FLT_FN (BUILT_IN_SCALBN):
10264 CASE_FLT_FN (BUILT_IN_SCALBLN):
10265 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10267 CASE_FLT_FN (BUILT_IN_FREXP):
10268 return fold_builtin_frexp (arg0, arg1, type);
10270 CASE_FLT_FN (BUILT_IN_MODF):
10271 return fold_builtin_modf (arg0, arg1, type);
10273 case BUILT_IN_BZERO:
10274 return fold_builtin_bzero (arg0, arg1, ignore);
10276 case BUILT_IN_FPUTS:
10277 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10279 case BUILT_IN_FPUTS_UNLOCKED:
10280 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10282 case BUILT_IN_STRSTR:
10283 return fold_builtin_strstr (arg0, arg1, type);
10285 case BUILT_IN_STRCAT:
10286 return fold_builtin_strcat (arg0, arg1);
10288 case BUILT_IN_STRSPN:
10289 return fold_builtin_strspn (arg0, arg1);
10291 case BUILT_IN_STRCSPN:
10292 return fold_builtin_strcspn (arg0, arg1);
10294 case BUILT_IN_STRCHR:
10295 case BUILT_IN_INDEX:
10296 return fold_builtin_strchr (arg0, arg1, type);
10298 case BUILT_IN_STRRCHR:
10299 case BUILT_IN_RINDEX:
10300 return fold_builtin_strrchr (arg0, arg1, type);
10302 case BUILT_IN_STRCPY:
10303 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10305 case BUILT_IN_STRCMP:
10306 return fold_builtin_strcmp (arg0, arg1);
10308 case BUILT_IN_STRPBRK:
10309 return fold_builtin_strpbrk (arg0, arg1, type);
10311 case BUILT_IN_EXPECT:
10312 return fold_builtin_expect (arg0, arg1);
10314 CASE_FLT_FN (BUILT_IN_POW):
10315 return fold_builtin_pow (fndecl, arg0, arg1, type);
10317 CASE_FLT_FN (BUILT_IN_POWI):
10318 return fold_builtin_powi (fndecl, arg0, arg1, type);
10320 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10321 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10323 CASE_FLT_FN (BUILT_IN_FMIN):
10324 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10326 CASE_FLT_FN (BUILT_IN_FMAX):
10327 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10329 case BUILT_IN_ISGREATER:
10330 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10331 case BUILT_IN_ISGREATEREQUAL:
10332 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10333 case BUILT_IN_ISLESS:
10334 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10335 case BUILT_IN_ISLESSEQUAL:
10336 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10337 case BUILT_IN_ISLESSGREATER:
10338 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10339 case BUILT_IN_ISUNORDERED:
10340 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10343 /* We do the folding for va_start in the expander. */
10344 case BUILT_IN_VA_START:
10347 case BUILT_IN_SPRINTF:
10348 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10350 case BUILT_IN_OBJECT_SIZE:
10351 return fold_builtin_object_size (arg0, arg1);
10353 case BUILT_IN_PRINTF:
10354 case BUILT_IN_PRINTF_UNLOCKED:
10355 case BUILT_IN_VPRINTF:
10356 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10358 case BUILT_IN_PRINTF_CHK:
10359 case BUILT_IN_VPRINTF_CHK:
10360 if (!validate_arg (arg0, INTEGER_TYPE)
10361 || TREE_SIDE_EFFECTS (arg0))
10364 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10367 case BUILT_IN_FPRINTF:
10368 case BUILT_IN_FPRINTF_UNLOCKED:
10369 case BUILT_IN_VFPRINTF:
10370 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10379 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10380 and ARG2. IGNORE is true if the result of the function call is ignored.
10381 This function returns NULL_TREE if no simplification was possible. */
10384 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10386 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10387 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10391 CASE_FLT_FN (BUILT_IN_SINCOS):
10392 return fold_builtin_sincos (arg0, arg1, arg2);
10394 CASE_FLT_FN (BUILT_IN_FMA):
10395 if (validate_arg (arg0, REAL_TYPE)
10396 && validate_arg(arg1, REAL_TYPE)
10397 && validate_arg(arg2, REAL_TYPE))
10398 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10401 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10402 CASE_FLT_FN (BUILT_IN_REMQUO):
10403 if (validate_arg (arg0, REAL_TYPE)
10404 && validate_arg(arg1, REAL_TYPE)
10405 && validate_arg(arg2, POINTER_TYPE))
10406 return do_mpfr_remquo (arg0, arg1, arg2);
10410 case BUILT_IN_MEMSET:
10411 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10413 case BUILT_IN_BCOPY:
10414 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10416 case BUILT_IN_MEMCPY:
10417 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10419 case BUILT_IN_MEMPCPY:
10420 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10422 case BUILT_IN_MEMMOVE:
10423 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10425 case BUILT_IN_STRNCAT:
10426 return fold_builtin_strncat (arg0, arg1, arg2);
10428 case BUILT_IN_STRNCPY:
10429 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10431 case BUILT_IN_STRNCMP:
10432 return fold_builtin_strncmp (arg0, arg1, arg2);
10434 case BUILT_IN_MEMCHR:
10435 return fold_builtin_memchr (arg0, arg1, arg2, type);
10437 case BUILT_IN_BCMP:
10438 case BUILT_IN_MEMCMP:
10439 return fold_builtin_memcmp (arg0, arg1, arg2);;
10441 case BUILT_IN_SPRINTF:
10442 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10444 case BUILT_IN_STRCPY_CHK:
10445 case BUILT_IN_STPCPY_CHK:
10446 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10449 case BUILT_IN_STRCAT_CHK:
10450 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10452 case BUILT_IN_PRINTF_CHK:
10453 case BUILT_IN_VPRINTF_CHK:
10454 if (!validate_arg (arg0, INTEGER_TYPE)
10455 || TREE_SIDE_EFFECTS (arg0))
10458 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10461 case BUILT_IN_FPRINTF:
10462 case BUILT_IN_FPRINTF_UNLOCKED:
10463 case BUILT_IN_VFPRINTF:
10464 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10466 case BUILT_IN_FPRINTF_CHK:
10467 case BUILT_IN_VFPRINTF_CHK:
10468 if (!validate_arg (arg1, INTEGER_TYPE)
10469 || TREE_SIDE_EFFECTS (arg1))
10472 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10481 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10482 ARG2, and ARG3. IGNORE is true if the result of the function call is
10483 ignored. This function returns NULL_TREE if no simplification was
10487 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10490 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10494 case BUILT_IN_MEMCPY_CHK:
10495 case BUILT_IN_MEMPCPY_CHK:
10496 case BUILT_IN_MEMMOVE_CHK:
10497 case BUILT_IN_MEMSET_CHK:
10498 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10500 DECL_FUNCTION_CODE (fndecl));
10502 case BUILT_IN_STRNCPY_CHK:
10503 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10505 case BUILT_IN_STRNCAT_CHK:
10506 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10508 case BUILT_IN_FPRINTF_CHK:
10509 case BUILT_IN_VFPRINTF_CHK:
10510 if (!validate_arg (arg1, INTEGER_TYPE)
10511 || TREE_SIDE_EFFECTS (arg1))
10514 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10524 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10525 arguments, where NARGS <= 4. IGNORE is true if the result of the
10526 function call is ignored. This function returns NULL_TREE if no
10527 simplification was possible. Note that this only folds builtins with
10528 fixed argument patterns. Foldings that do varargs-to-varargs
10529 transformations, or that match calls with more than 4 arguments,
10530 need to be handled with fold_builtin_varargs instead. */
10532 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10535 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10537 tree ret = NULL_TREE;
10542 ret = fold_builtin_0 (fndecl, ignore);
10545 ret = fold_builtin_1 (fndecl, args[0], ignore);
10548 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10551 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10554 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10562 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10563 TREE_NO_WARNING (ret) = 1;
10569 /* Builtins with folding operations that operate on "..." arguments
10570 need special handling; we need to store the arguments in a convenient
10571 data structure before attempting any folding. Fortunately there are
10572 only a few builtins that fall into this category. FNDECL is the
10573 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10574 result of the function call is ignored. */
10577 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10579 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10580 tree ret = NULL_TREE;
10584 case BUILT_IN_SPRINTF_CHK:
10585 case BUILT_IN_VSPRINTF_CHK:
10586 ret = fold_builtin_sprintf_chk (exp, fcode);
10589 case BUILT_IN_SNPRINTF_CHK:
10590 case BUILT_IN_VSNPRINTF_CHK:
10591 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10594 case BUILT_IN_FPCLASSIFY:
10595 ret = fold_builtin_fpclassify (exp);
10603 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10604 TREE_NO_WARNING (ret) = 1;
10610 /* A wrapper function for builtin folding that prevents warnings for
10611 "statement without effect" and the like, caused by removing the
10612 call node earlier than the warning is generated. */
10615 fold_call_expr (tree exp, bool ignore)
10617 tree ret = NULL_TREE;
10618 tree fndecl = get_callee_fndecl (exp);
10620 && TREE_CODE (fndecl) == FUNCTION_DECL
10621 && DECL_BUILT_IN (fndecl)
10622 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10623 yet. Defer folding until we see all the arguments
10624 (after inlining). */
10625 && !CALL_EXPR_VA_ARG_PACK (exp))
10627 int nargs = call_expr_nargs (exp);
10629 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10630 instead last argument is __builtin_va_arg_pack (). Defer folding
10631 even in that case, until arguments are finalized. */
10632 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10634 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10636 && TREE_CODE (fndecl2) == FUNCTION_DECL
10637 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10638 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10642 /* FIXME: Don't use a list in this interface. */
10643 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10644 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10647 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10649 tree *args = CALL_EXPR_ARGP (exp);
10650 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10653 ret = fold_builtin_varargs (fndecl, exp, ignore);
10656 /* Propagate location information from original call to
10657 expansion of builtin. Otherwise things like
10658 maybe_emit_chk_warning, that operate on the expansion
10659 of a builtin, will use the wrong location information. */
10660 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10662 tree realret = ret;
10663 if (TREE_CODE (ret) == NOP_EXPR)
10664 realret = TREE_OPERAND (ret, 0);
10665 if (CAN_HAVE_LOCATION_P (realret)
10666 && !EXPR_HAS_LOCATION (realret))
10667 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10677 /* Conveniently construct a function call expression. FNDECL names the
10678 function to be called and ARGLIST is a TREE_LIST of arguments. */
10681 build_function_call_expr (tree fndecl, tree arglist)
10683 tree fntype = TREE_TYPE (fndecl);
10684 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10685 int n = list_length (arglist);
10686 tree *argarray = (tree *) alloca (n * sizeof (tree));
10689 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10690 argarray[i] = TREE_VALUE (arglist);
10691 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10694 /* Conveniently construct a function call expression. FNDECL names the
10695 function to be called, N is the number of arguments, and the "..."
10696 parameters are the argument expressions. */
10699 build_call_expr (tree fndecl, int n, ...)
10702 tree fntype = TREE_TYPE (fndecl);
10703 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10704 tree *argarray = (tree *) alloca (n * sizeof (tree));
10708 for (i = 0; i < n; i++)
10709 argarray[i] = va_arg (ap, tree);
10711 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10714 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10715 N arguments are passed in the array ARGARRAY. */
10718 fold_builtin_call_array (tree type,
10723 tree ret = NULL_TREE;
10727 if (TREE_CODE (fn) == ADDR_EXPR)
10729 tree fndecl = TREE_OPERAND (fn, 0);
10730 if (TREE_CODE (fndecl) == FUNCTION_DECL
10731 && DECL_BUILT_IN (fndecl))
10733 /* If last argument is __builtin_va_arg_pack (), arguments to this
10734 function are not finalized yet. Defer folding until they are. */
10735 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10737 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10739 && TREE_CODE (fndecl2) == FUNCTION_DECL
10740 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10741 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10742 return build_call_array (type, fn, n, argarray);
10744 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10746 tree arglist = NULL_TREE;
10747 for (i = n - 1; i >= 0; i--)
10748 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10749 ret = targetm.fold_builtin (fndecl, arglist, false);
10753 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10755 /* First try the transformations that don't require consing up
10757 ret = fold_builtin_n (fndecl, argarray, n, false);
10762 /* If we got this far, we need to build an exp. */
10763 exp = build_call_array (type, fn, n, argarray);
10764 ret = fold_builtin_varargs (fndecl, exp, false);
10765 return ret ? ret : exp;
10769 return build_call_array (type, fn, n, argarray);
10772 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10773 along with N new arguments specified as the "..." parameters. SKIP
10774 is the number of arguments in EXP to be omitted. This function is used
10775 to do varargs-to-varargs transformations. */
10778 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10780 int oldnargs = call_expr_nargs (exp);
10781 int nargs = oldnargs - skip + n;
10782 tree fntype = TREE_TYPE (fndecl);
10783 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10791 buffer = alloca (nargs * sizeof (tree));
10793 for (i = 0; i < n; i++)
10794 buffer[i] = va_arg (ap, tree);
10796 for (j = skip; j < oldnargs; j++, i++)
10797 buffer[i] = CALL_EXPR_ARG (exp, j);
10800 buffer = CALL_EXPR_ARGP (exp) + skip;
10802 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10805 /* Validate a single argument ARG against a tree code CODE representing
10809 validate_arg (const_tree arg, enum tree_code code)
10813 else if (code == POINTER_TYPE)
10814 return POINTER_TYPE_P (TREE_TYPE (arg));
10815 else if (code == INTEGER_TYPE)
10816 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10817 return code == TREE_CODE (TREE_TYPE (arg));
10820 /* This function validates the types of a function call argument list
10821 against a specified list of tree_codes. If the last specifier is a 0,
10822 that represents an ellipses, otherwise the last specifier must be a
10826 validate_arglist (const_tree callexpr, ...)
10828 enum tree_code code;
10831 const_call_expr_arg_iterator iter;
10834 va_start (ap, callexpr);
10835 init_const_call_expr_arg_iterator (callexpr, &iter);
10839 code = va_arg (ap, enum tree_code);
10843 /* This signifies an ellipses, any further arguments are all ok. */
10847 /* This signifies an endlink, if no arguments remain, return
10848 true, otherwise return false. */
10849 res = !more_const_call_expr_args_p (&iter);
10852 /* If no parameters remain or the parameter's code does not
10853 match the specified code, return false. Otherwise continue
10854 checking any remaining arguments. */
10855 arg = next_const_call_expr_arg (&iter);
10856 if (!validate_arg (arg, code))
10863 /* We need gotos here since we can only have one VA_CLOSE in a
10871 /* Default target-specific builtin expander that does nothing. */
10874 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10875 rtx target ATTRIBUTE_UNUSED,
10876 rtx subtarget ATTRIBUTE_UNUSED,
10877 enum machine_mode mode ATTRIBUTE_UNUSED,
10878 int ignore ATTRIBUTE_UNUSED)
10883 /* Returns true is EXP represents data that would potentially reside
10884 in a readonly section. */
10887 readonly_data_expr (tree exp)
10891 if (TREE_CODE (exp) != ADDR_EXPR)
10894 exp = get_base_address (TREE_OPERAND (exp, 0));
10898 /* Make sure we call decl_readonly_section only for trees it
10899 can handle (since it returns true for everything it doesn't
10901 if (TREE_CODE (exp) == STRING_CST
10902 || TREE_CODE (exp) == CONSTRUCTOR
10903 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10904 return decl_readonly_section (exp, 0);
10909 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10910 to the call, and TYPE is its return type.
10912 Return NULL_TREE if no simplification was possible, otherwise return the
10913 simplified form of the call as a tree.
10915 The simplified form may be a constant or other expression which
10916 computes the same value, but in a more efficient manner (including
10917 calls to other builtin functions).
10919 The call may contain arguments which need to be evaluated, but
10920 which are not useful to determine the result of the call. In
10921 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10922 COMPOUND_EXPR will be an argument which must be evaluated.
10923 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10924 COMPOUND_EXPR in the chain will contain the tree for the simplified
10925 form of the builtin function call. */
10928 fold_builtin_strstr (tree s1, tree s2, tree type)
10930 if (!validate_arg (s1, POINTER_TYPE)
10931 || !validate_arg (s2, POINTER_TYPE))
10936 const char *p1, *p2;
10938 p2 = c_getstr (s2);
10942 p1 = c_getstr (s1);
10945 const char *r = strstr (p1, p2);
10949 return build_int_cst (TREE_TYPE (s1), 0);
10951 /* Return an offset into the constant string argument. */
10952 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10953 s1, size_int (r - p1));
10954 return fold_convert (type, tem);
10957 /* The argument is const char *, and the result is char *, so we need
10958 a type conversion here to avoid a warning. */
10960 return fold_convert (type, s1);
10965 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10969 /* New argument list transforming strstr(s1, s2) to
10970 strchr(s1, s2[0]). */
10971 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10975 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10976 the call, and TYPE is its return type.
10978 Return NULL_TREE if no simplification was possible, otherwise return the
10979 simplified form of the call as a tree.
10981 The simplified form may be a constant or other expression which
10982 computes the same value, but in a more efficient manner (including
10983 calls to other builtin functions).
10985 The call may contain arguments which need to be evaluated, but
10986 which are not useful to determine the result of the call. In
10987 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10988 COMPOUND_EXPR will be an argument which must be evaluated.
10989 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10990 COMPOUND_EXPR in the chain will contain the tree for the simplified
10991 form of the builtin function call. */
10994 fold_builtin_strchr (tree s1, tree s2, tree type)
10996 if (!validate_arg (s1, POINTER_TYPE)
10997 || !validate_arg (s2, INTEGER_TYPE))
11003 if (TREE_CODE (s2) != INTEGER_CST)
11006 p1 = c_getstr (s1);
11013 if (target_char_cast (s2, &c))
11016 r = strchr (p1, c);
11019 return build_int_cst (TREE_TYPE (s1), 0);
11021 /* Return an offset into the constant string argument. */
11022 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11023 s1, size_int (r - p1));
11024 return fold_convert (type, tem);
11030 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11031 the call, and TYPE is its return type.
11033 Return NULL_TREE if no simplification was possible, otherwise return the
11034 simplified form of the call as a tree.
11036 The simplified form may be a constant or other expression which
11037 computes the same value, but in a more efficient manner (including
11038 calls to other builtin functions).
11040 The call may contain arguments which need to be evaluated, but
11041 which are not useful to determine the result of the call. In
11042 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11043 COMPOUND_EXPR will be an argument which must be evaluated.
11044 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11045 COMPOUND_EXPR in the chain will contain the tree for the simplified
11046 form of the builtin function call. */
11049 fold_builtin_strrchr (tree s1, tree s2, tree type)
11051 if (!validate_arg (s1, POINTER_TYPE)
11052 || !validate_arg (s2, INTEGER_TYPE))
11059 if (TREE_CODE (s2) != INTEGER_CST)
11062 p1 = c_getstr (s1);
11069 if (target_char_cast (s2, &c))
11072 r = strrchr (p1, c);
11075 return build_int_cst (TREE_TYPE (s1), 0);
11077 /* Return an offset into the constant string argument. */
11078 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11079 s1, size_int (r - p1));
11080 return fold_convert (type, tem);
11083 if (! integer_zerop (s2))
11086 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11090 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11091 return build_call_expr (fn, 2, s1, s2);
11095 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11096 to the call, and TYPE is its return type.
11098 Return NULL_TREE if no simplification was possible, otherwise return the
11099 simplified form of the call as a tree.
11101 The simplified form may be a constant or other expression which
11102 computes the same value, but in a more efficient manner (including
11103 calls to other builtin functions).
11105 The call may contain arguments which need to be evaluated, but
11106 which are not useful to determine the result of the call. In
11107 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11108 COMPOUND_EXPR will be an argument which must be evaluated.
11109 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11110 COMPOUND_EXPR in the chain will contain the tree for the simplified
11111 form of the builtin function call. */
11114 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11116 if (!validate_arg (s1, POINTER_TYPE)
11117 || !validate_arg (s2, POINTER_TYPE))
11122 const char *p1, *p2;
11124 p2 = c_getstr (s2);
11128 p1 = c_getstr (s1);
11131 const char *r = strpbrk (p1, p2);
11135 return build_int_cst (TREE_TYPE (s1), 0);
11137 /* Return an offset into the constant string argument. */
11138 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11139 s1, size_int (r - p1));
11140 return fold_convert (type, tem);
11144 /* strpbrk(x, "") == NULL.
11145 Evaluate and ignore s1 in case it had side-effects. */
11146 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11149 return NULL_TREE; /* Really call strpbrk. */
11151 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11155 /* New argument list transforming strpbrk(s1, s2) to
11156 strchr(s1, s2[0]). */
11157 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11161 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11164 Return NULL_TREE if no simplification was possible, otherwise return the
11165 simplified form of the call as a tree.
11167 The simplified form may be a constant or other expression which
11168 computes the same value, but in a more efficient manner (including
11169 calls to other builtin functions).
11171 The call may contain arguments which need to be evaluated, but
11172 which are not useful to determine the result of the call. In
11173 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11174 COMPOUND_EXPR will be an argument which must be evaluated.
11175 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11176 COMPOUND_EXPR in the chain will contain the tree for the simplified
11177 form of the builtin function call. */
11180 fold_builtin_strcat (tree dst, tree src)
11182 if (!validate_arg (dst, POINTER_TYPE)
11183 || !validate_arg (src, POINTER_TYPE))
11187 const char *p = c_getstr (src);
11189 /* If the string length is zero, return the dst parameter. */
11190 if (p && *p == '\0')
11197 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11198 arguments to the call.
11200 Return NULL_TREE if no simplification was possible, otherwise return the
11201 simplified form of the call as a tree.
11203 The simplified form may be a constant or other expression which
11204 computes the same value, but in a more efficient manner (including
11205 calls to other builtin functions).
11207 The call may contain arguments which need to be evaluated, but
11208 which are not useful to determine the result of the call. In
11209 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11210 COMPOUND_EXPR will be an argument which must be evaluated.
11211 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11212 COMPOUND_EXPR in the chain will contain the tree for the simplified
11213 form of the builtin function call. */
11216 fold_builtin_strncat (tree dst, tree src, tree len)
11218 if (!validate_arg (dst, POINTER_TYPE)
11219 || !validate_arg (src, POINTER_TYPE)
11220 || !validate_arg (len, INTEGER_TYPE))
11224 const char *p = c_getstr (src);
11226 /* If the requested length is zero, or the src parameter string
11227 length is zero, return the dst parameter. */
11228 if (integer_zerop (len) || (p && *p == '\0'))
11229 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11231 /* If the requested len is greater than or equal to the string
11232 length, call strcat. */
11233 if (TREE_CODE (len) == INTEGER_CST && p
11234 && compare_tree_int (len, strlen (p)) >= 0)
11236 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11238 /* If the replacement _DECL isn't initialized, don't do the
11243 return build_call_expr (fn, 2, dst, src);
11249 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11252 Return NULL_TREE if no simplification was possible, otherwise return the
11253 simplified form of the call as a tree.
11255 The simplified form may be a constant or other expression which
11256 computes the same value, but in a more efficient manner (including
11257 calls to other builtin functions).
11259 The call may contain arguments which need to be evaluated, but
11260 which are not useful to determine the result of the call. In
11261 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11262 COMPOUND_EXPR will be an argument which must be evaluated.
11263 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11264 COMPOUND_EXPR in the chain will contain the tree for the simplified
11265 form of the builtin function call. */
11268 fold_builtin_strspn (tree s1, tree s2)
11270 if (!validate_arg (s1, POINTER_TYPE)
11271 || !validate_arg (s2, POINTER_TYPE))
11275 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11277 /* If both arguments are constants, evaluate at compile-time. */
11280 const size_t r = strspn (p1, p2);
11281 return size_int (r);
11284 /* If either argument is "", return NULL_TREE. */
11285 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11286 /* Evaluate and ignore both arguments in case either one has
11288 return omit_two_operands (integer_type_node, integer_zero_node,
11294 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11297 Return NULL_TREE if no simplification was possible, otherwise return the
11298 simplified form of the call as a tree.
11300 The simplified form may be a constant or other expression which
11301 computes the same value, but in a more efficient manner (including
11302 calls to other builtin functions).
11304 The call may contain arguments which need to be evaluated, but
11305 which are not useful to determine the result of the call. In
11306 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11307 COMPOUND_EXPR will be an argument which must be evaluated.
11308 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11309 COMPOUND_EXPR in the chain will contain the tree for the simplified
11310 form of the builtin function call. */
11313 fold_builtin_strcspn (tree s1, tree s2)
11315 if (!validate_arg (s1, POINTER_TYPE)
11316 || !validate_arg (s2, POINTER_TYPE))
11320 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11322 /* If both arguments are constants, evaluate at compile-time. */
11325 const size_t r = strcspn (p1, p2);
11326 return size_int (r);
11329 /* If the first argument is "", return NULL_TREE. */
11330 if (p1 && *p1 == '\0')
11332 /* Evaluate and ignore argument s2 in case it has
11334 return omit_one_operand (integer_type_node,
11335 integer_zero_node, s2);
11338 /* If the second argument is "", return __builtin_strlen(s1). */
11339 if (p2 && *p2 == '\0')
11341 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11343 /* If the replacement _DECL isn't initialized, don't do the
11348 return build_call_expr (fn, 1, s1);
11354 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11355 to the call. IGNORE is true if the value returned
11356 by the builtin will be ignored. UNLOCKED is true is true if this
11357 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11358 the known length of the string. Return NULL_TREE if no simplification
11362 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11364 /* If we're using an unlocked function, assume the other unlocked
11365 functions exist explicitly. */
11366 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11367 : implicit_built_in_decls[BUILT_IN_FPUTC];
11368 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11369 : implicit_built_in_decls[BUILT_IN_FWRITE];
11371 /* If the return value is used, don't do the transformation. */
11375 /* Verify the arguments in the original call. */
11376 if (!validate_arg (arg0, POINTER_TYPE)
11377 || !validate_arg (arg1, POINTER_TYPE))
11381 len = c_strlen (arg0, 0);
11383 /* Get the length of the string passed to fputs. If the length
11384 can't be determined, punt. */
11386 || TREE_CODE (len) != INTEGER_CST)
11389 switch (compare_tree_int (len, 1))
11391 case -1: /* length is 0, delete the call entirely . */
11392 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11394 case 0: /* length is 1, call fputc. */
11396 const char *p = c_getstr (arg0);
11401 return build_call_expr (fn_fputc, 2,
11402 build_int_cst (NULL_TREE, p[0]), arg1);
11408 case 1: /* length is greater than 1, call fwrite. */
11410 /* If optimizing for size keep fputs. */
11413 /* New argument list transforming fputs(string, stream) to
11414 fwrite(string, 1, len, stream). */
11416 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11421 gcc_unreachable ();
11426 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11427 produced. False otherwise. This is done so that we don't output the error
11428 or warning twice or three times. */
11430 fold_builtin_next_arg (tree exp, bool va_start_p)
11432 tree fntype = TREE_TYPE (current_function_decl);
11433 int nargs = call_expr_nargs (exp);
11436 if (TYPE_ARG_TYPES (fntype) == 0
11437 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11438 == void_type_node))
11440 error ("%<va_start%> used in function with fixed args");
11446 if (va_start_p && (nargs != 2))
11448 error ("wrong number of arguments to function %<va_start%>");
11451 arg = CALL_EXPR_ARG (exp, 1);
11453 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11454 when we checked the arguments and if needed issued a warning. */
11459 /* Evidently an out of date version of <stdarg.h>; can't validate
11460 va_start's second argument, but can still work as intended. */
11461 warning (0, "%<__builtin_next_arg%> called without an argument");
11464 else if (nargs > 1)
11466 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11469 arg = CALL_EXPR_ARG (exp, 0);
11472 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11473 or __builtin_next_arg (0) the first time we see it, after checking
11474 the arguments and if needed issuing a warning. */
11475 if (!integer_zerop (arg))
11477 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11479 /* Strip off all nops for the sake of the comparison. This
11480 is not quite the same as STRIP_NOPS. It does more.
11481 We must also strip off INDIRECT_EXPR for C++ reference
11483 while (CONVERT_EXPR_P (arg)
11484 || TREE_CODE (arg) == INDIRECT_REF)
11485 arg = TREE_OPERAND (arg, 0);
11486 if (arg != last_parm)
11488 /* FIXME: Sometimes with the tree optimizers we can get the
11489 not the last argument even though the user used the last
11490 argument. We just warn and set the arg to be the last
11491 argument so that we will get wrong-code because of
11493 warning (0, "second parameter of %<va_start%> not last named argument");
11495 /* We want to verify the second parameter just once before the tree
11496 optimizers are run and then avoid keeping it in the tree,
11497 as otherwise we could warn even for correct code like:
11498 void foo (int i, ...)
11499 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11501 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11503 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11509 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11510 ORIG may be null if this is a 2-argument call. We don't attempt to
11511 simplify calls with more than 3 arguments.
11513 Return NULL_TREE if no simplification was possible, otherwise return the
11514 simplified form of the call as a tree. If IGNORED is true, it means that
11515 the caller does not use the returned value of the function. */
11518 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11521 const char *fmt_str = NULL;
11523 /* Verify the required arguments in the original call. We deal with two
11524 types of sprintf() calls: 'sprintf (str, fmt)' and
11525 'sprintf (dest, "%s", orig)'. */
11526 if (!validate_arg (dest, POINTER_TYPE)
11527 || !validate_arg (fmt, POINTER_TYPE))
11529 if (orig && !validate_arg (orig, POINTER_TYPE))
11532 /* Check whether the format is a literal string constant. */
11533 fmt_str = c_getstr (fmt);
11534 if (fmt_str == NULL)
11538 retval = NULL_TREE;
11540 if (!init_target_chars ())
11543 /* If the format doesn't contain % args or %%, use strcpy. */
11544 if (strchr (fmt_str, target_percent) == NULL)
11546 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11551 /* Don't optimize sprintf (buf, "abc", ptr++). */
11555 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11556 'format' is known to contain no % formats. */
11557 call = build_call_expr (fn, 2, dest, fmt);
11559 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11562 /* If the format is "%s", use strcpy if the result isn't used. */
11563 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11566 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11571 /* Don't crash on sprintf (str1, "%s"). */
11575 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11578 retval = c_strlen (orig, 1);
11579 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11582 call = build_call_expr (fn, 2, dest, orig);
11585 if (call && retval)
11587 retval = fold_convert
11588 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11590 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11596 /* Expand a call EXP to __builtin_object_size. */
11599 expand_builtin_object_size (tree exp)
11602 int object_size_type;
11603 tree fndecl = get_callee_fndecl (exp);
11605 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11607 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11609 expand_builtin_trap ();
11613 ost = CALL_EXPR_ARG (exp, 1);
11616 if (TREE_CODE (ost) != INTEGER_CST
11617 || tree_int_cst_sgn (ost) < 0
11618 || compare_tree_int (ost, 3) > 0)
11620 error ("%Klast argument of %D is not integer constant between 0 and 3",
11622 expand_builtin_trap ();
11626 object_size_type = tree_low_cst (ost, 0);
11628 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11631 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11632 FCODE is the BUILT_IN_* to use.
11633 Return NULL_RTX if we failed; the caller should emit a normal call,
11634 otherwise try to get the result in TARGET, if convenient (and in
11635 mode MODE if that's convenient). */
11638 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11639 enum built_in_function fcode)
11641 tree dest, src, len, size;
11643 if (!validate_arglist (exp,
11645 fcode == BUILT_IN_MEMSET_CHK
11646 ? INTEGER_TYPE : POINTER_TYPE,
11647 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11650 dest = CALL_EXPR_ARG (exp, 0);
11651 src = CALL_EXPR_ARG (exp, 1);
11652 len = CALL_EXPR_ARG (exp, 2);
11653 size = CALL_EXPR_ARG (exp, 3);
11655 if (! host_integerp (size, 1))
11658 if (host_integerp (len, 1) || integer_all_onesp (size))
11662 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11664 warning (0, "%Kcall to %D will always overflow destination buffer",
11665 exp, get_callee_fndecl (exp));
11670 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11671 mem{cpy,pcpy,move,set} is available. */
11674 case BUILT_IN_MEMCPY_CHK:
11675 fn = built_in_decls[BUILT_IN_MEMCPY];
11677 case BUILT_IN_MEMPCPY_CHK:
11678 fn = built_in_decls[BUILT_IN_MEMPCPY];
11680 case BUILT_IN_MEMMOVE_CHK:
11681 fn = built_in_decls[BUILT_IN_MEMMOVE];
11683 case BUILT_IN_MEMSET_CHK:
11684 fn = built_in_decls[BUILT_IN_MEMSET];
11693 fn = build_call_expr (fn, 3, dest, src, len);
11694 STRIP_TYPE_NOPS (fn);
11695 while (TREE_CODE (fn) == COMPOUND_EXPR)
11697 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11699 fn = TREE_OPERAND (fn, 1);
11701 if (TREE_CODE (fn) == CALL_EXPR)
11702 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11703 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11705 else if (fcode == BUILT_IN_MEMSET_CHK)
11709 unsigned int dest_align
11710 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11712 /* If DEST is not a pointer type, call the normal function. */
11713 if (dest_align == 0)
11716 /* If SRC and DEST are the same (and not volatile), do nothing. */
11717 if (operand_equal_p (src, dest, 0))
11721 if (fcode != BUILT_IN_MEMPCPY_CHK)
11723 /* Evaluate and ignore LEN in case it has side-effects. */
11724 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11725 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11728 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11729 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11732 /* __memmove_chk special case. */
11733 if (fcode == BUILT_IN_MEMMOVE_CHK)
11735 unsigned int src_align
11736 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11738 if (src_align == 0)
11741 /* If src is categorized for a readonly section we can use
11742 normal __memcpy_chk. */
11743 if (readonly_data_expr (src))
11745 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11748 fn = build_call_expr (fn, 4, dest, src, len, size);
11749 STRIP_TYPE_NOPS (fn);
11750 while (TREE_CODE (fn) == COMPOUND_EXPR)
11752 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11754 fn = TREE_OPERAND (fn, 1);
11756 if (TREE_CODE (fn) == CALL_EXPR)
11757 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11758 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11765 /* Emit warning if a buffer overflow is detected at compile time. */
11768 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11775 case BUILT_IN_STRCPY_CHK:
11776 case BUILT_IN_STPCPY_CHK:
11777 /* For __strcat_chk the warning will be emitted only if overflowing
11778 by at least strlen (dest) + 1 bytes. */
11779 case BUILT_IN_STRCAT_CHK:
11780 len = CALL_EXPR_ARG (exp, 1);
11781 size = CALL_EXPR_ARG (exp, 2);
11784 case BUILT_IN_STRNCAT_CHK:
11785 case BUILT_IN_STRNCPY_CHK:
11786 len = CALL_EXPR_ARG (exp, 2);
11787 size = CALL_EXPR_ARG (exp, 3);
11789 case BUILT_IN_SNPRINTF_CHK:
11790 case BUILT_IN_VSNPRINTF_CHK:
11791 len = CALL_EXPR_ARG (exp, 1);
11792 size = CALL_EXPR_ARG (exp, 3);
11795 gcc_unreachable ();
11801 if (! host_integerp (size, 1) || integer_all_onesp (size))
11806 len = c_strlen (len, 1);
11807 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11810 else if (fcode == BUILT_IN_STRNCAT_CHK)
11812 tree src = CALL_EXPR_ARG (exp, 1);
11813 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11815 src = c_strlen (src, 1);
11816 if (! src || ! host_integerp (src, 1))
11818 warning (0, "%Kcall to %D might overflow destination buffer",
11819 exp, get_callee_fndecl (exp));
11822 else if (tree_int_cst_lt (src, size))
11825 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11828 warning (0, "%Kcall to %D will always overflow destination buffer",
11829 exp, get_callee_fndecl (exp));
11832 /* Emit warning if a buffer overflow is detected at compile time
11833 in __sprintf_chk/__vsprintf_chk calls. */
11836 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11838 tree dest, size, len, fmt, flag;
11839 const char *fmt_str;
11840 int nargs = call_expr_nargs (exp);
11842 /* Verify the required arguments in the original call. */
11846 dest = CALL_EXPR_ARG (exp, 0);
11847 flag = CALL_EXPR_ARG (exp, 1);
11848 size = CALL_EXPR_ARG (exp, 2);
11849 fmt = CALL_EXPR_ARG (exp, 3);
11851 if (! host_integerp (size, 1) || integer_all_onesp (size))
11854 /* Check whether the format is a literal string constant. */
11855 fmt_str = c_getstr (fmt);
11856 if (fmt_str == NULL)
11859 if (!init_target_chars ())
11862 /* If the format doesn't contain % args or %%, we know its size. */
11863 if (strchr (fmt_str, target_percent) == 0)
11864 len = build_int_cstu (size_type_node, strlen (fmt_str));
11865 /* If the format is "%s" and first ... argument is a string literal,
11867 else if (fcode == BUILT_IN_SPRINTF_CHK
11868 && strcmp (fmt_str, target_percent_s) == 0)
11874 arg = CALL_EXPR_ARG (exp, 4);
11875 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11878 len = c_strlen (arg, 1);
11879 if (!len || ! host_integerp (len, 1))
11885 if (! tree_int_cst_lt (len, size))
11887 warning (0, "%Kcall to %D will always overflow destination buffer",
11888 exp, get_callee_fndecl (exp));
11892 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11896 fold_builtin_object_size (tree ptr, tree ost)
11898 tree ret = NULL_TREE;
11899 int object_size_type;
11901 if (!validate_arg (ptr, POINTER_TYPE)
11902 || !validate_arg (ost, INTEGER_TYPE))
11907 if (TREE_CODE (ost) != INTEGER_CST
11908 || tree_int_cst_sgn (ost) < 0
11909 || compare_tree_int (ost, 3) > 0)
11912 object_size_type = tree_low_cst (ost, 0);
11914 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11915 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11916 and (size_t) 0 for types 2 and 3. */
11917 if (TREE_SIDE_EFFECTS (ptr))
11918 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11920 if (TREE_CODE (ptr) == ADDR_EXPR)
11921 ret = build_int_cstu (size_type_node,
11922 compute_builtin_object_size (ptr, object_size_type));
11924 else if (TREE_CODE (ptr) == SSA_NAME)
11926 unsigned HOST_WIDE_INT bytes;
11928 /* If object size is not known yet, delay folding until
11929 later. Maybe subsequent passes will help determining
11931 bytes = compute_builtin_object_size (ptr, object_size_type);
11932 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11934 ret = build_int_cstu (size_type_node, bytes);
11939 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11940 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11941 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11948 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11949 DEST, SRC, LEN, and SIZE are the arguments to the call.
11950 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11951 code of the builtin. If MAXLEN is not NULL, it is maximum length
11952 passed as third argument. */
11955 fold_builtin_memory_chk (tree fndecl,
11956 tree dest, tree src, tree len, tree size,
11957 tree maxlen, bool ignore,
11958 enum built_in_function fcode)
11962 if (!validate_arg (dest, POINTER_TYPE)
11963 || !validate_arg (src,
11964 (fcode == BUILT_IN_MEMSET_CHK
11965 ? INTEGER_TYPE : POINTER_TYPE))
11966 || !validate_arg (len, INTEGER_TYPE)
11967 || !validate_arg (size, INTEGER_TYPE))
11970 /* If SRC and DEST are the same (and not volatile), return DEST
11971 (resp. DEST+LEN for __mempcpy_chk). */
11972 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11974 if (fcode != BUILT_IN_MEMPCPY_CHK)
11975 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11978 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11979 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11983 if (! host_integerp (size, 1))
11986 if (! integer_all_onesp (size))
11988 if (! host_integerp (len, 1))
11990 /* If LEN is not constant, try MAXLEN too.
11991 For MAXLEN only allow optimizing into non-_ocs function
11992 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11993 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11995 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11997 /* (void) __mempcpy_chk () can be optimized into
11998 (void) __memcpy_chk (). */
11999 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12003 return build_call_expr (fn, 4, dest, src, len, size);
12011 if (tree_int_cst_lt (size, maxlen))
12016 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12017 mem{cpy,pcpy,move,set} is available. */
12020 case BUILT_IN_MEMCPY_CHK:
12021 fn = built_in_decls[BUILT_IN_MEMCPY];
12023 case BUILT_IN_MEMPCPY_CHK:
12024 fn = built_in_decls[BUILT_IN_MEMPCPY];
12026 case BUILT_IN_MEMMOVE_CHK:
12027 fn = built_in_decls[BUILT_IN_MEMMOVE];
12029 case BUILT_IN_MEMSET_CHK:
12030 fn = built_in_decls[BUILT_IN_MEMSET];
12039 return build_call_expr (fn, 3, dest, src, len);
12042 /* Fold a call to the __st[rp]cpy_chk builtin.
12043 DEST, SRC, and SIZE are the arguments to the call.
12044 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12045 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12046 strings passed as second argument. */
12049 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12050 tree maxlen, bool ignore,
12051 enum built_in_function fcode)
12055 if (!validate_arg (dest, POINTER_TYPE)
12056 || !validate_arg (src, POINTER_TYPE)
12057 || !validate_arg (size, INTEGER_TYPE))
12060 /* If SRC and DEST are the same (and not volatile), return DEST. */
12061 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12062 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12064 if (! host_integerp (size, 1))
12067 if (! integer_all_onesp (size))
12069 len = c_strlen (src, 1);
12070 if (! len || ! host_integerp (len, 1))
12072 /* If LEN is not constant, try MAXLEN too.
12073 For MAXLEN only allow optimizing into non-_ocs function
12074 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12075 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12077 if (fcode == BUILT_IN_STPCPY_CHK)
12082 /* If return value of __stpcpy_chk is ignored,
12083 optimize into __strcpy_chk. */
12084 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12088 return build_call_expr (fn, 3, dest, src, size);
12091 if (! len || TREE_SIDE_EFFECTS (len))
12094 /* If c_strlen returned something, but not a constant,
12095 transform __strcpy_chk into __memcpy_chk. */
12096 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12100 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12101 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12102 build_call_expr (fn, 4,
12103 dest, src, len, size));
12109 if (! tree_int_cst_lt (maxlen, size))
12113 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12114 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12115 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12119 return build_call_expr (fn, 2, dest, src);
12122 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12123 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12124 length passed as third argument. */
12127 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12132 if (!validate_arg (dest, POINTER_TYPE)
12133 || !validate_arg (src, POINTER_TYPE)
12134 || !validate_arg (len, INTEGER_TYPE)
12135 || !validate_arg (size, INTEGER_TYPE))
12138 if (! host_integerp (size, 1))
12141 if (! integer_all_onesp (size))
12143 if (! host_integerp (len, 1))
12145 /* If LEN is not constant, try MAXLEN too.
12146 For MAXLEN only allow optimizing into non-_ocs function
12147 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12148 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12154 if (tree_int_cst_lt (size, maxlen))
12158 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12159 fn = built_in_decls[BUILT_IN_STRNCPY];
12163 return build_call_expr (fn, 3, dest, src, len);
12166 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12167 are the arguments to the call. */
12170 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12175 if (!validate_arg (dest, POINTER_TYPE)
12176 || !validate_arg (src, POINTER_TYPE)
12177 || !validate_arg (size, INTEGER_TYPE))
12180 p = c_getstr (src);
12181 /* If the SRC parameter is "", return DEST. */
12182 if (p && *p == '\0')
12183 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12185 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12188 /* If __builtin_strcat_chk is used, assume strcat is available. */
12189 fn = built_in_decls[BUILT_IN_STRCAT];
12193 return build_call_expr (fn, 2, dest, src);
12196 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12200 fold_builtin_strncat_chk (tree fndecl,
12201 tree dest, tree src, tree len, tree size)
12206 if (!validate_arg (dest, POINTER_TYPE)
12207 || !validate_arg (src, POINTER_TYPE)
12208 || !validate_arg (size, INTEGER_TYPE)
12209 || !validate_arg (size, INTEGER_TYPE))
12212 p = c_getstr (src);
12213 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12214 if (p && *p == '\0')
12215 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12216 else if (integer_zerop (len))
12217 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12219 if (! host_integerp (size, 1))
12222 if (! integer_all_onesp (size))
12224 tree src_len = c_strlen (src, 1);
12226 && host_integerp (src_len, 1)
12227 && host_integerp (len, 1)
12228 && ! tree_int_cst_lt (len, src_len))
12230 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12231 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12235 return build_call_expr (fn, 3, dest, src, size);
12240 /* If __builtin_strncat_chk is used, assume strncat is available. */
12241 fn = built_in_decls[BUILT_IN_STRNCAT];
12245 return build_call_expr (fn, 3, dest, src, len);
12248 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12249 a normal call should be emitted rather than expanding the function
12250 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12253 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12255 tree dest, size, len, fn, fmt, flag;
12256 const char *fmt_str;
12257 int nargs = call_expr_nargs (exp);
12259 /* Verify the required arguments in the original call. */
12262 dest = CALL_EXPR_ARG (exp, 0);
12263 if (!validate_arg (dest, POINTER_TYPE))
12265 flag = CALL_EXPR_ARG (exp, 1);
12266 if (!validate_arg (flag, INTEGER_TYPE))
12268 size = CALL_EXPR_ARG (exp, 2);
12269 if (!validate_arg (size, INTEGER_TYPE))
12271 fmt = CALL_EXPR_ARG (exp, 3);
12272 if (!validate_arg (fmt, POINTER_TYPE))
12275 if (! host_integerp (size, 1))
12280 if (!init_target_chars ())
12283 /* Check whether the format is a literal string constant. */
12284 fmt_str = c_getstr (fmt);
12285 if (fmt_str != NULL)
12287 /* If the format doesn't contain % args or %%, we know the size. */
12288 if (strchr (fmt_str, target_percent) == 0)
12290 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12291 len = build_int_cstu (size_type_node, strlen (fmt_str));
12293 /* If the format is "%s" and first ... argument is a string literal,
12294 we know the size too. */
12295 else if (fcode == BUILT_IN_SPRINTF_CHK
12296 && strcmp (fmt_str, target_percent_s) == 0)
12302 arg = CALL_EXPR_ARG (exp, 4);
12303 if (validate_arg (arg, POINTER_TYPE))
12305 len = c_strlen (arg, 1);
12306 if (! len || ! host_integerp (len, 1))
12313 if (! integer_all_onesp (size))
12315 if (! len || ! tree_int_cst_lt (len, size))
12319 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12320 or if format doesn't contain % chars or is "%s". */
12321 if (! integer_zerop (flag))
12323 if (fmt_str == NULL)
12325 if (strchr (fmt_str, target_percent) != NULL
12326 && strcmp (fmt_str, target_percent_s))
12330 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12331 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12332 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12336 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12339 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12340 a normal call should be emitted rather than expanding the function
12341 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12342 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12343 passed as second argument. */
12346 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12347 enum built_in_function fcode)
12349 tree dest, size, len, fn, fmt, flag;
12350 const char *fmt_str;
12352 /* Verify the required arguments in the original call. */
12353 if (call_expr_nargs (exp) < 5)
12355 dest = CALL_EXPR_ARG (exp, 0);
12356 if (!validate_arg (dest, POINTER_TYPE))
12358 len = CALL_EXPR_ARG (exp, 1);
12359 if (!validate_arg (len, INTEGER_TYPE))
12361 flag = CALL_EXPR_ARG (exp, 2);
12362 if (!validate_arg (flag, INTEGER_TYPE))
12364 size = CALL_EXPR_ARG (exp, 3);
12365 if (!validate_arg (size, INTEGER_TYPE))
12367 fmt = CALL_EXPR_ARG (exp, 4);
12368 if (!validate_arg (fmt, POINTER_TYPE))
12371 if (! host_integerp (size, 1))
12374 if (! integer_all_onesp (size))
12376 if (! host_integerp (len, 1))
12378 /* If LEN is not constant, try MAXLEN too.
12379 For MAXLEN only allow optimizing into non-_ocs function
12380 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12381 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12387 if (tree_int_cst_lt (size, maxlen))
12391 if (!init_target_chars ())
12394 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12395 or if format doesn't contain % chars or is "%s". */
12396 if (! integer_zerop (flag))
12398 fmt_str = c_getstr (fmt);
12399 if (fmt_str == NULL)
12401 if (strchr (fmt_str, target_percent) != NULL
12402 && strcmp (fmt_str, target_percent_s))
12406 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12408 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12409 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12413 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12416 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12417 FMT and ARG are the arguments to the call; we don't fold cases with
12418 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12420 Return NULL_TREE if no simplification was possible, otherwise return the
12421 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12422 code of the function to be simplified. */
12425 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12426 enum built_in_function fcode)
12428 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12429 const char *fmt_str = NULL;
12431 /* If the return value is used, don't do the transformation. */
12435 /* Verify the required arguments in the original call. */
12436 if (!validate_arg (fmt, POINTER_TYPE))
12439 /* Check whether the format is a literal string constant. */
12440 fmt_str = c_getstr (fmt);
12441 if (fmt_str == NULL)
12444 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12446 /* If we're using an unlocked function, assume the other
12447 unlocked functions exist explicitly. */
12448 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12449 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12453 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12454 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12457 if (!init_target_chars ())
12460 if (strcmp (fmt_str, target_percent_s) == 0
12461 || strchr (fmt_str, target_percent) == NULL)
12465 if (strcmp (fmt_str, target_percent_s) == 0)
12467 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12470 if (!arg || !validate_arg (arg, POINTER_TYPE))
12473 str = c_getstr (arg);
12479 /* The format specifier doesn't contain any '%' characters. */
12480 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12486 /* If the string was "", printf does nothing. */
12487 if (str[0] == '\0')
12488 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12490 /* If the string has length of 1, call putchar. */
12491 if (str[1] == '\0')
12493 /* Given printf("c"), (where c is any one character,)
12494 convert "c"[0] to an int and pass that to the replacement
12496 newarg = build_int_cst (NULL_TREE, str[0]);
12498 call = build_call_expr (fn_putchar, 1, newarg);
12502 /* If the string was "string\n", call puts("string"). */
12503 size_t len = strlen (str);
12504 if ((unsigned char)str[len - 1] == target_newline)
12506 /* Create a NUL-terminated string that's one char shorter
12507 than the original, stripping off the trailing '\n'. */
12508 char *newstr = alloca (len);
12509 memcpy (newstr, str, len - 1);
12510 newstr[len - 1] = 0;
12512 newarg = build_string_literal (len, newstr);
12514 call = build_call_expr (fn_puts, 1, newarg);
12517 /* We'd like to arrange to call fputs(string,stdout) here,
12518 but we need stdout and don't have a way to get it yet. */
12523 /* The other optimizations can be done only on the non-va_list variants. */
12524 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12527 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12528 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12530 if (!arg || !validate_arg (arg, POINTER_TYPE))
12533 call = build_call_expr (fn_puts, 1, arg);
12536 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12537 else if (strcmp (fmt_str, target_percent_c) == 0)
12539 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12542 call = build_call_expr (fn_putchar, 1, arg);
12548 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12551 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12552 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12553 more than 3 arguments, and ARG may be null in the 2-argument case.
12555 Return NULL_TREE if no simplification was possible, otherwise return the
12556 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12557 code of the function to be simplified. */
12560 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12561 enum built_in_function fcode)
12563 tree fn_fputc, fn_fputs, call = NULL_TREE;
12564 const char *fmt_str = NULL;
12566 /* If the return value is used, don't do the transformation. */
12570 /* Verify the required arguments in the original call. */
12571 if (!validate_arg (fp, POINTER_TYPE))
12573 if (!validate_arg (fmt, POINTER_TYPE))
12576 /* Check whether the format is a literal string constant. */
12577 fmt_str = c_getstr (fmt);
12578 if (fmt_str == NULL)
12581 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12583 /* If we're using an unlocked function, assume the other
12584 unlocked functions exist explicitly. */
12585 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12586 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12590 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12591 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12594 if (!init_target_chars ())
12597 /* If the format doesn't contain % args or %%, use strcpy. */
12598 if (strchr (fmt_str, target_percent) == NULL)
12600 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12604 /* If the format specifier was "", fprintf does nothing. */
12605 if (fmt_str[0] == '\0')
12607 /* If FP has side-effects, just wait until gimplification is
12609 if (TREE_SIDE_EFFECTS (fp))
12612 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12615 /* When "string" doesn't contain %, replace all cases of
12616 fprintf (fp, string) with fputs (string, fp). The fputs
12617 builtin will take care of special cases like length == 1. */
12619 call = build_call_expr (fn_fputs, 2, fmt, fp);
12622 /* The other optimizations can be done only on the non-va_list variants. */
12623 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12626 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12627 else if (strcmp (fmt_str, target_percent_s) == 0)
12629 if (!arg || !validate_arg (arg, POINTER_TYPE))
12632 call = build_call_expr (fn_fputs, 2, arg, fp);
12635 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12636 else if (strcmp (fmt_str, target_percent_c) == 0)
12638 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12641 call = build_call_expr (fn_fputc, 2, arg, fp);
12646 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12649 /* Initialize format string characters in the target charset. */
12652 init_target_chars (void)
12657 target_newline = lang_hooks.to_target_charset ('\n');
12658 target_percent = lang_hooks.to_target_charset ('%');
12659 target_c = lang_hooks.to_target_charset ('c');
12660 target_s = lang_hooks.to_target_charset ('s');
12661 if (target_newline == 0 || target_percent == 0 || target_c == 0
12665 target_percent_c[0] = target_percent;
12666 target_percent_c[1] = target_c;
12667 target_percent_c[2] = '\0';
12669 target_percent_s[0] = target_percent;
12670 target_percent_s[1] = target_s;
12671 target_percent_s[2] = '\0';
12673 target_percent_s_newline[0] = target_percent;
12674 target_percent_s_newline[1] = target_s;
12675 target_percent_s_newline[2] = target_newline;
12676 target_percent_s_newline[3] = '\0';
12683 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12684 and no overflow/underflow occurred. INEXACT is true if M was not
12685 exactly calculated. TYPE is the tree type for the result. This
12686 function assumes that you cleared the MPFR flags and then
12687 calculated M to see if anything subsequently set a flag prior to
12688 entering this function. Return NULL_TREE if any checks fail. */
12691 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12693 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12694 overflow/underflow occurred. If -frounding-math, proceed iff the
12695 result of calling FUNC was exact. */
12696 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12697 && (!flag_rounding_math || !inexact))
12699 REAL_VALUE_TYPE rr;
12701 real_from_mpfr (&rr, m, type, GMP_RNDN);
12702 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12703 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12704 but the mpft_t is not, then we underflowed in the
12706 if (real_isfinite (&rr)
12707 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12709 REAL_VALUE_TYPE rmode;
12711 real_convert (&rmode, TYPE_MODE (type), &rr);
12712 /* Proceed iff the specified mode can hold the value. */
12713 if (real_identical (&rmode, &rr))
12714 return build_real (type, rmode);
12720 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12721 FUNC on it and return the resulting value as a tree with type TYPE.
12722 If MIN and/or MAX are not NULL, then the supplied ARG must be
12723 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12724 acceptable values, otherwise they are not. The mpfr precision is
12725 set to the precision of TYPE. We assume that function FUNC returns
12726 zero if the result could be calculated exactly within the requested
12730 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12731 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12734 tree result = NULL_TREE;
12738 /* To proceed, MPFR must exactly represent the target floating point
12739 format, which only happens when the target base equals two. */
12740 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12741 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12743 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12745 if (real_isfinite (ra)
12746 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12747 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12749 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12753 mpfr_init2 (m, prec);
12754 mpfr_from_real (m, ra, GMP_RNDN);
12755 mpfr_clear_flags ();
12756 inexact = func (m, m, GMP_RNDN);
12757 result = do_mpfr_ckconv (m, type, inexact);
12765 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12766 FUNC on it and return the resulting value as a tree with type TYPE.
12767 The mpfr precision is set to the precision of TYPE. We assume that
12768 function FUNC returns zero if the result could be calculated
12769 exactly within the requested precision. */
12772 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12773 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12775 tree result = NULL_TREE;
12780 /* To proceed, MPFR must exactly represent the target floating point
12781 format, which only happens when the target base equals two. */
12782 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12783 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12784 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12786 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12787 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12789 if (real_isfinite (ra1) && real_isfinite (ra2))
12791 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12795 mpfr_inits2 (prec, m1, m2, NULL);
12796 mpfr_from_real (m1, ra1, GMP_RNDN);
12797 mpfr_from_real (m2, ra2, GMP_RNDN);
12798 mpfr_clear_flags ();
12799 inexact = func (m1, m1, m2, GMP_RNDN);
12800 result = do_mpfr_ckconv (m1, type, inexact);
12801 mpfr_clears (m1, m2, NULL);
12808 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12809 FUNC on it and return the resulting value as a tree with type TYPE.
12810 The mpfr precision is set to the precision of TYPE. We assume that
12811 function FUNC returns zero if the result could be calculated
12812 exactly within the requested precision. */
12815 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12816 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12818 tree result = NULL_TREE;
12824 /* To proceed, MPFR must exactly represent the target floating point
12825 format, which only happens when the target base equals two. */
12826 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12827 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12828 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12829 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12831 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12832 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12833 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12835 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12837 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12841 mpfr_inits2 (prec, m1, m2, m3, NULL);
12842 mpfr_from_real (m1, ra1, GMP_RNDN);
12843 mpfr_from_real (m2, ra2, GMP_RNDN);
12844 mpfr_from_real (m3, ra3, GMP_RNDN);
12845 mpfr_clear_flags ();
12846 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12847 result = do_mpfr_ckconv (m1, type, inexact);
12848 mpfr_clears (m1, m2, m3, NULL);
12855 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12856 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12857 If ARG_SINP and ARG_COSP are NULL then the result is returned
12858 as a complex value.
12859 The type is taken from the type of ARG and is used for setting the
12860 precision of the calculation and results. */
12863 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12865 tree const type = TREE_TYPE (arg);
12866 tree result = NULL_TREE;
12870 /* To proceed, MPFR must exactly represent the target floating point
12871 format, which only happens when the target base equals two. */
12872 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12873 && TREE_CODE (arg) == REAL_CST
12874 && !TREE_OVERFLOW (arg))
12876 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12878 if (real_isfinite (ra))
12880 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12881 tree result_s, result_c;
12885 mpfr_inits2 (prec, m, ms, mc, NULL);
12886 mpfr_from_real (m, ra, GMP_RNDN);
12887 mpfr_clear_flags ();
12888 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12889 result_s = do_mpfr_ckconv (ms, type, inexact);
12890 result_c = do_mpfr_ckconv (mc, type, inexact);
12891 mpfr_clears (m, ms, mc, NULL);
12892 if (result_s && result_c)
12894 /* If we are to return in a complex value do so. */
12895 if (!arg_sinp && !arg_cosp)
12896 return build_complex (build_complex_type (type),
12897 result_c, result_s);
12899 /* Dereference the sin/cos pointer arguments. */
12900 arg_sinp = build_fold_indirect_ref (arg_sinp);
12901 arg_cosp = build_fold_indirect_ref (arg_cosp);
12902 /* Proceed if valid pointer type were passed in. */
12903 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12904 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12906 /* Set the values. */
12907 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12909 TREE_SIDE_EFFECTS (result_s) = 1;
12910 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12912 TREE_SIDE_EFFECTS (result_c) = 1;
12913 /* Combine the assignments into a compound expr. */
12914 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12915 result_s, result_c));
12923 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12924 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12925 two-argument mpfr order N Bessel function FUNC on them and return
12926 the resulting value as a tree with type TYPE. The mpfr precision
12927 is set to the precision of TYPE. We assume that function FUNC
12928 returns zero if the result could be calculated exactly within the
12929 requested precision. */
12931 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12932 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12933 const REAL_VALUE_TYPE *min, bool inclusive)
12935 tree result = NULL_TREE;
12940 /* To proceed, MPFR must exactly represent the target floating point
12941 format, which only happens when the target base equals two. */
12942 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12943 && host_integerp (arg1, 0)
12944 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12946 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12947 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12950 && real_isfinite (ra)
12951 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12953 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12957 mpfr_init2 (m, prec);
12958 mpfr_from_real (m, ra, GMP_RNDN);
12959 mpfr_clear_flags ();
12960 inexact = func (m, n, m, GMP_RNDN);
12961 result = do_mpfr_ckconv (m, type, inexact);
12969 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12970 the pointer *(ARG_QUO) and return the result. The type is taken
12971 from the type of ARG0 and is used for setting the precision of the
12972 calculation and results. */
12975 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12977 tree const type = TREE_TYPE (arg0);
12978 tree result = NULL_TREE;
12983 /* To proceed, MPFR must exactly represent the target floating point
12984 format, which only happens when the target base equals two. */
12985 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12986 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12987 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12989 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12990 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12992 if (real_isfinite (ra0) && real_isfinite (ra1))
12994 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12999 mpfr_inits2 (prec, m0, m1, NULL);
13000 mpfr_from_real (m0, ra0, GMP_RNDN);
13001 mpfr_from_real (m1, ra1, GMP_RNDN);
13002 mpfr_clear_flags ();
13003 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
13004 /* Remquo is independent of the rounding mode, so pass
13005 inexact=0 to do_mpfr_ckconv(). */
13006 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13007 mpfr_clears (m0, m1, NULL);
13010 /* MPFR calculates quo in the host's long so it may
13011 return more bits in quo than the target int can hold
13012 if sizeof(host long) > sizeof(target int). This can
13013 happen even for native compilers in LP64 mode. In
13014 these cases, modulo the quo value with the largest
13015 number that the target int can hold while leaving one
13016 bit for the sign. */
13017 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13018 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13020 /* Dereference the quo pointer argument. */
13021 arg_quo = build_fold_indirect_ref (arg_quo);
13022 /* Proceed iff a valid pointer type was passed in. */
13023 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13025 /* Set the value. */
13026 tree result_quo = fold_build2 (MODIFY_EXPR,
13027 TREE_TYPE (arg_quo), arg_quo,
13028 build_int_cst (NULL, integer_quo));
13029 TREE_SIDE_EFFECTS (result_quo) = 1;
13030 /* Combine the quo assignment with the rem. */
13031 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13032 result_quo, result_rem));
13040 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13041 resulting value as a tree with type TYPE. The mpfr precision is
13042 set to the precision of TYPE. We assume that this mpfr function
13043 returns zero if the result could be calculated exactly within the
13044 requested precision. In addition, the integer pointer represented
13045 by ARG_SG will be dereferenced and set to the appropriate signgam
13049 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13051 tree result = NULL_TREE;
13055 /* To proceed, MPFR must exactly represent the target floating point
13056 format, which only happens when the target base equals two. Also
13057 verify ARG is a constant and that ARG_SG is an int pointer. */
13058 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13059 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13060 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13061 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13063 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13065 /* In addition to NaN and Inf, the argument cannot be zero or a
13066 negative integer. */
13067 if (real_isfinite (ra)
13068 && ra->cl != rvc_zero
13069 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13071 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13076 mpfr_init2 (m, prec);
13077 mpfr_from_real (m, ra, GMP_RNDN);
13078 mpfr_clear_flags ();
13079 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
13080 result_lg = do_mpfr_ckconv (m, type, inexact);
13086 /* Dereference the arg_sg pointer argument. */
13087 arg_sg = build_fold_indirect_ref (arg_sg);
13088 /* Assign the signgam value into *arg_sg. */
13089 result_sg = fold_build2 (MODIFY_EXPR,
13090 TREE_TYPE (arg_sg), arg_sg,
13091 build_int_cst (NULL, sg));
13092 TREE_SIDE_EFFECTS (result_sg) = 1;
13093 /* Combine the signgam assignment with the lgamma result. */
13094 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13095 result_sg, result_lg));