1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
250 if (strncmp (name, "__sync_", 7) == 0)
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
280 switch (TREE_CODE (exp))
283 exp = TREE_OPERAND (exp, 0);
284 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
287 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
288 align = MIN (inner, max_align);
291 case POINTER_PLUS_EXPR:
292 /* If sum of pointer + int, restrict our maximum alignment to that
293 imposed by the integer. If not, we can't do any better than
295 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
298 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
299 & (max_align / BITS_PER_UNIT - 1))
303 exp = TREE_OPERAND (exp, 0);
307 /* See what we are pointing at and look at its alignment. */
308 exp = TREE_OPERAND (exp, 0);
310 if (handled_component_p (exp))
312 HOST_WIDE_INT bitsize, bitpos;
314 enum machine_mode mode;
315 int unsignedp, volatilep;
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
320 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
321 if (offset && TREE_CODE (offset) == PLUS_EXPR
322 && host_integerp (TREE_OPERAND (offset, 1), 1))
324 /* Any overflow in calculating offset_bits won't change
327 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
331 inner = MIN (inner, (offset_bits & -offset_bits));
332 offset = TREE_OPERAND (offset, 0);
334 if (offset && TREE_CODE (offset) == MULT_EXPR
335 && host_integerp (TREE_OPERAND (offset, 1), 1))
337 /* Any overflow in calculating offset_factor won't change
339 unsigned offset_factor
340 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
344 inner = MIN (inner, (offset_factor & -offset_factor));
347 inner = MIN (inner, BITS_PER_UNIT);
350 align = MIN (inner, DECL_ALIGN (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 else if (CONSTANT_CLASS_P (exp))
353 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
355 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
356 || TREE_CODE (exp) == INDIRECT_REF)
357 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
359 align = MIN (align, inner);
360 return MIN (align, max_align);
368 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
369 way, because it could contain a zero byte in the middle.
370 TREE_STRING_LENGTH is the size of the character array, not the string.
372 ONLY_VALUE should be nonzero if the result is not going to be emitted
373 into the instruction stream and zero if it is going to be expanded.
374 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
375 is returned, otherwise NULL, since
376 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
377 evaluate the side-effects.
379 The value returned is of type `ssizetype'.
381 Unfortunately, string_constant can't access the values of const char
382 arrays with initializers, so neither can we do so here. */
385 c_strlen (tree src, int only_value)
388 HOST_WIDE_INT offset;
393 if (TREE_CODE (src) == COND_EXPR
394 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
398 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
399 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
400 if (tree_int_cst_equal (len1, len2))
404 if (TREE_CODE (src) == COMPOUND_EXPR
405 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
406 return c_strlen (TREE_OPERAND (src, 1), only_value);
408 src = string_constant (src, &offset_node);
412 max = TREE_STRING_LENGTH (src) - 1;
413 ptr = TREE_STRING_POINTER (src);
415 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
417 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
418 compute the offset to the following null if we don't know where to
419 start searching for it. */
422 for (i = 0; i < max; i++)
426 /* We don't know the starting offset, but we do know that the string
427 has no internal zero bytes. We can assume that the offset falls
428 within the bounds of the string; otherwise, the programmer deserves
429 what he gets. Subtract the offset from the length of the string,
430 and return that. This would perhaps not be valid if we were dealing
431 with named arrays in addition to literal string constants. */
433 return size_diffop (size_int (max), offset_node);
436 /* We have a known offset into the string. Start searching there for
437 a null character if we can represent it as a single HOST_WIDE_INT. */
438 if (offset_node == 0)
440 else if (! host_integerp (offset_node, 0))
443 offset = tree_low_cst (offset_node, 0);
445 /* If the offset is known to be out of bounds, warn, and call strlen at
447 if (offset < 0 || offset > max)
449 /* Suppress multiple warnings for propagated constant strings. */
450 if (! TREE_NO_WARNING (src))
452 warning (0, "offset outside bounds of constant string");
453 TREE_NO_WARNING (src) = 1;
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
475 src = string_constant (src, &offset_node);
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
492 c_readstr (const char *str, enum machine_mode mode)
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
518 return immed_double_const (c[0], c[1], mode);
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
526 target_char_cast (tree cst, char *p)
528 unsigned HOST_WIDE_INT val, hostval;
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
554 builtin_save_expr (tree exp)
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
561 return save_expr (exp);
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
591 tem = hard_frame_pointer_rtx;
593 /* Tell reload not to eliminate the frame pointer. */
594 crtl->accesses_prior_frames = 1;
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
603 SETUP_FRAME_ADDRESSES ();
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
648 /* Alias set used for setjmp buffer. */
649 static alias_set_type setjmp_alias_set = -1;
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
665 buf_addr = convert_memory_address (Pmode, buf_addr);
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 cfun->calls_setjmp = 1;
699 /* We have a nonlocal label. */
700 cfun->has_nonlocal_label = 1;
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
732 #ifdef ELIMINABLE_REGS
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
741 if (i == ARRAY_SIZE (elim_regs))
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx,
747 copy_to_reg (get_arg_pointer_save_area ()));
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
776 expand_builtin_longjmp (rtx buf_addr, rtx value)
778 rtx fp, lab, stack, insn, last;
779 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 if (setjmp_alias_set == -1)
782 setjmp_alias_set = new_alias_set ();
784 buf_addr = convert_memory_address (Pmode, buf_addr);
786 buf_addr = force_reg (Pmode, buf_addr);
788 /* We used to store value in static_chain_rtx, but that fails if pointers
789 are smaller than integers. We instead require that the user must pass
790 a second argument of 1, because that is what builtin_setjmp will
791 return. This also makes EH slightly more efficient, since we are no
792 longer copying around a value that we don't care about. */
793 gcc_assert (value == const1_rtx);
795 last = get_last_insn ();
796 #ifdef HAVE_builtin_longjmp
797 if (HAVE_builtin_longjmp)
798 emit_insn (gen_builtin_longjmp (buf_addr));
802 fp = gen_rtx_MEM (Pmode, buf_addr);
803 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
804 GET_MODE_SIZE (Pmode)));
806 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (fp, setjmp_alias_set);
809 set_mem_alias_set (lab, setjmp_alias_set);
810 set_mem_alias_set (stack, setjmp_alias_set);
812 /* Pick up FP, label, and SP from the block and jump. This code is
813 from expand_goto in stmt.c; see there for detailed comments. */
814 #ifdef HAVE_nonlocal_goto
815 if (HAVE_nonlocal_goto)
816 /* We have to pass a value to the nonlocal_goto pattern that will
817 get copied into the static_chain pointer, but it does not matter
818 what that value is, because builtin_setjmp does not use it. */
819 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
823 lab = copy_to_reg (lab);
825 emit_insn (gen_rtx_CLOBBER (VOIDmode,
826 gen_rtx_MEM (BLKmode,
827 gen_rtx_SCRATCH (VOIDmode))));
828 emit_insn (gen_rtx_CLOBBER (VOIDmode,
829 gen_rtx_MEM (BLKmode,
830 hard_frame_pointer_rtx)));
832 emit_move_insn (hard_frame_pointer_rtx, fp);
833 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
835 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
836 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
837 emit_indirect_jump (lab);
841 /* Search backwards and mark the jump insn as a non-local goto.
842 Note that this precludes the use of __builtin_longjmp to a
843 __builtin_setjmp target in the same function. However, we've
844 already cautioned the user that these functions are for
845 internal exception handling use only. */
846 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
848 gcc_assert (insn != last);
852 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
856 else if (CALL_P (insn))
861 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
862 and the address of the save area. */
865 expand_builtin_nonlocal_goto (tree exp)
867 tree t_label, t_save_area;
868 rtx r_label, r_save_area, r_fp, r_sp, insn;
870 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
873 t_label = CALL_EXPR_ARG (exp, 0);
874 t_save_area = CALL_EXPR_ARG (exp, 1);
876 r_label = expand_normal (t_label);
877 r_label = convert_memory_address (Pmode, r_label);
878 r_save_area = expand_normal (t_save_area);
879 r_save_area = convert_memory_address (Pmode, r_save_area);
880 r_fp = gen_rtx_MEM (Pmode, r_save_area);
881 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
882 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
884 crtl->has_nonlocal_goto = 1;
886 #ifdef HAVE_nonlocal_goto
887 /* ??? We no longer need to pass the static chain value, afaik. */
888 if (HAVE_nonlocal_goto)
889 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
893 r_label = copy_to_reg (r_label);
895 emit_insn (gen_rtx_CLOBBER (VOIDmode,
896 gen_rtx_MEM (BLKmode,
897 gen_rtx_SCRATCH (VOIDmode))));
899 emit_insn (gen_rtx_CLOBBER (VOIDmode,
900 gen_rtx_MEM (BLKmode,
901 hard_frame_pointer_rtx)));
903 /* Restore frame pointer for containing function.
904 This sets the actual hard register used for the frame pointer
905 to the location of the function's incoming static chain info.
906 The non-local goto handler will then adjust it to contain the
907 proper value and reload the argument pointer, if needed. */
908 emit_move_insn (hard_frame_pointer_rtx, r_fp);
909 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
911 /* USE of hard_frame_pointer_rtx added for consistency;
912 not clear if really needed. */
913 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
914 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
916 /* If the architecture is using a GP register, we must
917 conservatively assume that the target function makes use of it.
918 The prologue of functions with nonlocal gotos must therefore
919 initialize the GP register to the appropriate value, and we
920 must then make sure that this value is live at the point
921 of the jump. (Note that this doesn't necessarily apply
922 to targets with a nonlocal_goto pattern; they are free
923 to implement it in their own way. Note also that this is
924 a no-op if the GP register is a global invariant.) */
925 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
926 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
927 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
929 emit_indirect_jump (r_label);
932 /* Search backwards to the jump insn and mark it as a
934 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
938 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
939 const0_rtx, REG_NOTES (insn));
942 else if (CALL_P (insn))
949 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
950 (not all will be used on all machines) that was passed to __builtin_setjmp.
951 It updates the stack pointer in that block to correspond to the current
955 expand_builtin_update_setjmp_buf (rtx buf_addr)
957 enum machine_mode sa_mode = Pmode;
961 #ifdef HAVE_save_stack_nonlocal
962 if (HAVE_save_stack_nonlocal)
963 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
965 #ifdef STACK_SAVEAREA_MODE
966 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
970 = gen_rtx_MEM (sa_mode,
973 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
977 emit_insn (gen_setjmp ());
980 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
983 /* Expand a call to __builtin_prefetch. For a target that does not support
984 data prefetch, evaluate the memory address argument in case it has side
988 expand_builtin_prefetch (tree exp)
990 tree arg0, arg1, arg2;
994 if (!validate_arglist (exp, POINTER_TYPE, 0))
997 arg0 = CALL_EXPR_ARG (exp, 0);
999 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1000 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1002 nargs = call_expr_nargs (exp);
1004 arg1 = CALL_EXPR_ARG (exp, 1);
1006 arg1 = integer_zero_node;
1008 arg2 = CALL_EXPR_ARG (exp, 2);
1010 arg2 = build_int_cst (NULL_TREE, 3);
1012 /* Argument 0 is an address. */
1013 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1015 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1016 if (TREE_CODE (arg1) != INTEGER_CST)
1018 error ("second argument to %<__builtin_prefetch%> must be a constant");
1019 arg1 = integer_zero_node;
1021 op1 = expand_normal (arg1);
1022 /* Argument 1 must be either zero or one. */
1023 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1025 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1030 /* Argument 2 (locality) must be a compile-time constant int. */
1031 if (TREE_CODE (arg2) != INTEGER_CST)
1033 error ("third argument to %<__builtin_prefetch%> must be a constant");
1034 arg2 = integer_zero_node;
1036 op2 = expand_normal (arg2);
1037 /* Argument 2 must be 0, 1, 2, or 3. */
1038 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1040 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1044 #ifdef HAVE_prefetch
1047 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1049 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1050 || (GET_MODE (op0) != Pmode))
1052 op0 = convert_memory_address (Pmode, op0);
1053 op0 = force_reg (Pmode, op0);
1055 emit_insn (gen_prefetch (op0, op1, op2));
1059 /* Don't do anything with direct references to volatile memory, but
1060 generate code to handle other side effects. */
1061 if (!MEM_P (op0) && side_effects_p (op0))
1065 /* Get a MEM rtx for expression EXP which is the address of an operand
1066 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1067 the maximum length of the block of memory that might be accessed or
1071 get_memory_rtx (tree exp, tree len)
1073 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1074 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1076 /* Get an expression we can use to find the attributes to assign to MEM.
1077 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1078 we can. First remove any nops. */
1079 while (CONVERT_EXPR_P (exp)
1080 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1081 exp = TREE_OPERAND (exp, 0);
1083 if (TREE_CODE (exp) == ADDR_EXPR)
1084 exp = TREE_OPERAND (exp, 0);
1085 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1086 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1090 /* Honor attributes derived from exp, except for the alias set
1091 (as builtin stringops may alias with anything) and the size
1092 (as stringops may access multiple array elements). */
1095 set_mem_attributes (mem, exp, 0);
1097 /* Allow the string and memory builtins to overflow from one
1098 field into another, see http://gcc.gnu.org/PR23561.
1099 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1100 memory accessed by the string or memory builtin will fit
1101 within the field. */
1102 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1104 tree mem_expr = MEM_EXPR (mem);
1105 HOST_WIDE_INT offset = -1, length = -1;
1108 while (TREE_CODE (inner) == ARRAY_REF
1109 || CONVERT_EXPR_P (inner)
1110 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1111 || TREE_CODE (inner) == SAVE_EXPR)
1112 inner = TREE_OPERAND (inner, 0);
1114 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1116 if (MEM_OFFSET (mem)
1117 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1118 offset = INTVAL (MEM_OFFSET (mem));
1120 if (offset >= 0 && len && host_integerp (len, 0))
1121 length = tree_low_cst (len, 0);
1123 while (TREE_CODE (inner) == COMPONENT_REF)
1125 tree field = TREE_OPERAND (inner, 1);
1126 gcc_assert (! DECL_BIT_FIELD (field));
1127 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1128 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1131 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1132 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1135 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1136 /* If we can prove the memory starting at XEXP (mem, 0)
1137 and ending at XEXP (mem, 0) + LENGTH will fit into
1138 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1141 && offset + length <= size)
1146 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1147 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1148 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1156 mem_expr = TREE_OPERAND (mem_expr, 0);
1157 inner = TREE_OPERAND (inner, 0);
1160 if (mem_expr == NULL)
1162 if (mem_expr != MEM_EXPR (mem))
1164 set_mem_expr (mem, mem_expr);
1165 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1168 set_mem_alias_set (mem, 0);
1169 set_mem_size (mem, NULL_RTX);
1175 /* Built-in functions to perform an untyped call and return. */
1177 /* For each register that may be used for calling a function, this
1178 gives a mode used to copy the register's value. VOIDmode indicates
1179 the register is not used for calling a function. If the machine
1180 has register windows, this gives only the outbound registers.
1181 INCOMING_REGNO gives the corresponding inbound register. */
1182 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1184 /* For each register that may be used for returning values, this gives
1185 a mode used to copy the register's value. VOIDmode indicates the
1186 register is not used for returning values. If the machine has
1187 register windows, this gives only the outbound registers.
1188 INCOMING_REGNO gives the corresponding inbound register. */
1189 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1191 /* For each register that may be used for calling a function, this
1192 gives the offset of that register into the block returned by
1193 __builtin_apply_args. 0 indicates that the register is not
1194 used for calling a function. */
1195 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1197 /* Return the size required for the block returned by __builtin_apply_args,
1198 and initialize apply_args_mode. */
1201 apply_args_size (void)
1203 static int size = -1;
1206 enum machine_mode mode;
1208 /* The values computed by this function never change. */
1211 /* The first value is the incoming arg-pointer. */
1212 size = GET_MODE_SIZE (Pmode);
1214 /* The second value is the structure value address unless this is
1215 passed as an "invisible" first argument. */
1216 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1217 size += GET_MODE_SIZE (Pmode);
1219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1220 if (FUNCTION_ARG_REGNO_P (regno))
1222 mode = reg_raw_mode[regno];
1224 gcc_assert (mode != VOIDmode);
1226 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1227 if (size % align != 0)
1228 size = CEIL (size, align) * align;
1229 apply_args_reg_offset[regno] = size;
1230 size += GET_MODE_SIZE (mode);
1231 apply_args_mode[regno] = mode;
1235 apply_args_mode[regno] = VOIDmode;
1236 apply_args_reg_offset[regno] = 0;
1242 /* Return the size required for the block returned by __builtin_apply,
1243 and initialize apply_result_mode. */
1246 apply_result_size (void)
1248 static int size = -1;
1250 enum machine_mode mode;
1252 /* The values computed by this function never change. */
1257 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1258 if (FUNCTION_VALUE_REGNO_P (regno))
1260 mode = reg_raw_mode[regno];
1262 gcc_assert (mode != VOIDmode);
1264 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1265 if (size % align != 0)
1266 size = CEIL (size, align) * align;
1267 size += GET_MODE_SIZE (mode);
1268 apply_result_mode[regno] = mode;
1271 apply_result_mode[regno] = VOIDmode;
1273 /* Allow targets that use untyped_call and untyped_return to override
1274 the size so that machine-specific information can be stored here. */
1275 #ifdef APPLY_RESULT_SIZE
1276 size = APPLY_RESULT_SIZE;
1282 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1283 /* Create a vector describing the result block RESULT. If SAVEP is true,
1284 the result block is used to save the values; otherwise it is used to
1285 restore the values. */
1288 result_vector (int savep, rtx result)
1290 int regno, size, align, nelts;
1291 enum machine_mode mode;
1293 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1296 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1297 if ((mode = apply_result_mode[regno]) != VOIDmode)
1299 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1300 if (size % align != 0)
1301 size = CEIL (size, align) * align;
1302 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1303 mem = adjust_address (result, mode, size);
1304 savevec[nelts++] = (savep
1305 ? gen_rtx_SET (VOIDmode, mem, reg)
1306 : gen_rtx_SET (VOIDmode, reg, mem));
1307 size += GET_MODE_SIZE (mode);
1309 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1311 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1313 /* Save the state required to perform an untyped call with the same
1314 arguments as were passed to the current function. */
1317 expand_builtin_apply_args_1 (void)
1320 int size, align, regno;
1321 enum machine_mode mode;
1322 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1324 /* Create a block where the arg-pointer, structure value address,
1325 and argument registers can be saved. */
1326 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1328 /* Walk past the arg-pointer and structure value address. */
1329 size = GET_MODE_SIZE (Pmode);
1330 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1331 size += GET_MODE_SIZE (Pmode);
1333 /* Save each register used in calling a function to the block. */
1334 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1335 if ((mode = apply_args_mode[regno]) != VOIDmode)
1337 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1338 if (size % align != 0)
1339 size = CEIL (size, align) * align;
1341 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1343 emit_move_insn (adjust_address (registers, mode, size), tem);
1344 size += GET_MODE_SIZE (mode);
1347 /* Save the arg pointer to the block. */
1348 tem = copy_to_reg (virtual_incoming_args_rtx);
1349 #ifdef STACK_GROWS_DOWNWARD
1350 /* We need the pointer as the caller actually passed them to us, not
1351 as we might have pretended they were passed. Make sure it's a valid
1352 operand, as emit_move_insn isn't expected to handle a PLUS. */
1354 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1357 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1359 size = GET_MODE_SIZE (Pmode);
1361 /* Save the structure value address unless this is passed as an
1362 "invisible" first argument. */
1363 if (struct_incoming_value)
1365 emit_move_insn (adjust_address (registers, Pmode, size),
1366 copy_to_reg (struct_incoming_value));
1367 size += GET_MODE_SIZE (Pmode);
1370 /* Return the address of the block. */
1371 return copy_addr_to_reg (XEXP (registers, 0));
1374 /* __builtin_apply_args returns block of memory allocated on
1375 the stack into which is stored the arg pointer, structure
1376 value address, static chain, and all the registers that might
1377 possibly be used in performing a function call. The code is
1378 moved to the start of the function so the incoming values are
1382 expand_builtin_apply_args (void)
1384 /* Don't do __builtin_apply_args more than once in a function.
1385 Save the result of the first call and reuse it. */
1386 if (apply_args_value != 0)
1387 return apply_args_value;
1389 /* When this function is called, it means that registers must be
1390 saved on entry to this function. So we migrate the
1391 call to the first insn of this function. */
1396 temp = expand_builtin_apply_args_1 ();
1400 apply_args_value = temp;
1402 /* Put the insns after the NOTE that starts the function.
1403 If this is inside a start_sequence, make the outer-level insn
1404 chain current, so the code is placed at the start of the
1406 push_topmost_sequence ();
1407 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1408 pop_topmost_sequence ();
1413 /* Perform an untyped call and save the state required to perform an
1414 untyped return of whatever value was returned by the given function. */
1417 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1419 int size, align, regno;
1420 enum machine_mode mode;
1421 rtx incoming_args, result, reg, dest, src, call_insn;
1422 rtx old_stack_level = 0;
1423 rtx call_fusage = 0;
1424 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1426 arguments = convert_memory_address (Pmode, arguments);
1428 /* Create a block where the return registers can be saved. */
1429 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1431 /* Fetch the arg pointer from the ARGUMENTS block. */
1432 incoming_args = gen_reg_rtx (Pmode);
1433 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1434 #ifndef STACK_GROWS_DOWNWARD
1435 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1436 incoming_args, 0, OPTAB_LIB_WIDEN);
1439 /* Push a new argument block and copy the arguments. Do not allow
1440 the (potential) memcpy call below to interfere with our stack
1442 do_pending_stack_adjust ();
1445 /* Save the stack with nonlocal if available. */
1446 #ifdef HAVE_save_stack_nonlocal
1447 if (HAVE_save_stack_nonlocal)
1448 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1451 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1453 /* Allocate a block of memory onto the stack and copy the memory
1454 arguments to the outgoing arguments address. */
1455 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1456 dest = virtual_outgoing_args_rtx;
1457 #ifndef STACK_GROWS_DOWNWARD
1458 if (GET_CODE (argsize) == CONST_INT)
1459 dest = plus_constant (dest, -INTVAL (argsize));
1461 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1463 dest = gen_rtx_MEM (BLKmode, dest);
1464 set_mem_align (dest, PARM_BOUNDARY);
1465 src = gen_rtx_MEM (BLKmode, incoming_args);
1466 set_mem_align (src, PARM_BOUNDARY);
1467 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1469 /* Refer to the argument block. */
1471 arguments = gen_rtx_MEM (BLKmode, arguments);
1472 set_mem_align (arguments, PARM_BOUNDARY);
1474 /* Walk past the arg-pointer and structure value address. */
1475 size = GET_MODE_SIZE (Pmode);
1477 size += GET_MODE_SIZE (Pmode);
1479 /* Restore each of the registers previously saved. Make USE insns
1480 for each of these registers for use in making the call. */
1481 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1482 if ((mode = apply_args_mode[regno]) != VOIDmode)
1484 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1485 if (size % align != 0)
1486 size = CEIL (size, align) * align;
1487 reg = gen_rtx_REG (mode, regno);
1488 emit_move_insn (reg, adjust_address (arguments, mode, size));
1489 use_reg (&call_fusage, reg);
1490 size += GET_MODE_SIZE (mode);
1493 /* Restore the structure value address unless this is passed as an
1494 "invisible" first argument. */
1495 size = GET_MODE_SIZE (Pmode);
1498 rtx value = gen_reg_rtx (Pmode);
1499 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1500 emit_move_insn (struct_value, value);
1501 if (REG_P (struct_value))
1502 use_reg (&call_fusage, struct_value);
1503 size += GET_MODE_SIZE (Pmode);
1506 /* All arguments and registers used for the call are set up by now! */
1507 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1509 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1510 and we don't want to load it into a register as an optimization,
1511 because prepare_call_address already did it if it should be done. */
1512 if (GET_CODE (function) != SYMBOL_REF)
1513 function = memory_address (FUNCTION_MODE, function);
1515 /* Generate the actual call instruction and save the return value. */
1516 #ifdef HAVE_untyped_call
1517 if (HAVE_untyped_call)
1518 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1519 result, result_vector (1, result)));
1522 #ifdef HAVE_call_value
1523 if (HAVE_call_value)
1527 /* Locate the unique return register. It is not possible to
1528 express a call that sets more than one return register using
1529 call_value; use untyped_call for that. In fact, untyped_call
1530 only needs to save the return registers in the given block. */
1531 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1532 if ((mode = apply_result_mode[regno]) != VOIDmode)
1534 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1536 valreg = gen_rtx_REG (mode, regno);
1539 emit_call_insn (GEN_CALL_VALUE (valreg,
1540 gen_rtx_MEM (FUNCTION_MODE, function),
1541 const0_rtx, NULL_RTX, const0_rtx));
1543 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1549 /* Find the CALL insn we just emitted, and attach the register usage
1551 call_insn = last_call_insn ();
1552 add_function_usage_to (call_insn, call_fusage);
1554 /* Restore the stack. */
1555 #ifdef HAVE_save_stack_nonlocal
1556 if (HAVE_save_stack_nonlocal)
1557 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1560 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1564 /* Return the address of the result block. */
1565 result = copy_addr_to_reg (XEXP (result, 0));
1566 return convert_memory_address (ptr_mode, result);
1569 /* Perform an untyped return. */
1572 expand_builtin_return (rtx result)
1574 int size, align, regno;
1575 enum machine_mode mode;
1577 rtx call_fusage = 0;
1579 result = convert_memory_address (Pmode, result);
1581 apply_result_size ();
1582 result = gen_rtx_MEM (BLKmode, result);
1584 #ifdef HAVE_untyped_return
1585 if (HAVE_untyped_return)
1587 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1593 /* Restore the return value and note that each value is used. */
1595 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1596 if ((mode = apply_result_mode[regno]) != VOIDmode)
1598 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1599 if (size % align != 0)
1600 size = CEIL (size, align) * align;
1601 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1602 emit_move_insn (reg, adjust_address (result, mode, size));
1604 push_to_sequence (call_fusage);
1605 emit_insn (gen_rtx_USE (VOIDmode, reg));
1606 call_fusage = get_insns ();
1608 size += GET_MODE_SIZE (mode);
1611 /* Put the USE insns before the return. */
1612 emit_insn (call_fusage);
1614 /* Return whatever values was restored by jumping directly to the end
1616 expand_naked_return ();
1619 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1621 static enum type_class
1622 type_to_class (tree type)
1624 switch (TREE_CODE (type))
1626 case VOID_TYPE: return void_type_class;
1627 case INTEGER_TYPE: return integer_type_class;
1628 case ENUMERAL_TYPE: return enumeral_type_class;
1629 case BOOLEAN_TYPE: return boolean_type_class;
1630 case POINTER_TYPE: return pointer_type_class;
1631 case REFERENCE_TYPE: return reference_type_class;
1632 case OFFSET_TYPE: return offset_type_class;
1633 case REAL_TYPE: return real_type_class;
1634 case COMPLEX_TYPE: return complex_type_class;
1635 case FUNCTION_TYPE: return function_type_class;
1636 case METHOD_TYPE: return method_type_class;
1637 case RECORD_TYPE: return record_type_class;
1639 case QUAL_UNION_TYPE: return union_type_class;
1640 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1641 ? string_type_class : array_type_class);
1642 case LANG_TYPE: return lang_type_class;
1643 default: return no_type_class;
1647 /* Expand a call EXP to __builtin_classify_type. */
1650 expand_builtin_classify_type (tree exp)
1652 if (call_expr_nargs (exp))
1653 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1654 return GEN_INT (no_type_class);
1657 /* This helper macro, meant to be used in mathfn_built_in below,
1658 determines which among a set of three builtin math functions is
1659 appropriate for a given type mode. The `F' and `L' cases are
1660 automatically generated from the `double' case. */
1661 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1662 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1663 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1664 fcodel = BUILT_IN_MATHFN##L ; break;
1665 /* Similar to above, but appends _R after any F/L suffix. */
1666 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1667 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1668 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1669 fcodel = BUILT_IN_MATHFN##L_R ; break;
1671 /* Return mathematic function equivalent to FN but operating directly
1672 on TYPE, if available. If IMPLICIT is true find the function in
1673 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1674 can't do the conversion, return zero. */
1677 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1679 tree const *const fn_arr
1680 = implicit ? implicit_built_in_decls : built_in_decls;
1681 enum built_in_function fcode, fcodef, fcodel;
1685 CASE_MATHFN (BUILT_IN_ACOS)
1686 CASE_MATHFN (BUILT_IN_ACOSH)
1687 CASE_MATHFN (BUILT_IN_ASIN)
1688 CASE_MATHFN (BUILT_IN_ASINH)
1689 CASE_MATHFN (BUILT_IN_ATAN)
1690 CASE_MATHFN (BUILT_IN_ATAN2)
1691 CASE_MATHFN (BUILT_IN_ATANH)
1692 CASE_MATHFN (BUILT_IN_CBRT)
1693 CASE_MATHFN (BUILT_IN_CEIL)
1694 CASE_MATHFN (BUILT_IN_CEXPI)
1695 CASE_MATHFN (BUILT_IN_COPYSIGN)
1696 CASE_MATHFN (BUILT_IN_COS)
1697 CASE_MATHFN (BUILT_IN_COSH)
1698 CASE_MATHFN (BUILT_IN_DREM)
1699 CASE_MATHFN (BUILT_IN_ERF)
1700 CASE_MATHFN (BUILT_IN_ERFC)
1701 CASE_MATHFN (BUILT_IN_EXP)
1702 CASE_MATHFN (BUILT_IN_EXP10)
1703 CASE_MATHFN (BUILT_IN_EXP2)
1704 CASE_MATHFN (BUILT_IN_EXPM1)
1705 CASE_MATHFN (BUILT_IN_FABS)
1706 CASE_MATHFN (BUILT_IN_FDIM)
1707 CASE_MATHFN (BUILT_IN_FLOOR)
1708 CASE_MATHFN (BUILT_IN_FMA)
1709 CASE_MATHFN (BUILT_IN_FMAX)
1710 CASE_MATHFN (BUILT_IN_FMIN)
1711 CASE_MATHFN (BUILT_IN_FMOD)
1712 CASE_MATHFN (BUILT_IN_FREXP)
1713 CASE_MATHFN (BUILT_IN_GAMMA)
1714 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1715 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1716 CASE_MATHFN (BUILT_IN_HYPOT)
1717 CASE_MATHFN (BUILT_IN_ILOGB)
1718 CASE_MATHFN (BUILT_IN_INF)
1719 CASE_MATHFN (BUILT_IN_ISINF)
1720 CASE_MATHFN (BUILT_IN_J0)
1721 CASE_MATHFN (BUILT_IN_J1)
1722 CASE_MATHFN (BUILT_IN_JN)
1723 CASE_MATHFN (BUILT_IN_LCEIL)
1724 CASE_MATHFN (BUILT_IN_LDEXP)
1725 CASE_MATHFN (BUILT_IN_LFLOOR)
1726 CASE_MATHFN (BUILT_IN_LGAMMA)
1727 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1728 CASE_MATHFN (BUILT_IN_LLCEIL)
1729 CASE_MATHFN (BUILT_IN_LLFLOOR)
1730 CASE_MATHFN (BUILT_IN_LLRINT)
1731 CASE_MATHFN (BUILT_IN_LLROUND)
1732 CASE_MATHFN (BUILT_IN_LOG)
1733 CASE_MATHFN (BUILT_IN_LOG10)
1734 CASE_MATHFN (BUILT_IN_LOG1P)
1735 CASE_MATHFN (BUILT_IN_LOG2)
1736 CASE_MATHFN (BUILT_IN_LOGB)
1737 CASE_MATHFN (BUILT_IN_LRINT)
1738 CASE_MATHFN (BUILT_IN_LROUND)
1739 CASE_MATHFN (BUILT_IN_MODF)
1740 CASE_MATHFN (BUILT_IN_NAN)
1741 CASE_MATHFN (BUILT_IN_NANS)
1742 CASE_MATHFN (BUILT_IN_NEARBYINT)
1743 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1744 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1745 CASE_MATHFN (BUILT_IN_POW)
1746 CASE_MATHFN (BUILT_IN_POWI)
1747 CASE_MATHFN (BUILT_IN_POW10)
1748 CASE_MATHFN (BUILT_IN_REMAINDER)
1749 CASE_MATHFN (BUILT_IN_REMQUO)
1750 CASE_MATHFN (BUILT_IN_RINT)
1751 CASE_MATHFN (BUILT_IN_ROUND)
1752 CASE_MATHFN (BUILT_IN_SCALB)
1753 CASE_MATHFN (BUILT_IN_SCALBLN)
1754 CASE_MATHFN (BUILT_IN_SCALBN)
1755 CASE_MATHFN (BUILT_IN_SIGNBIT)
1756 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1757 CASE_MATHFN (BUILT_IN_SIN)
1758 CASE_MATHFN (BUILT_IN_SINCOS)
1759 CASE_MATHFN (BUILT_IN_SINH)
1760 CASE_MATHFN (BUILT_IN_SQRT)
1761 CASE_MATHFN (BUILT_IN_TAN)
1762 CASE_MATHFN (BUILT_IN_TANH)
1763 CASE_MATHFN (BUILT_IN_TGAMMA)
1764 CASE_MATHFN (BUILT_IN_TRUNC)
1765 CASE_MATHFN (BUILT_IN_Y0)
1766 CASE_MATHFN (BUILT_IN_Y1)
1767 CASE_MATHFN (BUILT_IN_YN)
1773 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1774 return fn_arr[fcode];
1775 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1776 return fn_arr[fcodef];
1777 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1778 return fn_arr[fcodel];
1783 /* Like mathfn_built_in_1(), but always use the implicit array. */
1786 mathfn_built_in (tree type, enum built_in_function fn)
1788 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1791 /* If errno must be maintained, expand the RTL to check if the result,
1792 TARGET, of a built-in function call, EXP, is NaN, and if so set
1796 expand_errno_check (tree exp, rtx target)
1798 rtx lab = gen_label_rtx ();
1800 /* Test the result; if it is NaN, set errno=EDOM because
1801 the argument was not in the domain. */
1802 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1806 /* If this built-in doesn't throw an exception, set errno directly. */
1807 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1809 #ifdef GEN_ERRNO_RTX
1810 rtx errno_rtx = GEN_ERRNO_RTX;
1813 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1815 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1821 /* Make sure the library call isn't expanded as a tail call. */
1822 CALL_EXPR_TAILCALL (exp) = 0;
1824 /* We can't set errno=EDOM directly; let the library call do it.
1825 Pop the arguments right away in case the call gets deleted. */
1827 expand_call (exp, target, 0);
1832 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1833 Return NULL_RTX if a normal call should be emitted rather than expanding
1834 the function in-line. EXP is the expression that is a call to the builtin
1835 function; if convenient, the result should be placed in TARGET.
1836 SUBTARGET may be used as the target for computing one of EXP's operands. */
1839 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1841 optab builtin_optab;
1842 rtx op0, insns, before_call;
1843 tree fndecl = get_callee_fndecl (exp);
1844 enum machine_mode mode;
1845 bool errno_set = false;
1848 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1851 arg = CALL_EXPR_ARG (exp, 0);
1853 switch (DECL_FUNCTION_CODE (fndecl))
1855 CASE_FLT_FN (BUILT_IN_SQRT):
1856 errno_set = ! tree_expr_nonnegative_p (arg);
1857 builtin_optab = sqrt_optab;
1859 CASE_FLT_FN (BUILT_IN_EXP):
1860 errno_set = true; builtin_optab = exp_optab; break;
1861 CASE_FLT_FN (BUILT_IN_EXP10):
1862 CASE_FLT_FN (BUILT_IN_POW10):
1863 errno_set = true; builtin_optab = exp10_optab; break;
1864 CASE_FLT_FN (BUILT_IN_EXP2):
1865 errno_set = true; builtin_optab = exp2_optab; break;
1866 CASE_FLT_FN (BUILT_IN_EXPM1):
1867 errno_set = true; builtin_optab = expm1_optab; break;
1868 CASE_FLT_FN (BUILT_IN_LOGB):
1869 errno_set = true; builtin_optab = logb_optab; break;
1870 CASE_FLT_FN (BUILT_IN_LOG):
1871 errno_set = true; builtin_optab = log_optab; break;
1872 CASE_FLT_FN (BUILT_IN_LOG10):
1873 errno_set = true; builtin_optab = log10_optab; break;
1874 CASE_FLT_FN (BUILT_IN_LOG2):
1875 errno_set = true; builtin_optab = log2_optab; break;
1876 CASE_FLT_FN (BUILT_IN_LOG1P):
1877 errno_set = true; builtin_optab = log1p_optab; break;
1878 CASE_FLT_FN (BUILT_IN_ASIN):
1879 builtin_optab = asin_optab; break;
1880 CASE_FLT_FN (BUILT_IN_ACOS):
1881 builtin_optab = acos_optab; break;
1882 CASE_FLT_FN (BUILT_IN_TAN):
1883 builtin_optab = tan_optab; break;
1884 CASE_FLT_FN (BUILT_IN_ATAN):
1885 builtin_optab = atan_optab; break;
1886 CASE_FLT_FN (BUILT_IN_FLOOR):
1887 builtin_optab = floor_optab; break;
1888 CASE_FLT_FN (BUILT_IN_CEIL):
1889 builtin_optab = ceil_optab; break;
1890 CASE_FLT_FN (BUILT_IN_TRUNC):
1891 builtin_optab = btrunc_optab; break;
1892 CASE_FLT_FN (BUILT_IN_ROUND):
1893 builtin_optab = round_optab; break;
1894 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1895 builtin_optab = nearbyint_optab;
1896 if (flag_trapping_math)
1898 /* Else fallthrough and expand as rint. */
1899 CASE_FLT_FN (BUILT_IN_RINT):
1900 builtin_optab = rint_optab; break;
1905 /* Make a suitable register to place result in. */
1906 mode = TYPE_MODE (TREE_TYPE (exp));
1908 if (! flag_errno_math || ! HONOR_NANS (mode))
1911 /* Before working hard, check whether the instruction is available. */
1912 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1914 target = gen_reg_rtx (mode);
1916 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1917 need to expand the argument again. This way, we will not perform
1918 side-effects more the once. */
1919 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1921 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1925 /* Compute into TARGET.
1926 Set TARGET to wherever the result comes back. */
1927 target = expand_unop (mode, builtin_optab, op0, target, 0);
1932 expand_errno_check (exp, target);
1934 /* Output the entire sequence. */
1935 insns = get_insns ();
1941 /* If we were unable to expand via the builtin, stop the sequence
1942 (without outputting the insns) and call to the library function
1943 with the stabilized argument list. */
1947 before_call = get_last_insn ();
1949 target = expand_call (exp, target, target == const0_rtx);
1951 /* If this is a sqrt operation and we don't care about errno, try to
1952 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1953 This allows the semantics of the libcall to be visible to the RTL
1955 if (builtin_optab == sqrt_optab && !errno_set)
1957 /* Search backwards through the insns emitted by expand_call looking
1958 for the instruction with the REG_RETVAL note. */
1959 rtx last = get_last_insn ();
1960 while (last != before_call)
1962 if (find_reg_note (last, REG_RETVAL, NULL))
1964 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1965 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1966 two elements, i.e. symbol_ref(sqrt) and the operand. */
1968 && GET_CODE (note) == EXPR_LIST
1969 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1970 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1971 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1973 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1974 /* Check operand is a register with expected mode. */
1977 && GET_MODE (operand) == mode)
1979 /* Replace the REG_EQUAL note with a SQRT rtx. */
1980 rtx equiv = gen_rtx_SQRT (mode, operand);
1981 set_unique_reg_note (last, REG_EQUAL, equiv);
1986 last = PREV_INSN (last);
1993 /* Expand a call to the builtin binary math functions (pow and atan2).
1994 Return NULL_RTX if a normal call should be emitted rather than expanding the
1995 function in-line. EXP is the expression that is a call to the builtin
1996 function; if convenient, the result should be placed in TARGET.
1997 SUBTARGET may be used as the target for computing one of EXP's
2001 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2003 optab builtin_optab;
2004 rtx op0, op1, insns;
2005 int op1_type = REAL_TYPE;
2006 tree fndecl = get_callee_fndecl (exp);
2008 enum machine_mode mode;
2009 bool errno_set = true;
2011 switch (DECL_FUNCTION_CODE (fndecl))
2013 CASE_FLT_FN (BUILT_IN_SCALBN):
2014 CASE_FLT_FN (BUILT_IN_SCALBLN):
2015 CASE_FLT_FN (BUILT_IN_LDEXP):
2016 op1_type = INTEGER_TYPE;
2021 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2024 arg0 = CALL_EXPR_ARG (exp, 0);
2025 arg1 = CALL_EXPR_ARG (exp, 1);
2027 switch (DECL_FUNCTION_CODE (fndecl))
2029 CASE_FLT_FN (BUILT_IN_POW):
2030 builtin_optab = pow_optab; break;
2031 CASE_FLT_FN (BUILT_IN_ATAN2):
2032 builtin_optab = atan2_optab; break;
2033 CASE_FLT_FN (BUILT_IN_SCALB):
2034 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2036 builtin_optab = scalb_optab; break;
2037 CASE_FLT_FN (BUILT_IN_SCALBN):
2038 CASE_FLT_FN (BUILT_IN_SCALBLN):
2039 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2041 /* Fall through... */
2042 CASE_FLT_FN (BUILT_IN_LDEXP):
2043 builtin_optab = ldexp_optab; break;
2044 CASE_FLT_FN (BUILT_IN_FMOD):
2045 builtin_optab = fmod_optab; break;
2046 CASE_FLT_FN (BUILT_IN_REMAINDER):
2047 CASE_FLT_FN (BUILT_IN_DREM):
2048 builtin_optab = remainder_optab; break;
2053 /* Make a suitable register to place result in. */
2054 mode = TYPE_MODE (TREE_TYPE (exp));
2056 /* Before working hard, check whether the instruction is available. */
2057 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2060 target = gen_reg_rtx (mode);
2062 if (! flag_errno_math || ! HONOR_NANS (mode))
2065 /* Always stabilize the argument list. */
2066 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2067 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2069 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2070 op1 = expand_normal (arg1);
2074 /* Compute into TARGET.
2075 Set TARGET to wherever the result comes back. */
2076 target = expand_binop (mode, builtin_optab, op0, op1,
2077 target, 0, OPTAB_DIRECT);
2079 /* If we were unable to expand via the builtin, stop the sequence
2080 (without outputting the insns) and call to the library function
2081 with the stabilized argument list. */
2085 return expand_call (exp, target, target == const0_rtx);
2089 expand_errno_check (exp, target);
2091 /* Output the entire sequence. */
2092 insns = get_insns ();
2099 /* Expand a call to the builtin sin and cos math functions.
2100 Return NULL_RTX if a normal call should be emitted rather than expanding the
2101 function in-line. EXP is the expression that is a call to the builtin
2102 function; if convenient, the result should be placed in TARGET.
2103 SUBTARGET may be used as the target for computing one of EXP's
2107 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2109 optab builtin_optab;
2111 tree fndecl = get_callee_fndecl (exp);
2112 enum machine_mode mode;
2115 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2118 arg = CALL_EXPR_ARG (exp, 0);
2120 switch (DECL_FUNCTION_CODE (fndecl))
2122 CASE_FLT_FN (BUILT_IN_SIN):
2123 CASE_FLT_FN (BUILT_IN_COS):
2124 builtin_optab = sincos_optab; break;
2129 /* Make a suitable register to place result in. */
2130 mode = TYPE_MODE (TREE_TYPE (exp));
2132 /* Check if sincos insn is available, otherwise fallback
2133 to sin or cos insn. */
2134 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2135 switch (DECL_FUNCTION_CODE (fndecl))
2137 CASE_FLT_FN (BUILT_IN_SIN):
2138 builtin_optab = sin_optab; break;
2139 CASE_FLT_FN (BUILT_IN_COS):
2140 builtin_optab = cos_optab; break;
2145 /* Before working hard, check whether the instruction is available. */
2146 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2148 target = gen_reg_rtx (mode);
2150 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2151 need to expand the argument again. This way, we will not perform
2152 side-effects more the once. */
2153 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2155 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2159 /* Compute into TARGET.
2160 Set TARGET to wherever the result comes back. */
2161 if (builtin_optab == sincos_optab)
2165 switch (DECL_FUNCTION_CODE (fndecl))
2167 CASE_FLT_FN (BUILT_IN_SIN):
2168 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2170 CASE_FLT_FN (BUILT_IN_COS):
2171 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2176 gcc_assert (result);
2180 target = expand_unop (mode, builtin_optab, op0, target, 0);
2185 /* Output the entire sequence. */
2186 insns = get_insns ();
2192 /* If we were unable to expand via the builtin, stop the sequence
2193 (without outputting the insns) and call to the library function
2194 with the stabilized argument list. */
2198 target = expand_call (exp, target, target == const0_rtx);
2203 /* Expand a call to one of the builtin math functions that operate on
2204 floating point argument and output an integer result (ilogb, isinf,
2206 Return 0 if a normal call should be emitted rather than expanding the
2207 function in-line. EXP is the expression that is a call to the builtin
2208 function; if convenient, the result should be placed in TARGET.
2209 SUBTARGET may be used as the target for computing one of EXP's operands. */
2212 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2214 optab builtin_optab = 0;
2215 enum insn_code icode = CODE_FOR_nothing;
2217 tree fndecl = get_callee_fndecl (exp);
2218 enum machine_mode mode;
2219 bool errno_set = false;
2222 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2225 arg = CALL_EXPR_ARG (exp, 0);
2227 switch (DECL_FUNCTION_CODE (fndecl))
2229 CASE_FLT_FN (BUILT_IN_ILOGB):
2230 errno_set = true; builtin_optab = ilogb_optab; break;
2231 CASE_FLT_FN (BUILT_IN_ISINF):
2232 builtin_optab = isinf_optab; break;
2233 case BUILT_IN_ISNORMAL:
2234 case BUILT_IN_ISFINITE:
2235 CASE_FLT_FN (BUILT_IN_FINITE):
2236 /* These builtins have no optabs (yet). */
2242 /* There's no easy way to detect the case we need to set EDOM. */
2243 if (flag_errno_math && errno_set)
2246 /* Optab mode depends on the mode of the input argument. */
2247 mode = TYPE_MODE (TREE_TYPE (arg));
2250 icode = optab_handler (builtin_optab, mode)->insn_code;
2252 /* Before working hard, check whether the instruction is available. */
2253 if (icode != CODE_FOR_nothing)
2255 /* Make a suitable register to place result in. */
2257 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2258 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2260 gcc_assert (insn_data[icode].operand[0].predicate
2261 (target, GET_MODE (target)));
2263 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2264 need to expand the argument again. This way, we will not perform
2265 side-effects more the once. */
2266 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2268 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2270 if (mode != GET_MODE (op0))
2271 op0 = convert_to_mode (mode, op0, 0);
2273 /* Compute into TARGET.
2274 Set TARGET to wherever the result comes back. */
2275 emit_unop_insn (icode, target, op0, UNKNOWN);
2279 /* If there is no optab, try generic code. */
2280 switch (DECL_FUNCTION_CODE (fndecl))
2284 CASE_FLT_FN (BUILT_IN_ISINF):
2286 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2287 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2288 tree const type = TREE_TYPE (arg);
2292 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2293 real_from_string (&r, buf);
2294 result = build_call_expr (isgr_fn, 2,
2295 fold_build1 (ABS_EXPR, type, arg),
2296 build_real (type, r));
2297 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2299 CASE_FLT_FN (BUILT_IN_FINITE):
2300 case BUILT_IN_ISFINITE:
2302 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2303 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2304 tree const type = TREE_TYPE (arg);
2308 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2309 real_from_string (&r, buf);
2310 result = build_call_expr (isle_fn, 2,
2311 fold_build1 (ABS_EXPR, type, arg),
2312 build_real (type, r));
2313 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2315 case BUILT_IN_ISNORMAL:
2317 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2318 islessequal(fabs(x),DBL_MAX). */
2319 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2320 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2321 tree const type = TREE_TYPE (arg);
2322 REAL_VALUE_TYPE rmax, rmin;
2325 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2326 real_from_string (&rmax, buf);
2327 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2328 real_from_string (&rmin, buf);
2329 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2330 result = build_call_expr (isle_fn, 2, arg,
2331 build_real (type, rmax));
2332 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2333 build_call_expr (isge_fn, 2, arg,
2334 build_real (type, rmin)));
2335 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2341 target = expand_call (exp, target, target == const0_rtx);
2346 /* Expand a call to the builtin sincos math function.
2347 Return NULL_RTX if a normal call should be emitted rather than expanding the
2348 function in-line. EXP is the expression that is a call to the builtin
2352 expand_builtin_sincos (tree exp)
2354 rtx op0, op1, op2, target1, target2;
2355 enum machine_mode mode;
2356 tree arg, sinp, cosp;
2359 if (!validate_arglist (exp, REAL_TYPE,
2360 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2363 arg = CALL_EXPR_ARG (exp, 0);
2364 sinp = CALL_EXPR_ARG (exp, 1);
2365 cosp = CALL_EXPR_ARG (exp, 2);
2367 /* Make a suitable register to place result in. */
2368 mode = TYPE_MODE (TREE_TYPE (arg));
2370 /* Check if sincos insn is available, otherwise emit the call. */
2371 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2374 target1 = gen_reg_rtx (mode);
2375 target2 = gen_reg_rtx (mode);
2377 op0 = expand_normal (arg);
2378 op1 = expand_normal (build_fold_indirect_ref (sinp));
2379 op2 = expand_normal (build_fold_indirect_ref (cosp));
2381 /* Compute into target1 and target2.
2382 Set TARGET to wherever the result comes back. */
2383 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2384 gcc_assert (result);
2386 /* Move target1 and target2 to the memory locations indicated
2388 emit_move_insn (op1, target1);
2389 emit_move_insn (op2, target2);
2394 /* Expand a call to the internal cexpi builtin to the sincos math function.
2395 EXP is the expression that is a call to the builtin function; if convenient,
2396 the result should be placed in TARGET. SUBTARGET may be used as the target
2397 for computing one of EXP's operands. */
2400 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2402 tree fndecl = get_callee_fndecl (exp);
2404 enum machine_mode mode;
2407 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2410 arg = CALL_EXPR_ARG (exp, 0);
2411 type = TREE_TYPE (arg);
2412 mode = TYPE_MODE (TREE_TYPE (arg));
2414 /* Try expanding via a sincos optab, fall back to emitting a libcall
2415 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2416 is only generated from sincos, cexp or if we have either of them. */
2417 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2419 op1 = gen_reg_rtx (mode);
2420 op2 = gen_reg_rtx (mode);
2422 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2424 /* Compute into op1 and op2. */
2425 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2427 else if (TARGET_HAS_SINCOS)
2429 tree call, fn = NULL_TREE;
2433 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2434 fn = built_in_decls[BUILT_IN_SINCOSF];
2435 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2436 fn = built_in_decls[BUILT_IN_SINCOS];
2437 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2438 fn = built_in_decls[BUILT_IN_SINCOSL];
2442 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2443 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2444 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2445 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2446 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2447 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2449 /* Make sure not to fold the sincos call again. */
2450 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2451 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2452 call, 3, arg, top1, top2));
2456 tree call, fn = NULL_TREE, narg;
2457 tree ctype = build_complex_type (type);
2459 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2460 fn = built_in_decls[BUILT_IN_CEXPF];
2461 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2462 fn = built_in_decls[BUILT_IN_CEXP];
2463 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2464 fn = built_in_decls[BUILT_IN_CEXPL];
2468 /* If we don't have a decl for cexp create one. This is the
2469 friendliest fallback if the user calls __builtin_cexpi
2470 without full target C99 function support. */
2471 if (fn == NULL_TREE)
2474 const char *name = NULL;
2476 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2478 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2480 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2483 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2484 fn = build_fn_decl (name, fntype);
2487 narg = fold_build2 (COMPLEX_EXPR, ctype,
2488 build_real (type, dconst0), arg);
2490 /* Make sure not to fold the cexp call again. */
2491 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2492 return expand_expr (build_call_nary (ctype, call, 1, narg),
2493 target, VOIDmode, EXPAND_NORMAL);
2496 /* Now build the proper return type. */
2497 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2498 make_tree (TREE_TYPE (arg), op2),
2499 make_tree (TREE_TYPE (arg), op1)),
2500 target, VOIDmode, EXPAND_NORMAL);
2503 /* Expand a call to one of the builtin rounding functions gcc defines
2504 as an extension (lfloor and lceil). As these are gcc extensions we
2505 do not need to worry about setting errno to EDOM.
2506 If expanding via optab fails, lower expression to (int)(floor(x)).
2507 EXP is the expression that is a call to the builtin function;
2508 if convenient, the result should be placed in TARGET. SUBTARGET may
2509 be used as the target for computing one of EXP's operands. */
2512 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2514 convert_optab builtin_optab;
2515 rtx op0, insns, tmp;
2516 tree fndecl = get_callee_fndecl (exp);
2517 enum built_in_function fallback_fn;
2518 tree fallback_fndecl;
2519 enum machine_mode mode;
2522 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2525 arg = CALL_EXPR_ARG (exp, 0);
2527 switch (DECL_FUNCTION_CODE (fndecl))
2529 CASE_FLT_FN (BUILT_IN_LCEIL):
2530 CASE_FLT_FN (BUILT_IN_LLCEIL):
2531 builtin_optab = lceil_optab;
2532 fallback_fn = BUILT_IN_CEIL;
2535 CASE_FLT_FN (BUILT_IN_LFLOOR):
2536 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2537 builtin_optab = lfloor_optab;
2538 fallback_fn = BUILT_IN_FLOOR;
2545 /* Make a suitable register to place result in. */
2546 mode = TYPE_MODE (TREE_TYPE (exp));
2548 target = gen_reg_rtx (mode);
2550 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2551 need to expand the argument again. This way, we will not perform
2552 side-effects more the once. */
2553 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2555 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2559 /* Compute into TARGET. */
2560 if (expand_sfix_optab (target, op0, builtin_optab))
2562 /* Output the entire sequence. */
2563 insns = get_insns ();
2569 /* If we were unable to expand via the builtin, stop the sequence
2570 (without outputting the insns). */
2573 /* Fall back to floating point rounding optab. */
2574 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2576 /* For non-C99 targets we may end up without a fallback fndecl here
2577 if the user called __builtin_lfloor directly. In this case emit
2578 a call to the floor/ceil variants nevertheless. This should result
2579 in the best user experience for not full C99 targets. */
2580 if (fallback_fndecl == NULL_TREE)
2583 const char *name = NULL;
2585 switch (DECL_FUNCTION_CODE (fndecl))
2587 case BUILT_IN_LCEIL:
2588 case BUILT_IN_LLCEIL:
2591 case BUILT_IN_LCEILF:
2592 case BUILT_IN_LLCEILF:
2595 case BUILT_IN_LCEILL:
2596 case BUILT_IN_LLCEILL:
2599 case BUILT_IN_LFLOOR:
2600 case BUILT_IN_LLFLOOR:
2603 case BUILT_IN_LFLOORF:
2604 case BUILT_IN_LLFLOORF:
2607 case BUILT_IN_LFLOORL:
2608 case BUILT_IN_LLFLOORL:
2615 fntype = build_function_type_list (TREE_TYPE (arg),
2616 TREE_TYPE (arg), NULL_TREE);
2617 fallback_fndecl = build_fn_decl (name, fntype);
2620 exp = build_call_expr (fallback_fndecl, 1, arg);
2622 tmp = expand_normal (exp);
2624 /* Truncate the result of floating point optab to integer
2625 via expand_fix (). */
2626 target = gen_reg_rtx (mode);
2627 expand_fix (target, tmp, 0);
2632 /* Expand a call to one of the builtin math functions doing integer
2634 Return 0 if a normal call should be emitted rather than expanding the
2635 function in-line. EXP is the expression that is a call to the builtin
2636 function; if convenient, the result should be placed in TARGET.
2637 SUBTARGET may be used as the target for computing one of EXP's operands. */
2640 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2642 convert_optab builtin_optab;
2644 tree fndecl = get_callee_fndecl (exp);
2646 enum machine_mode mode;
2648 /* There's no easy way to detect the case we need to set EDOM. */
2649 if (flag_errno_math)
2652 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2655 arg = CALL_EXPR_ARG (exp, 0);
2657 switch (DECL_FUNCTION_CODE (fndecl))
2659 CASE_FLT_FN (BUILT_IN_LRINT):
2660 CASE_FLT_FN (BUILT_IN_LLRINT):
2661 builtin_optab = lrint_optab; break;
2662 CASE_FLT_FN (BUILT_IN_LROUND):
2663 CASE_FLT_FN (BUILT_IN_LLROUND):
2664 builtin_optab = lround_optab; break;
2669 /* Make a suitable register to place result in. */
2670 mode = TYPE_MODE (TREE_TYPE (exp));
2672 target = gen_reg_rtx (mode);
2674 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2675 need to expand the argument again. This way, we will not perform
2676 side-effects more the once. */
2677 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2679 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2683 if (expand_sfix_optab (target, op0, builtin_optab))
2685 /* Output the entire sequence. */
2686 insns = get_insns ();
2692 /* If we were unable to expand via the builtin, stop the sequence
2693 (without outputting the insns) and call to the library function
2694 with the stabilized argument list. */
2697 target = expand_call (exp, target, target == const0_rtx);
2702 /* To evaluate powi(x,n), the floating point value x raised to the
2703 constant integer exponent n, we use a hybrid algorithm that
2704 combines the "window method" with look-up tables. For an
2705 introduction to exponentiation algorithms and "addition chains",
2706 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2707 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2708 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2709 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2711 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2712 multiplications to inline before calling the system library's pow
2713 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2714 so this default never requires calling pow, powf or powl. */
2716 #ifndef POWI_MAX_MULTS
2717 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2720 /* The size of the "optimal power tree" lookup table. All
2721 exponents less than this value are simply looked up in the
2722 powi_table below. This threshold is also used to size the
2723 cache of pseudo registers that hold intermediate results. */
2724 #define POWI_TABLE_SIZE 256
2726 /* The size, in bits of the window, used in the "window method"
2727 exponentiation algorithm. This is equivalent to a radix of
2728 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2729 #define POWI_WINDOW_SIZE 3
2731 /* The following table is an efficient representation of an
2732 "optimal power tree". For each value, i, the corresponding
2733 value, j, in the table states than an optimal evaluation
2734 sequence for calculating pow(x,i) can be found by evaluating
2735 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2736 100 integers is given in Knuth's "Seminumerical algorithms". */
2738 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2740 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2741 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2742 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2743 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2744 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2745 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2746 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2747 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2748 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2749 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2750 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2751 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2752 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2753 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2754 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2755 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2756 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2757 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2758 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2759 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2760 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2761 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2762 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2763 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2764 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2765 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2766 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2767 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2768 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2769 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2770 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2771 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2775 /* Return the number of multiplications required to calculate
2776 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2777 subroutine of powi_cost. CACHE is an array indicating
2778 which exponents have already been calculated. */
2781 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2783 /* If we've already calculated this exponent, then this evaluation
2784 doesn't require any additional multiplications. */
2789 return powi_lookup_cost (n - powi_table[n], cache)
2790 + powi_lookup_cost (powi_table[n], cache) + 1;
2793 /* Return the number of multiplications required to calculate
2794 powi(x,n) for an arbitrary x, given the exponent N. This
2795 function needs to be kept in sync with expand_powi below. */
2798 powi_cost (HOST_WIDE_INT n)
2800 bool cache[POWI_TABLE_SIZE];
2801 unsigned HOST_WIDE_INT digit;
2802 unsigned HOST_WIDE_INT val;
2808 /* Ignore the reciprocal when calculating the cost. */
2809 val = (n < 0) ? -n : n;
2811 /* Initialize the exponent cache. */
2812 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2817 while (val >= POWI_TABLE_SIZE)
2821 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2822 result += powi_lookup_cost (digit, cache)
2823 + POWI_WINDOW_SIZE + 1;
2824 val >>= POWI_WINDOW_SIZE;
2833 return result + powi_lookup_cost (val, cache);
2836 /* Recursive subroutine of expand_powi. This function takes the array,
2837 CACHE, of already calculated exponents and an exponent N and returns
2838 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2841 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2843 unsigned HOST_WIDE_INT digit;
2847 if (n < POWI_TABLE_SIZE)
2852 target = gen_reg_rtx (mode);
2855 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2856 op1 = expand_powi_1 (mode, powi_table[n], cache);
2860 target = gen_reg_rtx (mode);
2861 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2862 op0 = expand_powi_1 (mode, n - digit, cache);
2863 op1 = expand_powi_1 (mode, digit, cache);
2867 target = gen_reg_rtx (mode);
2868 op0 = expand_powi_1 (mode, n >> 1, cache);
2872 result = expand_mult (mode, op0, op1, target, 0);
2873 if (result != target)
2874 emit_move_insn (target, result);
2878 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2879 floating point operand in mode MODE, and N is the exponent. This
2880 function needs to be kept in sync with powi_cost above. */
2883 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2885 unsigned HOST_WIDE_INT val;
2886 rtx cache[POWI_TABLE_SIZE];
2890 return CONST1_RTX (mode);
2892 val = (n < 0) ? -n : n;
2894 memset (cache, 0, sizeof (cache));
2897 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2899 /* If the original exponent was negative, reciprocate the result. */
2901 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2902 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2907 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2908 a normal call should be emitted rather than expanding the function
2909 in-line. EXP is the expression that is a call to the builtin
2910 function; if convenient, the result should be placed in TARGET. */
2913 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2917 tree type = TREE_TYPE (exp);
2918 REAL_VALUE_TYPE cint, c, c2;
2921 enum machine_mode mode = TYPE_MODE (type);
2923 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2926 arg0 = CALL_EXPR_ARG (exp, 0);
2927 arg1 = CALL_EXPR_ARG (exp, 1);
2929 if (TREE_CODE (arg1) != REAL_CST
2930 || TREE_OVERFLOW (arg1))
2931 return expand_builtin_mathfn_2 (exp, target, subtarget);
2933 /* Handle constant exponents. */
2935 /* For integer valued exponents we can expand to an optimal multiplication
2936 sequence using expand_powi. */
2937 c = TREE_REAL_CST (arg1);
2938 n = real_to_integer (&c);
2939 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2940 if (real_identical (&c, &cint)
2941 && ((n >= -1 && n <= 2)
2942 || (flag_unsafe_math_optimizations
2944 && powi_cost (n) <= POWI_MAX_MULTS)))
2946 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2949 op = force_reg (mode, op);
2950 op = expand_powi (op, mode, n);
2955 narg0 = builtin_save_expr (arg0);
2957 /* If the exponent is not integer valued, check if it is half of an integer.
2958 In this case we can expand to sqrt (x) * x**(n/2). */
2959 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2960 if (fn != NULL_TREE)
2962 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2963 n = real_to_integer (&c2);
2964 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2965 if (real_identical (&c2, &cint)
2966 && ((flag_unsafe_math_optimizations
2968 && powi_cost (n/2) <= POWI_MAX_MULTS)
2971 tree call_expr = build_call_expr (fn, 1, narg0);
2972 /* Use expand_expr in case the newly built call expression
2973 was folded to a non-call. */
2974 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2977 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2978 op2 = force_reg (mode, op2);
2979 op2 = expand_powi (op2, mode, abs (n / 2));
2980 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2981 0, OPTAB_LIB_WIDEN);
2982 /* If the original exponent was negative, reciprocate the
2985 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2986 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2992 /* Try if the exponent is a third of an integer. In this case
2993 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2994 different from pow (x, 1./3.) due to rounding and behavior
2995 with negative x we need to constrain this transformation to
2996 unsafe math and positive x or finite math. */
2997 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2999 && flag_unsafe_math_optimizations
3000 && (tree_expr_nonnegative_p (arg0)
3001 || !HONOR_NANS (mode)))
3003 REAL_VALUE_TYPE dconst3;
3004 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3005 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3006 real_round (&c2, mode, &c2);
3007 n = real_to_integer (&c2);
3008 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3009 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3010 real_convert (&c2, mode, &c2);
3011 if (real_identical (&c2, &c)
3013 && powi_cost (n/3) <= POWI_MAX_MULTS)
3016 tree call_expr = build_call_expr (fn, 1,narg0);
3017 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3018 if (abs (n) % 3 == 2)
3019 op = expand_simple_binop (mode, MULT, op, op, op,
3020 0, OPTAB_LIB_WIDEN);
3023 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3024 op2 = force_reg (mode, op2);
3025 op2 = expand_powi (op2, mode, abs (n / 3));
3026 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3027 0, OPTAB_LIB_WIDEN);
3028 /* If the original exponent was negative, reciprocate the
3031 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3032 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3038 /* Fall back to optab expansion. */
3039 return expand_builtin_mathfn_2 (exp, target, subtarget);
3042 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3043 a normal call should be emitted rather than expanding the function
3044 in-line. EXP is the expression that is a call to the builtin
3045 function; if convenient, the result should be placed in TARGET. */
3048 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3052 enum machine_mode mode;
3053 enum machine_mode mode2;
3055 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3058 arg0 = CALL_EXPR_ARG (exp, 0);
3059 arg1 = CALL_EXPR_ARG (exp, 1);
3060 mode = TYPE_MODE (TREE_TYPE (exp));
3062 /* Handle constant power. */
3064 if (TREE_CODE (arg1) == INTEGER_CST
3065 && !TREE_OVERFLOW (arg1))
3067 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3069 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3070 Otherwise, check the number of multiplications required. */
3071 if ((TREE_INT_CST_HIGH (arg1) == 0
3072 || TREE_INT_CST_HIGH (arg1) == -1)
3073 && ((n >= -1 && n <= 2)
3075 && powi_cost (n) <= POWI_MAX_MULTS)))
3077 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3078 op0 = force_reg (mode, op0);
3079 return expand_powi (op0, mode, n);
3083 /* Emit a libcall to libgcc. */
3085 /* Mode of the 2nd argument must match that of an int. */
3086 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3088 if (target == NULL_RTX)
3089 target = gen_reg_rtx (mode);
3091 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3092 if (GET_MODE (op0) != mode)
3093 op0 = convert_to_mode (mode, op0, 0);
3094 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3095 if (GET_MODE (op1) != mode2)
3096 op1 = convert_to_mode (mode2, op1, 0);
3098 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3099 target, LCT_CONST, mode, 2,
3100 op0, mode, op1, mode2);
3105 /* Expand expression EXP which is a call to the strlen builtin. Return
3106 NULL_RTX if we failed the caller should emit a normal call, otherwise
3107 try to get the result in TARGET, if convenient. */
3110 expand_builtin_strlen (tree exp, rtx target,
3111 enum machine_mode target_mode)
3113 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3119 tree src = CALL_EXPR_ARG (exp, 0);
3120 rtx result, src_reg, char_rtx, before_strlen;
3121 enum machine_mode insn_mode = target_mode, char_mode;
3122 enum insn_code icode = CODE_FOR_nothing;
3125 /* If the length can be computed at compile-time, return it. */
3126 len = c_strlen (src, 0);
3128 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3130 /* If the length can be computed at compile-time and is constant
3131 integer, but there are side-effects in src, evaluate
3132 src for side-effects, then return len.
3133 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3134 can be optimized into: i++; x = 3; */
3135 len = c_strlen (src, 1);
3136 if (len && TREE_CODE (len) == INTEGER_CST)
3138 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3139 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3142 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3144 /* If SRC is not a pointer type, don't do this operation inline. */
3148 /* Bail out if we can't compute strlen in the right mode. */
3149 while (insn_mode != VOIDmode)
3151 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3152 if (icode != CODE_FOR_nothing)
3155 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3157 if (insn_mode == VOIDmode)
3160 /* Make a place to write the result of the instruction. */
3164 && GET_MODE (result) == insn_mode
3165 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3166 result = gen_reg_rtx (insn_mode);
3168 /* Make a place to hold the source address. We will not expand
3169 the actual source until we are sure that the expansion will
3170 not fail -- there are trees that cannot be expanded twice. */
3171 src_reg = gen_reg_rtx (Pmode);
3173 /* Mark the beginning of the strlen sequence so we can emit the
3174 source operand later. */
3175 before_strlen = get_last_insn ();
3177 char_rtx = const0_rtx;
3178 char_mode = insn_data[(int) icode].operand[2].mode;
3179 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3181 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3183 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3184 char_rtx, GEN_INT (align));
3189 /* Now that we are assured of success, expand the source. */
3191 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3193 emit_move_insn (src_reg, pat);
3198 emit_insn_after (pat, before_strlen);
3200 emit_insn_before (pat, get_insns ());
3202 /* Return the value in the proper mode for this function. */
3203 if (GET_MODE (result) == target_mode)
3205 else if (target != 0)
3206 convert_move (target, result, 0);
3208 target = convert_to_mode (target_mode, result, 0);
3214 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3215 caller should emit a normal call, otherwise try to get the result
3216 in TARGET, if convenient (and in mode MODE if that's convenient). */
3219 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3221 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3223 tree type = TREE_TYPE (exp);
3224 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3225 CALL_EXPR_ARG (exp, 1), type);
3227 return expand_expr (result, target, mode, EXPAND_NORMAL);
3232 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3233 caller should emit a normal call, otherwise try to get the result
3234 in TARGET, if convenient (and in mode MODE if that's convenient). */
3237 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3239 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3241 tree type = TREE_TYPE (exp);
3242 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3243 CALL_EXPR_ARG (exp, 1), type);
3245 return expand_expr (result, target, mode, EXPAND_NORMAL);
3247 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3252 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3253 caller should emit a normal call, otherwise try to get the result
3254 in TARGET, if convenient (and in mode MODE if that's convenient). */
3257 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3259 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3261 tree type = TREE_TYPE (exp);
3262 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3263 CALL_EXPR_ARG (exp, 1), type);
3265 return expand_expr (result, target, mode, EXPAND_NORMAL);
3270 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3271 caller should emit a normal call, otherwise try to get the result
3272 in TARGET, if convenient (and in mode MODE if that's convenient). */
3275 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3277 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3279 tree type = TREE_TYPE (exp);
3280 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3281 CALL_EXPR_ARG (exp, 1), type);
3283 return expand_expr (result, target, mode, EXPAND_NORMAL);
3288 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3289 bytes from constant string DATA + OFFSET and return it as target
3293 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3294 enum machine_mode mode)
3296 const char *str = (const char *) data;
3298 gcc_assert (offset >= 0
3299 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3300 <= strlen (str) + 1));
3302 return c_readstr (str + offset, mode);
3305 /* Expand a call EXP to the memcpy builtin.
3306 Return NULL_RTX if we failed, the caller should emit a normal call,
3307 otherwise try to get the result in TARGET, if convenient (and in
3308 mode MODE if that's convenient). */
3311 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3313 tree fndecl = get_callee_fndecl (exp);
3315 if (!validate_arglist (exp,
3316 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3320 tree dest = CALL_EXPR_ARG (exp, 0);
3321 tree src = CALL_EXPR_ARG (exp, 1);
3322 tree len = CALL_EXPR_ARG (exp, 2);
3323 const char *src_str;
3324 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3325 unsigned int dest_align
3326 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3327 rtx dest_mem, src_mem, dest_addr, len_rtx;
3328 tree result = fold_builtin_memory_op (dest, src, len,
3329 TREE_TYPE (TREE_TYPE (fndecl)),
3331 HOST_WIDE_INT expected_size = -1;
3332 unsigned int expected_align = 0;
3336 while (TREE_CODE (result) == COMPOUND_EXPR)
3338 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3340 result = TREE_OPERAND (result, 1);
3342 return expand_expr (result, target, mode, EXPAND_NORMAL);
3345 /* If DEST is not a pointer type, call the normal function. */
3346 if (dest_align == 0)
3349 /* If either SRC is not a pointer type, don't do this
3350 operation in-line. */
3354 stringop_block_profile (exp, &expected_align, &expected_size);
3355 if (expected_align < dest_align)
3356 expected_align = dest_align;
3357 dest_mem = get_memory_rtx (dest, len);
3358 set_mem_align (dest_mem, dest_align);
3359 len_rtx = expand_normal (len);
3360 src_str = c_getstr (src);
3362 /* If SRC is a string constant and block move would be done
3363 by pieces, we can avoid loading the string from memory
3364 and only stored the computed constants. */
3366 && GET_CODE (len_rtx) == CONST_INT
3367 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3368 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3369 (void *) src_str, dest_align, false))
3371 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3372 builtin_memcpy_read_str,
3373 (void *) src_str, dest_align, false, 0);
3374 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3375 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3379 src_mem = get_memory_rtx (src, len);
3380 set_mem_align (src_mem, src_align);
3382 /* Copy word part most expediently. */
3383 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3384 CALL_EXPR_TAILCALL (exp)
3385 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3386 expected_align, expected_size);
3390 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3391 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3397 /* Expand a call EXP to the mempcpy builtin.
3398 Return NULL_RTX if we failed; the caller should emit a normal call,
3399 otherwise try to get the result in TARGET, if convenient (and in
3400 mode MODE if that's convenient). If ENDP is 0 return the
3401 destination pointer, if ENDP is 1 return the end pointer ala
3402 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3406 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3408 if (!validate_arglist (exp,
3409 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3413 tree dest = CALL_EXPR_ARG (exp, 0);
3414 tree src = CALL_EXPR_ARG (exp, 1);
3415 tree len = CALL_EXPR_ARG (exp, 2);
3416 return expand_builtin_mempcpy_args (dest, src, len,
3418 target, mode, /*endp=*/ 1);
3422 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3423 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3424 so that this can also be called without constructing an actual CALL_EXPR.
3425 TYPE is the return type of the call. The other arguments and return value
3426 are the same as for expand_builtin_mempcpy. */
3429 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3430 rtx target, enum machine_mode mode, int endp)
3432 /* If return value is ignored, transform mempcpy into memcpy. */
3433 if (target == const0_rtx)
3435 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3440 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3441 target, mode, EXPAND_NORMAL);
3445 const char *src_str;
3446 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3447 unsigned int dest_align
3448 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3449 rtx dest_mem, src_mem, len_rtx;
3450 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3454 while (TREE_CODE (result) == COMPOUND_EXPR)
3456 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3458 result = TREE_OPERAND (result, 1);
3460 return expand_expr (result, target, mode, EXPAND_NORMAL);
3463 /* If either SRC or DEST is not a pointer type, don't do this
3464 operation in-line. */
3465 if (dest_align == 0 || src_align == 0)
3468 /* If LEN is not constant, call the normal function. */
3469 if (! host_integerp (len, 1))
3472 len_rtx = expand_normal (len);
3473 src_str = c_getstr (src);
3475 /* If SRC is a string constant and block move would be done
3476 by pieces, we can avoid loading the string from memory
3477 and only stored the computed constants. */
3479 && GET_CODE (len_rtx) == CONST_INT
3480 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3481 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3482 (void *) src_str, dest_align, false))
3484 dest_mem = get_memory_rtx (dest, len);
3485 set_mem_align (dest_mem, dest_align);
3486 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3487 builtin_memcpy_read_str,
3488 (void *) src_str, dest_align,
3490 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3491 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3495 if (GET_CODE (len_rtx) == CONST_INT
3496 && can_move_by_pieces (INTVAL (len_rtx),
3497 MIN (dest_align, src_align)))
3499 dest_mem = get_memory_rtx (dest, len);
3500 set_mem_align (dest_mem, dest_align);
3501 src_mem = get_memory_rtx (src, len);
3502 set_mem_align (src_mem, src_align);
3503 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3504 MIN (dest_align, src_align), endp);
3505 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3506 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3514 /* Expand expression EXP, which is a call to the memmove builtin. Return
3515 NULL_RTX if we failed; the caller should emit a normal call. */
3518 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3520 if (!validate_arglist (exp,
3521 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3525 tree dest = CALL_EXPR_ARG (exp, 0);
3526 tree src = CALL_EXPR_ARG (exp, 1);
3527 tree len = CALL_EXPR_ARG (exp, 2);
3528 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3529 target, mode, ignore);
3533 /* Helper function to do the actual work for expand_builtin_memmove. The
3534 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3535 so that this can also be called without constructing an actual CALL_EXPR.
3536 TYPE is the return type of the call. The other arguments and return value
3537 are the same as for expand_builtin_memmove. */
3540 expand_builtin_memmove_args (tree dest, tree src, tree len,
3541 tree type, rtx target, enum machine_mode mode,
3544 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3548 STRIP_TYPE_NOPS (result);
3549 while (TREE_CODE (result) == COMPOUND_EXPR)
3551 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3553 result = TREE_OPERAND (result, 1);
3555 return expand_expr (result, target, mode, EXPAND_NORMAL);
3558 /* Otherwise, call the normal function. */
3562 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3563 NULL_RTX if we failed the caller should emit a normal call. */
3566 expand_builtin_bcopy (tree exp, int ignore)
3568 tree type = TREE_TYPE (exp);
3569 tree src, dest, size;
3571 if (!validate_arglist (exp,
3572 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3575 src = CALL_EXPR_ARG (exp, 0);
3576 dest = CALL_EXPR_ARG (exp, 1);
3577 size = CALL_EXPR_ARG (exp, 2);
3579 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3580 This is done this way so that if it isn't expanded inline, we fall
3581 back to calling bcopy instead of memmove. */
3582 return expand_builtin_memmove_args (dest, src,
3583 fold_convert (sizetype, size),
3584 type, const0_rtx, VOIDmode,
3589 # define HAVE_movstr 0
3590 # define CODE_FOR_movstr CODE_FOR_nothing
3593 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3594 we failed, the caller should emit a normal call, otherwise try to
3595 get the result in TARGET, if convenient. If ENDP is 0 return the
3596 destination pointer, if ENDP is 1 return the end pointer ala
3597 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3601 expand_movstr (tree dest, tree src, rtx target, int endp)
3607 const struct insn_data * data;
3612 dest_mem = get_memory_rtx (dest, NULL);
3613 src_mem = get_memory_rtx (src, NULL);
3616 target = force_reg (Pmode, XEXP (dest_mem, 0));
3617 dest_mem = replace_equiv_address (dest_mem, target);
3618 end = gen_reg_rtx (Pmode);
3622 if (target == 0 || target == const0_rtx)
3624 end = gen_reg_rtx (Pmode);
3632 data = insn_data + CODE_FOR_movstr;
3634 if (data->operand[0].mode != VOIDmode)
3635 end = gen_lowpart (data->operand[0].mode, end);
3637 insn = data->genfun (end, dest_mem, src_mem);
3643 /* movstr is supposed to set end to the address of the NUL
3644 terminator. If the caller requested a mempcpy-like return value,
3646 if (endp == 1 && target != const0_rtx)
3648 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3649 emit_move_insn (target, force_operand (tem, NULL_RTX));
3655 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3656 NULL_RTX if we failed the caller should emit a normal call, otherwise
3657 try to get the result in TARGET, if convenient (and in mode MODE if that's
3661 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3663 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3665 tree dest = CALL_EXPR_ARG (exp, 0);
3666 tree src = CALL_EXPR_ARG (exp, 1);
3667 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3672 /* Helper function to do the actual work for expand_builtin_strcpy. The
3673 arguments to the builtin_strcpy call DEST and SRC are broken out
3674 so that this can also be called without constructing an actual CALL_EXPR.
3675 The other arguments and return value are the same as for
3676 expand_builtin_strcpy. */
3679 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3680 rtx target, enum machine_mode mode)
3682 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3684 return expand_expr (result, target, mode, EXPAND_NORMAL);
3685 return expand_movstr (dest, src, target, /*endp=*/0);
3689 /* Expand a call EXP to the stpcpy builtin.
3690 Return NULL_RTX if we failed the caller should emit a normal call,
3691 otherwise try to get the result in TARGET, if convenient (and in
3692 mode MODE if that's convenient). */
3695 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3699 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3702 dst = CALL_EXPR_ARG (exp, 0);
3703 src = CALL_EXPR_ARG (exp, 1);
3705 /* If return value is ignored, transform stpcpy into strcpy. */
3706 if (target == const0_rtx)
3708 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3712 return expand_expr (build_call_expr (fn, 2, dst, src),
3713 target, mode, EXPAND_NORMAL);
3720 /* Ensure we get an actual string whose length can be evaluated at
3721 compile-time, not an expression containing a string. This is
3722 because the latter will potentially produce pessimized code
3723 when used to produce the return value. */
3724 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3725 return expand_movstr (dst, src, target, /*endp=*/2);
3727 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3728 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3729 target, mode, /*endp=*/2);
3734 if (TREE_CODE (len) == INTEGER_CST)
3736 rtx len_rtx = expand_normal (len);
3738 if (GET_CODE (len_rtx) == CONST_INT)
3740 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3741 dst, src, target, mode);
3747 if (mode != VOIDmode)
3748 target = gen_reg_rtx (mode);
3750 target = gen_reg_rtx (GET_MODE (ret));
3752 if (GET_MODE (target) != GET_MODE (ret))
3753 ret = gen_lowpart (GET_MODE (target), ret);
3755 ret = plus_constant (ret, INTVAL (len_rtx));
3756 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3764 return expand_movstr (dst, src, target, /*endp=*/2);
3768 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3769 bytes from constant string DATA + OFFSET and return it as target
3773 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3774 enum machine_mode mode)
3776 const char *str = (const char *) data;
3778 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3781 return c_readstr (str + offset, mode);
3784 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3785 NULL_RTX if we failed the caller should emit a normal call. */
3788 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3790 tree fndecl = get_callee_fndecl (exp);
3792 if (validate_arglist (exp,
3793 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3795 tree dest = CALL_EXPR_ARG (exp, 0);
3796 tree src = CALL_EXPR_ARG (exp, 1);
3797 tree len = CALL_EXPR_ARG (exp, 2);
3798 tree slen = c_strlen (src, 1);
3799 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3803 while (TREE_CODE (result) == COMPOUND_EXPR)
3805 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3807 result = TREE_OPERAND (result, 1);
3809 return expand_expr (result, target, mode, EXPAND_NORMAL);
3812 /* We must be passed a constant len and src parameter. */
3813 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3816 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3818 /* We're required to pad with trailing zeros if the requested
3819 len is greater than strlen(s2)+1. In that case try to
3820 use store_by_pieces, if it fails, punt. */
3821 if (tree_int_cst_lt (slen, len))
3823 unsigned int dest_align
3824 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3825 const char *p = c_getstr (src);
3828 if (!p || dest_align == 0 || !host_integerp (len, 1)
3829 || !can_store_by_pieces (tree_low_cst (len, 1),
3830 builtin_strncpy_read_str,
3831 (void *) p, dest_align, false))
3834 dest_mem = get_memory_rtx (dest, len);
3835 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3836 builtin_strncpy_read_str,
3837 (void *) p, dest_align, false, 0);
3838 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3839 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3846 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3847 bytes from constant string DATA + OFFSET and return it as target
3851 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3852 enum machine_mode mode)
3854 const char *c = (const char *) data;
3855 char *p = alloca (GET_MODE_SIZE (mode));
3857 memset (p, *c, GET_MODE_SIZE (mode));
3859 return c_readstr (p, mode);
3862 /* Callback routine for store_by_pieces. Return the RTL of a register
3863 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3864 char value given in the RTL register data. For example, if mode is
3865 4 bytes wide, return the RTL for 0x01010101*data. */
3868 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3869 enum machine_mode mode)
3875 size = GET_MODE_SIZE (mode);
3880 memset (p, 1, size);
3881 coeff = c_readstr (p, mode);
3883 target = convert_to_mode (mode, (rtx) data, 1);
3884 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3885 return force_reg (mode, target);
3888 /* Expand expression EXP, which is a call to the memset builtin. Return
3889 NULL_RTX if we failed the caller should emit a normal call, otherwise
3890 try to get the result in TARGET, if convenient (and in mode MODE if that's
3894 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3896 if (!validate_arglist (exp,
3897 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3901 tree dest = CALL_EXPR_ARG (exp, 0);
3902 tree val = CALL_EXPR_ARG (exp, 1);
3903 tree len = CALL_EXPR_ARG (exp, 2);
3904 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3908 /* Helper function to do the actual work for expand_builtin_memset. The
3909 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3910 so that this can also be called without constructing an actual CALL_EXPR.
3911 The other arguments and return value are the same as for
3912 expand_builtin_memset. */
3915 expand_builtin_memset_args (tree dest, tree val, tree len,
3916 rtx target, enum machine_mode mode, tree orig_exp)
3919 enum built_in_function fcode;
3921 unsigned int dest_align;
3922 rtx dest_mem, dest_addr, len_rtx;
3923 HOST_WIDE_INT expected_size = -1;
3924 unsigned int expected_align = 0;
3926 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3928 /* If DEST is not a pointer type, don't do this operation in-line. */
3929 if (dest_align == 0)
3932 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3933 if (expected_align < dest_align)
3934 expected_align = dest_align;
3936 /* If the LEN parameter is zero, return DEST. */
3937 if (integer_zerop (len))
3939 /* Evaluate and ignore VAL in case it has side-effects. */
3940 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3941 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3944 /* Stabilize the arguments in case we fail. */
3945 dest = builtin_save_expr (dest);
3946 val = builtin_save_expr (val);
3947 len = builtin_save_expr (len);
3949 len_rtx = expand_normal (len);
3950 dest_mem = get_memory_rtx (dest, len);
3952 if (TREE_CODE (val) != INTEGER_CST)
3956 val_rtx = expand_normal (val);
3957 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3960 /* Assume that we can memset by pieces if we can store
3961 * the coefficients by pieces (in the required modes).
3962 * We can't pass builtin_memset_gen_str as that emits RTL. */
3964 if (host_integerp (len, 1)
3965 && can_store_by_pieces (tree_low_cst (len, 1),
3966 builtin_memset_read_str, &c, dest_align,
3969 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3971 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3972 builtin_memset_gen_str, val_rtx, dest_align,
3975 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3976 dest_align, expected_align,
3980 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3981 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3985 if (target_char_cast (val, &c))
3990 if (host_integerp (len, 1)
3991 && can_store_by_pieces (tree_low_cst (len, 1),
3992 builtin_memset_read_str, &c, dest_align,
3994 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3995 builtin_memset_read_str, &c, dest_align, true, 0);
3996 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3997 dest_align, expected_align,
4001 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4002 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4006 set_mem_align (dest_mem, dest_align);
4007 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4008 CALL_EXPR_TAILCALL (orig_exp)
4009 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4010 expected_align, expected_size);
4014 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4015 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4021 fndecl = get_callee_fndecl (orig_exp);
4022 fcode = DECL_FUNCTION_CODE (fndecl);
4023 if (fcode == BUILT_IN_MEMSET)
4024 fn = build_call_expr (fndecl, 3, dest, val, len);
4025 else if (fcode == BUILT_IN_BZERO)
4026 fn = build_call_expr (fndecl, 2, dest, len);
4029 if (TREE_CODE (fn) == CALL_EXPR)
4030 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4031 return expand_call (fn, target, target == const0_rtx);
4034 /* Expand expression EXP, which is a call to the bzero builtin. Return
4035 NULL_RTX if we failed the caller should emit a normal call. */
4038 expand_builtin_bzero (tree exp)
4042 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4045 dest = CALL_EXPR_ARG (exp, 0);
4046 size = CALL_EXPR_ARG (exp, 1);
4048 /* New argument list transforming bzero(ptr x, int y) to
4049 memset(ptr x, int 0, size_t y). This is done this way
4050 so that if it isn't expanded inline, we fallback to
4051 calling bzero instead of memset. */
4053 return expand_builtin_memset_args (dest, integer_zero_node,
4054 fold_convert (sizetype, size),
4055 const0_rtx, VOIDmode, exp);
4058 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4059 caller should emit a normal call, otherwise try to get the result
4060 in TARGET, if convenient (and in mode MODE if that's convenient). */
4063 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4065 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4066 INTEGER_TYPE, VOID_TYPE))
4068 tree type = TREE_TYPE (exp);
4069 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4070 CALL_EXPR_ARG (exp, 1),
4071 CALL_EXPR_ARG (exp, 2), type);
4073 return expand_expr (result, target, mode, EXPAND_NORMAL);
4078 /* Expand expression EXP, which is a call to the memcmp built-in function.
4079 Return NULL_RTX if we failed and the
4080 caller should emit a normal call, otherwise try to get the result in
4081 TARGET, if convenient (and in mode MODE, if that's convenient). */
4084 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4086 if (!validate_arglist (exp,
4087 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4091 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4092 CALL_EXPR_ARG (exp, 1),
4093 CALL_EXPR_ARG (exp, 2));
4095 return expand_expr (result, target, mode, EXPAND_NORMAL);
4098 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4100 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4103 tree arg1 = CALL_EXPR_ARG (exp, 0);
4104 tree arg2 = CALL_EXPR_ARG (exp, 1);
4105 tree len = CALL_EXPR_ARG (exp, 2);
4108 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4110 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4111 enum machine_mode insn_mode;
4113 #ifdef HAVE_cmpmemsi
4115 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4118 #ifdef HAVE_cmpstrnsi
4120 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4125 /* If we don't have POINTER_TYPE, call the function. */
4126 if (arg1_align == 0 || arg2_align == 0)
4129 /* Make a place to write the result of the instruction. */
4132 && REG_P (result) && GET_MODE (result) == insn_mode
4133 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4134 result = gen_reg_rtx (insn_mode);
4136 arg1_rtx = get_memory_rtx (arg1, len);
4137 arg2_rtx = get_memory_rtx (arg2, len);
4138 arg3_rtx = expand_normal (len);
4140 /* Set MEM_SIZE as appropriate. */
4141 if (GET_CODE (arg3_rtx) == CONST_INT)
4143 set_mem_size (arg1_rtx, arg3_rtx);
4144 set_mem_size (arg2_rtx, arg3_rtx);
4147 #ifdef HAVE_cmpmemsi
4149 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4150 GEN_INT (MIN (arg1_align, arg2_align)));
4153 #ifdef HAVE_cmpstrnsi
4155 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4156 GEN_INT (MIN (arg1_align, arg2_align)));
4164 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4165 TYPE_MODE (integer_type_node), 3,
4166 XEXP (arg1_rtx, 0), Pmode,
4167 XEXP (arg2_rtx, 0), Pmode,
4168 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4169 TYPE_UNSIGNED (sizetype)),
4170 TYPE_MODE (sizetype));
4172 /* Return the value in the proper mode for this function. */
4173 mode = TYPE_MODE (TREE_TYPE (exp));
4174 if (GET_MODE (result) == mode)
4176 else if (target != 0)
4178 convert_move (target, result, 0);
4182 return convert_to_mode (mode, result, 0);
4189 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4190 if we failed the caller should emit a normal call, otherwise try to get
4191 the result in TARGET, if convenient. */
4194 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4196 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4200 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4201 CALL_EXPR_ARG (exp, 1));
4203 return expand_expr (result, target, mode, EXPAND_NORMAL);
4206 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4207 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4208 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4210 rtx arg1_rtx, arg2_rtx;
4211 rtx result, insn = NULL_RTX;
4213 tree arg1 = CALL_EXPR_ARG (exp, 0);
4214 tree arg2 = CALL_EXPR_ARG (exp, 1);
4217 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4219 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4221 /* If we don't have POINTER_TYPE, call the function. */
4222 if (arg1_align == 0 || arg2_align == 0)
4225 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4226 arg1 = builtin_save_expr (arg1);
4227 arg2 = builtin_save_expr (arg2);
4229 arg1_rtx = get_memory_rtx (arg1, NULL);
4230 arg2_rtx = get_memory_rtx (arg2, NULL);
4232 #ifdef HAVE_cmpstrsi
4233 /* Try to call cmpstrsi. */
4236 enum machine_mode insn_mode
4237 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4239 /* Make a place to write the result of the instruction. */
4242 && REG_P (result) && GET_MODE (result) == insn_mode
4243 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4244 result = gen_reg_rtx (insn_mode);
4246 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4247 GEN_INT (MIN (arg1_align, arg2_align)));
4250 #ifdef HAVE_cmpstrnsi
4251 /* Try to determine at least one length and call cmpstrnsi. */
4252 if (!insn && HAVE_cmpstrnsi)
4257 enum machine_mode insn_mode
4258 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4259 tree len1 = c_strlen (arg1, 1);
4260 tree len2 = c_strlen (arg2, 1);
4263 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4265 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4267 /* If we don't have a constant length for the first, use the length
4268 of the second, if we know it. We don't require a constant for
4269 this case; some cost analysis could be done if both are available
4270 but neither is constant. For now, assume they're equally cheap,
4271 unless one has side effects. If both strings have constant lengths,
4278 else if (TREE_SIDE_EFFECTS (len1))
4280 else if (TREE_SIDE_EFFECTS (len2))
4282 else if (TREE_CODE (len1) != INTEGER_CST)
4284 else if (TREE_CODE (len2) != INTEGER_CST)
4286 else if (tree_int_cst_lt (len1, len2))
4291 /* If both arguments have side effects, we cannot optimize. */
4292 if (!len || TREE_SIDE_EFFECTS (len))
4295 arg3_rtx = expand_normal (len);
4297 /* Make a place to write the result of the instruction. */
4300 && REG_P (result) && GET_MODE (result) == insn_mode
4301 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4302 result = gen_reg_rtx (insn_mode);
4304 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4305 GEN_INT (MIN (arg1_align, arg2_align)));
4313 /* Return the value in the proper mode for this function. */
4314 mode = TYPE_MODE (TREE_TYPE (exp));
4315 if (GET_MODE (result) == mode)
4318 return convert_to_mode (mode, result, 0);
4319 convert_move (target, result, 0);
4323 /* Expand the library call ourselves using a stabilized argument
4324 list to avoid re-evaluating the function's arguments twice. */
4325 #ifdef HAVE_cmpstrnsi
4328 fndecl = get_callee_fndecl (exp);
4329 fn = build_call_expr (fndecl, 2, arg1, arg2);
4330 if (TREE_CODE (fn) == CALL_EXPR)
4331 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4332 return expand_call (fn, target, target == const0_rtx);
4338 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4339 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4340 the result in TARGET, if convenient. */
4343 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4345 if (!validate_arglist (exp,
4346 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4350 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4351 CALL_EXPR_ARG (exp, 1),
4352 CALL_EXPR_ARG (exp, 2));
4354 return expand_expr (result, target, mode, EXPAND_NORMAL);
4357 /* If c_strlen can determine an expression for one of the string
4358 lengths, and it doesn't have side effects, then emit cmpstrnsi
4359 using length MIN(strlen(string)+1, arg3). */
4360 #ifdef HAVE_cmpstrnsi
4363 tree len, len1, len2;
4364 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4367 tree arg1 = CALL_EXPR_ARG (exp, 0);
4368 tree arg2 = CALL_EXPR_ARG (exp, 1);
4369 tree arg3 = CALL_EXPR_ARG (exp, 2);
4372 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4374 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4375 enum machine_mode insn_mode
4376 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4378 len1 = c_strlen (arg1, 1);
4379 len2 = c_strlen (arg2, 1);
4382 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4384 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4386 /* If we don't have a constant length for the first, use the length
4387 of the second, if we know it. We don't require a constant for
4388 this case; some cost analysis could be done if both are available
4389 but neither is constant. For now, assume they're equally cheap,
4390 unless one has side effects. If both strings have constant lengths,
4397 else if (TREE_SIDE_EFFECTS (len1))
4399 else if (TREE_SIDE_EFFECTS (len2))
4401 else if (TREE_CODE (len1) != INTEGER_CST)
4403 else if (TREE_CODE (len2) != INTEGER_CST)
4405 else if (tree_int_cst_lt (len1, len2))
4410 /* If both arguments have side effects, we cannot optimize. */
4411 if (!len || TREE_SIDE_EFFECTS (len))
4414 /* The actual new length parameter is MIN(len,arg3). */
4415 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4416 fold_convert (TREE_TYPE (len), arg3));
4418 /* If we don't have POINTER_TYPE, call the function. */
4419 if (arg1_align == 0 || arg2_align == 0)
4422 /* Make a place to write the result of the instruction. */
4425 && REG_P (result) && GET_MODE (result) == insn_mode
4426 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4427 result = gen_reg_rtx (insn_mode);
4429 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4430 arg1 = builtin_save_expr (arg1);
4431 arg2 = builtin_save_expr (arg2);
4432 len = builtin_save_expr (len);
4434 arg1_rtx = get_memory_rtx (arg1, len);
4435 arg2_rtx = get_memory_rtx (arg2, len);
4436 arg3_rtx = expand_normal (len);
4437 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4438 GEN_INT (MIN (arg1_align, arg2_align)));
4443 /* Return the value in the proper mode for this function. */
4444 mode = TYPE_MODE (TREE_TYPE (exp));
4445 if (GET_MODE (result) == mode)
4448 return convert_to_mode (mode, result, 0);
4449 convert_move (target, result, 0);
4453 /* Expand the library call ourselves using a stabilized argument
4454 list to avoid re-evaluating the function's arguments twice. */
4455 fndecl = get_callee_fndecl (exp);
4456 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4457 if (TREE_CODE (fn) == CALL_EXPR)
4458 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4459 return expand_call (fn, target, target == const0_rtx);
4465 /* Expand expression EXP, which is a call to the strcat builtin.
4466 Return NULL_RTX if we failed the caller should emit a normal call,
4467 otherwise try to get the result in TARGET, if convenient. */
4470 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4472 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4476 tree dst = CALL_EXPR_ARG (exp, 0);
4477 tree src = CALL_EXPR_ARG (exp, 1);
4478 const char *p = c_getstr (src);
4480 /* If the string length is zero, return the dst parameter. */
4481 if (p && *p == '\0')
4482 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4486 /* See if we can store by pieces into (dst + strlen(dst)). */
4487 tree newsrc, newdst,
4488 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4491 /* Stabilize the argument list. */
4492 newsrc = builtin_save_expr (src);
4493 dst = builtin_save_expr (dst);
4497 /* Create strlen (dst). */
4498 newdst = build_call_expr (strlen_fn, 1, dst);
4499 /* Create (dst p+ strlen (dst)). */
4501 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4502 newdst = builtin_save_expr (newdst);
4504 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4506 end_sequence (); /* Stop sequence. */
4510 /* Output the entire sequence. */
4511 insns = get_insns ();
4515 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4522 /* Expand expression EXP, which is a call to the strncat builtin.
4523 Return NULL_RTX if we failed the caller should emit a normal call,
4524 otherwise try to get the result in TARGET, if convenient. */
4527 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4529 if (validate_arglist (exp,
4530 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4532 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4533 CALL_EXPR_ARG (exp, 1),
4534 CALL_EXPR_ARG (exp, 2));
4536 return expand_expr (result, target, mode, EXPAND_NORMAL);
4541 /* Expand expression EXP, which is a call to the strspn builtin.
4542 Return NULL_RTX if we failed the caller should emit a normal call,
4543 otherwise try to get the result in TARGET, if convenient. */
4546 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4548 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4550 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4551 CALL_EXPR_ARG (exp, 1));
4553 return expand_expr (result, target, mode, EXPAND_NORMAL);
4558 /* Expand expression EXP, which is a call to the strcspn builtin.
4559 Return NULL_RTX if we failed the caller should emit a normal call,
4560 otherwise try to get the result in TARGET, if convenient. */
4563 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4565 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4567 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4568 CALL_EXPR_ARG (exp, 1));
4570 return expand_expr (result, target, mode, EXPAND_NORMAL);
4575 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4576 if that's convenient. */
4579 expand_builtin_saveregs (void)
4583 /* Don't do __builtin_saveregs more than once in a function.
4584 Save the result of the first call and reuse it. */
4585 if (saveregs_value != 0)
4586 return saveregs_value;
4588 /* When this function is called, it means that registers must be
4589 saved on entry to this function. So we migrate the call to the
4590 first insn of this function. */
4594 /* Do whatever the machine needs done in this case. */
4595 val = targetm.calls.expand_builtin_saveregs ();
4600 saveregs_value = val;
4602 /* Put the insns after the NOTE that starts the function. If this
4603 is inside a start_sequence, make the outer-level insn chain current, so
4604 the code is placed at the start of the function. */
4605 push_topmost_sequence ();
4606 emit_insn_after (seq, entry_of_function ());
4607 pop_topmost_sequence ();
4612 /* __builtin_args_info (N) returns word N of the arg space info
4613 for the current function. The number and meanings of words
4614 is controlled by the definition of CUMULATIVE_ARGS. */
4617 expand_builtin_args_info (tree exp)
4619 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4620 int *word_ptr = (int *) &crtl->args.info;
4622 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4624 if (call_expr_nargs (exp) != 0)
4626 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4627 error ("argument of %<__builtin_args_info%> must be constant");
4630 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4632 if (wordnum < 0 || wordnum >= nwords)
4633 error ("argument of %<__builtin_args_info%> out of range");
4635 return GEN_INT (word_ptr[wordnum]);
4639 error ("missing argument in %<__builtin_args_info%>");
4644 /* Expand a call to __builtin_next_arg. */
4647 expand_builtin_next_arg (void)
4649 /* Checking arguments is already done in fold_builtin_next_arg
4650 that must be called before this function. */
4651 return expand_binop (ptr_mode, add_optab,
4652 crtl->args.internal_arg_pointer,
4653 crtl->args.arg_offset_rtx,
4654 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4657 /* Make it easier for the backends by protecting the valist argument
4658 from multiple evaluations. */
4661 stabilize_va_list (tree valist, int needs_lvalue)
4663 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4665 if (TREE_SIDE_EFFECTS (valist))
4666 valist = save_expr (valist);
4668 /* For this case, the backends will be expecting a pointer to
4669 TREE_TYPE (va_list_type_node), but it's possible we've
4670 actually been given an array (an actual va_list_type_node).
4672 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4674 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4675 valist = build_fold_addr_expr_with_type (valist, p1);
4684 if (! TREE_SIDE_EFFECTS (valist))
4687 pt = build_pointer_type (va_list_type_node);
4688 valist = fold_build1 (ADDR_EXPR, pt, valist);
4689 TREE_SIDE_EFFECTS (valist) = 1;
4692 if (TREE_SIDE_EFFECTS (valist))
4693 valist = save_expr (valist);
4694 valist = build_fold_indirect_ref (valist);
4700 /* The "standard" definition of va_list is void*. */
4703 std_build_builtin_va_list (void)
4705 return ptr_type_node;
4708 /* The "standard" implementation of va_start: just assign `nextarg' to
4712 std_expand_builtin_va_start (tree valist, rtx nextarg)
4714 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4715 convert_move (va_r, nextarg, 0);
4718 /* Expand EXP, a call to __builtin_va_start. */
4721 expand_builtin_va_start (tree exp)
4726 if (call_expr_nargs (exp) < 2)
4728 error ("too few arguments to function %<va_start%>");
4732 if (fold_builtin_next_arg (exp, true))
4735 nextarg = expand_builtin_next_arg ();
4736 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4738 if (targetm.expand_builtin_va_start)
4739 targetm.expand_builtin_va_start (valist, nextarg);
4741 std_expand_builtin_va_start (valist, nextarg);
4746 /* The "standard" implementation of va_arg: read the value from the
4747 current (padded) address and increment by the (padded) size. */
4750 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4752 tree addr, t, type_size, rounded_size, valist_tmp;
4753 unsigned HOST_WIDE_INT align, boundary;
4756 #ifdef ARGS_GROW_DOWNWARD
4757 /* All of the alignment and movement below is for args-grow-up machines.
4758 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4759 implement their own specialized gimplify_va_arg_expr routines. */
4763 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4765 type = build_pointer_type (type);
4767 align = PARM_BOUNDARY / BITS_PER_UNIT;
4768 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4770 /* Hoist the valist value into a temporary for the moment. */
4771 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4773 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4774 requires greater alignment, we must perform dynamic alignment. */
4775 if (boundary > align
4776 && !integer_zerop (TYPE_SIZE (type)))
4778 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4779 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4780 valist_tmp, size_int (boundary - 1)));
4781 gimplify_and_add (t, pre_p);
4783 t = fold_convert (sizetype, valist_tmp);
4784 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4785 fold_convert (TREE_TYPE (valist),
4786 fold_build2 (BIT_AND_EXPR, sizetype, t,
4787 size_int (-boundary))));
4788 gimplify_and_add (t, pre_p);
4793 /* If the actual alignment is less than the alignment of the type,
4794 adjust the type accordingly so that we don't assume strict alignment
4795 when deferencing the pointer. */
4796 boundary *= BITS_PER_UNIT;
4797 if (boundary < TYPE_ALIGN (type))
4799 type = build_variant_type_copy (type);
4800 TYPE_ALIGN (type) = boundary;
4803 /* Compute the rounded size of the type. */
4804 type_size = size_in_bytes (type);
4805 rounded_size = round_up (type_size, align);
4807 /* Reduce rounded_size so it's sharable with the postqueue. */
4808 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4812 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4814 /* Small args are padded downward. */
4815 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4816 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4817 size_binop (MINUS_EXPR, rounded_size, type_size));
4818 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4821 /* Compute new value for AP. */
4822 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4823 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4824 gimplify_and_add (t, pre_p);
4826 addr = fold_convert (build_pointer_type (type), addr);
4829 addr = build_va_arg_indirect_ref (addr);
4831 return build_va_arg_indirect_ref (addr);
4834 /* Build an indirect-ref expression over the given TREE, which represents a
4835 piece of a va_arg() expansion. */
4837 build_va_arg_indirect_ref (tree addr)
4839 addr = build_fold_indirect_ref (addr);
4841 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4847 /* Return a dummy expression of type TYPE in order to keep going after an
4851 dummy_object (tree type)
4853 tree t = build_int_cst (build_pointer_type (type), 0);
4854 return build1 (INDIRECT_REF, type, t);
4857 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4858 builtin function, but a very special sort of operator. */
4860 enum gimplify_status
4861 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4863 tree promoted_type, want_va_type, have_va_type;
4864 tree valist = TREE_OPERAND (*expr_p, 0);
4865 tree type = TREE_TYPE (*expr_p);
4868 /* Verify that valist is of the proper type. */
4869 want_va_type = va_list_type_node;
4870 have_va_type = TREE_TYPE (valist);
4872 if (have_va_type == error_mark_node)
4875 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4877 /* If va_list is an array type, the argument may have decayed
4878 to a pointer type, e.g. by being passed to another function.
4879 In that case, unwrap both types so that we can compare the
4880 underlying records. */
4881 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4882 || POINTER_TYPE_P (have_va_type))
4884 want_va_type = TREE_TYPE (want_va_type);
4885 have_va_type = TREE_TYPE (have_va_type);
4889 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4891 error ("first argument to %<va_arg%> not of type %<va_list%>");
4895 /* Generate a diagnostic for requesting data of a type that cannot
4896 be passed through `...' due to type promotion at the call site. */
4897 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4900 static bool gave_help;
4902 /* Unfortunately, this is merely undefined, rather than a constraint
4903 violation, so we cannot make this an error. If this call is never
4904 executed, the program is still strictly conforming. */
4905 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4906 type, promoted_type);
4910 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4911 promoted_type, type);
4914 /* We can, however, treat "undefined" any way we please.
4915 Call abort to encourage the user to fix the program. */
4916 inform ("if this code is reached, the program will abort");
4917 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4918 append_to_statement_list (t, pre_p);
4920 /* This is dead code, but go ahead and finish so that the
4921 mode of the result comes out right. */
4922 *expr_p = dummy_object (type);
4927 /* Make it easier for the backends by protecting the valist argument
4928 from multiple evaluations. */
4929 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4931 /* For this case, the backends will be expecting a pointer to
4932 TREE_TYPE (va_list_type_node), but it's possible we've
4933 actually been given an array (an actual va_list_type_node).
4935 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4937 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4938 valist = build_fold_addr_expr_with_type (valist, p1);
4940 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4943 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4945 if (!targetm.gimplify_va_arg_expr)
4946 /* FIXME:Once most targets are converted we should merely
4947 assert this is non-null. */
4950 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4955 /* Expand EXP, a call to __builtin_va_end. */
4958 expand_builtin_va_end (tree exp)
4960 tree valist = CALL_EXPR_ARG (exp, 0);
4962 /* Evaluate for side effects, if needed. I hate macros that don't
4964 if (TREE_SIDE_EFFECTS (valist))
4965 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4970 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4971 builtin rather than just as an assignment in stdarg.h because of the
4972 nastiness of array-type va_list types. */
4975 expand_builtin_va_copy (tree exp)
4979 dst = CALL_EXPR_ARG (exp, 0);
4980 src = CALL_EXPR_ARG (exp, 1);
4982 dst = stabilize_va_list (dst, 1);
4983 src = stabilize_va_list (src, 0);
4985 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4987 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4988 TREE_SIDE_EFFECTS (t) = 1;
4989 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4993 rtx dstb, srcb, size;
4995 /* Evaluate to pointers. */
4996 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4997 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4998 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4999 VOIDmode, EXPAND_NORMAL);
5001 dstb = convert_memory_address (Pmode, dstb);
5002 srcb = convert_memory_address (Pmode, srcb);
5004 /* "Dereference" to BLKmode memories. */
5005 dstb = gen_rtx_MEM (BLKmode, dstb);
5006 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5007 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
5008 srcb = gen_rtx_MEM (BLKmode, srcb);
5009 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5010 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
5013 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5019 /* Expand a call to one of the builtin functions __builtin_frame_address or
5020 __builtin_return_address. */
5023 expand_builtin_frame_address (tree fndecl, tree exp)
5025 /* The argument must be a nonnegative integer constant.
5026 It counts the number of frames to scan up the stack.
5027 The value is the return address saved in that frame. */
5028 if (call_expr_nargs (exp) == 0)
5029 /* Warning about missing arg was already issued. */
5031 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5033 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5034 error ("invalid argument to %<__builtin_frame_address%>");
5036 error ("invalid argument to %<__builtin_return_address%>");
5042 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5043 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5045 /* Some ports cannot access arbitrary stack frames. */
5048 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5049 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5051 warning (0, "unsupported argument to %<__builtin_return_address%>");
5055 /* For __builtin_frame_address, return what we've got. */
5056 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5060 && ! CONSTANT_P (tem))
5061 tem = copy_to_mode_reg (Pmode, tem);
5066 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5067 we failed and the caller should emit a normal call, otherwise try to get
5068 the result in TARGET, if convenient. */
5071 expand_builtin_alloca (tree exp, rtx target)
5076 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5077 should always expand to function calls. These can be intercepted
5082 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5085 /* Compute the argument. */
5086 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5088 /* Allocate the desired space. */
5089 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5090 result = convert_memory_address (ptr_mode, result);
5095 /* Expand a call to a bswap builtin with argument ARG0. MODE
5096 is the mode to expand with. */
5099 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5101 enum machine_mode mode;
5105 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5108 arg = CALL_EXPR_ARG (exp, 0);
5109 mode = TYPE_MODE (TREE_TYPE (arg));
5110 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5112 target = expand_unop (mode, bswap_optab, op0, target, 1);
5114 gcc_assert (target);
5116 return convert_to_mode (mode, target, 0);
5119 /* Expand a call to a unary builtin in EXP.
5120 Return NULL_RTX if a normal call should be emitted rather than expanding the
5121 function in-line. If convenient, the result should be placed in TARGET.
5122 SUBTARGET may be used as the target for computing one of EXP's operands. */
5125 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5126 rtx subtarget, optab op_optab)
5130 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5133 /* Compute the argument. */
5134 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5135 VOIDmode, EXPAND_NORMAL);
5136 /* Compute op, into TARGET if possible.
5137 Set TARGET to wherever the result comes back. */
5138 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5139 op_optab, op0, target, 1);
5140 gcc_assert (target);
5142 return convert_to_mode (target_mode, target, 0);
5145 /* If the string passed to fputs is a constant and is one character
5146 long, we attempt to transform this call into __builtin_fputc(). */
5149 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5151 /* Verify the arguments in the original call. */
5152 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5154 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5155 CALL_EXPR_ARG (exp, 1),
5156 (target == const0_rtx),
5157 unlocked, NULL_TREE);
5159 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5164 /* Expand a call to __builtin_expect. We just return our argument
5165 as the builtin_expect semantic should've been already executed by
5166 tree branch prediction pass. */
5169 expand_builtin_expect (tree exp, rtx target)
5173 if (call_expr_nargs (exp) < 2)
5175 arg = CALL_EXPR_ARG (exp, 0);
5176 c = CALL_EXPR_ARG (exp, 1);
5178 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5179 /* When guessing was done, the hints should be already stripped away. */
5180 gcc_assert (!flag_guess_branch_prob
5181 || optimize == 0 || errorcount || sorrycount);
5186 expand_builtin_trap (void)
5190 emit_insn (gen_trap ());
5193 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5197 /* Expand EXP, a call to fabs, fabsf or fabsl.
5198 Return NULL_RTX if a normal call should be emitted rather than expanding
5199 the function inline. If convenient, the result should be placed
5200 in TARGET. SUBTARGET may be used as the target for computing
5204 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5206 enum machine_mode mode;
5210 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5213 arg = CALL_EXPR_ARG (exp, 0);
5214 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5215 mode = TYPE_MODE (TREE_TYPE (arg));
5216 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5217 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5220 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5221 Return NULL is a normal call should be emitted rather than expanding the
5222 function inline. If convenient, the result should be placed in TARGET.
5223 SUBTARGET may be used as the target for computing the operand. */
5226 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5231 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5234 arg = CALL_EXPR_ARG (exp, 0);
5235 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5237 arg = CALL_EXPR_ARG (exp, 1);
5238 op1 = expand_normal (arg);
5240 return expand_copysign (op0, op1, target);
5243 /* Create a new constant string literal and return a char* pointer to it.
5244 The STRING_CST value is the LEN characters at STR. */
5246 build_string_literal (int len, const char *str)
5248 tree t, elem, index, type;
5250 t = build_string (len, str);
5251 elem = build_type_variant (char_type_node, 1, 0);
5252 index = build_index_type (size_int (len - 1));
5253 type = build_array_type (elem, index);
5254 TREE_TYPE (t) = type;
5255 TREE_CONSTANT (t) = 1;
5256 TREE_READONLY (t) = 1;
5257 TREE_STATIC (t) = 1;
5259 type = build_pointer_type (elem);
5260 t = build1 (ADDR_EXPR, type,
5261 build4 (ARRAY_REF, elem,
5262 t, integer_zero_node, NULL_TREE, NULL_TREE));
5266 /* Expand EXP, a call to printf or printf_unlocked.
5267 Return NULL_RTX if a normal call should be emitted rather than transforming
5268 the function inline. If convenient, the result should be placed in
5269 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5272 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5275 /* If we're using an unlocked function, assume the other unlocked
5276 functions exist explicitly. */
5277 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5278 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5279 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5280 : implicit_built_in_decls[BUILT_IN_PUTS];
5281 const char *fmt_str;
5284 int nargs = call_expr_nargs (exp);
5286 /* If the return value is used, don't do the transformation. */
5287 if (target != const0_rtx)
5290 /* Verify the required arguments in the original call. */
5293 fmt = CALL_EXPR_ARG (exp, 0);
5294 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5297 /* Check whether the format is a literal string constant. */
5298 fmt_str = c_getstr (fmt);
5299 if (fmt_str == NULL)
5302 if (!init_target_chars ())
5305 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5306 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5309 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5312 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5314 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5315 else if (strcmp (fmt_str, target_percent_c) == 0)
5318 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5321 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5325 /* We can't handle anything else with % args or %% ... yet. */
5326 if (strchr (fmt_str, target_percent))
5332 /* If the format specifier was "", printf does nothing. */
5333 if (fmt_str[0] == '\0')
5335 /* If the format specifier has length of 1, call putchar. */
5336 if (fmt_str[1] == '\0')
5338 /* Given printf("c"), (where c is any one character,)
5339 convert "c"[0] to an int and pass that to the replacement
5341 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5343 fn = build_call_expr (fn_putchar, 1, arg);
5347 /* If the format specifier was "string\n", call puts("string"). */
5348 size_t len = strlen (fmt_str);
5349 if ((unsigned char)fmt_str[len - 1] == target_newline)
5351 /* Create a NUL-terminated string that's one char shorter
5352 than the original, stripping off the trailing '\n'. */
5353 char *newstr = alloca (len);
5354 memcpy (newstr, fmt_str, len - 1);
5355 newstr[len - 1] = 0;
5356 arg = build_string_literal (len, newstr);
5358 fn = build_call_expr (fn_puts, 1, arg);
5361 /* We'd like to arrange to call fputs(string,stdout) here,
5362 but we need stdout and don't have a way to get it yet. */
5369 if (TREE_CODE (fn) == CALL_EXPR)
5370 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5371 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5374 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5375 Return NULL_RTX if a normal call should be emitted rather than transforming
5376 the function inline. If convenient, the result should be placed in
5377 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5380 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5383 /* If we're using an unlocked function, assume the other unlocked
5384 functions exist explicitly. */
5385 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5386 : implicit_built_in_decls[BUILT_IN_FPUTC];
5387 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5388 : implicit_built_in_decls[BUILT_IN_FPUTS];
5389 const char *fmt_str;
5392 int nargs = call_expr_nargs (exp);
5394 /* If the return value is used, don't do the transformation. */
5395 if (target != const0_rtx)
5398 /* Verify the required arguments in the original call. */
5401 fp = CALL_EXPR_ARG (exp, 0);
5402 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5404 fmt = CALL_EXPR_ARG (exp, 1);
5405 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5408 /* Check whether the format is a literal string constant. */
5409 fmt_str = c_getstr (fmt);
5410 if (fmt_str == NULL)
5413 if (!init_target_chars ())
5416 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5417 if (strcmp (fmt_str, target_percent_s) == 0)
5420 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5422 arg = CALL_EXPR_ARG (exp, 2);
5424 fn = build_call_expr (fn_fputs, 2, arg, fp);
5426 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5427 else if (strcmp (fmt_str, target_percent_c) == 0)
5430 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5432 arg = CALL_EXPR_ARG (exp, 2);
5434 fn = build_call_expr (fn_fputc, 2, arg, fp);
5438 /* We can't handle anything else with % args or %% ... yet. */
5439 if (strchr (fmt_str, target_percent))
5445 /* If the format specifier was "", fprintf does nothing. */
5446 if (fmt_str[0] == '\0')
5448 /* Evaluate and ignore FILE* argument for side-effects. */
5449 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5453 /* When "string" doesn't contain %, replace all cases of
5454 fprintf(stream,string) with fputs(string,stream). The fputs
5455 builtin will take care of special cases like length == 1. */
5457 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5462 if (TREE_CODE (fn) == CALL_EXPR)
5463 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5464 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5467 /* Expand a call EXP to sprintf. Return NULL_RTX if
5468 a normal call should be emitted rather than expanding the function
5469 inline. If convenient, the result should be placed in TARGET with
5473 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5476 const char *fmt_str;
5477 int nargs = call_expr_nargs (exp);
5479 /* Verify the required arguments in the original call. */
5482 dest = CALL_EXPR_ARG (exp, 0);
5483 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5485 fmt = CALL_EXPR_ARG (exp, 0);
5486 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5489 /* Check whether the format is a literal string constant. */
5490 fmt_str = c_getstr (fmt);
5491 if (fmt_str == NULL)
5494 if (!init_target_chars ())
5497 /* If the format doesn't contain % args or %%, use strcpy. */
5498 if (strchr (fmt_str, target_percent) == 0)
5500 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5503 if ((nargs > 2) || ! fn)
5505 expand_expr (build_call_expr (fn, 2, dest, fmt),
5506 const0_rtx, VOIDmode, EXPAND_NORMAL);
5507 if (target == const0_rtx)
5509 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5510 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5512 /* If the format is "%s", use strcpy if the result isn't used. */
5513 else if (strcmp (fmt_str, target_percent_s) == 0)
5516 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5522 arg = CALL_EXPR_ARG (exp, 2);
5523 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5526 if (target != const0_rtx)
5528 len = c_strlen (arg, 1);
5529 if (! len || TREE_CODE (len) != INTEGER_CST)
5535 expand_expr (build_call_expr (fn, 2, dest, arg),
5536 const0_rtx, VOIDmode, EXPAND_NORMAL);
5538 if (target == const0_rtx)
5540 return expand_expr (len, target, mode, EXPAND_NORMAL);
5546 /* Expand a call to either the entry or exit function profiler. */
5549 expand_builtin_profile_func (bool exitp)
5553 this = DECL_RTL (current_function_decl);
5554 gcc_assert (MEM_P (this));
5555 this = XEXP (this, 0);
5558 which = profile_function_exit_libfunc;
5560 which = profile_function_entry_libfunc;
5562 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5563 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5570 /* Expand a call to __builtin___clear_cache. */
5573 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5575 #ifndef HAVE_clear_cache
5576 #ifdef CLEAR_INSN_CACHE
5577 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5578 does something. Just do the default expansion to a call to
5582 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5583 does nothing. There is no need to call it. Do nothing. */
5585 #endif /* CLEAR_INSN_CACHE */
5587 /* We have a "clear_cache" insn, and it will handle everything. */
5589 rtx begin_rtx, end_rtx;
5590 enum insn_code icode;
5592 /* We must not expand to a library call. If we did, any
5593 fallback library function in libgcc that might contain a call to
5594 __builtin___clear_cache() would recurse infinitely. */
5595 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5597 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5601 if (HAVE_clear_cache)
5603 icode = CODE_FOR_clear_cache;
5605 begin = CALL_EXPR_ARG (exp, 0);
5606 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5607 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5608 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5609 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5611 end = CALL_EXPR_ARG (exp, 1);
5612 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5613 end_rtx = convert_memory_address (Pmode, end_rtx);
5614 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5615 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5617 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5620 #endif /* HAVE_clear_cache */
5623 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5626 round_trampoline_addr (rtx tramp)
5628 rtx temp, addend, mask;
5630 /* If we don't need too much alignment, we'll have been guaranteed
5631 proper alignment by get_trampoline_type. */
5632 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5635 /* Round address up to desired boundary. */
5636 temp = gen_reg_rtx (Pmode);
5637 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5638 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5640 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5641 temp, 0, OPTAB_LIB_WIDEN);
5642 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5643 temp, 0, OPTAB_LIB_WIDEN);
5649 expand_builtin_init_trampoline (tree exp)
5651 tree t_tramp, t_func, t_chain;
5652 rtx r_tramp, r_func, r_chain;
5653 #ifdef TRAMPOLINE_TEMPLATE
5657 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5658 POINTER_TYPE, VOID_TYPE))
5661 t_tramp = CALL_EXPR_ARG (exp, 0);
5662 t_func = CALL_EXPR_ARG (exp, 1);
5663 t_chain = CALL_EXPR_ARG (exp, 2);
5665 r_tramp = expand_normal (t_tramp);
5666 r_func = expand_normal (t_func);
5667 r_chain = expand_normal (t_chain);
5669 /* Generate insns to initialize the trampoline. */
5670 r_tramp = round_trampoline_addr (r_tramp);
5671 #ifdef TRAMPOLINE_TEMPLATE
5672 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5673 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5674 emit_block_move (blktramp, assemble_trampoline_template (),
5675 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5677 trampolines_created = 1;
5678 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5684 expand_builtin_adjust_trampoline (tree exp)
5688 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5691 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5692 tramp = round_trampoline_addr (tramp);
5693 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5694 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5700 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5701 function. The function first checks whether the back end provides
5702 an insn to implement signbit for the respective mode. If not, it
5703 checks whether the floating point format of the value is such that
5704 the sign bit can be extracted. If that is not the case, the
5705 function returns NULL_RTX to indicate that a normal call should be
5706 emitted rather than expanding the function in-line. EXP is the
5707 expression that is a call to the builtin function; if convenient,
5708 the result should be placed in TARGET. */
5710 expand_builtin_signbit (tree exp, rtx target)
5712 const struct real_format *fmt;
5713 enum machine_mode fmode, imode, rmode;
5714 HOST_WIDE_INT hi, lo;
5717 enum insn_code icode;
5720 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5723 arg = CALL_EXPR_ARG (exp, 0);
5724 fmode = TYPE_MODE (TREE_TYPE (arg));
5725 rmode = TYPE_MODE (TREE_TYPE (exp));
5726 fmt = REAL_MODE_FORMAT (fmode);
5728 arg = builtin_save_expr (arg);
5730 /* Expand the argument yielding a RTX expression. */
5731 temp = expand_normal (arg);
5733 /* Check if the back end provides an insn that handles signbit for the
5735 icode = signbit_optab->handlers [(int) fmode].insn_code;
5736 if (icode != CODE_FOR_nothing)
5738 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5739 emit_unop_insn (icode, target, temp, UNKNOWN);
5743 /* For floating point formats without a sign bit, implement signbit
5745 bitpos = fmt->signbit_ro;
5748 /* But we can't do this if the format supports signed zero. */
5749 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5752 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5753 build_real (TREE_TYPE (arg), dconst0));
5754 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5757 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5759 imode = int_mode_for_mode (fmode);
5760 if (imode == BLKmode)
5762 temp = gen_lowpart (imode, temp);
5767 /* Handle targets with different FP word orders. */
5768 if (FLOAT_WORDS_BIG_ENDIAN)
5769 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5771 word = bitpos / BITS_PER_WORD;
5772 temp = operand_subword_force (temp, word, fmode);
5773 bitpos = bitpos % BITS_PER_WORD;
5776 /* Force the intermediate word_mode (or narrower) result into a
5777 register. This avoids attempting to create paradoxical SUBREGs
5778 of floating point modes below. */
5779 temp = force_reg (imode, temp);
5781 /* If the bitpos is within the "result mode" lowpart, the operation
5782 can be implement with a single bitwise AND. Otherwise, we need
5783 a right shift and an AND. */
5785 if (bitpos < GET_MODE_BITSIZE (rmode))
5787 if (bitpos < HOST_BITS_PER_WIDE_INT)
5790 lo = (HOST_WIDE_INT) 1 << bitpos;
5794 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5799 temp = gen_lowpart (rmode, temp);
5800 temp = expand_binop (rmode, and_optab, temp,
5801 immed_double_const (lo, hi, rmode),
5802 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5806 /* Perform a logical right shift to place the signbit in the least
5807 significant bit, then truncate the result to the desired mode
5808 and mask just this bit. */
5809 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5810 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5811 temp = gen_lowpart (rmode, temp);
5812 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5813 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5819 /* Expand fork or exec calls. TARGET is the desired target of the
5820 call. EXP is the call. FN is the
5821 identificator of the actual function. IGNORE is nonzero if the
5822 value is to be ignored. */
5825 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5830 /* If we are not profiling, just call the function. */
5831 if (!profile_arc_flag)
5834 /* Otherwise call the wrapper. This should be equivalent for the rest of
5835 compiler, so the code does not diverge, and the wrapper may run the
5836 code necessary for keeping the profiling sane. */
5838 switch (DECL_FUNCTION_CODE (fn))
5841 id = get_identifier ("__gcov_fork");
5844 case BUILT_IN_EXECL:
5845 id = get_identifier ("__gcov_execl");
5848 case BUILT_IN_EXECV:
5849 id = get_identifier ("__gcov_execv");
5852 case BUILT_IN_EXECLP:
5853 id = get_identifier ("__gcov_execlp");
5856 case BUILT_IN_EXECLE:
5857 id = get_identifier ("__gcov_execle");
5860 case BUILT_IN_EXECVP:
5861 id = get_identifier ("__gcov_execvp");
5864 case BUILT_IN_EXECVE:
5865 id = get_identifier ("__gcov_execve");
5872 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5873 DECL_EXTERNAL (decl) = 1;
5874 TREE_PUBLIC (decl) = 1;
5875 DECL_ARTIFICIAL (decl) = 1;
5876 TREE_NOTHROW (decl) = 1;
5877 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5878 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5879 call = rewrite_call_expr (exp, 0, decl, 0);
5880 return expand_call (call, target, ignore);
5885 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5886 the pointer in these functions is void*, the tree optimizers may remove
5887 casts. The mode computed in expand_builtin isn't reliable either, due
5888 to __sync_bool_compare_and_swap.
5890 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5891 group of builtins. This gives us log2 of the mode size. */
5893 static inline enum machine_mode
5894 get_builtin_sync_mode (int fcode_diff)
5896 /* The size is not negotiable, so ask not to get BLKmode in return
5897 if the target indicates that a smaller size would be better. */
5898 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5901 /* Expand the memory expression LOC and return the appropriate memory operand
5902 for the builtin_sync operations. */
5905 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5909 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5911 /* Note that we explicitly do not want any alias information for this
5912 memory, so that we kill all other live memories. Otherwise we don't
5913 satisfy the full barrier semantics of the intrinsic. */
5914 mem = validize_mem (gen_rtx_MEM (mode, addr));
5916 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5917 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5918 MEM_VOLATILE_P (mem) = 1;
5923 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5924 EXP is the CALL_EXPR. CODE is the rtx code
5925 that corresponds to the arithmetic or logical operation from the name;
5926 an exception here is that NOT actually means NAND. TARGET is an optional
5927 place for us to store the results; AFTER is true if this is the
5928 fetch_and_xxx form. IGNORE is true if we don't actually care about
5929 the result of the operation at all. */
5932 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5933 enum rtx_code code, bool after,
5934 rtx target, bool ignore)
5937 enum machine_mode old_mode;
5939 /* Expand the operands. */
5940 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5942 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5943 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5944 of CONST_INTs, where we know the old_mode only from the call argument. */
5945 old_mode = GET_MODE (val);
5946 if (old_mode == VOIDmode)
5947 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5948 val = convert_modes (mode, old_mode, val, 1);
5951 return expand_sync_operation (mem, val, code);
5953 return expand_sync_fetch_operation (mem, val, code, after, target);
5956 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5957 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5958 true if this is the boolean form. TARGET is a place for us to store the
5959 results; this is NOT optional if IS_BOOL is true. */
5962 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5963 bool is_bool, rtx target)
5965 rtx old_val, new_val, mem;
5966 enum machine_mode old_mode;
5968 /* Expand the operands. */
5969 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5972 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5973 mode, EXPAND_NORMAL);
5974 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5975 of CONST_INTs, where we know the old_mode only from the call argument. */
5976 old_mode = GET_MODE (old_val);
5977 if (old_mode == VOIDmode)
5978 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5979 old_val = convert_modes (mode, old_mode, old_val, 1);
5981 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5982 mode, EXPAND_NORMAL);
5983 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5984 of CONST_INTs, where we know the old_mode only from the call argument. */
5985 old_mode = GET_MODE (new_val);
5986 if (old_mode == VOIDmode)
5987 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5988 new_val = convert_modes (mode, old_mode, new_val, 1);
5991 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5993 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5996 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5997 general form is actually an atomic exchange, and some targets only
5998 support a reduced form with the second argument being a constant 1.
5999 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6003 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6007 enum machine_mode old_mode;
6009 /* Expand the operands. */
6010 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6011 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6012 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6013 of CONST_INTs, where we know the old_mode only from the call argument. */
6014 old_mode = GET_MODE (val);
6015 if (old_mode == VOIDmode)
6016 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6017 val = convert_modes (mode, old_mode, val, 1);
6019 return expand_sync_lock_test_and_set (mem, val, target);
6022 /* Expand the __sync_synchronize intrinsic. */
6025 expand_builtin_synchronize (void)
6029 #ifdef HAVE_memory_barrier
6030 if (HAVE_memory_barrier)
6032 emit_insn (gen_memory_barrier ());
6037 /* If no explicit memory barrier instruction is available, create an
6038 empty asm stmt with a memory clobber. */
6039 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6040 tree_cons (NULL, build_string (6, "memory"), NULL));
6041 ASM_VOLATILE_P (x) = 1;
6042 expand_asm_expr (x);
6045 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6048 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6050 enum insn_code icode;
6052 rtx val = const0_rtx;
6054 /* Expand the operands. */
6055 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6057 /* If there is an explicit operation in the md file, use it. */
6058 icode = sync_lock_release[mode];
6059 if (icode != CODE_FOR_nothing)
6061 if (!insn_data[icode].operand[1].predicate (val, mode))
6062 val = force_reg (mode, val);
6064 insn = GEN_FCN (icode) (mem, val);
6072 /* Otherwise we can implement this operation by emitting a barrier
6073 followed by a store of zero. */
6074 expand_builtin_synchronize ();
6075 emit_move_insn (mem, val);
6078 /* Expand an expression EXP that calls a built-in function,
6079 with result going to TARGET if that's convenient
6080 (and in mode MODE if that's convenient).
6081 SUBTARGET may be used as the target for computing one of EXP's operands.
6082 IGNORE is nonzero if the value is to be ignored. */
6085 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6088 tree fndecl = get_callee_fndecl (exp);
6089 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6090 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6092 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6093 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6095 /* When not optimizing, generate calls to library functions for a certain
6098 && !called_as_built_in (fndecl)
6099 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6100 && fcode != BUILT_IN_ALLOCA)
6101 return expand_call (exp, target, ignore);
6103 /* The built-in function expanders test for target == const0_rtx
6104 to determine whether the function's result will be ignored. */
6106 target = const0_rtx;
6108 /* If the result of a pure or const built-in function is ignored, and
6109 none of its arguments are volatile, we can avoid expanding the
6110 built-in call and just evaluate the arguments for side-effects. */
6111 if (target == const0_rtx
6112 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6114 bool volatilep = false;
6116 call_expr_arg_iterator iter;
6118 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6119 if (TREE_THIS_VOLATILE (arg))
6127 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6128 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6135 CASE_FLT_FN (BUILT_IN_FABS):
6136 target = expand_builtin_fabs (exp, target, subtarget);
6141 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6142 target = expand_builtin_copysign (exp, target, subtarget);
6147 /* Just do a normal library call if we were unable to fold
6149 CASE_FLT_FN (BUILT_IN_CABS):
6152 CASE_FLT_FN (BUILT_IN_EXP):
6153 CASE_FLT_FN (BUILT_IN_EXP10):
6154 CASE_FLT_FN (BUILT_IN_POW10):
6155 CASE_FLT_FN (BUILT_IN_EXP2):
6156 CASE_FLT_FN (BUILT_IN_EXPM1):
6157 CASE_FLT_FN (BUILT_IN_LOGB):
6158 CASE_FLT_FN (BUILT_IN_LOG):
6159 CASE_FLT_FN (BUILT_IN_LOG10):
6160 CASE_FLT_FN (BUILT_IN_LOG2):
6161 CASE_FLT_FN (BUILT_IN_LOG1P):
6162 CASE_FLT_FN (BUILT_IN_TAN):
6163 CASE_FLT_FN (BUILT_IN_ASIN):
6164 CASE_FLT_FN (BUILT_IN_ACOS):
6165 CASE_FLT_FN (BUILT_IN_ATAN):
6166 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6167 because of possible accuracy problems. */
6168 if (! flag_unsafe_math_optimizations)
6170 CASE_FLT_FN (BUILT_IN_SQRT):
6171 CASE_FLT_FN (BUILT_IN_FLOOR):
6172 CASE_FLT_FN (BUILT_IN_CEIL):
6173 CASE_FLT_FN (BUILT_IN_TRUNC):
6174 CASE_FLT_FN (BUILT_IN_ROUND):
6175 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6176 CASE_FLT_FN (BUILT_IN_RINT):
6177 target = expand_builtin_mathfn (exp, target, subtarget);
6182 CASE_FLT_FN (BUILT_IN_ILOGB):
6183 if (! flag_unsafe_math_optimizations)
6185 CASE_FLT_FN (BUILT_IN_ISINF):
6186 CASE_FLT_FN (BUILT_IN_FINITE):
6187 case BUILT_IN_ISFINITE:
6188 case BUILT_IN_ISNORMAL:
6189 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6194 CASE_FLT_FN (BUILT_IN_LCEIL):
6195 CASE_FLT_FN (BUILT_IN_LLCEIL):
6196 CASE_FLT_FN (BUILT_IN_LFLOOR):
6197 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6198 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6203 CASE_FLT_FN (BUILT_IN_LRINT):
6204 CASE_FLT_FN (BUILT_IN_LLRINT):
6205 CASE_FLT_FN (BUILT_IN_LROUND):
6206 CASE_FLT_FN (BUILT_IN_LLROUND):
6207 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6212 CASE_FLT_FN (BUILT_IN_POW):
6213 target = expand_builtin_pow (exp, target, subtarget);
6218 CASE_FLT_FN (BUILT_IN_POWI):
6219 target = expand_builtin_powi (exp, target, subtarget);
6224 CASE_FLT_FN (BUILT_IN_ATAN2):
6225 CASE_FLT_FN (BUILT_IN_LDEXP):
6226 CASE_FLT_FN (BUILT_IN_SCALB):
6227 CASE_FLT_FN (BUILT_IN_SCALBN):
6228 CASE_FLT_FN (BUILT_IN_SCALBLN):
6229 if (! flag_unsafe_math_optimizations)
6232 CASE_FLT_FN (BUILT_IN_FMOD):
6233 CASE_FLT_FN (BUILT_IN_REMAINDER):
6234 CASE_FLT_FN (BUILT_IN_DREM):
6235 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6240 CASE_FLT_FN (BUILT_IN_CEXPI):
6241 target = expand_builtin_cexpi (exp, target, subtarget);
6242 gcc_assert (target);
6245 CASE_FLT_FN (BUILT_IN_SIN):
6246 CASE_FLT_FN (BUILT_IN_COS):
6247 if (! flag_unsafe_math_optimizations)
6249 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6254 CASE_FLT_FN (BUILT_IN_SINCOS):
6255 if (! flag_unsafe_math_optimizations)
6257 target = expand_builtin_sincos (exp);
6262 case BUILT_IN_APPLY_ARGS:
6263 return expand_builtin_apply_args ();
6265 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6266 FUNCTION with a copy of the parameters described by
6267 ARGUMENTS, and ARGSIZE. It returns a block of memory
6268 allocated on the stack into which is stored all the registers
6269 that might possibly be used for returning the result of a
6270 function. ARGUMENTS is the value returned by
6271 __builtin_apply_args. ARGSIZE is the number of bytes of
6272 arguments that must be copied. ??? How should this value be
6273 computed? We'll also need a safe worst case value for varargs
6275 case BUILT_IN_APPLY:
6276 if (!validate_arglist (exp, POINTER_TYPE,
6277 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6278 && !validate_arglist (exp, REFERENCE_TYPE,
6279 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6285 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6286 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6287 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6289 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6292 /* __builtin_return (RESULT) causes the function to return the
6293 value described by RESULT. RESULT is address of the block of
6294 memory returned by __builtin_apply. */
6295 case BUILT_IN_RETURN:
6296 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6297 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6300 case BUILT_IN_SAVEREGS:
6301 return expand_builtin_saveregs ();
6303 case BUILT_IN_ARGS_INFO:
6304 return expand_builtin_args_info (exp);
6306 case BUILT_IN_VA_ARG_PACK:
6307 /* All valid uses of __builtin_va_arg_pack () are removed during
6309 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6312 case BUILT_IN_VA_ARG_PACK_LEN:
6313 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6315 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6318 /* Return the address of the first anonymous stack arg. */
6319 case BUILT_IN_NEXT_ARG:
6320 if (fold_builtin_next_arg (exp, false))
6322 return expand_builtin_next_arg ();
6324 case BUILT_IN_CLEAR_CACHE:
6325 target = expand_builtin___clear_cache (exp);
6330 case BUILT_IN_CLASSIFY_TYPE:
6331 return expand_builtin_classify_type (exp);
6333 case BUILT_IN_CONSTANT_P:
6336 case BUILT_IN_FRAME_ADDRESS:
6337 case BUILT_IN_RETURN_ADDRESS:
6338 return expand_builtin_frame_address (fndecl, exp);
6340 /* Returns the address of the area where the structure is returned.
6342 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6343 if (call_expr_nargs (exp) != 0
6344 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6345 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6348 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6350 case BUILT_IN_ALLOCA:
6351 target = expand_builtin_alloca (exp, target);
6356 case BUILT_IN_STACK_SAVE:
6357 return expand_stack_save ();
6359 case BUILT_IN_STACK_RESTORE:
6360 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6363 case BUILT_IN_BSWAP32:
6364 case BUILT_IN_BSWAP64:
6365 target = expand_builtin_bswap (exp, target, subtarget);
6371 CASE_INT_FN (BUILT_IN_FFS):
6372 case BUILT_IN_FFSIMAX:
6373 target = expand_builtin_unop (target_mode, exp, target,
6374 subtarget, ffs_optab);
6379 CASE_INT_FN (BUILT_IN_CLZ):
6380 case BUILT_IN_CLZIMAX:
6381 target = expand_builtin_unop (target_mode, exp, target,
6382 subtarget, clz_optab);
6387 CASE_INT_FN (BUILT_IN_CTZ):
6388 case BUILT_IN_CTZIMAX:
6389 target = expand_builtin_unop (target_mode, exp, target,
6390 subtarget, ctz_optab);
6395 CASE_INT_FN (BUILT_IN_POPCOUNT):
6396 case BUILT_IN_POPCOUNTIMAX:
6397 target = expand_builtin_unop (target_mode, exp, target,
6398 subtarget, popcount_optab);
6403 CASE_INT_FN (BUILT_IN_PARITY):
6404 case BUILT_IN_PARITYIMAX:
6405 target = expand_builtin_unop (target_mode, exp, target,
6406 subtarget, parity_optab);
6411 case BUILT_IN_STRLEN:
6412 target = expand_builtin_strlen (exp, target, target_mode);
6417 case BUILT_IN_STRCPY:
6418 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6423 case BUILT_IN_STRNCPY:
6424 target = expand_builtin_strncpy (exp, target, mode);
6429 case BUILT_IN_STPCPY:
6430 target = expand_builtin_stpcpy (exp, target, mode);
6435 case BUILT_IN_STRCAT:
6436 target = expand_builtin_strcat (fndecl, exp, target, mode);
6441 case BUILT_IN_STRNCAT:
6442 target = expand_builtin_strncat (exp, target, mode);
6447 case BUILT_IN_STRSPN:
6448 target = expand_builtin_strspn (exp, target, mode);
6453 case BUILT_IN_STRCSPN:
6454 target = expand_builtin_strcspn (exp, target, mode);
6459 case BUILT_IN_STRSTR:
6460 target = expand_builtin_strstr (exp, target, mode);
6465 case BUILT_IN_STRPBRK:
6466 target = expand_builtin_strpbrk (exp, target, mode);
6471 case BUILT_IN_INDEX:
6472 case BUILT_IN_STRCHR:
6473 target = expand_builtin_strchr (exp, target, mode);
6478 case BUILT_IN_RINDEX:
6479 case BUILT_IN_STRRCHR:
6480 target = expand_builtin_strrchr (exp, target, mode);
6485 case BUILT_IN_MEMCPY:
6486 target = expand_builtin_memcpy (exp, target, mode);
6491 case BUILT_IN_MEMPCPY:
6492 target = expand_builtin_mempcpy (exp, target, mode);
6497 case BUILT_IN_MEMMOVE:
6498 target = expand_builtin_memmove (exp, target, mode, ignore);
6503 case BUILT_IN_BCOPY:
6504 target = expand_builtin_bcopy (exp, ignore);
6509 case BUILT_IN_MEMSET:
6510 target = expand_builtin_memset (exp, target, mode);
6515 case BUILT_IN_BZERO:
6516 target = expand_builtin_bzero (exp);
6521 case BUILT_IN_STRCMP:
6522 target = expand_builtin_strcmp (exp, target, mode);
6527 case BUILT_IN_STRNCMP:
6528 target = expand_builtin_strncmp (exp, target, mode);
6533 case BUILT_IN_MEMCHR:
6534 target = expand_builtin_memchr (exp, target, mode);
6540 case BUILT_IN_MEMCMP:
6541 target = expand_builtin_memcmp (exp, target, mode);
6546 case BUILT_IN_SETJMP:
6547 /* This should have been lowered to the builtins below. */
6550 case BUILT_IN_SETJMP_SETUP:
6551 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6552 and the receiver label. */
6553 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6555 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6556 VOIDmode, EXPAND_NORMAL);
6557 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6558 rtx label_r = label_rtx (label);
6560 /* This is copied from the handling of non-local gotos. */
6561 expand_builtin_setjmp_setup (buf_addr, label_r);
6562 nonlocal_goto_handler_labels
6563 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6564 nonlocal_goto_handler_labels);
6565 /* ??? Do not let expand_label treat us as such since we would
6566 not want to be both on the list of non-local labels and on
6567 the list of forced labels. */
6568 FORCED_LABEL (label) = 0;
6573 case BUILT_IN_SETJMP_DISPATCHER:
6574 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6575 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6577 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6578 rtx label_r = label_rtx (label);
6580 /* Remove the dispatcher label from the list of non-local labels
6581 since the receiver labels have been added to it above. */
6582 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6587 case BUILT_IN_SETJMP_RECEIVER:
6588 /* __builtin_setjmp_receiver is passed the receiver label. */
6589 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6591 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6592 rtx label_r = label_rtx (label);
6594 expand_builtin_setjmp_receiver (label_r);
6599 /* __builtin_longjmp is passed a pointer to an array of five words.
6600 It's similar to the C library longjmp function but works with
6601 __builtin_setjmp above. */
6602 case BUILT_IN_LONGJMP:
6603 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6605 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6606 VOIDmode, EXPAND_NORMAL);
6607 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6609 if (value != const1_rtx)
6611 error ("%<__builtin_longjmp%> second argument must be 1");
6615 expand_builtin_longjmp (buf_addr, value);
6620 case BUILT_IN_NONLOCAL_GOTO:
6621 target = expand_builtin_nonlocal_goto (exp);
6626 /* This updates the setjmp buffer that is its argument with the value
6627 of the current stack pointer. */
6628 case BUILT_IN_UPDATE_SETJMP_BUF:
6629 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6632 = expand_normal (CALL_EXPR_ARG (exp, 0));
6634 expand_builtin_update_setjmp_buf (buf_addr);
6640 expand_builtin_trap ();
6643 case BUILT_IN_PRINTF:
6644 target = expand_builtin_printf (exp, target, mode, false);
6649 case BUILT_IN_PRINTF_UNLOCKED:
6650 target = expand_builtin_printf (exp, target, mode, true);
6655 case BUILT_IN_FPUTS:
6656 target = expand_builtin_fputs (exp, target, false);
6660 case BUILT_IN_FPUTS_UNLOCKED:
6661 target = expand_builtin_fputs (exp, target, true);
6666 case BUILT_IN_FPRINTF:
6667 target = expand_builtin_fprintf (exp, target, mode, false);
6672 case BUILT_IN_FPRINTF_UNLOCKED:
6673 target = expand_builtin_fprintf (exp, target, mode, true);
6678 case BUILT_IN_SPRINTF:
6679 target = expand_builtin_sprintf (exp, target, mode);
6684 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6685 case BUILT_IN_SIGNBITD32:
6686 case BUILT_IN_SIGNBITD64:
6687 case BUILT_IN_SIGNBITD128:
6688 target = expand_builtin_signbit (exp, target);
6693 /* Various hooks for the DWARF 2 __throw routine. */
6694 case BUILT_IN_UNWIND_INIT:
6695 expand_builtin_unwind_init ();
6697 case BUILT_IN_DWARF_CFA:
6698 return virtual_cfa_rtx;
6699 #ifdef DWARF2_UNWIND_INFO
6700 case BUILT_IN_DWARF_SP_COLUMN:
6701 return expand_builtin_dwarf_sp_column ();
6702 case BUILT_IN_INIT_DWARF_REG_SIZES:
6703 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6706 case BUILT_IN_FROB_RETURN_ADDR:
6707 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6708 case BUILT_IN_EXTRACT_RETURN_ADDR:
6709 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6710 case BUILT_IN_EH_RETURN:
6711 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6712 CALL_EXPR_ARG (exp, 1));
6714 #ifdef EH_RETURN_DATA_REGNO
6715 case BUILT_IN_EH_RETURN_DATA_REGNO:
6716 return expand_builtin_eh_return_data_regno (exp);
6718 case BUILT_IN_EXTEND_POINTER:
6719 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6721 case BUILT_IN_VA_START:
6722 return expand_builtin_va_start (exp);
6723 case BUILT_IN_VA_END:
6724 return expand_builtin_va_end (exp);
6725 case BUILT_IN_VA_COPY:
6726 return expand_builtin_va_copy (exp);
6727 case BUILT_IN_EXPECT:
6728 return expand_builtin_expect (exp, target);
6729 case BUILT_IN_PREFETCH:
6730 expand_builtin_prefetch (exp);
6733 case BUILT_IN_PROFILE_FUNC_ENTER:
6734 return expand_builtin_profile_func (false);
6735 case BUILT_IN_PROFILE_FUNC_EXIT:
6736 return expand_builtin_profile_func (true);
6738 case BUILT_IN_INIT_TRAMPOLINE:
6739 return expand_builtin_init_trampoline (exp);
6740 case BUILT_IN_ADJUST_TRAMPOLINE:
6741 return expand_builtin_adjust_trampoline (exp);
6744 case BUILT_IN_EXECL:
6745 case BUILT_IN_EXECV:
6746 case BUILT_IN_EXECLP:
6747 case BUILT_IN_EXECLE:
6748 case BUILT_IN_EXECVP:
6749 case BUILT_IN_EXECVE:
6750 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6755 case BUILT_IN_FETCH_AND_ADD_1:
6756 case BUILT_IN_FETCH_AND_ADD_2:
6757 case BUILT_IN_FETCH_AND_ADD_4:
6758 case BUILT_IN_FETCH_AND_ADD_8:
6759 case BUILT_IN_FETCH_AND_ADD_16:
6760 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6761 target = expand_builtin_sync_operation (mode, exp, PLUS,
6762 false, target, ignore);
6767 case BUILT_IN_FETCH_AND_SUB_1:
6768 case BUILT_IN_FETCH_AND_SUB_2:
6769 case BUILT_IN_FETCH_AND_SUB_4:
6770 case BUILT_IN_FETCH_AND_SUB_8:
6771 case BUILT_IN_FETCH_AND_SUB_16:
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6773 target = expand_builtin_sync_operation (mode, exp, MINUS,
6774 false, target, ignore);
6779 case BUILT_IN_FETCH_AND_OR_1:
6780 case BUILT_IN_FETCH_AND_OR_2:
6781 case BUILT_IN_FETCH_AND_OR_4:
6782 case BUILT_IN_FETCH_AND_OR_8:
6783 case BUILT_IN_FETCH_AND_OR_16:
6784 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6785 target = expand_builtin_sync_operation (mode, exp, IOR,
6786 false, target, ignore);
6791 case BUILT_IN_FETCH_AND_AND_1:
6792 case BUILT_IN_FETCH_AND_AND_2:
6793 case BUILT_IN_FETCH_AND_AND_4:
6794 case BUILT_IN_FETCH_AND_AND_8:
6795 case BUILT_IN_FETCH_AND_AND_16:
6796 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6797 target = expand_builtin_sync_operation (mode, exp, AND,
6798 false, target, ignore);
6803 case BUILT_IN_FETCH_AND_XOR_1:
6804 case BUILT_IN_FETCH_AND_XOR_2:
6805 case BUILT_IN_FETCH_AND_XOR_4:
6806 case BUILT_IN_FETCH_AND_XOR_8:
6807 case BUILT_IN_FETCH_AND_XOR_16:
6808 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6809 target = expand_builtin_sync_operation (mode, exp, XOR,
6810 false, target, ignore);
6815 case BUILT_IN_FETCH_AND_NAND_1:
6816 case BUILT_IN_FETCH_AND_NAND_2:
6817 case BUILT_IN_FETCH_AND_NAND_4:
6818 case BUILT_IN_FETCH_AND_NAND_8:
6819 case BUILT_IN_FETCH_AND_NAND_16:
6820 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6821 target = expand_builtin_sync_operation (mode, exp, NOT,
6822 false, target, ignore);
6827 case BUILT_IN_ADD_AND_FETCH_1:
6828 case BUILT_IN_ADD_AND_FETCH_2:
6829 case BUILT_IN_ADD_AND_FETCH_4:
6830 case BUILT_IN_ADD_AND_FETCH_8:
6831 case BUILT_IN_ADD_AND_FETCH_16:
6832 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6833 target = expand_builtin_sync_operation (mode, exp, PLUS,
6834 true, target, ignore);
6839 case BUILT_IN_SUB_AND_FETCH_1:
6840 case BUILT_IN_SUB_AND_FETCH_2:
6841 case BUILT_IN_SUB_AND_FETCH_4:
6842 case BUILT_IN_SUB_AND_FETCH_8:
6843 case BUILT_IN_SUB_AND_FETCH_16:
6844 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6845 target = expand_builtin_sync_operation (mode, exp, MINUS,
6846 true, target, ignore);
6851 case BUILT_IN_OR_AND_FETCH_1:
6852 case BUILT_IN_OR_AND_FETCH_2:
6853 case BUILT_IN_OR_AND_FETCH_4:
6854 case BUILT_IN_OR_AND_FETCH_8:
6855 case BUILT_IN_OR_AND_FETCH_16:
6856 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6857 target = expand_builtin_sync_operation (mode, exp, IOR,
6858 true, target, ignore);
6863 case BUILT_IN_AND_AND_FETCH_1:
6864 case BUILT_IN_AND_AND_FETCH_2:
6865 case BUILT_IN_AND_AND_FETCH_4:
6866 case BUILT_IN_AND_AND_FETCH_8:
6867 case BUILT_IN_AND_AND_FETCH_16:
6868 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6869 target = expand_builtin_sync_operation (mode, exp, AND,
6870 true, target, ignore);
6875 case BUILT_IN_XOR_AND_FETCH_1:
6876 case BUILT_IN_XOR_AND_FETCH_2:
6877 case BUILT_IN_XOR_AND_FETCH_4:
6878 case BUILT_IN_XOR_AND_FETCH_8:
6879 case BUILT_IN_XOR_AND_FETCH_16:
6880 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6881 target = expand_builtin_sync_operation (mode, exp, XOR,
6882 true, target, ignore);
6887 case BUILT_IN_NAND_AND_FETCH_1:
6888 case BUILT_IN_NAND_AND_FETCH_2:
6889 case BUILT_IN_NAND_AND_FETCH_4:
6890 case BUILT_IN_NAND_AND_FETCH_8:
6891 case BUILT_IN_NAND_AND_FETCH_16:
6892 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6893 target = expand_builtin_sync_operation (mode, exp, NOT,
6894 true, target, ignore);
6899 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6900 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6901 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6902 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6903 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6904 if (mode == VOIDmode)
6905 mode = TYPE_MODE (boolean_type_node);
6906 if (!target || !register_operand (target, mode))
6907 target = gen_reg_rtx (mode);
6909 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6910 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6915 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6916 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6917 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6918 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6919 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6920 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6921 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6926 case BUILT_IN_LOCK_TEST_AND_SET_1:
6927 case BUILT_IN_LOCK_TEST_AND_SET_2:
6928 case BUILT_IN_LOCK_TEST_AND_SET_4:
6929 case BUILT_IN_LOCK_TEST_AND_SET_8:
6930 case BUILT_IN_LOCK_TEST_AND_SET_16:
6931 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6932 target = expand_builtin_lock_test_and_set (mode, exp, target);
6937 case BUILT_IN_LOCK_RELEASE_1:
6938 case BUILT_IN_LOCK_RELEASE_2:
6939 case BUILT_IN_LOCK_RELEASE_4:
6940 case BUILT_IN_LOCK_RELEASE_8:
6941 case BUILT_IN_LOCK_RELEASE_16:
6942 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6943 expand_builtin_lock_release (mode, exp);
6946 case BUILT_IN_SYNCHRONIZE:
6947 expand_builtin_synchronize ();
6950 case BUILT_IN_OBJECT_SIZE:
6951 return expand_builtin_object_size (exp);
6953 case BUILT_IN_MEMCPY_CHK:
6954 case BUILT_IN_MEMPCPY_CHK:
6955 case BUILT_IN_MEMMOVE_CHK:
6956 case BUILT_IN_MEMSET_CHK:
6957 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6962 case BUILT_IN_STRCPY_CHK:
6963 case BUILT_IN_STPCPY_CHK:
6964 case BUILT_IN_STRNCPY_CHK:
6965 case BUILT_IN_STRCAT_CHK:
6966 case BUILT_IN_STRNCAT_CHK:
6967 case BUILT_IN_SNPRINTF_CHK:
6968 case BUILT_IN_VSNPRINTF_CHK:
6969 maybe_emit_chk_warning (exp, fcode);
6972 case BUILT_IN_SPRINTF_CHK:
6973 case BUILT_IN_VSPRINTF_CHK:
6974 maybe_emit_sprintf_chk_warning (exp, fcode);
6977 default: /* just do library call, if unknown builtin */
6981 /* The switch statement above can drop through to cause the function
6982 to be called normally. */
6983 return expand_call (exp, target, ignore);
6986 /* Determine whether a tree node represents a call to a built-in
6987 function. If the tree T is a call to a built-in function with
6988 the right number of arguments of the appropriate types, return
6989 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6990 Otherwise the return value is END_BUILTINS. */
6992 enum built_in_function
6993 builtin_mathfn_code (const_tree t)
6995 const_tree fndecl, arg, parmlist;
6996 const_tree argtype, parmtype;
6997 const_call_expr_arg_iterator iter;
6999 if (TREE_CODE (t) != CALL_EXPR
7000 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7001 return END_BUILTINS;
7003 fndecl = get_callee_fndecl (t);
7004 if (fndecl == NULL_TREE
7005 || TREE_CODE (fndecl) != FUNCTION_DECL
7006 || ! DECL_BUILT_IN (fndecl)
7007 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7008 return END_BUILTINS;
7010 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7011 init_const_call_expr_arg_iterator (t, &iter);
7012 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7014 /* If a function doesn't take a variable number of arguments,
7015 the last element in the list will have type `void'. */
7016 parmtype = TREE_VALUE (parmlist);
7017 if (VOID_TYPE_P (parmtype))
7019 if (more_const_call_expr_args_p (&iter))
7020 return END_BUILTINS;
7021 return DECL_FUNCTION_CODE (fndecl);
7024 if (! more_const_call_expr_args_p (&iter))
7025 return END_BUILTINS;
7027 arg = next_const_call_expr_arg (&iter);
7028 argtype = TREE_TYPE (arg);
7030 if (SCALAR_FLOAT_TYPE_P (parmtype))
7032 if (! SCALAR_FLOAT_TYPE_P (argtype))
7033 return END_BUILTINS;
7035 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7037 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7038 return END_BUILTINS;
7040 else if (POINTER_TYPE_P (parmtype))
7042 if (! POINTER_TYPE_P (argtype))
7043 return END_BUILTINS;
7045 else if (INTEGRAL_TYPE_P (parmtype))
7047 if (! INTEGRAL_TYPE_P (argtype))
7048 return END_BUILTINS;
7051 return END_BUILTINS;
7054 /* Variable-length argument list. */
7055 return DECL_FUNCTION_CODE (fndecl);
7058 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7059 evaluate to a constant. */
7062 fold_builtin_constant_p (tree arg)
7064 /* We return 1 for a numeric type that's known to be a constant
7065 value at compile-time or for an aggregate type that's a
7066 literal constant. */
7069 /* If we know this is a constant, emit the constant of one. */
7070 if (CONSTANT_CLASS_P (arg)
7071 || (TREE_CODE (arg) == CONSTRUCTOR
7072 && TREE_CONSTANT (arg)))
7073 return integer_one_node;
7074 if (TREE_CODE (arg) == ADDR_EXPR)
7076 tree op = TREE_OPERAND (arg, 0);
7077 if (TREE_CODE (op) == STRING_CST
7078 || (TREE_CODE (op) == ARRAY_REF
7079 && integer_zerop (TREE_OPERAND (op, 1))
7080 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7081 return integer_one_node;
7084 /* If this expression has side effects, show we don't know it to be a
7085 constant. Likewise if it's a pointer or aggregate type since in
7086 those case we only want literals, since those are only optimized
7087 when generating RTL, not later.
7088 And finally, if we are compiling an initializer, not code, we
7089 need to return a definite result now; there's not going to be any
7090 more optimization done. */
7091 if (TREE_SIDE_EFFECTS (arg)
7092 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7093 || POINTER_TYPE_P (TREE_TYPE (arg))
7095 || folding_initializer)
7096 return integer_zero_node;
7101 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7102 return it as a truthvalue. */
7105 build_builtin_expect_predicate (tree pred, tree expected)
7107 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7109 fn = built_in_decls[BUILT_IN_EXPECT];
7110 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7111 ret_type = TREE_TYPE (TREE_TYPE (fn));
7112 pred_type = TREE_VALUE (arg_types);
7113 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7115 pred = fold_convert (pred_type, pred);
7116 expected = fold_convert (expected_type, expected);
7117 call_expr = build_call_expr (fn, 2, pred, expected);
7119 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7120 build_int_cst (ret_type, 0));
7123 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7124 NULL_TREE if no simplification is possible. */
7127 fold_builtin_expect (tree arg0, tree arg1)
7130 enum tree_code code;
7132 /* If this is a builtin_expect within a builtin_expect keep the
7133 inner one. See through a comparison against a constant. It
7134 might have been added to create a thruthvalue. */
7136 if (COMPARISON_CLASS_P (inner)
7137 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7138 inner = TREE_OPERAND (inner, 0);
7140 if (TREE_CODE (inner) == CALL_EXPR
7141 && (fndecl = get_callee_fndecl (inner))
7142 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7143 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7146 /* Distribute the expected value over short-circuiting operators.
7147 See through the cast from truthvalue_type_node to long. */
7149 while (TREE_CODE (inner) == NOP_EXPR
7150 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7151 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7152 inner = TREE_OPERAND (inner, 0);
7154 code = TREE_CODE (inner);
7155 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7157 tree op0 = TREE_OPERAND (inner, 0);
7158 tree op1 = TREE_OPERAND (inner, 1);
7160 op0 = build_builtin_expect_predicate (op0, arg1);
7161 op1 = build_builtin_expect_predicate (op1, arg1);
7162 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7164 return fold_convert (TREE_TYPE (arg0), inner);
7167 /* If the argument isn't invariant then there's nothing else we can do. */
7168 if (!TREE_CONSTANT (arg0))
7171 /* If we expect that a comparison against the argument will fold to
7172 a constant return the constant. In practice, this means a true
7173 constant or the address of a non-weak symbol. */
7176 if (TREE_CODE (inner) == ADDR_EXPR)
7180 inner = TREE_OPERAND (inner, 0);
7182 while (TREE_CODE (inner) == COMPONENT_REF
7183 || TREE_CODE (inner) == ARRAY_REF);
7184 if (DECL_P (inner) && DECL_WEAK (inner))
7188 /* Otherwise, ARG0 already has the proper type for the return value. */
7192 /* Fold a call to __builtin_classify_type with argument ARG. */
7195 fold_builtin_classify_type (tree arg)
7198 return build_int_cst (NULL_TREE, no_type_class);
7200 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7203 /* Fold a call to __builtin_strlen with argument ARG. */
7206 fold_builtin_strlen (tree arg)
7208 if (!validate_arg (arg, POINTER_TYPE))
7212 tree len = c_strlen (arg, 0);
7216 /* Convert from the internal "sizetype" type to "size_t". */
7218 len = fold_convert (size_type_node, len);
7226 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7229 fold_builtin_inf (tree type, int warn)
7231 REAL_VALUE_TYPE real;
7233 /* __builtin_inff is intended to be usable to define INFINITY on all
7234 targets. If an infinity is not available, INFINITY expands "to a
7235 positive constant of type float that overflows at translation
7236 time", footnote "In this case, using INFINITY will violate the
7237 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7238 Thus we pedwarn to ensure this constraint violation is
7240 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7241 pedwarn ("target format does not support infinity");
7244 return build_real (type, real);
7247 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7250 fold_builtin_nan (tree arg, tree type, int quiet)
7252 REAL_VALUE_TYPE real;
7255 if (!validate_arg (arg, POINTER_TYPE))
7257 str = c_getstr (arg);
7261 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7264 return build_real (type, real);
7267 /* Return true if the floating point expression T has an integer value.
7268 We also allow +Inf, -Inf and NaN to be considered integer values. */
7271 integer_valued_real_p (tree t)
7273 switch (TREE_CODE (t))
7280 return integer_valued_real_p (TREE_OPERAND (t, 0));
7285 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7292 return integer_valued_real_p (TREE_OPERAND (t, 0))
7293 && integer_valued_real_p (TREE_OPERAND (t, 1));
7296 return integer_valued_real_p (TREE_OPERAND (t, 1))
7297 && integer_valued_real_p (TREE_OPERAND (t, 2));
7300 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7304 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7305 if (TREE_CODE (type) == INTEGER_TYPE)
7307 if (TREE_CODE (type) == REAL_TYPE)
7308 return integer_valued_real_p (TREE_OPERAND (t, 0));
7313 switch (builtin_mathfn_code (t))
7315 CASE_FLT_FN (BUILT_IN_CEIL):
7316 CASE_FLT_FN (BUILT_IN_FLOOR):
7317 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7318 CASE_FLT_FN (BUILT_IN_RINT):
7319 CASE_FLT_FN (BUILT_IN_ROUND):
7320 CASE_FLT_FN (BUILT_IN_TRUNC):
7323 CASE_FLT_FN (BUILT_IN_FMIN):
7324 CASE_FLT_FN (BUILT_IN_FMAX):
7325 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7326 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7339 /* FNDECL is assumed to be a builtin where truncation can be propagated
7340 across (for instance floor((double)f) == (double)floorf (f).
7341 Do the transformation for a call with argument ARG. */
7344 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7346 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7348 if (!validate_arg (arg, REAL_TYPE))
7351 /* Integer rounding functions are idempotent. */
7352 if (fcode == builtin_mathfn_code (arg))
7355 /* If argument is already integer valued, and we don't need to worry
7356 about setting errno, there's no need to perform rounding. */
7357 if (! flag_errno_math && integer_valued_real_p (arg))
7362 tree arg0 = strip_float_extensions (arg);
7363 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7364 tree newtype = TREE_TYPE (arg0);
7367 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7368 && (decl = mathfn_built_in (newtype, fcode)))
7369 return fold_convert (ftype,
7370 build_call_expr (decl, 1,
7371 fold_convert (newtype, arg0)));
7376 /* FNDECL is assumed to be builtin which can narrow the FP type of
7377 the argument, for instance lround((double)f) -> lroundf (f).
7378 Do the transformation for a call with argument ARG. */
7381 fold_fixed_mathfn (tree fndecl, tree arg)
7383 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7385 if (!validate_arg (arg, REAL_TYPE))
7388 /* If argument is already integer valued, and we don't need to worry
7389 about setting errno, there's no need to perform rounding. */
7390 if (! flag_errno_math && integer_valued_real_p (arg))
7391 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7395 tree ftype = TREE_TYPE (arg);
7396 tree arg0 = strip_float_extensions (arg);
7397 tree newtype = TREE_TYPE (arg0);
7400 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7401 && (decl = mathfn_built_in (newtype, fcode)))
7402 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7405 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7406 sizeof (long long) == sizeof (long). */
7407 if (TYPE_PRECISION (long_long_integer_type_node)
7408 == TYPE_PRECISION (long_integer_type_node))
7410 tree newfn = NULL_TREE;
7413 CASE_FLT_FN (BUILT_IN_LLCEIL):
7414 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7417 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7418 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7421 CASE_FLT_FN (BUILT_IN_LLROUND):
7422 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7425 CASE_FLT_FN (BUILT_IN_LLRINT):
7426 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7435 tree newcall = build_call_expr(newfn, 1, arg);
7436 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7443 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7444 return type. Return NULL_TREE if no simplification can be made. */
7447 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7451 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7452 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7455 /* Calculate the result when the argument is a constant. */
7456 if (TREE_CODE (arg) == COMPLEX_CST
7457 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7461 if (TREE_CODE (arg) == COMPLEX_EXPR)
7463 tree real = TREE_OPERAND (arg, 0);
7464 tree imag = TREE_OPERAND (arg, 1);
7466 /* If either part is zero, cabs is fabs of the other. */
7467 if (real_zerop (real))
7468 return fold_build1 (ABS_EXPR, type, imag);
7469 if (real_zerop (imag))
7470 return fold_build1 (ABS_EXPR, type, real);
7472 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7473 if (flag_unsafe_math_optimizations
7474 && operand_equal_p (real, imag, OEP_PURE_SAME))
7476 const REAL_VALUE_TYPE sqrt2_trunc
7477 = real_value_truncate (TYPE_MODE (type),
7478 *get_real_const (rv_sqrt2));
7480 return fold_build2 (MULT_EXPR, type,
7481 fold_build1 (ABS_EXPR, type, real),
7482 build_real (type, sqrt2_trunc));
7486 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7487 if (TREE_CODE (arg) == NEGATE_EXPR
7488 || TREE_CODE (arg) == CONJ_EXPR)
7489 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7491 /* Don't do this when optimizing for size. */
7492 if (flag_unsafe_math_optimizations
7493 && optimize && !optimize_size)
7495 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7497 if (sqrtfn != NULL_TREE)
7499 tree rpart, ipart, result;
7501 arg = builtin_save_expr (arg);
7503 rpart = fold_build1 (REALPART_EXPR, type, arg);
7504 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7506 rpart = builtin_save_expr (rpart);
7507 ipart = builtin_save_expr (ipart);
7509 result = fold_build2 (PLUS_EXPR, type,
7510 fold_build2 (MULT_EXPR, type,
7512 fold_build2 (MULT_EXPR, type,
7515 return build_call_expr (sqrtfn, 1, result);
7522 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7523 Return NULL_TREE if no simplification can be made. */
7526 fold_builtin_sqrt (tree arg, tree type)
7529 enum built_in_function fcode;
7532 if (!validate_arg (arg, REAL_TYPE))
7535 /* Calculate the result when the argument is a constant. */
7536 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7539 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7540 fcode = builtin_mathfn_code (arg);
7541 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7543 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7544 arg = fold_build2 (MULT_EXPR, type,
7545 CALL_EXPR_ARG (arg, 0),
7546 build_real (type, dconsthalf));
7547 return build_call_expr (expfn, 1, arg);
7550 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7551 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7553 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7557 tree arg0 = CALL_EXPR_ARG (arg, 0);
7559 /* The inner root was either sqrt or cbrt. */
7560 REAL_VALUE_TYPE dconstroot =
7561 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7563 /* Adjust for the outer root. */
7564 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7565 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7566 tree_root = build_real (type, dconstroot);
7567 return build_call_expr (powfn, 2, arg0, tree_root);
7571 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7572 if (flag_unsafe_math_optimizations
7573 && (fcode == BUILT_IN_POW
7574 || fcode == BUILT_IN_POWF
7575 || fcode == BUILT_IN_POWL))
7577 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7578 tree arg0 = CALL_EXPR_ARG (arg, 0);
7579 tree arg1 = CALL_EXPR_ARG (arg, 1);
7581 if (!tree_expr_nonnegative_p (arg0))
7582 arg0 = build1 (ABS_EXPR, type, arg0);
7583 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7584 build_real (type, dconsthalf));
7585 return build_call_expr (powfn, 2, arg0, narg1);
7591 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7592 Return NULL_TREE if no simplification can be made. */
7595 fold_builtin_cbrt (tree arg, tree type)
7597 const enum built_in_function fcode = builtin_mathfn_code (arg);
7600 if (!validate_arg (arg, REAL_TYPE))
7603 /* Calculate the result when the argument is a constant. */
7604 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7607 if (flag_unsafe_math_optimizations)
7609 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7610 if (BUILTIN_EXPONENT_P (fcode))
7612 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7613 const REAL_VALUE_TYPE third_trunc =
7614 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7615 arg = fold_build2 (MULT_EXPR, type,
7616 CALL_EXPR_ARG (arg, 0),
7617 build_real (type, third_trunc));
7618 return build_call_expr (expfn, 1, arg);
7621 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7622 if (BUILTIN_SQRT_P (fcode))
7624 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7628 tree arg0 = CALL_EXPR_ARG (arg, 0);
7630 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7632 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7633 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7634 tree_root = build_real (type, dconstroot);
7635 return build_call_expr (powfn, 2, arg0, tree_root);
7639 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7640 if (BUILTIN_CBRT_P (fcode))
7642 tree arg0 = CALL_EXPR_ARG (arg, 0);
7643 if (tree_expr_nonnegative_p (arg0))
7645 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7650 REAL_VALUE_TYPE dconstroot;
7652 real_arithmetic (&dconstroot, MULT_EXPR,
7653 get_real_const (rv_third),
7654 get_real_const (rv_third));
7655 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7656 tree_root = build_real (type, dconstroot);
7657 return build_call_expr (powfn, 2, arg0, tree_root);
7662 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7663 if (fcode == BUILT_IN_POW
7664 || fcode == BUILT_IN_POWF
7665 || fcode == BUILT_IN_POWL)
7667 tree arg00 = CALL_EXPR_ARG (arg, 0);
7668 tree arg01 = CALL_EXPR_ARG (arg, 1);
7669 if (tree_expr_nonnegative_p (arg00))
7671 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7672 const REAL_VALUE_TYPE dconstroot
7673 = real_value_truncate (TYPE_MODE (type),
7674 *get_real_const (rv_third));
7675 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7676 build_real (type, dconstroot));
7677 return build_call_expr (powfn, 2, arg00, narg01);
7684 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7685 TYPE is the type of the return value. Return NULL_TREE if no
7686 simplification can be made. */
7689 fold_builtin_cos (tree arg, tree type, tree fndecl)
7693 if (!validate_arg (arg, REAL_TYPE))
7696 /* Calculate the result when the argument is a constant. */
7697 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7700 /* Optimize cos(-x) into cos (x). */
7701 if ((narg = fold_strip_sign_ops (arg)))
7702 return build_call_expr (fndecl, 1, narg);
7707 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7708 Return NULL_TREE if no simplification can be made. */
7711 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7713 if (validate_arg (arg, REAL_TYPE))
7717 /* Calculate the result when the argument is a constant. */
7718 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7721 /* Optimize cosh(-x) into cosh (x). */
7722 if ((narg = fold_strip_sign_ops (arg)))
7723 return build_call_expr (fndecl, 1, narg);
7729 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7730 Return NULL_TREE if no simplification can be made. */
7733 fold_builtin_tan (tree arg, tree type)
7735 enum built_in_function fcode;
7738 if (!validate_arg (arg, REAL_TYPE))
7741 /* Calculate the result when the argument is a constant. */
7742 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7745 /* Optimize tan(atan(x)) = x. */
7746 fcode = builtin_mathfn_code (arg);
7747 if (flag_unsafe_math_optimizations
7748 && (fcode == BUILT_IN_ATAN
7749 || fcode == BUILT_IN_ATANF
7750 || fcode == BUILT_IN_ATANL))
7751 return CALL_EXPR_ARG (arg, 0);
7756 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7757 NULL_TREE if no simplification can be made. */
7760 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7765 if (!validate_arg (arg0, REAL_TYPE)
7766 || !validate_arg (arg1, POINTER_TYPE)
7767 || !validate_arg (arg2, POINTER_TYPE))
7770 type = TREE_TYPE (arg0);
7772 /* Calculate the result when the argument is a constant. */
7773 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7776 /* Canonicalize sincos to cexpi. */
7777 if (!TARGET_C99_FUNCTIONS)
7779 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7783 call = build_call_expr (fn, 1, arg0);
7784 call = builtin_save_expr (call);
7786 return build2 (COMPOUND_EXPR, type,
7787 build2 (MODIFY_EXPR, void_type_node,
7788 build_fold_indirect_ref (arg1),
7789 build1 (IMAGPART_EXPR, type, call)),
7790 build2 (MODIFY_EXPR, void_type_node,
7791 build_fold_indirect_ref (arg2),
7792 build1 (REALPART_EXPR, type, call)));
7795 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7796 NULL_TREE if no simplification can be made. */
7799 fold_builtin_cexp (tree arg0, tree type)
7802 tree realp, imagp, ifn;
7804 if (!validate_arg (arg0, COMPLEX_TYPE))
7807 rtype = TREE_TYPE (TREE_TYPE (arg0));
7809 /* In case we can figure out the real part of arg0 and it is constant zero
7811 if (!TARGET_C99_FUNCTIONS)
7813 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7817 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7818 && real_zerop (realp))
7820 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7821 return build_call_expr (ifn, 1, narg);
7824 /* In case we can easily decompose real and imaginary parts split cexp
7825 to exp (r) * cexpi (i). */
7826 if (flag_unsafe_math_optimizations
7829 tree rfn, rcall, icall;
7831 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7835 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7839 icall = build_call_expr (ifn, 1, imagp);
7840 icall = builtin_save_expr (icall);
7841 rcall = build_call_expr (rfn, 1, realp);
7842 rcall = builtin_save_expr (rcall);
7843 return fold_build2 (COMPLEX_EXPR, type,
7844 fold_build2 (MULT_EXPR, rtype,
7846 fold_build1 (REALPART_EXPR, rtype, icall)),
7847 fold_build2 (MULT_EXPR, rtype,
7849 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7855 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7856 Return NULL_TREE if no simplification can be made. */
7859 fold_builtin_trunc (tree fndecl, tree arg)
7861 if (!validate_arg (arg, REAL_TYPE))
7864 /* Optimize trunc of constant value. */
7865 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7867 REAL_VALUE_TYPE r, x;
7868 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7870 x = TREE_REAL_CST (arg);
7871 real_trunc (&r, TYPE_MODE (type), &x);
7872 return build_real (type, r);
7875 return fold_trunc_transparent_mathfn (fndecl, arg);
7878 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7879 Return NULL_TREE if no simplification can be made. */
7882 fold_builtin_floor (tree fndecl, tree arg)
7884 if (!validate_arg (arg, REAL_TYPE))
7887 /* Optimize floor of constant value. */
7888 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7892 x = TREE_REAL_CST (arg);
7893 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7895 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7898 real_floor (&r, TYPE_MODE (type), &x);
7899 return build_real (type, r);
7903 /* Fold floor (x) where x is nonnegative to trunc (x). */
7904 if (tree_expr_nonnegative_p (arg))
7906 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7908 return build_call_expr (truncfn, 1, arg);
7911 return fold_trunc_transparent_mathfn (fndecl, arg);
7914 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7915 Return NULL_TREE if no simplification can be made. */
7918 fold_builtin_ceil (tree fndecl, tree arg)
7920 if (!validate_arg (arg, REAL_TYPE))
7923 /* Optimize ceil of constant value. */
7924 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7928 x = TREE_REAL_CST (arg);
7929 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7931 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7934 real_ceil (&r, TYPE_MODE (type), &x);
7935 return build_real (type, r);
7939 return fold_trunc_transparent_mathfn (fndecl, arg);
7942 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7943 Return NULL_TREE if no simplification can be made. */
7946 fold_builtin_round (tree fndecl, tree arg)
7948 if (!validate_arg (arg, REAL_TYPE))
7951 /* Optimize round of constant value. */
7952 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7956 x = TREE_REAL_CST (arg);
7957 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7959 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7962 real_round (&r, TYPE_MODE (type), &x);
7963 return build_real (type, r);
7967 return fold_trunc_transparent_mathfn (fndecl, arg);
7970 /* Fold function call to builtin lround, lroundf or lroundl (or the
7971 corresponding long long versions) and other rounding functions. ARG
7972 is the argument to the call. Return NULL_TREE if no simplification
7976 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7978 if (!validate_arg (arg, REAL_TYPE))
7981 /* Optimize lround of constant value. */
7982 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7984 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7986 if (real_isfinite (&x))
7988 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7989 tree ftype = TREE_TYPE (arg);
7990 unsigned HOST_WIDE_INT lo2;
7991 HOST_WIDE_INT hi, lo;
7994 switch (DECL_FUNCTION_CODE (fndecl))
7996 CASE_FLT_FN (BUILT_IN_LFLOOR):
7997 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7998 real_floor (&r, TYPE_MODE (ftype), &x);
8001 CASE_FLT_FN (BUILT_IN_LCEIL):
8002 CASE_FLT_FN (BUILT_IN_LLCEIL):
8003 real_ceil (&r, TYPE_MODE (ftype), &x);
8006 CASE_FLT_FN (BUILT_IN_LROUND):
8007 CASE_FLT_FN (BUILT_IN_LLROUND):
8008 real_round (&r, TYPE_MODE (ftype), &x);
8015 REAL_VALUE_TO_INT (&lo, &hi, r);
8016 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8017 return build_int_cst_wide (itype, lo2, hi);
8021 switch (DECL_FUNCTION_CODE (fndecl))
8023 CASE_FLT_FN (BUILT_IN_LFLOOR):
8024 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8025 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8026 if (tree_expr_nonnegative_p (arg))
8027 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8033 return fold_fixed_mathfn (fndecl, arg);
8036 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8037 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8038 the argument to the call. Return NULL_TREE if no simplification can
8042 fold_builtin_bitop (tree fndecl, tree arg)
8044 if (!validate_arg (arg, INTEGER_TYPE))
8047 /* Optimize for constant argument. */
8048 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8050 HOST_WIDE_INT hi, width, result;
8051 unsigned HOST_WIDE_INT lo;
8054 type = TREE_TYPE (arg);
8055 width = TYPE_PRECISION (type);
8056 lo = TREE_INT_CST_LOW (arg);
8058 /* Clear all the bits that are beyond the type's precision. */
8059 if (width > HOST_BITS_PER_WIDE_INT)
8061 hi = TREE_INT_CST_HIGH (arg);
8062 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8063 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8068 if (width < HOST_BITS_PER_WIDE_INT)
8069 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8072 switch (DECL_FUNCTION_CODE (fndecl))
8074 CASE_INT_FN (BUILT_IN_FFS):
8076 result = exact_log2 (lo & -lo) + 1;
8078 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8083 CASE_INT_FN (BUILT_IN_CLZ):
8085 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8087 result = width - floor_log2 (lo) - 1;
8088 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8092 CASE_INT_FN (BUILT_IN_CTZ):
8094 result = exact_log2 (lo & -lo);
8096 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8097 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8101 CASE_INT_FN (BUILT_IN_POPCOUNT):
8104 result++, lo &= lo - 1;
8106 result++, hi &= hi - 1;
8109 CASE_INT_FN (BUILT_IN_PARITY):
8112 result++, lo &= lo - 1;
8114 result++, hi &= hi - 1;
8122 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8128 /* Fold function call to builtin_bswap and the long and long long
8129 variants. Return NULL_TREE if no simplification can be made. */
8131 fold_builtin_bswap (tree fndecl, tree arg)
8133 if (! validate_arg (arg, INTEGER_TYPE))
8136 /* Optimize constant value. */
8137 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8139 HOST_WIDE_INT hi, width, r_hi = 0;
8140 unsigned HOST_WIDE_INT lo, r_lo = 0;
8143 type = TREE_TYPE (arg);
8144 width = TYPE_PRECISION (type);
8145 lo = TREE_INT_CST_LOW (arg);
8146 hi = TREE_INT_CST_HIGH (arg);
8148 switch (DECL_FUNCTION_CODE (fndecl))
8150 case BUILT_IN_BSWAP32:
8151 case BUILT_IN_BSWAP64:
8155 for (s = 0; s < width; s += 8)
8157 int d = width - s - 8;
8158 unsigned HOST_WIDE_INT byte;
8160 if (s < HOST_BITS_PER_WIDE_INT)
8161 byte = (lo >> s) & 0xff;
8163 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8165 if (d < HOST_BITS_PER_WIDE_INT)
8168 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8178 if (width < HOST_BITS_PER_WIDE_INT)
8179 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8181 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8187 /* Return true if EXPR is the real constant contained in VALUE. */
8190 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8194 return ((TREE_CODE (expr) == REAL_CST
8195 && !TREE_OVERFLOW (expr)
8196 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8197 || (TREE_CODE (expr) == COMPLEX_CST
8198 && real_dconstp (TREE_REALPART (expr), value)
8199 && real_zerop (TREE_IMAGPART (expr))));
8202 /* A subroutine of fold_builtin to fold the various logarithmic
8203 functions. Return NULL_TREE if no simplification can me made.
8204 FUNC is the corresponding MPFR logarithm function. */
8207 fold_builtin_logarithm (tree fndecl, tree arg,
8208 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8210 if (validate_arg (arg, REAL_TYPE))
8212 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8214 const enum built_in_function fcode = builtin_mathfn_code (arg);
8216 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8217 instead we'll look for 'e' truncated to MODE. So only do
8218 this if flag_unsafe_math_optimizations is set. */
8219 if (flag_unsafe_math_optimizations && func == mpfr_log)
8221 const REAL_VALUE_TYPE e_truncated =
8222 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8223 if (real_dconstp (arg, &e_truncated))
8224 return build_real (type, dconst1);
8227 /* Calculate the result when the argument is a constant. */
8228 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8231 /* Special case, optimize logN(expN(x)) = x. */
8232 if (flag_unsafe_math_optimizations
8233 && ((func == mpfr_log
8234 && (fcode == BUILT_IN_EXP
8235 || fcode == BUILT_IN_EXPF
8236 || fcode == BUILT_IN_EXPL))
8237 || (func == mpfr_log2
8238 && (fcode == BUILT_IN_EXP2
8239 || fcode == BUILT_IN_EXP2F
8240 || fcode == BUILT_IN_EXP2L))
8241 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8242 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8244 /* Optimize logN(func()) for various exponential functions. We
8245 want to determine the value "x" and the power "exponent" in
8246 order to transform logN(x**exponent) into exponent*logN(x). */
8247 if (flag_unsafe_math_optimizations)
8249 tree exponent = 0, x = 0;
8253 CASE_FLT_FN (BUILT_IN_EXP):
8254 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8255 x = build_real (type,
8256 real_value_truncate (TYPE_MODE (type),
8257 *get_real_const (rv_e)));
8258 exponent = CALL_EXPR_ARG (arg, 0);
8260 CASE_FLT_FN (BUILT_IN_EXP2):
8261 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8262 x = build_real (type, dconst2);
8263 exponent = CALL_EXPR_ARG (arg, 0);
8265 CASE_FLT_FN (BUILT_IN_EXP10):
8266 CASE_FLT_FN (BUILT_IN_POW10):
8267 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8269 REAL_VALUE_TYPE dconst10;
8270 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8271 x = build_real (type, dconst10);
8273 exponent = CALL_EXPR_ARG (arg, 0);
8275 CASE_FLT_FN (BUILT_IN_SQRT):
8276 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8277 x = CALL_EXPR_ARG (arg, 0);
8278 exponent = build_real (type, dconsthalf);
8280 CASE_FLT_FN (BUILT_IN_CBRT):
8281 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8282 x = CALL_EXPR_ARG (arg, 0);
8283 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8284 *get_real_const (rv_third)));
8286 CASE_FLT_FN (BUILT_IN_POW):
8287 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8288 x = CALL_EXPR_ARG (arg, 0);
8289 exponent = CALL_EXPR_ARG (arg, 1);
8295 /* Now perform the optimization. */
8298 tree logfn = build_call_expr (fndecl, 1, x);
8299 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8307 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8308 NULL_TREE if no simplification can be made. */
8311 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8313 tree res, narg0, narg1;
8315 if (!validate_arg (arg0, REAL_TYPE)
8316 || !validate_arg (arg1, REAL_TYPE))
8319 /* Calculate the result when the argument is a constant. */
8320 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8323 /* If either argument to hypot has a negate or abs, strip that off.
8324 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8325 narg0 = fold_strip_sign_ops (arg0);
8326 narg1 = fold_strip_sign_ops (arg1);
8329 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8330 narg1 ? narg1 : arg1);
8333 /* If either argument is zero, hypot is fabs of the other. */
8334 if (real_zerop (arg0))
8335 return fold_build1 (ABS_EXPR, type, arg1);
8336 else if (real_zerop (arg1))
8337 return fold_build1 (ABS_EXPR, type, arg0);
8339 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8340 if (flag_unsafe_math_optimizations
8341 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8343 const REAL_VALUE_TYPE sqrt2_trunc
8344 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8345 return fold_build2 (MULT_EXPR, type,
8346 fold_build1 (ABS_EXPR, type, arg0),
8347 build_real (type, sqrt2_trunc));
8354 /* Fold a builtin function call to pow, powf, or powl. Return
8355 NULL_TREE if no simplification can be made. */
8357 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8361 if (!validate_arg (arg0, REAL_TYPE)
8362 || !validate_arg (arg1, REAL_TYPE))
8365 /* Calculate the result when the argument is a constant. */
8366 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8369 /* Optimize pow(1.0,y) = 1.0. */
8370 if (real_onep (arg0))
8371 return omit_one_operand (type, build_real (type, dconst1), arg1);
8373 if (TREE_CODE (arg1) == REAL_CST
8374 && !TREE_OVERFLOW (arg1))
8376 REAL_VALUE_TYPE cint;
8380 c = TREE_REAL_CST (arg1);
8382 /* Optimize pow(x,0.0) = 1.0. */
8383 if (REAL_VALUES_EQUAL (c, dconst0))
8384 return omit_one_operand (type, build_real (type, dconst1),
8387 /* Optimize pow(x,1.0) = x. */
8388 if (REAL_VALUES_EQUAL (c, dconst1))
8391 /* Optimize pow(x,-1.0) = 1.0/x. */
8392 if (REAL_VALUES_EQUAL (c, dconstm1))
8393 return fold_build2 (RDIV_EXPR, type,
8394 build_real (type, dconst1), arg0);
8396 /* Optimize pow(x,0.5) = sqrt(x). */
8397 if (flag_unsafe_math_optimizations
8398 && REAL_VALUES_EQUAL (c, dconsthalf))
8400 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8402 if (sqrtfn != NULL_TREE)
8403 return build_call_expr (sqrtfn, 1, arg0);
8406 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8407 if (flag_unsafe_math_optimizations)
8409 const REAL_VALUE_TYPE dconstroot
8410 = real_value_truncate (TYPE_MODE (type),
8411 *get_real_const (rv_third));
8413 if (REAL_VALUES_EQUAL (c, dconstroot))
8415 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8416 if (cbrtfn != NULL_TREE)
8417 return build_call_expr (cbrtfn, 1, arg0);
8421 /* Check for an integer exponent. */
8422 n = real_to_integer (&c);
8423 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8424 if (real_identical (&c, &cint))
8426 /* Attempt to evaluate pow at compile-time. */
8427 if (TREE_CODE (arg0) == REAL_CST
8428 && !TREE_OVERFLOW (arg0))
8433 x = TREE_REAL_CST (arg0);
8434 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8435 if (flag_unsafe_math_optimizations || !inexact)
8436 return build_real (type, x);
8439 /* Strip sign ops from even integer powers. */
8440 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8442 tree narg0 = fold_strip_sign_ops (arg0);
8444 return build_call_expr (fndecl, 2, narg0, arg1);
8449 if (flag_unsafe_math_optimizations)
8451 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8453 /* Optimize pow(expN(x),y) = expN(x*y). */
8454 if (BUILTIN_EXPONENT_P (fcode))
8456 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8457 tree arg = CALL_EXPR_ARG (arg0, 0);
8458 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8459 return build_call_expr (expfn, 1, arg);
8462 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8463 if (BUILTIN_SQRT_P (fcode))
8465 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8466 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8467 build_real (type, dconsthalf));
8468 return build_call_expr (fndecl, 2, narg0, narg1);
8471 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8472 if (BUILTIN_CBRT_P (fcode))
8474 tree arg = CALL_EXPR_ARG (arg0, 0);
8475 if (tree_expr_nonnegative_p (arg))
8477 const REAL_VALUE_TYPE dconstroot
8478 = real_value_truncate (TYPE_MODE (type),
8479 *get_real_const (rv_third));
8480 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8481 build_real (type, dconstroot));
8482 return build_call_expr (fndecl, 2, arg, narg1);
8486 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8487 if (fcode == BUILT_IN_POW
8488 || fcode == BUILT_IN_POWF
8489 || fcode == BUILT_IN_POWL)
8491 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8492 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8493 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8494 return build_call_expr (fndecl, 2, arg00, narg1);
8501 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8502 Return NULL_TREE if no simplification can be made. */
8504 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8505 tree arg0, tree arg1, tree type)
8507 if (!validate_arg (arg0, REAL_TYPE)
8508 || !validate_arg (arg1, INTEGER_TYPE))
8511 /* Optimize pow(1.0,y) = 1.0. */
8512 if (real_onep (arg0))
8513 return omit_one_operand (type, build_real (type, dconst1), arg1);
8515 if (host_integerp (arg1, 0))
8517 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8519 /* Evaluate powi at compile-time. */
8520 if (TREE_CODE (arg0) == REAL_CST
8521 && !TREE_OVERFLOW (arg0))
8524 x = TREE_REAL_CST (arg0);
8525 real_powi (&x, TYPE_MODE (type), &x, c);
8526 return build_real (type, x);
8529 /* Optimize pow(x,0) = 1.0. */
8531 return omit_one_operand (type, build_real (type, dconst1),
8534 /* Optimize pow(x,1) = x. */
8538 /* Optimize pow(x,-1) = 1.0/x. */
8540 return fold_build2 (RDIV_EXPR, type,
8541 build_real (type, dconst1), arg0);
8547 /* A subroutine of fold_builtin to fold the various exponent
8548 functions. Return NULL_TREE if no simplification can be made.
8549 FUNC is the corresponding MPFR exponent function. */
8552 fold_builtin_exponent (tree fndecl, tree arg,
8553 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8555 if (validate_arg (arg, REAL_TYPE))
8557 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8560 /* Calculate the result when the argument is a constant. */
8561 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8564 /* Optimize expN(logN(x)) = x. */
8565 if (flag_unsafe_math_optimizations)
8567 const enum built_in_function fcode = builtin_mathfn_code (arg);
8569 if ((func == mpfr_exp
8570 && (fcode == BUILT_IN_LOG
8571 || fcode == BUILT_IN_LOGF
8572 || fcode == BUILT_IN_LOGL))
8573 || (func == mpfr_exp2
8574 && (fcode == BUILT_IN_LOG2
8575 || fcode == BUILT_IN_LOG2F
8576 || fcode == BUILT_IN_LOG2L))
8577 || (func == mpfr_exp10
8578 && (fcode == BUILT_IN_LOG10
8579 || fcode == BUILT_IN_LOG10F
8580 || fcode == BUILT_IN_LOG10L)))
8581 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8588 /* Return true if VAR is a VAR_DECL or a component thereof. */
8591 var_decl_component_p (tree var)
8594 while (handled_component_p (inner))
8595 inner = TREE_OPERAND (inner, 0);
8596 return SSA_VAR_P (inner);
8599 /* Fold function call to builtin memset. Return
8600 NULL_TREE if no simplification can be made. */
8603 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8606 unsigned HOST_WIDE_INT length, cval;
8608 if (! validate_arg (dest, POINTER_TYPE)
8609 || ! validate_arg (c, INTEGER_TYPE)
8610 || ! validate_arg (len, INTEGER_TYPE))
8613 if (! host_integerp (len, 1))
8616 /* If the LEN parameter is zero, return DEST. */
8617 if (integer_zerop (len))
8618 return omit_one_operand (type, dest, c);
8620 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8625 if (TREE_CODE (var) != ADDR_EXPR)
8628 var = TREE_OPERAND (var, 0);
8629 if (TREE_THIS_VOLATILE (var))
8632 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8633 && !POINTER_TYPE_P (TREE_TYPE (var)))
8636 if (! var_decl_component_p (var))
8639 length = tree_low_cst (len, 1);
8640 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8641 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8645 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8648 if (integer_zerop (c))
8652 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8655 cval = tree_low_cst (c, 1);
8659 cval |= (cval << 31) << 1;
8662 ret = build_int_cst_type (TREE_TYPE (var), cval);
8663 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8667 return omit_one_operand (type, dest, ret);
8670 /* Fold function call to builtin memset. Return
8671 NULL_TREE if no simplification can be made. */
8674 fold_builtin_bzero (tree dest, tree size, bool ignore)
8676 if (! validate_arg (dest, POINTER_TYPE)
8677 || ! validate_arg (size, INTEGER_TYPE))
8683 /* New argument list transforming bzero(ptr x, int y) to
8684 memset(ptr x, int 0, size_t y). This is done this way
8685 so that if it isn't expanded inline, we fallback to
8686 calling bzero instead of memset. */
8688 return fold_builtin_memset (dest, integer_zero_node,
8689 fold_convert (sizetype, size),
8690 void_type_node, ignore);
8693 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8694 NULL_TREE if no simplification can be made.
8695 If ENDP is 0, return DEST (like memcpy).
8696 If ENDP is 1, return DEST+LEN (like mempcpy).
8697 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8698 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8702 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8704 tree destvar, srcvar, expr;
8706 if (! validate_arg (dest, POINTER_TYPE)
8707 || ! validate_arg (src, POINTER_TYPE)
8708 || ! validate_arg (len, INTEGER_TYPE))
8711 /* If the LEN parameter is zero, return DEST. */
8712 if (integer_zerop (len))
8713 return omit_one_operand (type, dest, src);
8715 /* If SRC and DEST are the same (and not volatile), return
8716 DEST{,+LEN,+LEN-1}. */
8717 if (operand_equal_p (src, dest, 0))
8721 tree srctype, desttype;
8724 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8725 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8727 /* Both DEST and SRC must be pointer types.
8728 ??? This is what old code did. Is the testing for pointer types
8731 If either SRC is readonly or length is 1, we can use memcpy. */
8732 if (dest_align && src_align
8733 && (readonly_data_expr (src)
8734 || (host_integerp (len, 1)
8735 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8736 tree_low_cst (len, 1)))))
8738 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8741 return build_call_expr (fn, 3, dest, src, len);
8746 if (!host_integerp (len, 0))
8749 This logic lose for arguments like (type *)malloc (sizeof (type)),
8750 since we strip the casts of up to VOID return value from malloc.
8751 Perhaps we ought to inherit type from non-VOID argument here? */
8754 srctype = TREE_TYPE (TREE_TYPE (src));
8755 desttype = TREE_TYPE (TREE_TYPE (dest));
8756 if (!srctype || !desttype
8757 || !TYPE_SIZE_UNIT (srctype)
8758 || !TYPE_SIZE_UNIT (desttype)
8759 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8760 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8761 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8762 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8765 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8766 < (int) TYPE_ALIGN (desttype)
8767 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8768 < (int) TYPE_ALIGN (srctype)))
8772 dest = builtin_save_expr (dest);
8774 srcvar = build_fold_indirect_ref (src);
8775 if (TREE_THIS_VOLATILE (srcvar))
8777 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8779 /* With memcpy, it is possible to bypass aliasing rules, so without
8780 this check i. e. execute/20060930-2.c would be misoptimized, because
8781 it use conflicting alias set to hold argument for the memcpy call.
8782 This check is probably unnecesary with -fno-strict-aliasing.
8783 Similarly for destvar. See also PR29286. */
8784 if (!var_decl_component_p (srcvar)
8785 /* Accept: memcpy (*char_var, "test", 1); that simplify
8787 || is_gimple_min_invariant (srcvar)
8788 || readonly_data_expr (src))
8791 destvar = build_fold_indirect_ref (dest);
8792 if (TREE_THIS_VOLATILE (destvar))
8794 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8796 if (!var_decl_component_p (destvar))
8799 if (srctype == desttype
8800 || (gimple_in_ssa_p (cfun)
8801 && useless_type_conversion_p (desttype, srctype)))
8803 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8804 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8805 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8806 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8807 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8809 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8810 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8816 if (endp == 0 || endp == 3)
8817 return omit_one_operand (type, dest, expr);
8823 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8826 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8827 dest = fold_convert (type, dest);
8829 dest = omit_one_operand (type, dest, expr);
8833 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8834 If LEN is not NULL, it represents the length of the string to be
8835 copied. Return NULL_TREE if no simplification can be made. */
8838 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8842 if (!validate_arg (dest, POINTER_TYPE)
8843 || !validate_arg (src, POINTER_TYPE))
8846 /* If SRC and DEST are the same (and not volatile), return DEST. */
8847 if (operand_equal_p (src, dest, 0))
8848 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8853 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8859 len = c_strlen (src, 1);
8860 if (! len || TREE_SIDE_EFFECTS (len))
8864 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8865 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8866 build_call_expr (fn, 3, dest, src, len));
8869 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8870 If SLEN is not NULL, it represents the length of the source string.
8871 Return NULL_TREE if no simplification can be made. */
8874 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8878 if (!validate_arg (dest, POINTER_TYPE)
8879 || !validate_arg (src, POINTER_TYPE)
8880 || !validate_arg (len, INTEGER_TYPE))
8883 /* If the LEN parameter is zero, return DEST. */
8884 if (integer_zerop (len))
8885 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8887 /* We can't compare slen with len as constants below if len is not a
8889 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8893 slen = c_strlen (src, 1);
8895 /* Now, we must be passed a constant src ptr parameter. */
8896 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8899 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8901 /* We do not support simplification of this case, though we do
8902 support it when expanding trees into RTL. */
8903 /* FIXME: generate a call to __builtin_memset. */
8904 if (tree_int_cst_lt (slen, len))
8907 /* OK transform into builtin memcpy. */
8908 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8911 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8912 build_call_expr (fn, 3, dest, src, len));
8915 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8916 arguments to the call, and TYPE is its return type.
8917 Return NULL_TREE if no simplification can be made. */
8920 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8922 if (!validate_arg (arg1, POINTER_TYPE)
8923 || !validate_arg (arg2, INTEGER_TYPE)
8924 || !validate_arg (len, INTEGER_TYPE))
8930 if (TREE_CODE (arg2) != INTEGER_CST
8931 || !host_integerp (len, 1))
8934 p1 = c_getstr (arg1);
8935 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8941 if (target_char_cast (arg2, &c))
8944 r = memchr (p1, c, tree_low_cst (len, 1));
8947 return build_int_cst (TREE_TYPE (arg1), 0);
8949 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8951 return fold_convert (type, tem);
8957 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8958 Return NULL_TREE if no simplification can be made. */
8961 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8963 const char *p1, *p2;
8965 if (!validate_arg (arg1, POINTER_TYPE)
8966 || !validate_arg (arg2, POINTER_TYPE)
8967 || !validate_arg (len, INTEGER_TYPE))
8970 /* If the LEN parameter is zero, return zero. */
8971 if (integer_zerop (len))
8972 return omit_two_operands (integer_type_node, integer_zero_node,
8975 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8976 if (operand_equal_p (arg1, arg2, 0))
8977 return omit_one_operand (integer_type_node, integer_zero_node, len);
8979 p1 = c_getstr (arg1);
8980 p2 = c_getstr (arg2);
8982 /* If all arguments are constant, and the value of len is not greater
8983 than the lengths of arg1 and arg2, evaluate at compile-time. */
8984 if (host_integerp (len, 1) && p1 && p2
8985 && compare_tree_int (len, strlen (p1) + 1) <= 0
8986 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8988 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8991 return integer_one_node;
8993 return integer_minus_one_node;
8995 return integer_zero_node;
8998 /* If len parameter is one, return an expression corresponding to
8999 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9000 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9002 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9003 tree cst_uchar_ptr_node
9004 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9006 tree ind1 = fold_convert (integer_type_node,
9007 build1 (INDIRECT_REF, cst_uchar_node,
9008 fold_convert (cst_uchar_ptr_node,
9010 tree ind2 = fold_convert (integer_type_node,
9011 build1 (INDIRECT_REF, cst_uchar_node,
9012 fold_convert (cst_uchar_ptr_node,
9014 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9020 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9021 Return NULL_TREE if no simplification can be made. */
9024 fold_builtin_strcmp (tree arg1, tree arg2)
9026 const char *p1, *p2;
9028 if (!validate_arg (arg1, POINTER_TYPE)
9029 || !validate_arg (arg2, POINTER_TYPE))
9032 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9033 if (operand_equal_p (arg1, arg2, 0))
9034 return integer_zero_node;
9036 p1 = c_getstr (arg1);
9037 p2 = c_getstr (arg2);
9041 const int i = strcmp (p1, p2);
9043 return integer_minus_one_node;
9045 return integer_one_node;
9047 return integer_zero_node;
9050 /* If the second arg is "", return *(const unsigned char*)arg1. */
9051 if (p2 && *p2 == '\0')
9053 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9054 tree cst_uchar_ptr_node
9055 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9057 return fold_convert (integer_type_node,
9058 build1 (INDIRECT_REF, cst_uchar_node,
9059 fold_convert (cst_uchar_ptr_node,
9063 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9064 if (p1 && *p1 == '\0')
9066 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9067 tree cst_uchar_ptr_node
9068 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9070 tree temp = fold_convert (integer_type_node,
9071 build1 (INDIRECT_REF, cst_uchar_node,
9072 fold_convert (cst_uchar_ptr_node,
9074 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9080 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9081 Return NULL_TREE if no simplification can be made. */
9084 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9086 const char *p1, *p2;
9088 if (!validate_arg (arg1, POINTER_TYPE)
9089 || !validate_arg (arg2, POINTER_TYPE)
9090 || !validate_arg (len, INTEGER_TYPE))
9093 /* If the LEN parameter is zero, return zero. */
9094 if (integer_zerop (len))
9095 return omit_two_operands (integer_type_node, integer_zero_node,
9098 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9099 if (operand_equal_p (arg1, arg2, 0))
9100 return omit_one_operand (integer_type_node, integer_zero_node, len);
9102 p1 = c_getstr (arg1);
9103 p2 = c_getstr (arg2);
9105 if (host_integerp (len, 1) && p1 && p2)
9107 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9109 return integer_one_node;
9111 return integer_minus_one_node;
9113 return integer_zero_node;
9116 /* If the second arg is "", and the length is greater than zero,
9117 return *(const unsigned char*)arg1. */
9118 if (p2 && *p2 == '\0'
9119 && TREE_CODE (len) == INTEGER_CST
9120 && tree_int_cst_sgn (len) == 1)
9122 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9123 tree cst_uchar_ptr_node
9124 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9126 return fold_convert (integer_type_node,
9127 build1 (INDIRECT_REF, cst_uchar_node,
9128 fold_convert (cst_uchar_ptr_node,
9132 /* If the first arg is "", and the length is greater than zero,
9133 return -*(const unsigned char*)arg2. */
9134 if (p1 && *p1 == '\0'
9135 && TREE_CODE (len) == INTEGER_CST
9136 && tree_int_cst_sgn (len) == 1)
9138 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9139 tree cst_uchar_ptr_node
9140 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9142 tree temp = fold_convert (integer_type_node,
9143 build1 (INDIRECT_REF, cst_uchar_node,
9144 fold_convert (cst_uchar_ptr_node,
9146 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9149 /* If len parameter is one, return an expression corresponding to
9150 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9151 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9153 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9154 tree cst_uchar_ptr_node
9155 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9157 tree ind1 = fold_convert (integer_type_node,
9158 build1 (INDIRECT_REF, cst_uchar_node,
9159 fold_convert (cst_uchar_ptr_node,
9161 tree ind2 = fold_convert (integer_type_node,
9162 build1 (INDIRECT_REF, cst_uchar_node,
9163 fold_convert (cst_uchar_ptr_node,
9165 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9171 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9172 ARG. Return NULL_TREE if no simplification can be made. */
9175 fold_builtin_signbit (tree arg, tree type)
9179 if (!validate_arg (arg, REAL_TYPE))
9182 /* If ARG is a compile-time constant, determine the result. */
9183 if (TREE_CODE (arg) == REAL_CST
9184 && !TREE_OVERFLOW (arg))
9188 c = TREE_REAL_CST (arg);
9189 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9190 return fold_convert (type, temp);
9193 /* If ARG is non-negative, the result is always zero. */
9194 if (tree_expr_nonnegative_p (arg))
9195 return omit_one_operand (type, integer_zero_node, arg);
9197 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9198 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9199 return fold_build2 (LT_EXPR, type, arg,
9200 build_real (TREE_TYPE (arg), dconst0));
9205 /* Fold function call to builtin copysign, copysignf or copysignl with
9206 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9210 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9214 if (!validate_arg (arg1, REAL_TYPE)
9215 || !validate_arg (arg2, REAL_TYPE))
9218 /* copysign(X,X) is X. */
9219 if (operand_equal_p (arg1, arg2, 0))
9220 return fold_convert (type, arg1);
9222 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9223 if (TREE_CODE (arg1) == REAL_CST
9224 && TREE_CODE (arg2) == REAL_CST
9225 && !TREE_OVERFLOW (arg1)
9226 && !TREE_OVERFLOW (arg2))
9228 REAL_VALUE_TYPE c1, c2;
9230 c1 = TREE_REAL_CST (arg1);
9231 c2 = TREE_REAL_CST (arg2);
9232 /* c1.sign := c2.sign. */
9233 real_copysign (&c1, &c2);
9234 return build_real (type, c1);
9237 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9238 Remember to evaluate Y for side-effects. */
9239 if (tree_expr_nonnegative_p (arg2))
9240 return omit_one_operand (type,
9241 fold_build1 (ABS_EXPR, type, arg1),
9244 /* Strip sign changing operations for the first argument. */
9245 tem = fold_strip_sign_ops (arg1);
9247 return build_call_expr (fndecl, 2, tem, arg2);
9252 /* Fold a call to builtin isascii with argument ARG. */
9255 fold_builtin_isascii (tree arg)
9257 if (!validate_arg (arg, INTEGER_TYPE))
9261 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9262 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9263 build_int_cst (NULL_TREE,
9264 ~ (unsigned HOST_WIDE_INT) 0x7f));
9265 return fold_build2 (EQ_EXPR, integer_type_node,
9266 arg, integer_zero_node);
9270 /* Fold a call to builtin toascii with argument ARG. */
9273 fold_builtin_toascii (tree arg)
9275 if (!validate_arg (arg, INTEGER_TYPE))
9278 /* Transform toascii(c) -> (c & 0x7f). */
9279 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9280 build_int_cst (NULL_TREE, 0x7f));
9283 /* Fold a call to builtin isdigit with argument ARG. */
9286 fold_builtin_isdigit (tree arg)
9288 if (!validate_arg (arg, INTEGER_TYPE))
9292 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9293 /* According to the C standard, isdigit is unaffected by locale.
9294 However, it definitely is affected by the target character set. */
9295 unsigned HOST_WIDE_INT target_digit0
9296 = lang_hooks.to_target_charset ('0');
9298 if (target_digit0 == 0)
9301 arg = fold_convert (unsigned_type_node, arg);
9302 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9303 build_int_cst (unsigned_type_node, target_digit0));
9304 return fold_build2 (LE_EXPR, integer_type_node, arg,
9305 build_int_cst (unsigned_type_node, 9));
9309 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9312 fold_builtin_fabs (tree arg, tree type)
9314 if (!validate_arg (arg, REAL_TYPE))
9317 arg = fold_convert (type, arg);
9318 if (TREE_CODE (arg) == REAL_CST)
9319 return fold_abs_const (arg, type);
9320 return fold_build1 (ABS_EXPR, type, arg);
9323 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9326 fold_builtin_abs (tree arg, tree type)
9328 if (!validate_arg (arg, INTEGER_TYPE))
9331 arg = fold_convert (type, arg);
9332 if (TREE_CODE (arg) == INTEGER_CST)
9333 return fold_abs_const (arg, type);
9334 return fold_build1 (ABS_EXPR, type, arg);
9337 /* Fold a call to builtin fmin or fmax. */
9340 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9342 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9344 /* Calculate the result when the argument is a constant. */
9345 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9350 /* If either argument is NaN, return the other one. Avoid the
9351 transformation if we get (and honor) a signalling NaN. Using
9352 omit_one_operand() ensures we create a non-lvalue. */
9353 if (TREE_CODE (arg0) == REAL_CST
9354 && real_isnan (&TREE_REAL_CST (arg0))
9355 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9356 || ! TREE_REAL_CST (arg0).signalling))
9357 return omit_one_operand (type, arg1, arg0);
9358 if (TREE_CODE (arg1) == REAL_CST
9359 && real_isnan (&TREE_REAL_CST (arg1))
9360 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9361 || ! TREE_REAL_CST (arg1).signalling))
9362 return omit_one_operand (type, arg0, arg1);
9364 /* Transform fmin/fmax(x,x) -> x. */
9365 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9366 return omit_one_operand (type, arg0, arg1);
9368 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9369 functions to return the numeric arg if the other one is NaN.
9370 These tree codes don't honor that, so only transform if
9371 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9372 handled, so we don't have to worry about it either. */
9373 if (flag_finite_math_only)
9374 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9375 fold_convert (type, arg0),
9376 fold_convert (type, arg1));
9381 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9384 fold_builtin_carg (tree arg, tree type)
9386 if (validate_arg (arg, COMPLEX_TYPE))
9388 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9392 tree new_arg = builtin_save_expr (arg);
9393 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9394 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9395 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9402 /* Fold a call to builtin logb/ilogb. */
9405 fold_builtin_logb (tree arg, tree rettype)
9407 if (! validate_arg (arg, REAL_TYPE))
9412 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9414 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9420 /* If arg is Inf or NaN and we're logb, return it. */
9421 if (TREE_CODE (rettype) == REAL_TYPE)
9422 return fold_convert (rettype, arg);
9423 /* Fall through... */
9425 /* Zero may set errno and/or raise an exception for logb, also
9426 for ilogb we don't know FP_ILOGB0. */
9429 /* For normal numbers, proceed iff radix == 2. In GCC,
9430 normalized significands are in the range [0.5, 1.0). We
9431 want the exponent as if they were [1.0, 2.0) so get the
9432 exponent and subtract 1. */
9433 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9434 return fold_convert (rettype, build_int_cst (NULL_TREE,
9435 REAL_EXP (value)-1));
9443 /* Fold a call to builtin significand, if radix == 2. */
9446 fold_builtin_significand (tree arg, tree rettype)
9448 if (! validate_arg (arg, REAL_TYPE))
9453 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9455 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9462 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9463 return fold_convert (rettype, arg);
9465 /* For normal numbers, proceed iff radix == 2. */
9466 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9468 REAL_VALUE_TYPE result = *value;
9469 /* In GCC, normalized significands are in the range [0.5,
9470 1.0). We want them to be [1.0, 2.0) so set the
9472 SET_REAL_EXP (&result, 1);
9473 return build_real (rettype, result);
9482 /* Fold a call to builtin frexp, we can assume the base is 2. */
9485 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9487 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9492 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9495 arg1 = build_fold_indirect_ref (arg1);
9497 /* Proceed if a valid pointer type was passed in. */
9498 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9500 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9506 /* For +-0, return (*exp = 0, +-0). */
9507 exp = integer_zero_node;
9512 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9513 return omit_one_operand (rettype, arg0, arg1);
9516 /* Since the frexp function always expects base 2, and in
9517 GCC normalized significands are already in the range
9518 [0.5, 1.0), we have exactly what frexp wants. */
9519 REAL_VALUE_TYPE frac_rvt = *value;
9520 SET_REAL_EXP (&frac_rvt, 0);
9521 frac = build_real (rettype, frac_rvt);
9522 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9529 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9530 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9531 TREE_SIDE_EFFECTS (arg1) = 1;
9532 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9538 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9539 then we can assume the base is two. If it's false, then we have to
9540 check the mode of the TYPE parameter in certain cases. */
9543 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9545 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9550 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9551 if (real_zerop (arg0) || integer_zerop (arg1)
9552 || (TREE_CODE (arg0) == REAL_CST
9553 && !real_isfinite (&TREE_REAL_CST (arg0))))
9554 return omit_one_operand (type, arg0, arg1);
9556 /* If both arguments are constant, then try to evaluate it. */
9557 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9558 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9559 && host_integerp (arg1, 0))
9561 /* Bound the maximum adjustment to twice the range of the
9562 mode's valid exponents. Use abs to ensure the range is
9563 positive as a sanity check. */
9564 const long max_exp_adj = 2 *
9565 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9566 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9568 /* Get the user-requested adjustment. */
9569 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9571 /* The requested adjustment must be inside this range. This
9572 is a preliminary cap to avoid things like overflow, we
9573 may still fail to compute the result for other reasons. */
9574 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9576 REAL_VALUE_TYPE initial_result;
9578 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9580 /* Ensure we didn't overflow. */
9581 if (! real_isinf (&initial_result))
9583 const REAL_VALUE_TYPE trunc_result
9584 = real_value_truncate (TYPE_MODE (type), initial_result);
9586 /* Only proceed if the target mode can hold the
9588 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9589 return build_real (type, trunc_result);
9598 /* Fold a call to builtin modf. */
9601 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9603 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9608 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9611 arg1 = build_fold_indirect_ref (arg1);
9613 /* Proceed if a valid pointer type was passed in. */
9614 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9616 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9617 REAL_VALUE_TYPE trunc, frac;
9623 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9624 trunc = frac = *value;
9627 /* For +-Inf, return (*arg1 = arg0, +-0). */
9629 frac.sign = value->sign;
9633 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9634 real_trunc (&trunc, VOIDmode, value);
9635 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9636 /* If the original number was negative and already
9637 integral, then the fractional part is -0.0. */
9638 if (value->sign && frac.cl == rvc_zero)
9639 frac.sign = value->sign;
9643 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9644 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9645 build_real (rettype, trunc));
9646 TREE_SIDE_EFFECTS (arg1) = 1;
9647 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9648 build_real (rettype, frac));
9654 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9655 ARG is the argument for the call. */
9658 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9660 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9663 if (!validate_arg (arg, REAL_TYPE))
9666 switch (builtin_index)
9668 case BUILT_IN_ISINF:
9669 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9670 return omit_one_operand (type, integer_zero_node, arg);
9672 if (TREE_CODE (arg) == REAL_CST)
9674 r = TREE_REAL_CST (arg);
9675 if (real_isinf (&r))
9676 return real_compare (GT_EXPR, &r, &dconst0)
9677 ? integer_one_node : integer_minus_one_node;
9679 return integer_zero_node;
9684 case BUILT_IN_ISINF_SIGN:
9686 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9687 /* In a boolean context, GCC will fold the inner COND_EXPR to
9688 1. So e.g. "if (isinf_sign(x))" would be folded to just
9689 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9690 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9691 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9692 tree tmp = NULL_TREE;
9694 arg = builtin_save_expr (arg);
9696 if (signbit_fn && isinf_fn)
9698 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9699 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9701 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9702 signbit_call, integer_zero_node);
9703 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9704 isinf_call, integer_zero_node);
9706 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9707 integer_minus_one_node, integer_one_node);
9708 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9715 case BUILT_IN_ISFINITE:
9716 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9717 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9718 return omit_one_operand (type, integer_one_node, arg);
9720 if (TREE_CODE (arg) == REAL_CST)
9722 r = TREE_REAL_CST (arg);
9723 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9728 case BUILT_IN_ISNAN:
9729 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9730 return omit_one_operand (type, integer_zero_node, arg);
9732 if (TREE_CODE (arg) == REAL_CST)
9734 r = TREE_REAL_CST (arg);
9735 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9738 arg = builtin_save_expr (arg);
9739 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9746 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9747 This builtin will generate code to return the appropriate floating
9748 point classification depending on the value of the floating point
9749 number passed in. The possible return values must be supplied as
9750 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9751 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9752 one floating point argument which is "type generic". */
9755 fold_builtin_fpclassify (tree exp)
9757 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9758 arg, type, res, tmp;
9759 enum machine_mode mode;
9763 /* Verify the required arguments in the original call. */
9764 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9765 INTEGER_TYPE, INTEGER_TYPE,
9766 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9769 fp_nan = CALL_EXPR_ARG (exp, 0);
9770 fp_infinite = CALL_EXPR_ARG (exp, 1);
9771 fp_normal = CALL_EXPR_ARG (exp, 2);
9772 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9773 fp_zero = CALL_EXPR_ARG (exp, 4);
9774 arg = CALL_EXPR_ARG (exp, 5);
9775 type = TREE_TYPE (arg);
9776 mode = TYPE_MODE (type);
9777 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9781 (fabs(x) == Inf ? FP_INFINITE :
9782 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9783 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9785 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9786 build_real (type, dconst0));
9787 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9789 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9790 real_from_string (&r, buf);
9791 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9792 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9794 if (HONOR_INFINITIES (mode))
9797 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9798 build_real (type, r));
9799 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9802 if (HONOR_NANS (mode))
9804 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9805 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9811 /* Fold a call to an unordered comparison function such as
9812 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9813 being called and ARG0 and ARG1 are the arguments for the call.
9814 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9815 the opposite of the desired result. UNORDERED_CODE is used
9816 for modes that can hold NaNs and ORDERED_CODE is used for
9820 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9821 enum tree_code unordered_code,
9822 enum tree_code ordered_code)
9824 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9825 enum tree_code code;
9827 enum tree_code code0, code1;
9828 tree cmp_type = NULL_TREE;
9830 type0 = TREE_TYPE (arg0);
9831 type1 = TREE_TYPE (arg1);
9833 code0 = TREE_CODE (type0);
9834 code1 = TREE_CODE (type1);
9836 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9837 /* Choose the wider of two real types. */
9838 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9840 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9842 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9845 arg0 = fold_convert (cmp_type, arg0);
9846 arg1 = fold_convert (cmp_type, arg1);
9848 if (unordered_code == UNORDERED_EXPR)
9850 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9851 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9852 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9855 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9857 return fold_build1 (TRUTH_NOT_EXPR, type,
9858 fold_build2 (code, type, arg0, arg1));
9861 /* Fold a call to built-in function FNDECL with 0 arguments.
9862 IGNORE is true if the result of the function call is ignored. This
9863 function returns NULL_TREE if no simplification was possible. */
9866 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9868 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9869 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9872 CASE_FLT_FN (BUILT_IN_INF):
9873 case BUILT_IN_INFD32:
9874 case BUILT_IN_INFD64:
9875 case BUILT_IN_INFD128:
9876 return fold_builtin_inf (type, true);
9878 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9879 return fold_builtin_inf (type, false);
9881 case BUILT_IN_CLASSIFY_TYPE:
9882 return fold_builtin_classify_type (NULL_TREE);
9890 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9891 IGNORE is true if the result of the function call is ignored. This
9892 function returns NULL_TREE if no simplification was possible. */
9895 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9897 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9898 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9902 case BUILT_IN_CONSTANT_P:
9904 tree val = fold_builtin_constant_p (arg0);
9906 /* Gimplification will pull the CALL_EXPR for the builtin out of
9907 an if condition. When not optimizing, we'll not CSE it back.
9908 To avoid link error types of regressions, return false now. */
9909 if (!val && !optimize)
9910 val = integer_zero_node;
9915 case BUILT_IN_CLASSIFY_TYPE:
9916 return fold_builtin_classify_type (arg0);
9918 case BUILT_IN_STRLEN:
9919 return fold_builtin_strlen (arg0);
9921 CASE_FLT_FN (BUILT_IN_FABS):
9922 return fold_builtin_fabs (arg0, type);
9926 case BUILT_IN_LLABS:
9927 case BUILT_IN_IMAXABS:
9928 return fold_builtin_abs (arg0, type);
9930 CASE_FLT_FN (BUILT_IN_CONJ):
9931 if (validate_arg (arg0, COMPLEX_TYPE))
9932 return fold_build1 (CONJ_EXPR, type, arg0);
9935 CASE_FLT_FN (BUILT_IN_CREAL):
9936 if (validate_arg (arg0, COMPLEX_TYPE))
9937 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9940 CASE_FLT_FN (BUILT_IN_CIMAG):
9941 if (validate_arg (arg0, COMPLEX_TYPE))
9942 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9945 CASE_FLT_FN (BUILT_IN_CCOS):
9946 CASE_FLT_FN (BUILT_IN_CCOSH):
9947 /* These functions are "even", i.e. f(x) == f(-x). */
9948 if (validate_arg (arg0, COMPLEX_TYPE))
9950 tree narg = fold_strip_sign_ops (arg0);
9952 return build_call_expr (fndecl, 1, narg);
9956 CASE_FLT_FN (BUILT_IN_CABS):
9957 return fold_builtin_cabs (arg0, type, fndecl);
9959 CASE_FLT_FN (BUILT_IN_CARG):
9960 return fold_builtin_carg (arg0, type);
9962 CASE_FLT_FN (BUILT_IN_SQRT):
9963 return fold_builtin_sqrt (arg0, type);
9965 CASE_FLT_FN (BUILT_IN_CBRT):
9966 return fold_builtin_cbrt (arg0, type);
9968 CASE_FLT_FN (BUILT_IN_ASIN):
9969 if (validate_arg (arg0, REAL_TYPE))
9970 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9971 &dconstm1, &dconst1, true);
9974 CASE_FLT_FN (BUILT_IN_ACOS):
9975 if (validate_arg (arg0, REAL_TYPE))
9976 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9977 &dconstm1, &dconst1, true);
9980 CASE_FLT_FN (BUILT_IN_ATAN):
9981 if (validate_arg (arg0, REAL_TYPE))
9982 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9985 CASE_FLT_FN (BUILT_IN_ASINH):
9986 if (validate_arg (arg0, REAL_TYPE))
9987 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9990 CASE_FLT_FN (BUILT_IN_ACOSH):
9991 if (validate_arg (arg0, REAL_TYPE))
9992 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9993 &dconst1, NULL, true);
9996 CASE_FLT_FN (BUILT_IN_ATANH):
9997 if (validate_arg (arg0, REAL_TYPE))
9998 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9999 &dconstm1, &dconst1, false);
10002 CASE_FLT_FN (BUILT_IN_SIN):
10003 if (validate_arg (arg0, REAL_TYPE))
10004 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10007 CASE_FLT_FN (BUILT_IN_COS):
10008 return fold_builtin_cos (arg0, type, fndecl);
10011 CASE_FLT_FN (BUILT_IN_TAN):
10012 return fold_builtin_tan (arg0, type);
10014 CASE_FLT_FN (BUILT_IN_CEXP):
10015 return fold_builtin_cexp (arg0, type);
10017 CASE_FLT_FN (BUILT_IN_CEXPI):
10018 if (validate_arg (arg0, REAL_TYPE))
10019 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10022 CASE_FLT_FN (BUILT_IN_SINH):
10023 if (validate_arg (arg0, REAL_TYPE))
10024 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10027 CASE_FLT_FN (BUILT_IN_COSH):
10028 return fold_builtin_cosh (arg0, type, fndecl);
10030 CASE_FLT_FN (BUILT_IN_TANH):
10031 if (validate_arg (arg0, REAL_TYPE))
10032 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10035 CASE_FLT_FN (BUILT_IN_ERF):
10036 if (validate_arg (arg0, REAL_TYPE))
10037 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10040 CASE_FLT_FN (BUILT_IN_ERFC):
10041 if (validate_arg (arg0, REAL_TYPE))
10042 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10045 CASE_FLT_FN (BUILT_IN_TGAMMA):
10046 if (validate_arg (arg0, REAL_TYPE))
10047 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10050 CASE_FLT_FN (BUILT_IN_EXP):
10051 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10053 CASE_FLT_FN (BUILT_IN_EXP2):
10054 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10056 CASE_FLT_FN (BUILT_IN_EXP10):
10057 CASE_FLT_FN (BUILT_IN_POW10):
10058 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10060 CASE_FLT_FN (BUILT_IN_EXPM1):
10061 if (validate_arg (arg0, REAL_TYPE))
10062 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10065 CASE_FLT_FN (BUILT_IN_LOG):
10066 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10068 CASE_FLT_FN (BUILT_IN_LOG2):
10069 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10071 CASE_FLT_FN (BUILT_IN_LOG10):
10072 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10074 CASE_FLT_FN (BUILT_IN_LOG1P):
10075 if (validate_arg (arg0, REAL_TYPE))
10076 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10077 &dconstm1, NULL, false);
10080 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10081 CASE_FLT_FN (BUILT_IN_J0):
10082 if (validate_arg (arg0, REAL_TYPE))
10083 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10087 CASE_FLT_FN (BUILT_IN_J1):
10088 if (validate_arg (arg0, REAL_TYPE))
10089 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10093 CASE_FLT_FN (BUILT_IN_Y0):
10094 if (validate_arg (arg0, REAL_TYPE))
10095 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10096 &dconst0, NULL, false);
10099 CASE_FLT_FN (BUILT_IN_Y1):
10100 if (validate_arg (arg0, REAL_TYPE))
10101 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10102 &dconst0, NULL, false);
10106 CASE_FLT_FN (BUILT_IN_NAN):
10107 case BUILT_IN_NAND32:
10108 case BUILT_IN_NAND64:
10109 case BUILT_IN_NAND128:
10110 return fold_builtin_nan (arg0, type, true);
10112 CASE_FLT_FN (BUILT_IN_NANS):
10113 return fold_builtin_nan (arg0, type, false);
10115 CASE_FLT_FN (BUILT_IN_FLOOR):
10116 return fold_builtin_floor (fndecl, arg0);
10118 CASE_FLT_FN (BUILT_IN_CEIL):
10119 return fold_builtin_ceil (fndecl, arg0);
10121 CASE_FLT_FN (BUILT_IN_TRUNC):
10122 return fold_builtin_trunc (fndecl, arg0);
10124 CASE_FLT_FN (BUILT_IN_ROUND):
10125 return fold_builtin_round (fndecl, arg0);
10127 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10128 CASE_FLT_FN (BUILT_IN_RINT):
10129 return fold_trunc_transparent_mathfn (fndecl, arg0);
10131 CASE_FLT_FN (BUILT_IN_LCEIL):
10132 CASE_FLT_FN (BUILT_IN_LLCEIL):
10133 CASE_FLT_FN (BUILT_IN_LFLOOR):
10134 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10135 CASE_FLT_FN (BUILT_IN_LROUND):
10136 CASE_FLT_FN (BUILT_IN_LLROUND):
10137 return fold_builtin_int_roundingfn (fndecl, arg0);
10139 CASE_FLT_FN (BUILT_IN_LRINT):
10140 CASE_FLT_FN (BUILT_IN_LLRINT):
10141 return fold_fixed_mathfn (fndecl, arg0);
10143 case BUILT_IN_BSWAP32:
10144 case BUILT_IN_BSWAP64:
10145 return fold_builtin_bswap (fndecl, arg0);
10147 CASE_INT_FN (BUILT_IN_FFS):
10148 CASE_INT_FN (BUILT_IN_CLZ):
10149 CASE_INT_FN (BUILT_IN_CTZ):
10150 CASE_INT_FN (BUILT_IN_POPCOUNT):
10151 CASE_INT_FN (BUILT_IN_PARITY):
10152 return fold_builtin_bitop (fndecl, arg0);
10154 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10155 return fold_builtin_signbit (arg0, type);
10157 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10158 return fold_builtin_significand (arg0, type);
10160 CASE_FLT_FN (BUILT_IN_ILOGB):
10161 CASE_FLT_FN (BUILT_IN_LOGB):
10162 return fold_builtin_logb (arg0, type);
10164 case BUILT_IN_ISASCII:
10165 return fold_builtin_isascii (arg0);
10167 case BUILT_IN_TOASCII:
10168 return fold_builtin_toascii (arg0);
10170 case BUILT_IN_ISDIGIT:
10171 return fold_builtin_isdigit (arg0);
10173 CASE_FLT_FN (BUILT_IN_FINITE):
10174 case BUILT_IN_FINITED32:
10175 case BUILT_IN_FINITED64:
10176 case BUILT_IN_FINITED128:
10177 case BUILT_IN_ISFINITE:
10178 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10180 CASE_FLT_FN (BUILT_IN_ISINF):
10181 case BUILT_IN_ISINFD32:
10182 case BUILT_IN_ISINFD64:
10183 case BUILT_IN_ISINFD128:
10184 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10186 case BUILT_IN_ISINF_SIGN:
10187 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10189 CASE_FLT_FN (BUILT_IN_ISNAN):
10190 case BUILT_IN_ISNAND32:
10191 case BUILT_IN_ISNAND64:
10192 case BUILT_IN_ISNAND128:
10193 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10195 case BUILT_IN_PRINTF:
10196 case BUILT_IN_PRINTF_UNLOCKED:
10197 case BUILT_IN_VPRINTF:
10198 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10208 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10209 IGNORE is true if the result of the function call is ignored. This
10210 function returns NULL_TREE if no simplification was possible. */
10213 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10215 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10216 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10220 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10221 CASE_FLT_FN (BUILT_IN_JN):
10222 if (validate_arg (arg0, INTEGER_TYPE)
10223 && validate_arg (arg1, REAL_TYPE))
10224 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10227 CASE_FLT_FN (BUILT_IN_YN):
10228 if (validate_arg (arg0, INTEGER_TYPE)
10229 && validate_arg (arg1, REAL_TYPE))
10230 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10234 CASE_FLT_FN (BUILT_IN_DREM):
10235 CASE_FLT_FN (BUILT_IN_REMAINDER):
10236 if (validate_arg (arg0, REAL_TYPE)
10237 && validate_arg(arg1, REAL_TYPE))
10238 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10241 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10242 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10243 if (validate_arg (arg0, REAL_TYPE)
10244 && validate_arg(arg1, POINTER_TYPE))
10245 return do_mpfr_lgamma_r (arg0, arg1, type);
10249 CASE_FLT_FN (BUILT_IN_ATAN2):
10250 if (validate_arg (arg0, REAL_TYPE)
10251 && validate_arg(arg1, REAL_TYPE))
10252 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10255 CASE_FLT_FN (BUILT_IN_FDIM):
10256 if (validate_arg (arg0, REAL_TYPE)
10257 && validate_arg(arg1, REAL_TYPE))
10258 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10261 CASE_FLT_FN (BUILT_IN_HYPOT):
10262 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10264 CASE_FLT_FN (BUILT_IN_LDEXP):
10265 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10266 CASE_FLT_FN (BUILT_IN_SCALBN):
10267 CASE_FLT_FN (BUILT_IN_SCALBLN):
10268 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10270 CASE_FLT_FN (BUILT_IN_FREXP):
10271 return fold_builtin_frexp (arg0, arg1, type);
10273 CASE_FLT_FN (BUILT_IN_MODF):
10274 return fold_builtin_modf (arg0, arg1, type);
10276 case BUILT_IN_BZERO:
10277 return fold_builtin_bzero (arg0, arg1, ignore);
10279 case BUILT_IN_FPUTS:
10280 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10282 case BUILT_IN_FPUTS_UNLOCKED:
10283 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10285 case BUILT_IN_STRSTR:
10286 return fold_builtin_strstr (arg0, arg1, type);
10288 case BUILT_IN_STRCAT:
10289 return fold_builtin_strcat (arg0, arg1);
10291 case BUILT_IN_STRSPN:
10292 return fold_builtin_strspn (arg0, arg1);
10294 case BUILT_IN_STRCSPN:
10295 return fold_builtin_strcspn (arg0, arg1);
10297 case BUILT_IN_STRCHR:
10298 case BUILT_IN_INDEX:
10299 return fold_builtin_strchr (arg0, arg1, type);
10301 case BUILT_IN_STRRCHR:
10302 case BUILT_IN_RINDEX:
10303 return fold_builtin_strrchr (arg0, arg1, type);
10305 case BUILT_IN_STRCPY:
10306 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10308 case BUILT_IN_STRCMP:
10309 return fold_builtin_strcmp (arg0, arg1);
10311 case BUILT_IN_STRPBRK:
10312 return fold_builtin_strpbrk (arg0, arg1, type);
10314 case BUILT_IN_EXPECT:
10315 return fold_builtin_expect (arg0, arg1);
10317 CASE_FLT_FN (BUILT_IN_POW):
10318 return fold_builtin_pow (fndecl, arg0, arg1, type);
10320 CASE_FLT_FN (BUILT_IN_POWI):
10321 return fold_builtin_powi (fndecl, arg0, arg1, type);
10323 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10324 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10326 CASE_FLT_FN (BUILT_IN_FMIN):
10327 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10329 CASE_FLT_FN (BUILT_IN_FMAX):
10330 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10332 case BUILT_IN_ISGREATER:
10333 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10334 case BUILT_IN_ISGREATEREQUAL:
10335 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10336 case BUILT_IN_ISLESS:
10337 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10338 case BUILT_IN_ISLESSEQUAL:
10339 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10340 case BUILT_IN_ISLESSGREATER:
10341 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10342 case BUILT_IN_ISUNORDERED:
10343 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10346 /* We do the folding for va_start in the expander. */
10347 case BUILT_IN_VA_START:
10350 case BUILT_IN_SPRINTF:
10351 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10353 case BUILT_IN_OBJECT_SIZE:
10354 return fold_builtin_object_size (arg0, arg1);
10356 case BUILT_IN_PRINTF:
10357 case BUILT_IN_PRINTF_UNLOCKED:
10358 case BUILT_IN_VPRINTF:
10359 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10361 case BUILT_IN_PRINTF_CHK:
10362 case BUILT_IN_VPRINTF_CHK:
10363 if (!validate_arg (arg0, INTEGER_TYPE)
10364 || TREE_SIDE_EFFECTS (arg0))
10367 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10370 case BUILT_IN_FPRINTF:
10371 case BUILT_IN_FPRINTF_UNLOCKED:
10372 case BUILT_IN_VFPRINTF:
10373 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10382 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10383 and ARG2. IGNORE is true if the result of the function call is ignored.
10384 This function returns NULL_TREE if no simplification was possible. */
10387 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10389 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10390 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10394 CASE_FLT_FN (BUILT_IN_SINCOS):
10395 return fold_builtin_sincos (arg0, arg1, arg2);
10397 CASE_FLT_FN (BUILT_IN_FMA):
10398 if (validate_arg (arg0, REAL_TYPE)
10399 && validate_arg(arg1, REAL_TYPE)
10400 && validate_arg(arg2, REAL_TYPE))
10401 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10404 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10405 CASE_FLT_FN (BUILT_IN_REMQUO):
10406 if (validate_arg (arg0, REAL_TYPE)
10407 && validate_arg(arg1, REAL_TYPE)
10408 && validate_arg(arg2, POINTER_TYPE))
10409 return do_mpfr_remquo (arg0, arg1, arg2);
10413 case BUILT_IN_MEMSET:
10414 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10416 case BUILT_IN_BCOPY:
10417 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10419 case BUILT_IN_MEMCPY:
10420 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10422 case BUILT_IN_MEMPCPY:
10423 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10425 case BUILT_IN_MEMMOVE:
10426 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10428 case BUILT_IN_STRNCAT:
10429 return fold_builtin_strncat (arg0, arg1, arg2);
10431 case BUILT_IN_STRNCPY:
10432 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10434 case BUILT_IN_STRNCMP:
10435 return fold_builtin_strncmp (arg0, arg1, arg2);
10437 case BUILT_IN_MEMCHR:
10438 return fold_builtin_memchr (arg0, arg1, arg2, type);
10440 case BUILT_IN_BCMP:
10441 case BUILT_IN_MEMCMP:
10442 return fold_builtin_memcmp (arg0, arg1, arg2);;
10444 case BUILT_IN_SPRINTF:
10445 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10447 case BUILT_IN_STRCPY_CHK:
10448 case BUILT_IN_STPCPY_CHK:
10449 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10452 case BUILT_IN_STRCAT_CHK:
10453 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10455 case BUILT_IN_PRINTF_CHK:
10456 case BUILT_IN_VPRINTF_CHK:
10457 if (!validate_arg (arg0, INTEGER_TYPE)
10458 || TREE_SIDE_EFFECTS (arg0))
10461 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10464 case BUILT_IN_FPRINTF:
10465 case BUILT_IN_FPRINTF_UNLOCKED:
10466 case BUILT_IN_VFPRINTF:
10467 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10469 case BUILT_IN_FPRINTF_CHK:
10470 case BUILT_IN_VFPRINTF_CHK:
10471 if (!validate_arg (arg1, INTEGER_TYPE)
10472 || TREE_SIDE_EFFECTS (arg1))
10475 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10484 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10485 ARG2, and ARG3. IGNORE is true if the result of the function call is
10486 ignored. This function returns NULL_TREE if no simplification was
10490 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10493 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10497 case BUILT_IN_MEMCPY_CHK:
10498 case BUILT_IN_MEMPCPY_CHK:
10499 case BUILT_IN_MEMMOVE_CHK:
10500 case BUILT_IN_MEMSET_CHK:
10501 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10503 DECL_FUNCTION_CODE (fndecl));
10505 case BUILT_IN_STRNCPY_CHK:
10506 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10508 case BUILT_IN_STRNCAT_CHK:
10509 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10511 case BUILT_IN_FPRINTF_CHK:
10512 case BUILT_IN_VFPRINTF_CHK:
10513 if (!validate_arg (arg1, INTEGER_TYPE)
10514 || TREE_SIDE_EFFECTS (arg1))
10517 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10527 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10528 arguments, where NARGS <= 4. IGNORE is true if the result of the
10529 function call is ignored. This function returns NULL_TREE if no
10530 simplification was possible. Note that this only folds builtins with
10531 fixed argument patterns. Foldings that do varargs-to-varargs
10532 transformations, or that match calls with more than 4 arguments,
10533 need to be handled with fold_builtin_varargs instead. */
10535 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10538 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10540 tree ret = NULL_TREE;
10545 ret = fold_builtin_0 (fndecl, ignore);
10548 ret = fold_builtin_1 (fndecl, args[0], ignore);
10551 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10554 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10557 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10565 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10566 TREE_NO_WARNING (ret) = 1;
10572 /* Builtins with folding operations that operate on "..." arguments
10573 need special handling; we need to store the arguments in a convenient
10574 data structure before attempting any folding. Fortunately there are
10575 only a few builtins that fall into this category. FNDECL is the
10576 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10577 result of the function call is ignored. */
10580 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10582 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10583 tree ret = NULL_TREE;
10587 case BUILT_IN_SPRINTF_CHK:
10588 case BUILT_IN_VSPRINTF_CHK:
10589 ret = fold_builtin_sprintf_chk (exp, fcode);
10592 case BUILT_IN_SNPRINTF_CHK:
10593 case BUILT_IN_VSNPRINTF_CHK:
10594 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10597 case BUILT_IN_FPCLASSIFY:
10598 ret = fold_builtin_fpclassify (exp);
10606 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10607 TREE_NO_WARNING (ret) = 1;
10613 /* A wrapper function for builtin folding that prevents warnings for
10614 "statement without effect" and the like, caused by removing the
10615 call node earlier than the warning is generated. */
10618 fold_call_expr (tree exp, bool ignore)
10620 tree ret = NULL_TREE;
10621 tree fndecl = get_callee_fndecl (exp);
10623 && TREE_CODE (fndecl) == FUNCTION_DECL
10624 && DECL_BUILT_IN (fndecl)
10625 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10626 yet. Defer folding until we see all the arguments
10627 (after inlining). */
10628 && !CALL_EXPR_VA_ARG_PACK (exp))
10630 int nargs = call_expr_nargs (exp);
10632 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10633 instead last argument is __builtin_va_arg_pack (). Defer folding
10634 even in that case, until arguments are finalized. */
10635 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10637 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10639 && TREE_CODE (fndecl2) == FUNCTION_DECL
10640 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10641 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10645 /* FIXME: Don't use a list in this interface. */
10646 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10647 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10650 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10652 tree *args = CALL_EXPR_ARGP (exp);
10653 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10656 ret = fold_builtin_varargs (fndecl, exp, ignore);
10659 /* Propagate location information from original call to
10660 expansion of builtin. Otherwise things like
10661 maybe_emit_chk_warning, that operate on the expansion
10662 of a builtin, will use the wrong location information. */
10663 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10665 tree realret = ret;
10666 if (TREE_CODE (ret) == NOP_EXPR)
10667 realret = TREE_OPERAND (ret, 0);
10668 if (CAN_HAVE_LOCATION_P (realret)
10669 && !EXPR_HAS_LOCATION (realret))
10670 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10680 /* Conveniently construct a function call expression. FNDECL names the
10681 function to be called and ARGLIST is a TREE_LIST of arguments. */
10684 build_function_call_expr (tree fndecl, tree arglist)
10686 tree fntype = TREE_TYPE (fndecl);
10687 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10688 int n = list_length (arglist);
10689 tree *argarray = (tree *) alloca (n * sizeof (tree));
10692 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10693 argarray[i] = TREE_VALUE (arglist);
10694 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10697 /* Conveniently construct a function call expression. FNDECL names the
10698 function to be called, N is the number of arguments, and the "..."
10699 parameters are the argument expressions. */
10702 build_call_expr (tree fndecl, int n, ...)
10705 tree fntype = TREE_TYPE (fndecl);
10706 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10707 tree *argarray = (tree *) alloca (n * sizeof (tree));
10711 for (i = 0; i < n; i++)
10712 argarray[i] = va_arg (ap, tree);
10714 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10717 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10718 N arguments are passed in the array ARGARRAY. */
10721 fold_builtin_call_array (tree type,
10726 tree ret = NULL_TREE;
10730 if (TREE_CODE (fn) == ADDR_EXPR)
10732 tree fndecl = TREE_OPERAND (fn, 0);
10733 if (TREE_CODE (fndecl) == FUNCTION_DECL
10734 && DECL_BUILT_IN (fndecl))
10736 /* If last argument is __builtin_va_arg_pack (), arguments to this
10737 function are not finalized yet. Defer folding until they are. */
10738 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10740 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10742 && TREE_CODE (fndecl2) == FUNCTION_DECL
10743 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10744 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10745 return build_call_array (type, fn, n, argarray);
10747 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10749 tree arglist = NULL_TREE;
10750 for (i = n - 1; i >= 0; i--)
10751 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10752 ret = targetm.fold_builtin (fndecl, arglist, false);
10756 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10758 /* First try the transformations that don't require consing up
10760 ret = fold_builtin_n (fndecl, argarray, n, false);
10765 /* If we got this far, we need to build an exp. */
10766 exp = build_call_array (type, fn, n, argarray);
10767 ret = fold_builtin_varargs (fndecl, exp, false);
10768 return ret ? ret : exp;
10772 return build_call_array (type, fn, n, argarray);
10775 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10776 along with N new arguments specified as the "..." parameters. SKIP
10777 is the number of arguments in EXP to be omitted. This function is used
10778 to do varargs-to-varargs transformations. */
10781 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10783 int oldnargs = call_expr_nargs (exp);
10784 int nargs = oldnargs - skip + n;
10785 tree fntype = TREE_TYPE (fndecl);
10786 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10794 buffer = alloca (nargs * sizeof (tree));
10796 for (i = 0; i < n; i++)
10797 buffer[i] = va_arg (ap, tree);
10799 for (j = skip; j < oldnargs; j++, i++)
10800 buffer[i] = CALL_EXPR_ARG (exp, j);
10803 buffer = CALL_EXPR_ARGP (exp) + skip;
10805 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10808 /* Validate a single argument ARG against a tree code CODE representing
10812 validate_arg (const_tree arg, enum tree_code code)
10816 else if (code == POINTER_TYPE)
10817 return POINTER_TYPE_P (TREE_TYPE (arg));
10818 else if (code == INTEGER_TYPE)
10819 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10820 return code == TREE_CODE (TREE_TYPE (arg));
10823 /* This function validates the types of a function call argument list
10824 against a specified list of tree_codes. If the last specifier is a 0,
10825 that represents an ellipses, otherwise the last specifier must be a
10829 validate_arglist (const_tree callexpr, ...)
10831 enum tree_code code;
10834 const_call_expr_arg_iterator iter;
10837 va_start (ap, callexpr);
10838 init_const_call_expr_arg_iterator (callexpr, &iter);
10842 code = va_arg (ap, enum tree_code);
10846 /* This signifies an ellipses, any further arguments are all ok. */
10850 /* This signifies an endlink, if no arguments remain, return
10851 true, otherwise return false. */
10852 res = !more_const_call_expr_args_p (&iter);
10855 /* If no parameters remain or the parameter's code does not
10856 match the specified code, return false. Otherwise continue
10857 checking any remaining arguments. */
10858 arg = next_const_call_expr_arg (&iter);
10859 if (!validate_arg (arg, code))
10866 /* We need gotos here since we can only have one VA_CLOSE in a
10874 /* Default target-specific builtin expander that does nothing. */
10877 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10878 rtx target ATTRIBUTE_UNUSED,
10879 rtx subtarget ATTRIBUTE_UNUSED,
10880 enum machine_mode mode ATTRIBUTE_UNUSED,
10881 int ignore ATTRIBUTE_UNUSED)
10886 /* Returns true is EXP represents data that would potentially reside
10887 in a readonly section. */
10890 readonly_data_expr (tree exp)
10894 if (TREE_CODE (exp) != ADDR_EXPR)
10897 exp = get_base_address (TREE_OPERAND (exp, 0));
10901 /* Make sure we call decl_readonly_section only for trees it
10902 can handle (since it returns true for everything it doesn't
10904 if (TREE_CODE (exp) == STRING_CST
10905 || TREE_CODE (exp) == CONSTRUCTOR
10906 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10907 return decl_readonly_section (exp, 0);
10912 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10913 to the call, and TYPE is its return type.
10915 Return NULL_TREE if no simplification was possible, otherwise return the
10916 simplified form of the call as a tree.
10918 The simplified form may be a constant or other expression which
10919 computes the same value, but in a more efficient manner (including
10920 calls to other builtin functions).
10922 The call may contain arguments which need to be evaluated, but
10923 which are not useful to determine the result of the call. In
10924 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10925 COMPOUND_EXPR will be an argument which must be evaluated.
10926 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10927 COMPOUND_EXPR in the chain will contain the tree for the simplified
10928 form of the builtin function call. */
10931 fold_builtin_strstr (tree s1, tree s2, tree type)
10933 if (!validate_arg (s1, POINTER_TYPE)
10934 || !validate_arg (s2, POINTER_TYPE))
10939 const char *p1, *p2;
10941 p2 = c_getstr (s2);
10945 p1 = c_getstr (s1);
10948 const char *r = strstr (p1, p2);
10952 return build_int_cst (TREE_TYPE (s1), 0);
10954 /* Return an offset into the constant string argument. */
10955 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10956 s1, size_int (r - p1));
10957 return fold_convert (type, tem);
10960 /* The argument is const char *, and the result is char *, so we need
10961 a type conversion here to avoid a warning. */
10963 return fold_convert (type, s1);
10968 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10972 /* New argument list transforming strstr(s1, s2) to
10973 strchr(s1, s2[0]). */
10974 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10978 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10979 the call, and TYPE is its return type.
10981 Return NULL_TREE if no simplification was possible, otherwise return the
10982 simplified form of the call as a tree.
10984 The simplified form may be a constant or other expression which
10985 computes the same value, but in a more efficient manner (including
10986 calls to other builtin functions).
10988 The call may contain arguments which need to be evaluated, but
10989 which are not useful to determine the result of the call. In
10990 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10991 COMPOUND_EXPR will be an argument which must be evaluated.
10992 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10993 COMPOUND_EXPR in the chain will contain the tree for the simplified
10994 form of the builtin function call. */
10997 fold_builtin_strchr (tree s1, tree s2, tree type)
10999 if (!validate_arg (s1, POINTER_TYPE)
11000 || !validate_arg (s2, INTEGER_TYPE))
11006 if (TREE_CODE (s2) != INTEGER_CST)
11009 p1 = c_getstr (s1);
11016 if (target_char_cast (s2, &c))
11019 r = strchr (p1, c);
11022 return build_int_cst (TREE_TYPE (s1), 0);
11024 /* Return an offset into the constant string argument. */
11025 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11026 s1, size_int (r - p1));
11027 return fold_convert (type, tem);
11033 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11034 the call, and TYPE is its return type.
11036 Return NULL_TREE if no simplification was possible, otherwise return the
11037 simplified form of the call as a tree.
11039 The simplified form may be a constant or other expression which
11040 computes the same value, but in a more efficient manner (including
11041 calls to other builtin functions).
11043 The call may contain arguments which need to be evaluated, but
11044 which are not useful to determine the result of the call. In
11045 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11046 COMPOUND_EXPR will be an argument which must be evaluated.
11047 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11048 COMPOUND_EXPR in the chain will contain the tree for the simplified
11049 form of the builtin function call. */
11052 fold_builtin_strrchr (tree s1, tree s2, tree type)
11054 if (!validate_arg (s1, POINTER_TYPE)
11055 || !validate_arg (s2, INTEGER_TYPE))
11062 if (TREE_CODE (s2) != INTEGER_CST)
11065 p1 = c_getstr (s1);
11072 if (target_char_cast (s2, &c))
11075 r = strrchr (p1, c);
11078 return build_int_cst (TREE_TYPE (s1), 0);
11080 /* Return an offset into the constant string argument. */
11081 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11082 s1, size_int (r - p1));
11083 return fold_convert (type, tem);
11086 if (! integer_zerop (s2))
11089 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11093 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11094 return build_call_expr (fn, 2, s1, s2);
11098 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11099 to the call, and TYPE is its return type.
11101 Return NULL_TREE if no simplification was possible, otherwise return the
11102 simplified form of the call as a tree.
11104 The simplified form may be a constant or other expression which
11105 computes the same value, but in a more efficient manner (including
11106 calls to other builtin functions).
11108 The call may contain arguments which need to be evaluated, but
11109 which are not useful to determine the result of the call. In
11110 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11111 COMPOUND_EXPR will be an argument which must be evaluated.
11112 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11113 COMPOUND_EXPR in the chain will contain the tree for the simplified
11114 form of the builtin function call. */
11117 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11119 if (!validate_arg (s1, POINTER_TYPE)
11120 || !validate_arg (s2, POINTER_TYPE))
11125 const char *p1, *p2;
11127 p2 = c_getstr (s2);
11131 p1 = c_getstr (s1);
11134 const char *r = strpbrk (p1, p2);
11138 return build_int_cst (TREE_TYPE (s1), 0);
11140 /* Return an offset into the constant string argument. */
11141 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11142 s1, size_int (r - p1));
11143 return fold_convert (type, tem);
11147 /* strpbrk(x, "") == NULL.
11148 Evaluate and ignore s1 in case it had side-effects. */
11149 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11152 return NULL_TREE; /* Really call strpbrk. */
11154 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11158 /* New argument list transforming strpbrk(s1, s2) to
11159 strchr(s1, s2[0]). */
11160 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11164 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11167 Return NULL_TREE if no simplification was possible, otherwise return the
11168 simplified form of the call as a tree.
11170 The simplified form may be a constant or other expression which
11171 computes the same value, but in a more efficient manner (including
11172 calls to other builtin functions).
11174 The call may contain arguments which need to be evaluated, but
11175 which are not useful to determine the result of the call. In
11176 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11177 COMPOUND_EXPR will be an argument which must be evaluated.
11178 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11179 COMPOUND_EXPR in the chain will contain the tree for the simplified
11180 form of the builtin function call. */
11183 fold_builtin_strcat (tree dst, tree src)
11185 if (!validate_arg (dst, POINTER_TYPE)
11186 || !validate_arg (src, POINTER_TYPE))
11190 const char *p = c_getstr (src);
11192 /* If the string length is zero, return the dst parameter. */
11193 if (p && *p == '\0')
11200 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11201 arguments to the call.
11203 Return NULL_TREE if no simplification was possible, otherwise return the
11204 simplified form of the call as a tree.
11206 The simplified form may be a constant or other expression which
11207 computes the same value, but in a more efficient manner (including
11208 calls to other builtin functions).
11210 The call may contain arguments which need to be evaluated, but
11211 which are not useful to determine the result of the call. In
11212 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11213 COMPOUND_EXPR will be an argument which must be evaluated.
11214 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11215 COMPOUND_EXPR in the chain will contain the tree for the simplified
11216 form of the builtin function call. */
11219 fold_builtin_strncat (tree dst, tree src, tree len)
11221 if (!validate_arg (dst, POINTER_TYPE)
11222 || !validate_arg (src, POINTER_TYPE)
11223 || !validate_arg (len, INTEGER_TYPE))
11227 const char *p = c_getstr (src);
11229 /* If the requested length is zero, or the src parameter string
11230 length is zero, return the dst parameter. */
11231 if (integer_zerop (len) || (p && *p == '\0'))
11232 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11234 /* If the requested len is greater than or equal to the string
11235 length, call strcat. */
11236 if (TREE_CODE (len) == INTEGER_CST && p
11237 && compare_tree_int (len, strlen (p)) >= 0)
11239 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11241 /* If the replacement _DECL isn't initialized, don't do the
11246 return build_call_expr (fn, 2, dst, src);
11252 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11255 Return NULL_TREE if no simplification was possible, otherwise return the
11256 simplified form of the call as a tree.
11258 The simplified form may be a constant or other expression which
11259 computes the same value, but in a more efficient manner (including
11260 calls to other builtin functions).
11262 The call may contain arguments which need to be evaluated, but
11263 which are not useful to determine the result of the call. In
11264 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11265 COMPOUND_EXPR will be an argument which must be evaluated.
11266 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11267 COMPOUND_EXPR in the chain will contain the tree for the simplified
11268 form of the builtin function call. */
11271 fold_builtin_strspn (tree s1, tree s2)
11273 if (!validate_arg (s1, POINTER_TYPE)
11274 || !validate_arg (s2, POINTER_TYPE))
11278 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11280 /* If both arguments are constants, evaluate at compile-time. */
11283 const size_t r = strspn (p1, p2);
11284 return size_int (r);
11287 /* If either argument is "", return NULL_TREE. */
11288 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11289 /* Evaluate and ignore both arguments in case either one has
11291 return omit_two_operands (integer_type_node, integer_zero_node,
11297 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11300 Return NULL_TREE if no simplification was possible, otherwise return the
11301 simplified form of the call as a tree.
11303 The simplified form may be a constant or other expression which
11304 computes the same value, but in a more efficient manner (including
11305 calls to other builtin functions).
11307 The call may contain arguments which need to be evaluated, but
11308 which are not useful to determine the result of the call. In
11309 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11310 COMPOUND_EXPR will be an argument which must be evaluated.
11311 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11312 COMPOUND_EXPR in the chain will contain the tree for the simplified
11313 form of the builtin function call. */
11316 fold_builtin_strcspn (tree s1, tree s2)
11318 if (!validate_arg (s1, POINTER_TYPE)
11319 || !validate_arg (s2, POINTER_TYPE))
11323 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11325 /* If both arguments are constants, evaluate at compile-time. */
11328 const size_t r = strcspn (p1, p2);
11329 return size_int (r);
11332 /* If the first argument is "", return NULL_TREE. */
11333 if (p1 && *p1 == '\0')
11335 /* Evaluate and ignore argument s2 in case it has
11337 return omit_one_operand (integer_type_node,
11338 integer_zero_node, s2);
11341 /* If the second argument is "", return __builtin_strlen(s1). */
11342 if (p2 && *p2 == '\0')
11344 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11346 /* If the replacement _DECL isn't initialized, don't do the
11351 return build_call_expr (fn, 1, s1);
11357 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11358 to the call. IGNORE is true if the value returned
11359 by the builtin will be ignored. UNLOCKED is true is true if this
11360 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11361 the known length of the string. Return NULL_TREE if no simplification
11365 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11367 /* If we're using an unlocked function, assume the other unlocked
11368 functions exist explicitly. */
11369 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11370 : implicit_built_in_decls[BUILT_IN_FPUTC];
11371 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11372 : implicit_built_in_decls[BUILT_IN_FWRITE];
11374 /* If the return value is used, don't do the transformation. */
11378 /* Verify the arguments in the original call. */
11379 if (!validate_arg (arg0, POINTER_TYPE)
11380 || !validate_arg (arg1, POINTER_TYPE))
11384 len = c_strlen (arg0, 0);
11386 /* Get the length of the string passed to fputs. If the length
11387 can't be determined, punt. */
11389 || TREE_CODE (len) != INTEGER_CST)
11392 switch (compare_tree_int (len, 1))
11394 case -1: /* length is 0, delete the call entirely . */
11395 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11397 case 0: /* length is 1, call fputc. */
11399 const char *p = c_getstr (arg0);
11404 return build_call_expr (fn_fputc, 2,
11405 build_int_cst (NULL_TREE, p[0]), arg1);
11411 case 1: /* length is greater than 1, call fwrite. */
11413 /* If optimizing for size keep fputs. */
11416 /* New argument list transforming fputs(string, stream) to
11417 fwrite(string, 1, len, stream). */
11419 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11424 gcc_unreachable ();
11429 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11430 produced. False otherwise. This is done so that we don't output the error
11431 or warning twice or three times. */
11433 fold_builtin_next_arg (tree exp, bool va_start_p)
11435 tree fntype = TREE_TYPE (current_function_decl);
11436 int nargs = call_expr_nargs (exp);
11439 if (TYPE_ARG_TYPES (fntype) == 0
11440 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11441 == void_type_node))
11443 error ("%<va_start%> used in function with fixed args");
11449 if (va_start_p && (nargs != 2))
11451 error ("wrong number of arguments to function %<va_start%>");
11454 arg = CALL_EXPR_ARG (exp, 1);
11456 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11457 when we checked the arguments and if needed issued a warning. */
11462 /* Evidently an out of date version of <stdarg.h>; can't validate
11463 va_start's second argument, but can still work as intended. */
11464 warning (0, "%<__builtin_next_arg%> called without an argument");
11467 else if (nargs > 1)
11469 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11472 arg = CALL_EXPR_ARG (exp, 0);
11475 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11476 or __builtin_next_arg (0) the first time we see it, after checking
11477 the arguments and if needed issuing a warning. */
11478 if (!integer_zerop (arg))
11480 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11482 /* Strip off all nops for the sake of the comparison. This
11483 is not quite the same as STRIP_NOPS. It does more.
11484 We must also strip off INDIRECT_EXPR for C++ reference
11486 while (CONVERT_EXPR_P (arg)
11487 || TREE_CODE (arg) == INDIRECT_REF)
11488 arg = TREE_OPERAND (arg, 0);
11489 if (arg != last_parm)
11491 /* FIXME: Sometimes with the tree optimizers we can get the
11492 not the last argument even though the user used the last
11493 argument. We just warn and set the arg to be the last
11494 argument so that we will get wrong-code because of
11496 warning (0, "second parameter of %<va_start%> not last named argument");
11498 /* We want to verify the second parameter just once before the tree
11499 optimizers are run and then avoid keeping it in the tree,
11500 as otherwise we could warn even for correct code like:
11501 void foo (int i, ...)
11502 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11504 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11506 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11512 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11513 ORIG may be null if this is a 2-argument call. We don't attempt to
11514 simplify calls with more than 3 arguments.
11516 Return NULL_TREE if no simplification was possible, otherwise return the
11517 simplified form of the call as a tree. If IGNORED is true, it means that
11518 the caller does not use the returned value of the function. */
11521 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11524 const char *fmt_str = NULL;
11526 /* Verify the required arguments in the original call. We deal with two
11527 types of sprintf() calls: 'sprintf (str, fmt)' and
11528 'sprintf (dest, "%s", orig)'. */
11529 if (!validate_arg (dest, POINTER_TYPE)
11530 || !validate_arg (fmt, POINTER_TYPE))
11532 if (orig && !validate_arg (orig, POINTER_TYPE))
11535 /* Check whether the format is a literal string constant. */
11536 fmt_str = c_getstr (fmt);
11537 if (fmt_str == NULL)
11541 retval = NULL_TREE;
11543 if (!init_target_chars ())
11546 /* If the format doesn't contain % args or %%, use strcpy. */
11547 if (strchr (fmt_str, target_percent) == NULL)
11549 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11554 /* Don't optimize sprintf (buf, "abc", ptr++). */
11558 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11559 'format' is known to contain no % formats. */
11560 call = build_call_expr (fn, 2, dest, fmt);
11562 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11565 /* If the format is "%s", use strcpy if the result isn't used. */
11566 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11569 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11574 /* Don't crash on sprintf (str1, "%s"). */
11578 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11581 retval = c_strlen (orig, 1);
11582 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11585 call = build_call_expr (fn, 2, dest, orig);
11588 if (call && retval)
11590 retval = fold_convert
11591 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11593 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11599 /* Expand a call EXP to __builtin_object_size. */
11602 expand_builtin_object_size (tree exp)
11605 int object_size_type;
11606 tree fndecl = get_callee_fndecl (exp);
11608 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11610 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11612 expand_builtin_trap ();
11616 ost = CALL_EXPR_ARG (exp, 1);
11619 if (TREE_CODE (ost) != INTEGER_CST
11620 || tree_int_cst_sgn (ost) < 0
11621 || compare_tree_int (ost, 3) > 0)
11623 error ("%Klast argument of %D is not integer constant between 0 and 3",
11625 expand_builtin_trap ();
11629 object_size_type = tree_low_cst (ost, 0);
11631 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11634 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11635 FCODE is the BUILT_IN_* to use.
11636 Return NULL_RTX if we failed; the caller should emit a normal call,
11637 otherwise try to get the result in TARGET, if convenient (and in
11638 mode MODE if that's convenient). */
11641 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11642 enum built_in_function fcode)
11644 tree dest, src, len, size;
11646 if (!validate_arglist (exp,
11648 fcode == BUILT_IN_MEMSET_CHK
11649 ? INTEGER_TYPE : POINTER_TYPE,
11650 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11653 dest = CALL_EXPR_ARG (exp, 0);
11654 src = CALL_EXPR_ARG (exp, 1);
11655 len = CALL_EXPR_ARG (exp, 2);
11656 size = CALL_EXPR_ARG (exp, 3);
11658 if (! host_integerp (size, 1))
11661 if (host_integerp (len, 1) || integer_all_onesp (size))
11665 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11667 warning (0, "%Kcall to %D will always overflow destination buffer",
11668 exp, get_callee_fndecl (exp));
11673 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11674 mem{cpy,pcpy,move,set} is available. */
11677 case BUILT_IN_MEMCPY_CHK:
11678 fn = built_in_decls[BUILT_IN_MEMCPY];
11680 case BUILT_IN_MEMPCPY_CHK:
11681 fn = built_in_decls[BUILT_IN_MEMPCPY];
11683 case BUILT_IN_MEMMOVE_CHK:
11684 fn = built_in_decls[BUILT_IN_MEMMOVE];
11686 case BUILT_IN_MEMSET_CHK:
11687 fn = built_in_decls[BUILT_IN_MEMSET];
11696 fn = build_call_expr (fn, 3, dest, src, len);
11697 STRIP_TYPE_NOPS (fn);
11698 while (TREE_CODE (fn) == COMPOUND_EXPR)
11700 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11702 fn = TREE_OPERAND (fn, 1);
11704 if (TREE_CODE (fn) == CALL_EXPR)
11705 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11706 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11708 else if (fcode == BUILT_IN_MEMSET_CHK)
11712 unsigned int dest_align
11713 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11715 /* If DEST is not a pointer type, call the normal function. */
11716 if (dest_align == 0)
11719 /* If SRC and DEST are the same (and not volatile), do nothing. */
11720 if (operand_equal_p (src, dest, 0))
11724 if (fcode != BUILT_IN_MEMPCPY_CHK)
11726 /* Evaluate and ignore LEN in case it has side-effects. */
11727 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11728 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11731 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11732 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11735 /* __memmove_chk special case. */
11736 if (fcode == BUILT_IN_MEMMOVE_CHK)
11738 unsigned int src_align
11739 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11741 if (src_align == 0)
11744 /* If src is categorized for a readonly section we can use
11745 normal __memcpy_chk. */
11746 if (readonly_data_expr (src))
11748 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11751 fn = build_call_expr (fn, 4, dest, src, len, size);
11752 STRIP_TYPE_NOPS (fn);
11753 while (TREE_CODE (fn) == COMPOUND_EXPR)
11755 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11757 fn = TREE_OPERAND (fn, 1);
11759 if (TREE_CODE (fn) == CALL_EXPR)
11760 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11761 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11768 /* Emit warning if a buffer overflow is detected at compile time. */
11771 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11778 case BUILT_IN_STRCPY_CHK:
11779 case BUILT_IN_STPCPY_CHK:
11780 /* For __strcat_chk the warning will be emitted only if overflowing
11781 by at least strlen (dest) + 1 bytes. */
11782 case BUILT_IN_STRCAT_CHK:
11783 len = CALL_EXPR_ARG (exp, 1);
11784 size = CALL_EXPR_ARG (exp, 2);
11787 case BUILT_IN_STRNCAT_CHK:
11788 case BUILT_IN_STRNCPY_CHK:
11789 len = CALL_EXPR_ARG (exp, 2);
11790 size = CALL_EXPR_ARG (exp, 3);
11792 case BUILT_IN_SNPRINTF_CHK:
11793 case BUILT_IN_VSNPRINTF_CHK:
11794 len = CALL_EXPR_ARG (exp, 1);
11795 size = CALL_EXPR_ARG (exp, 3);
11798 gcc_unreachable ();
11804 if (! host_integerp (size, 1) || integer_all_onesp (size))
11809 len = c_strlen (len, 1);
11810 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11813 else if (fcode == BUILT_IN_STRNCAT_CHK)
11815 tree src = CALL_EXPR_ARG (exp, 1);
11816 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11818 src = c_strlen (src, 1);
11819 if (! src || ! host_integerp (src, 1))
11821 warning (0, "%Kcall to %D might overflow destination buffer",
11822 exp, get_callee_fndecl (exp));
11825 else if (tree_int_cst_lt (src, size))
11828 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11831 warning (0, "%Kcall to %D will always overflow destination buffer",
11832 exp, get_callee_fndecl (exp));
11835 /* Emit warning if a buffer overflow is detected at compile time
11836 in __sprintf_chk/__vsprintf_chk calls. */
11839 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11841 tree dest, size, len, fmt, flag;
11842 const char *fmt_str;
11843 int nargs = call_expr_nargs (exp);
11845 /* Verify the required arguments in the original call. */
11849 dest = CALL_EXPR_ARG (exp, 0);
11850 flag = CALL_EXPR_ARG (exp, 1);
11851 size = CALL_EXPR_ARG (exp, 2);
11852 fmt = CALL_EXPR_ARG (exp, 3);
11854 if (! host_integerp (size, 1) || integer_all_onesp (size))
11857 /* Check whether the format is a literal string constant. */
11858 fmt_str = c_getstr (fmt);
11859 if (fmt_str == NULL)
11862 if (!init_target_chars ())
11865 /* If the format doesn't contain % args or %%, we know its size. */
11866 if (strchr (fmt_str, target_percent) == 0)
11867 len = build_int_cstu (size_type_node, strlen (fmt_str));
11868 /* If the format is "%s" and first ... argument is a string literal,
11870 else if (fcode == BUILT_IN_SPRINTF_CHK
11871 && strcmp (fmt_str, target_percent_s) == 0)
11877 arg = CALL_EXPR_ARG (exp, 4);
11878 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11881 len = c_strlen (arg, 1);
11882 if (!len || ! host_integerp (len, 1))
11888 if (! tree_int_cst_lt (len, size))
11890 warning (0, "%Kcall to %D will always overflow destination buffer",
11891 exp, get_callee_fndecl (exp));
11895 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11899 fold_builtin_object_size (tree ptr, tree ost)
11901 tree ret = NULL_TREE;
11902 int object_size_type;
11904 if (!validate_arg (ptr, POINTER_TYPE)
11905 || !validate_arg (ost, INTEGER_TYPE))
11910 if (TREE_CODE (ost) != INTEGER_CST
11911 || tree_int_cst_sgn (ost) < 0
11912 || compare_tree_int (ost, 3) > 0)
11915 object_size_type = tree_low_cst (ost, 0);
11917 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11918 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11919 and (size_t) 0 for types 2 and 3. */
11920 if (TREE_SIDE_EFFECTS (ptr))
11921 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11923 if (TREE_CODE (ptr) == ADDR_EXPR)
11924 ret = build_int_cstu (size_type_node,
11925 compute_builtin_object_size (ptr, object_size_type));
11927 else if (TREE_CODE (ptr) == SSA_NAME)
11929 unsigned HOST_WIDE_INT bytes;
11931 /* If object size is not known yet, delay folding until
11932 later. Maybe subsequent passes will help determining
11934 bytes = compute_builtin_object_size (ptr, object_size_type);
11935 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11937 ret = build_int_cstu (size_type_node, bytes);
11942 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11943 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11944 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11951 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11952 DEST, SRC, LEN, and SIZE are the arguments to the call.
11953 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11954 code of the builtin. If MAXLEN is not NULL, it is maximum length
11955 passed as third argument. */
11958 fold_builtin_memory_chk (tree fndecl,
11959 tree dest, tree src, tree len, tree size,
11960 tree maxlen, bool ignore,
11961 enum built_in_function fcode)
11965 if (!validate_arg (dest, POINTER_TYPE)
11966 || !validate_arg (src,
11967 (fcode == BUILT_IN_MEMSET_CHK
11968 ? INTEGER_TYPE : POINTER_TYPE))
11969 || !validate_arg (len, INTEGER_TYPE)
11970 || !validate_arg (size, INTEGER_TYPE))
11973 /* If SRC and DEST are the same (and not volatile), return DEST
11974 (resp. DEST+LEN for __mempcpy_chk). */
11975 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11977 if (fcode != BUILT_IN_MEMPCPY_CHK)
11978 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11981 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11982 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11986 if (! host_integerp (size, 1))
11989 if (! integer_all_onesp (size))
11991 if (! host_integerp (len, 1))
11993 /* If LEN is not constant, try MAXLEN too.
11994 For MAXLEN only allow optimizing into non-_ocs function
11995 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11996 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11998 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12000 /* (void) __mempcpy_chk () can be optimized into
12001 (void) __memcpy_chk (). */
12002 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12006 return build_call_expr (fn, 4, dest, src, len, size);
12014 if (tree_int_cst_lt (size, maxlen))
12019 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12020 mem{cpy,pcpy,move,set} is available. */
12023 case BUILT_IN_MEMCPY_CHK:
12024 fn = built_in_decls[BUILT_IN_MEMCPY];
12026 case BUILT_IN_MEMPCPY_CHK:
12027 fn = built_in_decls[BUILT_IN_MEMPCPY];
12029 case BUILT_IN_MEMMOVE_CHK:
12030 fn = built_in_decls[BUILT_IN_MEMMOVE];
12032 case BUILT_IN_MEMSET_CHK:
12033 fn = built_in_decls[BUILT_IN_MEMSET];
12042 return build_call_expr (fn, 3, dest, src, len);
12045 /* Fold a call to the __st[rp]cpy_chk builtin.
12046 DEST, SRC, and SIZE are the arguments to the call.
12047 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12048 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12049 strings passed as second argument. */
12052 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12053 tree maxlen, bool ignore,
12054 enum built_in_function fcode)
12058 if (!validate_arg (dest, POINTER_TYPE)
12059 || !validate_arg (src, POINTER_TYPE)
12060 || !validate_arg (size, INTEGER_TYPE))
12063 /* If SRC and DEST are the same (and not volatile), return DEST. */
12064 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12065 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12067 if (! host_integerp (size, 1))
12070 if (! integer_all_onesp (size))
12072 len = c_strlen (src, 1);
12073 if (! len || ! host_integerp (len, 1))
12075 /* If LEN is not constant, try MAXLEN too.
12076 For MAXLEN only allow optimizing into non-_ocs function
12077 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12078 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12080 if (fcode == BUILT_IN_STPCPY_CHK)
12085 /* If return value of __stpcpy_chk is ignored,
12086 optimize into __strcpy_chk. */
12087 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12091 return build_call_expr (fn, 3, dest, src, size);
12094 if (! len || TREE_SIDE_EFFECTS (len))
12097 /* If c_strlen returned something, but not a constant,
12098 transform __strcpy_chk into __memcpy_chk. */
12099 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12103 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12104 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12105 build_call_expr (fn, 4,
12106 dest, src, len, size));
12112 if (! tree_int_cst_lt (maxlen, size))
12116 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12117 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12118 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12122 return build_call_expr (fn, 2, dest, src);
12125 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12126 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12127 length passed as third argument. */
12130 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12135 if (!validate_arg (dest, POINTER_TYPE)
12136 || !validate_arg (src, POINTER_TYPE)
12137 || !validate_arg (len, INTEGER_TYPE)
12138 || !validate_arg (size, INTEGER_TYPE))
12141 if (! host_integerp (size, 1))
12144 if (! integer_all_onesp (size))
12146 if (! host_integerp (len, 1))
12148 /* If LEN is not constant, try MAXLEN too.
12149 For MAXLEN only allow optimizing into non-_ocs function
12150 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12151 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12157 if (tree_int_cst_lt (size, maxlen))
12161 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12162 fn = built_in_decls[BUILT_IN_STRNCPY];
12166 return build_call_expr (fn, 3, dest, src, len);
12169 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12170 are the arguments to the call. */
12173 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12178 if (!validate_arg (dest, POINTER_TYPE)
12179 || !validate_arg (src, POINTER_TYPE)
12180 || !validate_arg (size, INTEGER_TYPE))
12183 p = c_getstr (src);
12184 /* If the SRC parameter is "", return DEST. */
12185 if (p && *p == '\0')
12186 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12188 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12191 /* If __builtin_strcat_chk is used, assume strcat is available. */
12192 fn = built_in_decls[BUILT_IN_STRCAT];
12196 return build_call_expr (fn, 2, dest, src);
12199 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12203 fold_builtin_strncat_chk (tree fndecl,
12204 tree dest, tree src, tree len, tree size)
12209 if (!validate_arg (dest, POINTER_TYPE)
12210 || !validate_arg (src, POINTER_TYPE)
12211 || !validate_arg (size, INTEGER_TYPE)
12212 || !validate_arg (size, INTEGER_TYPE))
12215 p = c_getstr (src);
12216 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12217 if (p && *p == '\0')
12218 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12219 else if (integer_zerop (len))
12220 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12222 if (! host_integerp (size, 1))
12225 if (! integer_all_onesp (size))
12227 tree src_len = c_strlen (src, 1);
12229 && host_integerp (src_len, 1)
12230 && host_integerp (len, 1)
12231 && ! tree_int_cst_lt (len, src_len))
12233 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12234 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12238 return build_call_expr (fn, 3, dest, src, size);
12243 /* If __builtin_strncat_chk is used, assume strncat is available. */
12244 fn = built_in_decls[BUILT_IN_STRNCAT];
12248 return build_call_expr (fn, 3, dest, src, len);
12251 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12252 a normal call should be emitted rather than expanding the function
12253 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12256 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12258 tree dest, size, len, fn, fmt, flag;
12259 const char *fmt_str;
12260 int nargs = call_expr_nargs (exp);
12262 /* Verify the required arguments in the original call. */
12265 dest = CALL_EXPR_ARG (exp, 0);
12266 if (!validate_arg (dest, POINTER_TYPE))
12268 flag = CALL_EXPR_ARG (exp, 1);
12269 if (!validate_arg (flag, INTEGER_TYPE))
12271 size = CALL_EXPR_ARG (exp, 2);
12272 if (!validate_arg (size, INTEGER_TYPE))
12274 fmt = CALL_EXPR_ARG (exp, 3);
12275 if (!validate_arg (fmt, POINTER_TYPE))
12278 if (! host_integerp (size, 1))
12283 if (!init_target_chars ())
12286 /* Check whether the format is a literal string constant. */
12287 fmt_str = c_getstr (fmt);
12288 if (fmt_str != NULL)
12290 /* If the format doesn't contain % args or %%, we know the size. */
12291 if (strchr (fmt_str, target_percent) == 0)
12293 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12294 len = build_int_cstu (size_type_node, strlen (fmt_str));
12296 /* If the format is "%s" and first ... argument is a string literal,
12297 we know the size too. */
12298 else if (fcode == BUILT_IN_SPRINTF_CHK
12299 && strcmp (fmt_str, target_percent_s) == 0)
12305 arg = CALL_EXPR_ARG (exp, 4);
12306 if (validate_arg (arg, POINTER_TYPE))
12308 len = c_strlen (arg, 1);
12309 if (! len || ! host_integerp (len, 1))
12316 if (! integer_all_onesp (size))
12318 if (! len || ! tree_int_cst_lt (len, size))
12322 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12323 or if format doesn't contain % chars or is "%s". */
12324 if (! integer_zerop (flag))
12326 if (fmt_str == NULL)
12328 if (strchr (fmt_str, target_percent) != NULL
12329 && strcmp (fmt_str, target_percent_s))
12333 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12334 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12335 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12339 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12342 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12343 a normal call should be emitted rather than expanding the function
12344 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12345 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12346 passed as second argument. */
12349 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12350 enum built_in_function fcode)
12352 tree dest, size, len, fn, fmt, flag;
12353 const char *fmt_str;
12355 /* Verify the required arguments in the original call. */
12356 if (call_expr_nargs (exp) < 5)
12358 dest = CALL_EXPR_ARG (exp, 0);
12359 if (!validate_arg (dest, POINTER_TYPE))
12361 len = CALL_EXPR_ARG (exp, 1);
12362 if (!validate_arg (len, INTEGER_TYPE))
12364 flag = CALL_EXPR_ARG (exp, 2);
12365 if (!validate_arg (flag, INTEGER_TYPE))
12367 size = CALL_EXPR_ARG (exp, 3);
12368 if (!validate_arg (size, INTEGER_TYPE))
12370 fmt = CALL_EXPR_ARG (exp, 4);
12371 if (!validate_arg (fmt, POINTER_TYPE))
12374 if (! host_integerp (size, 1))
12377 if (! integer_all_onesp (size))
12379 if (! host_integerp (len, 1))
12381 /* If LEN is not constant, try MAXLEN too.
12382 For MAXLEN only allow optimizing into non-_ocs function
12383 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12384 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12390 if (tree_int_cst_lt (size, maxlen))
12394 if (!init_target_chars ())
12397 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12398 or if format doesn't contain % chars or is "%s". */
12399 if (! integer_zerop (flag))
12401 fmt_str = c_getstr (fmt);
12402 if (fmt_str == NULL)
12404 if (strchr (fmt_str, target_percent) != NULL
12405 && strcmp (fmt_str, target_percent_s))
12409 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12411 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12412 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12416 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12419 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12420 FMT and ARG are the arguments to the call; we don't fold cases with
12421 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12423 Return NULL_TREE if no simplification was possible, otherwise return the
12424 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12425 code of the function to be simplified. */
12428 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12429 enum built_in_function fcode)
12431 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12432 const char *fmt_str = NULL;
12434 /* If the return value is used, don't do the transformation. */
12438 /* Verify the required arguments in the original call. */
12439 if (!validate_arg (fmt, POINTER_TYPE))
12442 /* Check whether the format is a literal string constant. */
12443 fmt_str = c_getstr (fmt);
12444 if (fmt_str == NULL)
12447 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12449 /* If we're using an unlocked function, assume the other
12450 unlocked functions exist explicitly. */
12451 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12452 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12456 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12457 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12460 if (!init_target_chars ())
12463 if (strcmp (fmt_str, target_percent_s) == 0
12464 || strchr (fmt_str, target_percent) == NULL)
12468 if (strcmp (fmt_str, target_percent_s) == 0)
12470 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12473 if (!arg || !validate_arg (arg, POINTER_TYPE))
12476 str = c_getstr (arg);
12482 /* The format specifier doesn't contain any '%' characters. */
12483 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12489 /* If the string was "", printf does nothing. */
12490 if (str[0] == '\0')
12491 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12493 /* If the string has length of 1, call putchar. */
12494 if (str[1] == '\0')
12496 /* Given printf("c"), (where c is any one character,)
12497 convert "c"[0] to an int and pass that to the replacement
12499 newarg = build_int_cst (NULL_TREE, str[0]);
12501 call = build_call_expr (fn_putchar, 1, newarg);
12505 /* If the string was "string\n", call puts("string"). */
12506 size_t len = strlen (str);
12507 if ((unsigned char)str[len - 1] == target_newline)
12509 /* Create a NUL-terminated string that's one char shorter
12510 than the original, stripping off the trailing '\n'. */
12511 char *newstr = alloca (len);
12512 memcpy (newstr, str, len - 1);
12513 newstr[len - 1] = 0;
12515 newarg = build_string_literal (len, newstr);
12517 call = build_call_expr (fn_puts, 1, newarg);
12520 /* We'd like to arrange to call fputs(string,stdout) here,
12521 but we need stdout and don't have a way to get it yet. */
12526 /* The other optimizations can be done only on the non-va_list variants. */
12527 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12530 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12531 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12533 if (!arg || !validate_arg (arg, POINTER_TYPE))
12536 call = build_call_expr (fn_puts, 1, arg);
12539 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12540 else if (strcmp (fmt_str, target_percent_c) == 0)
12542 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12545 call = build_call_expr (fn_putchar, 1, arg);
12551 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12554 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12555 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12556 more than 3 arguments, and ARG may be null in the 2-argument case.
12558 Return NULL_TREE if no simplification was possible, otherwise return the
12559 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12560 code of the function to be simplified. */
12563 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12564 enum built_in_function fcode)
12566 tree fn_fputc, fn_fputs, call = NULL_TREE;
12567 const char *fmt_str = NULL;
12569 /* If the return value is used, don't do the transformation. */
12573 /* Verify the required arguments in the original call. */
12574 if (!validate_arg (fp, POINTER_TYPE))
12576 if (!validate_arg (fmt, POINTER_TYPE))
12579 /* Check whether the format is a literal string constant. */
12580 fmt_str = c_getstr (fmt);
12581 if (fmt_str == NULL)
12584 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12586 /* If we're using an unlocked function, assume the other
12587 unlocked functions exist explicitly. */
12588 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12589 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12593 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12594 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12597 if (!init_target_chars ())
12600 /* If the format doesn't contain % args or %%, use strcpy. */
12601 if (strchr (fmt_str, target_percent) == NULL)
12603 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12607 /* If the format specifier was "", fprintf does nothing. */
12608 if (fmt_str[0] == '\0')
12610 /* If FP has side-effects, just wait until gimplification is
12612 if (TREE_SIDE_EFFECTS (fp))
12615 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12618 /* When "string" doesn't contain %, replace all cases of
12619 fprintf (fp, string) with fputs (string, fp). The fputs
12620 builtin will take care of special cases like length == 1. */
12622 call = build_call_expr (fn_fputs, 2, fmt, fp);
12625 /* The other optimizations can be done only on the non-va_list variants. */
12626 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12629 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12630 else if (strcmp (fmt_str, target_percent_s) == 0)
12632 if (!arg || !validate_arg (arg, POINTER_TYPE))
12635 call = build_call_expr (fn_fputs, 2, arg, fp);
12638 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12639 else if (strcmp (fmt_str, target_percent_c) == 0)
12641 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12644 call = build_call_expr (fn_fputc, 2, arg, fp);
12649 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12652 /* Initialize format string characters in the target charset. */
12655 init_target_chars (void)
12660 target_newline = lang_hooks.to_target_charset ('\n');
12661 target_percent = lang_hooks.to_target_charset ('%');
12662 target_c = lang_hooks.to_target_charset ('c');
12663 target_s = lang_hooks.to_target_charset ('s');
12664 if (target_newline == 0 || target_percent == 0 || target_c == 0
12668 target_percent_c[0] = target_percent;
12669 target_percent_c[1] = target_c;
12670 target_percent_c[2] = '\0';
12672 target_percent_s[0] = target_percent;
12673 target_percent_s[1] = target_s;
12674 target_percent_s[2] = '\0';
12676 target_percent_s_newline[0] = target_percent;
12677 target_percent_s_newline[1] = target_s;
12678 target_percent_s_newline[2] = target_newline;
12679 target_percent_s_newline[3] = '\0';
12686 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12687 and no overflow/underflow occurred. INEXACT is true if M was not
12688 exactly calculated. TYPE is the tree type for the result. This
12689 function assumes that you cleared the MPFR flags and then
12690 calculated M to see if anything subsequently set a flag prior to
12691 entering this function. Return NULL_TREE if any checks fail. */
12694 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12696 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12697 overflow/underflow occurred. If -frounding-math, proceed iff the
12698 result of calling FUNC was exact. */
12699 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12700 && (!flag_rounding_math || !inexact))
12702 REAL_VALUE_TYPE rr;
12704 real_from_mpfr (&rr, m, type, GMP_RNDN);
12705 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12706 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12707 but the mpft_t is not, then we underflowed in the
12709 if (real_isfinite (&rr)
12710 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12712 REAL_VALUE_TYPE rmode;
12714 real_convert (&rmode, TYPE_MODE (type), &rr);
12715 /* Proceed iff the specified mode can hold the value. */
12716 if (real_identical (&rmode, &rr))
12717 return build_real (type, rmode);
12723 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12724 FUNC on it and return the resulting value as a tree with type TYPE.
12725 If MIN and/or MAX are not NULL, then the supplied ARG must be
12726 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12727 acceptable values, otherwise they are not. The mpfr precision is
12728 set to the precision of TYPE. We assume that function FUNC returns
12729 zero if the result could be calculated exactly within the requested
12733 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12734 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12737 tree result = NULL_TREE;
12741 /* To proceed, MPFR must exactly represent the target floating point
12742 format, which only happens when the target base equals two. */
12743 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12744 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12746 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12748 if (real_isfinite (ra)
12749 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12750 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12752 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12756 mpfr_init2 (m, prec);
12757 mpfr_from_real (m, ra, GMP_RNDN);
12758 mpfr_clear_flags ();
12759 inexact = func (m, m, GMP_RNDN);
12760 result = do_mpfr_ckconv (m, type, inexact);
12768 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12769 FUNC on it and return the resulting value as a tree with type TYPE.
12770 The mpfr precision is set to the precision of TYPE. We assume that
12771 function FUNC returns zero if the result could be calculated
12772 exactly within the requested precision. */
12775 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12776 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12778 tree result = NULL_TREE;
12783 /* To proceed, MPFR must exactly represent the target floating point
12784 format, which only happens when the target base equals two. */
12785 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12786 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12787 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12789 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12790 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12792 if (real_isfinite (ra1) && real_isfinite (ra2))
12794 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12798 mpfr_inits2 (prec, m1, m2, NULL);
12799 mpfr_from_real (m1, ra1, GMP_RNDN);
12800 mpfr_from_real (m2, ra2, GMP_RNDN);
12801 mpfr_clear_flags ();
12802 inexact = func (m1, m1, m2, GMP_RNDN);
12803 result = do_mpfr_ckconv (m1, type, inexact);
12804 mpfr_clears (m1, m2, NULL);
12811 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12812 FUNC on it and return the resulting value as a tree with type TYPE.
12813 The mpfr precision is set to the precision of TYPE. We assume that
12814 function FUNC returns zero if the result could be calculated
12815 exactly within the requested precision. */
12818 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12819 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12821 tree result = NULL_TREE;
12827 /* To proceed, MPFR must exactly represent the target floating point
12828 format, which only happens when the target base equals two. */
12829 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12830 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12831 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12832 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12834 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12835 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12836 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12838 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12840 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12844 mpfr_inits2 (prec, m1, m2, m3, NULL);
12845 mpfr_from_real (m1, ra1, GMP_RNDN);
12846 mpfr_from_real (m2, ra2, GMP_RNDN);
12847 mpfr_from_real (m3, ra3, GMP_RNDN);
12848 mpfr_clear_flags ();
12849 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12850 result = do_mpfr_ckconv (m1, type, inexact);
12851 mpfr_clears (m1, m2, m3, NULL);
12858 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12859 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12860 If ARG_SINP and ARG_COSP are NULL then the result is returned
12861 as a complex value.
12862 The type is taken from the type of ARG and is used for setting the
12863 precision of the calculation and results. */
12866 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12868 tree const type = TREE_TYPE (arg);
12869 tree result = NULL_TREE;
12873 /* To proceed, MPFR must exactly represent the target floating point
12874 format, which only happens when the target base equals two. */
12875 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12876 && TREE_CODE (arg) == REAL_CST
12877 && !TREE_OVERFLOW (arg))
12879 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12881 if (real_isfinite (ra))
12883 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12884 tree result_s, result_c;
12888 mpfr_inits2 (prec, m, ms, mc, NULL);
12889 mpfr_from_real (m, ra, GMP_RNDN);
12890 mpfr_clear_flags ();
12891 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12892 result_s = do_mpfr_ckconv (ms, type, inexact);
12893 result_c = do_mpfr_ckconv (mc, type, inexact);
12894 mpfr_clears (m, ms, mc, NULL);
12895 if (result_s && result_c)
12897 /* If we are to return in a complex value do so. */
12898 if (!arg_sinp && !arg_cosp)
12899 return build_complex (build_complex_type (type),
12900 result_c, result_s);
12902 /* Dereference the sin/cos pointer arguments. */
12903 arg_sinp = build_fold_indirect_ref (arg_sinp);
12904 arg_cosp = build_fold_indirect_ref (arg_cosp);
12905 /* Proceed if valid pointer type were passed in. */
12906 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12907 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12909 /* Set the values. */
12910 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12912 TREE_SIDE_EFFECTS (result_s) = 1;
12913 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12915 TREE_SIDE_EFFECTS (result_c) = 1;
12916 /* Combine the assignments into a compound expr. */
12917 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12918 result_s, result_c));
12926 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12927 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12928 two-argument mpfr order N Bessel function FUNC on them and return
12929 the resulting value as a tree with type TYPE. The mpfr precision
12930 is set to the precision of TYPE. We assume that function FUNC
12931 returns zero if the result could be calculated exactly within the
12932 requested precision. */
12934 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12935 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12936 const REAL_VALUE_TYPE *min, bool inclusive)
12938 tree result = NULL_TREE;
12943 /* To proceed, MPFR must exactly represent the target floating point
12944 format, which only happens when the target base equals two. */
12945 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12946 && host_integerp (arg1, 0)
12947 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12949 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12950 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12953 && real_isfinite (ra)
12954 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12956 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12960 mpfr_init2 (m, prec);
12961 mpfr_from_real (m, ra, GMP_RNDN);
12962 mpfr_clear_flags ();
12963 inexact = func (m, n, m, GMP_RNDN);
12964 result = do_mpfr_ckconv (m, type, inexact);
12972 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12973 the pointer *(ARG_QUO) and return the result. The type is taken
12974 from the type of ARG0 and is used for setting the precision of the
12975 calculation and results. */
12978 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12980 tree const type = TREE_TYPE (arg0);
12981 tree result = NULL_TREE;
12986 /* To proceed, MPFR must exactly represent the target floating point
12987 format, which only happens when the target base equals two. */
12988 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12989 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12990 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12992 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12993 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12995 if (real_isfinite (ra0) && real_isfinite (ra1))
12997 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13002 mpfr_inits2 (prec, m0, m1, NULL);
13003 mpfr_from_real (m0, ra0, GMP_RNDN);
13004 mpfr_from_real (m1, ra1, GMP_RNDN);
13005 mpfr_clear_flags ();
13006 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
13007 /* Remquo is independent of the rounding mode, so pass
13008 inexact=0 to do_mpfr_ckconv(). */
13009 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13010 mpfr_clears (m0, m1, NULL);
13013 /* MPFR calculates quo in the host's long so it may
13014 return more bits in quo than the target int can hold
13015 if sizeof(host long) > sizeof(target int). This can
13016 happen even for native compilers in LP64 mode. In
13017 these cases, modulo the quo value with the largest
13018 number that the target int can hold while leaving one
13019 bit for the sign. */
13020 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13021 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13023 /* Dereference the quo pointer argument. */
13024 arg_quo = build_fold_indirect_ref (arg_quo);
13025 /* Proceed iff a valid pointer type was passed in. */
13026 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13028 /* Set the value. */
13029 tree result_quo = fold_build2 (MODIFY_EXPR,
13030 TREE_TYPE (arg_quo), arg_quo,
13031 build_int_cst (NULL, integer_quo));
13032 TREE_SIDE_EFFECTS (result_quo) = 1;
13033 /* Combine the quo assignment with the rem. */
13034 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13035 result_quo, result_rem));
13043 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13044 resulting value as a tree with type TYPE. The mpfr precision is
13045 set to the precision of TYPE. We assume that this mpfr function
13046 returns zero if the result could be calculated exactly within the
13047 requested precision. In addition, the integer pointer represented
13048 by ARG_SG will be dereferenced and set to the appropriate signgam
13052 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13054 tree result = NULL_TREE;
13058 /* To proceed, MPFR must exactly represent the target floating point
13059 format, which only happens when the target base equals two. Also
13060 verify ARG is a constant and that ARG_SG is an int pointer. */
13061 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13062 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13063 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13064 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13066 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13068 /* In addition to NaN and Inf, the argument cannot be zero or a
13069 negative integer. */
13070 if (real_isfinite (ra)
13071 && ra->cl != rvc_zero
13072 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13074 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13079 mpfr_init2 (m, prec);
13080 mpfr_from_real (m, ra, GMP_RNDN);
13081 mpfr_clear_flags ();
13082 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
13083 result_lg = do_mpfr_ckconv (m, type, inexact);
13089 /* Dereference the arg_sg pointer argument. */
13090 arg_sg = build_fold_indirect_ref (arg_sg);
13091 /* Assign the signgam value into *arg_sg. */
13092 result_sg = fold_build2 (MODIFY_EXPR,
13093 TREE_TYPE (arg_sg), arg_sg,
13094 build_int_cst (NULL, sg));
13095 TREE_SIDE_EFFECTS (result_sg) = 1;
13096 /* Combine the signgam assignment with the lgamma result. */
13097 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13098 result_sg, result_lg));