1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
250 if (strncmp (name, "__sync_", 7) == 0)
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
280 switch (TREE_CODE (exp))
283 exp = TREE_OPERAND (exp, 0);
284 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
287 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
288 align = MIN (inner, max_align);
291 case POINTER_PLUS_EXPR:
292 /* If sum of pointer + int, restrict our maximum alignment to that
293 imposed by the integer. If not, we can't do any better than
295 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
298 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
299 & (max_align / BITS_PER_UNIT - 1))
303 exp = TREE_OPERAND (exp, 0);
307 /* See what we are pointing at and look at its alignment. */
308 exp = TREE_OPERAND (exp, 0);
310 if (handled_component_p (exp))
312 HOST_WIDE_INT bitsize, bitpos;
314 enum machine_mode mode;
315 int unsignedp, volatilep;
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
320 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
321 if (offset && TREE_CODE (offset) == PLUS_EXPR
322 && host_integerp (TREE_OPERAND (offset, 1), 1))
324 /* Any overflow in calculating offset_bits won't change
327 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
331 inner = MIN (inner, (offset_bits & -offset_bits));
332 offset = TREE_OPERAND (offset, 0);
334 if (offset && TREE_CODE (offset) == MULT_EXPR
335 && host_integerp (TREE_OPERAND (offset, 1), 1))
337 /* Any overflow in calculating offset_factor won't change
339 unsigned offset_factor
340 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
344 inner = MIN (inner, (offset_factor & -offset_factor));
347 inner = MIN (inner, BITS_PER_UNIT);
350 align = MIN (inner, DECL_ALIGN (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 else if (CONSTANT_CLASS_P (exp))
353 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
355 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
356 || TREE_CODE (exp) == INDIRECT_REF)
357 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
359 align = MIN (align, inner);
360 return MIN (align, max_align);
368 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
369 way, because it could contain a zero byte in the middle.
370 TREE_STRING_LENGTH is the size of the character array, not the string.
372 ONLY_VALUE should be nonzero if the result is not going to be emitted
373 into the instruction stream and zero if it is going to be expanded.
374 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
375 is returned, otherwise NULL, since
376 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
377 evaluate the side-effects.
379 The value returned is of type `ssizetype'.
381 Unfortunately, string_constant can't access the values of const char
382 arrays with initializers, so neither can we do so here. */
385 c_strlen (tree src, int only_value)
388 HOST_WIDE_INT offset;
393 if (TREE_CODE (src) == COND_EXPR
394 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
398 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
399 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
400 if (tree_int_cst_equal (len1, len2))
404 if (TREE_CODE (src) == COMPOUND_EXPR
405 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
406 return c_strlen (TREE_OPERAND (src, 1), only_value);
408 src = string_constant (src, &offset_node);
412 max = TREE_STRING_LENGTH (src) - 1;
413 ptr = TREE_STRING_POINTER (src);
415 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
417 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
418 compute the offset to the following null if we don't know where to
419 start searching for it. */
422 for (i = 0; i < max; i++)
426 /* We don't know the starting offset, but we do know that the string
427 has no internal zero bytes. We can assume that the offset falls
428 within the bounds of the string; otherwise, the programmer deserves
429 what he gets. Subtract the offset from the length of the string,
430 and return that. This would perhaps not be valid if we were dealing
431 with named arrays in addition to literal string constants. */
433 return size_diffop (size_int (max), offset_node);
436 /* We have a known offset into the string. Start searching there for
437 a null character if we can represent it as a single HOST_WIDE_INT. */
438 if (offset_node == 0)
440 else if (! host_integerp (offset_node, 0))
443 offset = tree_low_cst (offset_node, 0);
445 /* If the offset is known to be out of bounds, warn, and call strlen at
447 if (offset < 0 || offset > max)
449 /* Suppress multiple warnings for propagated constant strings. */
450 if (! TREE_NO_WARNING (src))
452 warning (0, "offset outside bounds of constant string");
453 TREE_NO_WARNING (src) = 1;
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
475 src = string_constant (src, &offset_node);
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
492 c_readstr (const char *str, enum machine_mode mode)
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
518 return immed_double_const (c[0], c[1], mode);
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
526 target_char_cast (tree cst, char *p)
528 unsigned HOST_WIDE_INT val, hostval;
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
554 builtin_save_expr (tree exp)
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
561 return save_expr (exp);
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
591 tem = hard_frame_pointer_rtx;
593 /* Tell reload not to eliminate the frame pointer. */
594 crtl->accesses_prior_frames = 1;
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
603 SETUP_FRAME_ADDRESSES ();
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
648 /* Alias set used for setjmp buffer. */
649 static alias_set_type setjmp_alias_set = -1;
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
665 buf_addr = convert_memory_address (Pmode, buf_addr);
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 cfun->calls_setjmp = 1;
699 /* We have a nonlocal label. */
700 cfun->has_nonlocal_label = 1;
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_use (hard_frame_pointer_rtx);
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_clobber (static_chain_rtx);
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_clobber (hard_frame_pointer_rtx);
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
732 #ifdef ELIMINABLE_REGS
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
741 if (i == ARRAY_SIZE (elim_regs))
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx,
747 copy_to_reg (get_arg_pointer_save_area ()));
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
776 expand_builtin_longjmp (rtx buf_addr, rtx value)
778 rtx fp, lab, stack, insn, last;
779 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 if (setjmp_alias_set == -1)
782 setjmp_alias_set = new_alias_set ();
784 buf_addr = convert_memory_address (Pmode, buf_addr);
786 buf_addr = force_reg (Pmode, buf_addr);
788 /* We used to store value in static_chain_rtx, but that fails if pointers
789 are smaller than integers. We instead require that the user must pass
790 a second argument of 1, because that is what builtin_setjmp will
791 return. This also makes EH slightly more efficient, since we are no
792 longer copying around a value that we don't care about. */
793 gcc_assert (value == const1_rtx);
795 last = get_last_insn ();
796 #ifdef HAVE_builtin_longjmp
797 if (HAVE_builtin_longjmp)
798 emit_insn (gen_builtin_longjmp (buf_addr));
802 fp = gen_rtx_MEM (Pmode, buf_addr);
803 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
804 GET_MODE_SIZE (Pmode)));
806 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (fp, setjmp_alias_set);
809 set_mem_alias_set (lab, setjmp_alias_set);
810 set_mem_alias_set (stack, setjmp_alias_set);
812 /* Pick up FP, label, and SP from the block and jump. This code is
813 from expand_goto in stmt.c; see there for detailed comments. */
814 #ifdef HAVE_nonlocal_goto
815 if (HAVE_nonlocal_goto)
816 /* We have to pass a value to the nonlocal_goto pattern that will
817 get copied into the static_chain pointer, but it does not matter
818 what that value is, because builtin_setjmp does not use it. */
819 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
823 lab = copy_to_reg (lab);
825 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
826 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
828 emit_move_insn (hard_frame_pointer_rtx, fp);
829 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
831 emit_use (hard_frame_pointer_rtx);
832 emit_use (stack_pointer_rtx);
833 emit_indirect_jump (lab);
837 /* Search backwards and mark the jump insn as a non-local goto.
838 Note that this precludes the use of __builtin_longjmp to a
839 __builtin_setjmp target in the same function. However, we've
840 already cautioned the user that these functions are for
841 internal exception handling use only. */
842 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
844 gcc_assert (insn != last);
848 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
851 else if (CALL_P (insn))
856 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
857 and the address of the save area. */
860 expand_builtin_nonlocal_goto (tree exp)
862 tree t_label, t_save_area;
863 rtx r_label, r_save_area, r_fp, r_sp, insn;
865 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
868 t_label = CALL_EXPR_ARG (exp, 0);
869 t_save_area = CALL_EXPR_ARG (exp, 1);
871 r_label = expand_normal (t_label);
872 r_label = convert_memory_address (Pmode, r_label);
873 r_save_area = expand_normal (t_save_area);
874 r_save_area = convert_memory_address (Pmode, r_save_area);
875 /* Copy the address of the save location to a register just in case it was based
876 on the frame pointer. */
877 r_save_area = copy_to_reg (r_save_area);
878 r_fp = gen_rtx_MEM (Pmode, r_save_area);
879 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
880 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
882 crtl->has_nonlocal_goto = 1;
884 #ifdef HAVE_nonlocal_goto
885 /* ??? We no longer need to pass the static chain value, afaik. */
886 if (HAVE_nonlocal_goto)
887 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
891 r_label = copy_to_reg (r_label);
893 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
894 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
896 /* Restore frame pointer for containing function.
897 This sets the actual hard register used for the frame pointer
898 to the location of the function's incoming static chain info.
899 The non-local goto handler will then adjust it to contain the
900 proper value and reload the argument pointer, if needed. */
901 emit_move_insn (hard_frame_pointer_rtx, r_fp);
902 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
904 /* USE of hard_frame_pointer_rtx added for consistency;
905 not clear if really needed. */
906 emit_use (hard_frame_pointer_rtx);
907 emit_use (stack_pointer_rtx);
909 /* If the architecture is using a GP register, we must
910 conservatively assume that the target function makes use of it.
911 The prologue of functions with nonlocal gotos must therefore
912 initialize the GP register to the appropriate value, and we
913 must then make sure that this value is live at the point
914 of the jump. (Note that this doesn't necessarily apply
915 to targets with a nonlocal_goto pattern; they are free
916 to implement it in their own way. Note also that this is
917 a no-op if the GP register is a global invariant.) */
918 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
919 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
920 emit_use (pic_offset_table_rtx);
922 emit_indirect_jump (r_label);
925 /* Search backwards to the jump insn and mark it as a
927 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
931 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
934 else if (CALL_P (insn))
941 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
942 (not all will be used on all machines) that was passed to __builtin_setjmp.
943 It updates the stack pointer in that block to correspond to the current
947 expand_builtin_update_setjmp_buf (rtx buf_addr)
949 enum machine_mode sa_mode = Pmode;
953 #ifdef HAVE_save_stack_nonlocal
954 if (HAVE_save_stack_nonlocal)
955 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
957 #ifdef STACK_SAVEAREA_MODE
958 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
962 = gen_rtx_MEM (sa_mode,
965 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
969 emit_insn (gen_setjmp ());
972 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
975 /* Expand a call to __builtin_prefetch. For a target that does not support
976 data prefetch, evaluate the memory address argument in case it has side
980 expand_builtin_prefetch (tree exp)
982 tree arg0, arg1, arg2;
986 if (!validate_arglist (exp, POINTER_TYPE, 0))
989 arg0 = CALL_EXPR_ARG (exp, 0);
991 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
992 zero (read) and argument 2 (locality) defaults to 3 (high degree of
994 nargs = call_expr_nargs (exp);
996 arg1 = CALL_EXPR_ARG (exp, 1);
998 arg1 = integer_zero_node;
1000 arg2 = CALL_EXPR_ARG (exp, 2);
1002 arg2 = build_int_cst (NULL_TREE, 3);
1004 /* Argument 0 is an address. */
1005 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1007 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1008 if (TREE_CODE (arg1) != INTEGER_CST)
1010 error ("second argument to %<__builtin_prefetch%> must be a constant");
1011 arg1 = integer_zero_node;
1013 op1 = expand_normal (arg1);
1014 /* Argument 1 must be either zero or one. */
1015 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1017 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1022 /* Argument 2 (locality) must be a compile-time constant int. */
1023 if (TREE_CODE (arg2) != INTEGER_CST)
1025 error ("third argument to %<__builtin_prefetch%> must be a constant");
1026 arg2 = integer_zero_node;
1028 op2 = expand_normal (arg2);
1029 /* Argument 2 must be 0, 1, 2, or 3. */
1030 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1032 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1036 #ifdef HAVE_prefetch
1039 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1041 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1042 || (GET_MODE (op0) != Pmode))
1044 op0 = convert_memory_address (Pmode, op0);
1045 op0 = force_reg (Pmode, op0);
1047 emit_insn (gen_prefetch (op0, op1, op2));
1051 /* Don't do anything with direct references to volatile memory, but
1052 generate code to handle other side effects. */
1053 if (!MEM_P (op0) && side_effects_p (op0))
1057 /* Get a MEM rtx for expression EXP which is the address of an operand
1058 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1059 the maximum length of the block of memory that might be accessed or
1063 get_memory_rtx (tree exp, tree len)
1065 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1066 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1068 /* Get an expression we can use to find the attributes to assign to MEM.
1069 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1070 we can. First remove any nops. */
1071 while (CONVERT_EXPR_P (exp)
1072 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1073 exp = TREE_OPERAND (exp, 0);
1075 if (TREE_CODE (exp) == ADDR_EXPR)
1076 exp = TREE_OPERAND (exp, 0);
1077 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1078 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1082 /* Honor attributes derived from exp, except for the alias set
1083 (as builtin stringops may alias with anything) and the size
1084 (as stringops may access multiple array elements). */
1087 set_mem_attributes (mem, exp, 0);
1089 /* Allow the string and memory builtins to overflow from one
1090 field into another, see http://gcc.gnu.org/PR23561.
1091 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1092 memory accessed by the string or memory builtin will fit
1093 within the field. */
1094 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1096 tree mem_expr = MEM_EXPR (mem);
1097 HOST_WIDE_INT offset = -1, length = -1;
1100 while (TREE_CODE (inner) == ARRAY_REF
1101 || CONVERT_EXPR_P (inner)
1102 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1103 || TREE_CODE (inner) == SAVE_EXPR)
1104 inner = TREE_OPERAND (inner, 0);
1106 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1108 if (MEM_OFFSET (mem)
1109 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1110 offset = INTVAL (MEM_OFFSET (mem));
1112 if (offset >= 0 && len && host_integerp (len, 0))
1113 length = tree_low_cst (len, 0);
1115 while (TREE_CODE (inner) == COMPONENT_REF)
1117 tree field = TREE_OPERAND (inner, 1);
1118 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1119 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1121 /* Bitfields are generally not byte-addressable. */
1122 gcc_assert (!DECL_BIT_FIELD (field)
1123 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1124 % BITS_PER_UNIT) == 0
1125 && host_integerp (DECL_SIZE (field), 0)
1126 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1127 % BITS_PER_UNIT) == 0));
1129 /* If we can prove that the memory starting at XEXP (mem, 0) and
1130 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1131 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1132 fields without DECL_SIZE_UNIT like flexible array members. */
1134 && DECL_SIZE_UNIT (field)
1135 && host_integerp (DECL_SIZE_UNIT (field), 0))
1138 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1141 && offset + length <= size)
1146 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1147 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1148 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1156 mem_expr = TREE_OPERAND (mem_expr, 0);
1157 inner = TREE_OPERAND (inner, 0);
1160 if (mem_expr == NULL)
1162 if (mem_expr != MEM_EXPR (mem))
1164 set_mem_expr (mem, mem_expr);
1165 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1168 set_mem_alias_set (mem, 0);
1169 set_mem_size (mem, NULL_RTX);
1175 /* Built-in functions to perform an untyped call and return. */
1177 /* For each register that may be used for calling a function, this
1178 gives a mode used to copy the register's value. VOIDmode indicates
1179 the register is not used for calling a function. If the machine
1180 has register windows, this gives only the outbound registers.
1181 INCOMING_REGNO gives the corresponding inbound register. */
1182 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1184 /* For each register that may be used for returning values, this gives
1185 a mode used to copy the register's value. VOIDmode indicates the
1186 register is not used for returning values. If the machine has
1187 register windows, this gives only the outbound registers.
1188 INCOMING_REGNO gives the corresponding inbound register. */
1189 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1191 /* For each register that may be used for calling a function, this
1192 gives the offset of that register into the block returned by
1193 __builtin_apply_args. 0 indicates that the register is not
1194 used for calling a function. */
1195 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1197 /* Return the size required for the block returned by __builtin_apply_args,
1198 and initialize apply_args_mode. */
1201 apply_args_size (void)
1203 static int size = -1;
1206 enum machine_mode mode;
1208 /* The values computed by this function never change. */
1211 /* The first value is the incoming arg-pointer. */
1212 size = GET_MODE_SIZE (Pmode);
1214 /* The second value is the structure value address unless this is
1215 passed as an "invisible" first argument. */
1216 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1217 size += GET_MODE_SIZE (Pmode);
1219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1220 if (FUNCTION_ARG_REGNO_P (regno))
1222 mode = reg_raw_mode[regno];
1224 gcc_assert (mode != VOIDmode);
1226 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1227 if (size % align != 0)
1228 size = CEIL (size, align) * align;
1229 apply_args_reg_offset[regno] = size;
1230 size += GET_MODE_SIZE (mode);
1231 apply_args_mode[regno] = mode;
1235 apply_args_mode[regno] = VOIDmode;
1236 apply_args_reg_offset[regno] = 0;
1242 /* Return the size required for the block returned by __builtin_apply,
1243 and initialize apply_result_mode. */
1246 apply_result_size (void)
1248 static int size = -1;
1250 enum machine_mode mode;
1252 /* The values computed by this function never change. */
1257 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1258 if (FUNCTION_VALUE_REGNO_P (regno))
1260 mode = reg_raw_mode[regno];
1262 gcc_assert (mode != VOIDmode);
1264 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1265 if (size % align != 0)
1266 size = CEIL (size, align) * align;
1267 size += GET_MODE_SIZE (mode);
1268 apply_result_mode[regno] = mode;
1271 apply_result_mode[regno] = VOIDmode;
1273 /* Allow targets that use untyped_call and untyped_return to override
1274 the size so that machine-specific information can be stored here. */
1275 #ifdef APPLY_RESULT_SIZE
1276 size = APPLY_RESULT_SIZE;
1282 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1283 /* Create a vector describing the result block RESULT. If SAVEP is true,
1284 the result block is used to save the values; otherwise it is used to
1285 restore the values. */
1288 result_vector (int savep, rtx result)
1290 int regno, size, align, nelts;
1291 enum machine_mode mode;
1293 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1296 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1297 if ((mode = apply_result_mode[regno]) != VOIDmode)
1299 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1300 if (size % align != 0)
1301 size = CEIL (size, align) * align;
1302 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1303 mem = adjust_address (result, mode, size);
1304 savevec[nelts++] = (savep
1305 ? gen_rtx_SET (VOIDmode, mem, reg)
1306 : gen_rtx_SET (VOIDmode, reg, mem));
1307 size += GET_MODE_SIZE (mode);
1309 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1311 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1313 /* Save the state required to perform an untyped call with the same
1314 arguments as were passed to the current function. */
1317 expand_builtin_apply_args_1 (void)
1320 int size, align, regno;
1321 enum machine_mode mode;
1322 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1324 /* Create a block where the arg-pointer, structure value address,
1325 and argument registers can be saved. */
1326 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1328 /* Walk past the arg-pointer and structure value address. */
1329 size = GET_MODE_SIZE (Pmode);
1330 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1331 size += GET_MODE_SIZE (Pmode);
1333 /* Save each register used in calling a function to the block. */
1334 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1335 if ((mode = apply_args_mode[regno]) != VOIDmode)
1337 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1338 if (size % align != 0)
1339 size = CEIL (size, align) * align;
1341 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1343 emit_move_insn (adjust_address (registers, mode, size), tem);
1344 size += GET_MODE_SIZE (mode);
1347 /* Save the arg pointer to the block. */
1348 tem = copy_to_reg (virtual_incoming_args_rtx);
1349 #ifdef STACK_GROWS_DOWNWARD
1350 /* We need the pointer as the caller actually passed them to us, not
1351 as we might have pretended they were passed. Make sure it's a valid
1352 operand, as emit_move_insn isn't expected to handle a PLUS. */
1354 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1357 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1359 size = GET_MODE_SIZE (Pmode);
1361 /* Save the structure value address unless this is passed as an
1362 "invisible" first argument. */
1363 if (struct_incoming_value)
1365 emit_move_insn (adjust_address (registers, Pmode, size),
1366 copy_to_reg (struct_incoming_value));
1367 size += GET_MODE_SIZE (Pmode);
1370 /* Return the address of the block. */
1371 return copy_addr_to_reg (XEXP (registers, 0));
1374 /* __builtin_apply_args returns block of memory allocated on
1375 the stack into which is stored the arg pointer, structure
1376 value address, static chain, and all the registers that might
1377 possibly be used in performing a function call. The code is
1378 moved to the start of the function so the incoming values are
1382 expand_builtin_apply_args (void)
1384 /* Don't do __builtin_apply_args more than once in a function.
1385 Save the result of the first call and reuse it. */
1386 if (apply_args_value != 0)
1387 return apply_args_value;
1389 /* When this function is called, it means that registers must be
1390 saved on entry to this function. So we migrate the
1391 call to the first insn of this function. */
1396 temp = expand_builtin_apply_args_1 ();
1400 apply_args_value = temp;
1402 /* Put the insns after the NOTE that starts the function.
1403 If this is inside a start_sequence, make the outer-level insn
1404 chain current, so the code is placed at the start of the
1406 push_topmost_sequence ();
1407 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1408 pop_topmost_sequence ();
1413 /* Perform an untyped call and save the state required to perform an
1414 untyped return of whatever value was returned by the given function. */
1417 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1419 int size, align, regno;
1420 enum machine_mode mode;
1421 rtx incoming_args, result, reg, dest, src, call_insn;
1422 rtx old_stack_level = 0;
1423 rtx call_fusage = 0;
1424 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1426 arguments = convert_memory_address (Pmode, arguments);
1428 /* Create a block where the return registers can be saved. */
1429 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1431 /* Fetch the arg pointer from the ARGUMENTS block. */
1432 incoming_args = gen_reg_rtx (Pmode);
1433 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1434 #ifndef STACK_GROWS_DOWNWARD
1435 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1436 incoming_args, 0, OPTAB_LIB_WIDEN);
1439 /* Push a new argument block and copy the arguments. Do not allow
1440 the (potential) memcpy call below to interfere with our stack
1442 do_pending_stack_adjust ();
1445 /* Save the stack with nonlocal if available. */
1446 #ifdef HAVE_save_stack_nonlocal
1447 if (HAVE_save_stack_nonlocal)
1448 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1451 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1453 /* Allocate a block of memory onto the stack and copy the memory
1454 arguments to the outgoing arguments address. */
1455 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1456 dest = virtual_outgoing_args_rtx;
1457 #ifndef STACK_GROWS_DOWNWARD
1458 if (GET_CODE (argsize) == CONST_INT)
1459 dest = plus_constant (dest, -INTVAL (argsize));
1461 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1463 dest = gen_rtx_MEM (BLKmode, dest);
1464 set_mem_align (dest, PARM_BOUNDARY);
1465 src = gen_rtx_MEM (BLKmode, incoming_args);
1466 set_mem_align (src, PARM_BOUNDARY);
1467 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1469 /* Refer to the argument block. */
1471 arguments = gen_rtx_MEM (BLKmode, arguments);
1472 set_mem_align (arguments, PARM_BOUNDARY);
1474 /* Walk past the arg-pointer and structure value address. */
1475 size = GET_MODE_SIZE (Pmode);
1477 size += GET_MODE_SIZE (Pmode);
1479 /* Restore each of the registers previously saved. Make USE insns
1480 for each of these registers for use in making the call. */
1481 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1482 if ((mode = apply_args_mode[regno]) != VOIDmode)
1484 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1485 if (size % align != 0)
1486 size = CEIL (size, align) * align;
1487 reg = gen_rtx_REG (mode, regno);
1488 emit_move_insn (reg, adjust_address (arguments, mode, size));
1489 use_reg (&call_fusage, reg);
1490 size += GET_MODE_SIZE (mode);
1493 /* Restore the structure value address unless this is passed as an
1494 "invisible" first argument. */
1495 size = GET_MODE_SIZE (Pmode);
1498 rtx value = gen_reg_rtx (Pmode);
1499 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1500 emit_move_insn (struct_value, value);
1501 if (REG_P (struct_value))
1502 use_reg (&call_fusage, struct_value);
1503 size += GET_MODE_SIZE (Pmode);
1506 /* All arguments and registers used for the call are set up by now! */
1507 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1509 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1510 and we don't want to load it into a register as an optimization,
1511 because prepare_call_address already did it if it should be done. */
1512 if (GET_CODE (function) != SYMBOL_REF)
1513 function = memory_address (FUNCTION_MODE, function);
1515 /* Generate the actual call instruction and save the return value. */
1516 #ifdef HAVE_untyped_call
1517 if (HAVE_untyped_call)
1518 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1519 result, result_vector (1, result)));
1522 #ifdef HAVE_call_value
1523 if (HAVE_call_value)
1527 /* Locate the unique return register. It is not possible to
1528 express a call that sets more than one return register using
1529 call_value; use untyped_call for that. In fact, untyped_call
1530 only needs to save the return registers in the given block. */
1531 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1532 if ((mode = apply_result_mode[regno]) != VOIDmode)
1534 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1536 valreg = gen_rtx_REG (mode, regno);
1539 emit_call_insn (GEN_CALL_VALUE (valreg,
1540 gen_rtx_MEM (FUNCTION_MODE, function),
1541 const0_rtx, NULL_RTX, const0_rtx));
1543 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1549 /* Find the CALL insn we just emitted, and attach the register usage
1551 call_insn = last_call_insn ();
1552 add_function_usage_to (call_insn, call_fusage);
1554 /* Restore the stack. */
1555 #ifdef HAVE_save_stack_nonlocal
1556 if (HAVE_save_stack_nonlocal)
1557 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1560 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1564 /* Return the address of the result block. */
1565 result = copy_addr_to_reg (XEXP (result, 0));
1566 return convert_memory_address (ptr_mode, result);
1569 /* Perform an untyped return. */
1572 expand_builtin_return (rtx result)
1574 int size, align, regno;
1575 enum machine_mode mode;
1577 rtx call_fusage = 0;
1579 result = convert_memory_address (Pmode, result);
1581 apply_result_size ();
1582 result = gen_rtx_MEM (BLKmode, result);
1584 #ifdef HAVE_untyped_return
1585 if (HAVE_untyped_return)
1587 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1593 /* Restore the return value and note that each value is used. */
1595 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1596 if ((mode = apply_result_mode[regno]) != VOIDmode)
1598 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1599 if (size % align != 0)
1600 size = CEIL (size, align) * align;
1601 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1602 emit_move_insn (reg, adjust_address (result, mode, size));
1604 push_to_sequence (call_fusage);
1606 call_fusage = get_insns ();
1608 size += GET_MODE_SIZE (mode);
1611 /* Put the USE insns before the return. */
1612 emit_insn (call_fusage);
1614 /* Return whatever values was restored by jumping directly to the end
1616 expand_naked_return ();
1619 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1621 static enum type_class
1622 type_to_class (tree type)
1624 switch (TREE_CODE (type))
1626 case VOID_TYPE: return void_type_class;
1627 case INTEGER_TYPE: return integer_type_class;
1628 case ENUMERAL_TYPE: return enumeral_type_class;
1629 case BOOLEAN_TYPE: return boolean_type_class;
1630 case POINTER_TYPE: return pointer_type_class;
1631 case REFERENCE_TYPE: return reference_type_class;
1632 case OFFSET_TYPE: return offset_type_class;
1633 case REAL_TYPE: return real_type_class;
1634 case COMPLEX_TYPE: return complex_type_class;
1635 case FUNCTION_TYPE: return function_type_class;
1636 case METHOD_TYPE: return method_type_class;
1637 case RECORD_TYPE: return record_type_class;
1639 case QUAL_UNION_TYPE: return union_type_class;
1640 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1641 ? string_type_class : array_type_class);
1642 case LANG_TYPE: return lang_type_class;
1643 default: return no_type_class;
1647 /* Expand a call EXP to __builtin_classify_type. */
1650 expand_builtin_classify_type (tree exp)
1652 if (call_expr_nargs (exp))
1653 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1654 return GEN_INT (no_type_class);
1657 /* This helper macro, meant to be used in mathfn_built_in below,
1658 determines which among a set of three builtin math functions is
1659 appropriate for a given type mode. The `F' and `L' cases are
1660 automatically generated from the `double' case. */
1661 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1662 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1663 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1664 fcodel = BUILT_IN_MATHFN##L ; break;
1665 /* Similar to above, but appends _R after any F/L suffix. */
1666 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1667 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1668 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1669 fcodel = BUILT_IN_MATHFN##L_R ; break;
1671 /* Return mathematic function equivalent to FN but operating directly
1672 on TYPE, if available. If IMPLICIT is true find the function in
1673 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1674 can't do the conversion, return zero. */
1677 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1679 tree const *const fn_arr
1680 = implicit ? implicit_built_in_decls : built_in_decls;
1681 enum built_in_function fcode, fcodef, fcodel;
1685 CASE_MATHFN (BUILT_IN_ACOS)
1686 CASE_MATHFN (BUILT_IN_ACOSH)
1687 CASE_MATHFN (BUILT_IN_ASIN)
1688 CASE_MATHFN (BUILT_IN_ASINH)
1689 CASE_MATHFN (BUILT_IN_ATAN)
1690 CASE_MATHFN (BUILT_IN_ATAN2)
1691 CASE_MATHFN (BUILT_IN_ATANH)
1692 CASE_MATHFN (BUILT_IN_CBRT)
1693 CASE_MATHFN (BUILT_IN_CEIL)
1694 CASE_MATHFN (BUILT_IN_CEXPI)
1695 CASE_MATHFN (BUILT_IN_COPYSIGN)
1696 CASE_MATHFN (BUILT_IN_COS)
1697 CASE_MATHFN (BUILT_IN_COSH)
1698 CASE_MATHFN (BUILT_IN_DREM)
1699 CASE_MATHFN (BUILT_IN_ERF)
1700 CASE_MATHFN (BUILT_IN_ERFC)
1701 CASE_MATHFN (BUILT_IN_EXP)
1702 CASE_MATHFN (BUILT_IN_EXP10)
1703 CASE_MATHFN (BUILT_IN_EXP2)
1704 CASE_MATHFN (BUILT_IN_EXPM1)
1705 CASE_MATHFN (BUILT_IN_FABS)
1706 CASE_MATHFN (BUILT_IN_FDIM)
1707 CASE_MATHFN (BUILT_IN_FLOOR)
1708 CASE_MATHFN (BUILT_IN_FMA)
1709 CASE_MATHFN (BUILT_IN_FMAX)
1710 CASE_MATHFN (BUILT_IN_FMIN)
1711 CASE_MATHFN (BUILT_IN_FMOD)
1712 CASE_MATHFN (BUILT_IN_FREXP)
1713 CASE_MATHFN (BUILT_IN_GAMMA)
1714 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1715 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1716 CASE_MATHFN (BUILT_IN_HYPOT)
1717 CASE_MATHFN (BUILT_IN_ILOGB)
1718 CASE_MATHFN (BUILT_IN_INF)
1719 CASE_MATHFN (BUILT_IN_ISINF)
1720 CASE_MATHFN (BUILT_IN_J0)
1721 CASE_MATHFN (BUILT_IN_J1)
1722 CASE_MATHFN (BUILT_IN_JN)
1723 CASE_MATHFN (BUILT_IN_LCEIL)
1724 CASE_MATHFN (BUILT_IN_LDEXP)
1725 CASE_MATHFN (BUILT_IN_LFLOOR)
1726 CASE_MATHFN (BUILT_IN_LGAMMA)
1727 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1728 CASE_MATHFN (BUILT_IN_LLCEIL)
1729 CASE_MATHFN (BUILT_IN_LLFLOOR)
1730 CASE_MATHFN (BUILT_IN_LLRINT)
1731 CASE_MATHFN (BUILT_IN_LLROUND)
1732 CASE_MATHFN (BUILT_IN_LOG)
1733 CASE_MATHFN (BUILT_IN_LOG10)
1734 CASE_MATHFN (BUILT_IN_LOG1P)
1735 CASE_MATHFN (BUILT_IN_LOG2)
1736 CASE_MATHFN (BUILT_IN_LOGB)
1737 CASE_MATHFN (BUILT_IN_LRINT)
1738 CASE_MATHFN (BUILT_IN_LROUND)
1739 CASE_MATHFN (BUILT_IN_MODF)
1740 CASE_MATHFN (BUILT_IN_NAN)
1741 CASE_MATHFN (BUILT_IN_NANS)
1742 CASE_MATHFN (BUILT_IN_NEARBYINT)
1743 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1744 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1745 CASE_MATHFN (BUILT_IN_POW)
1746 CASE_MATHFN (BUILT_IN_POWI)
1747 CASE_MATHFN (BUILT_IN_POW10)
1748 CASE_MATHFN (BUILT_IN_REMAINDER)
1749 CASE_MATHFN (BUILT_IN_REMQUO)
1750 CASE_MATHFN (BUILT_IN_RINT)
1751 CASE_MATHFN (BUILT_IN_ROUND)
1752 CASE_MATHFN (BUILT_IN_SCALB)
1753 CASE_MATHFN (BUILT_IN_SCALBLN)
1754 CASE_MATHFN (BUILT_IN_SCALBN)
1755 CASE_MATHFN (BUILT_IN_SIGNBIT)
1756 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1757 CASE_MATHFN (BUILT_IN_SIN)
1758 CASE_MATHFN (BUILT_IN_SINCOS)
1759 CASE_MATHFN (BUILT_IN_SINH)
1760 CASE_MATHFN (BUILT_IN_SQRT)
1761 CASE_MATHFN (BUILT_IN_TAN)
1762 CASE_MATHFN (BUILT_IN_TANH)
1763 CASE_MATHFN (BUILT_IN_TGAMMA)
1764 CASE_MATHFN (BUILT_IN_TRUNC)
1765 CASE_MATHFN (BUILT_IN_Y0)
1766 CASE_MATHFN (BUILT_IN_Y1)
1767 CASE_MATHFN (BUILT_IN_YN)
1773 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1774 return fn_arr[fcode];
1775 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1776 return fn_arr[fcodef];
1777 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1778 return fn_arr[fcodel];
1783 /* Like mathfn_built_in_1(), but always use the implicit array. */
1786 mathfn_built_in (tree type, enum built_in_function fn)
1788 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1791 /* If errno must be maintained, expand the RTL to check if the result,
1792 TARGET, of a built-in function call, EXP, is NaN, and if so set
1796 expand_errno_check (tree exp, rtx target)
1798 rtx lab = gen_label_rtx ();
1800 /* Test the result; if it is NaN, set errno=EDOM because
1801 the argument was not in the domain. */
1802 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1806 /* If this built-in doesn't throw an exception, set errno directly. */
1807 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1809 #ifdef GEN_ERRNO_RTX
1810 rtx errno_rtx = GEN_ERRNO_RTX;
1813 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1815 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1821 /* Make sure the library call isn't expanded as a tail call. */
1822 CALL_EXPR_TAILCALL (exp) = 0;
1824 /* We can't set errno=EDOM directly; let the library call do it.
1825 Pop the arguments right away in case the call gets deleted. */
1827 expand_call (exp, target, 0);
1832 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1833 Return NULL_RTX if a normal call should be emitted rather than expanding
1834 the function in-line. EXP is the expression that is a call to the builtin
1835 function; if convenient, the result should be placed in TARGET.
1836 SUBTARGET may be used as the target for computing one of EXP's operands. */
1839 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1841 optab builtin_optab;
1842 rtx op0, insns, before_call;
1843 tree fndecl = get_callee_fndecl (exp);
1844 enum machine_mode mode;
1845 bool errno_set = false;
1848 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1851 arg = CALL_EXPR_ARG (exp, 0);
1853 switch (DECL_FUNCTION_CODE (fndecl))
1855 CASE_FLT_FN (BUILT_IN_SQRT):
1856 errno_set = ! tree_expr_nonnegative_p (arg);
1857 builtin_optab = sqrt_optab;
1859 CASE_FLT_FN (BUILT_IN_EXP):
1860 errno_set = true; builtin_optab = exp_optab; break;
1861 CASE_FLT_FN (BUILT_IN_EXP10):
1862 CASE_FLT_FN (BUILT_IN_POW10):
1863 errno_set = true; builtin_optab = exp10_optab; break;
1864 CASE_FLT_FN (BUILT_IN_EXP2):
1865 errno_set = true; builtin_optab = exp2_optab; break;
1866 CASE_FLT_FN (BUILT_IN_EXPM1):
1867 errno_set = true; builtin_optab = expm1_optab; break;
1868 CASE_FLT_FN (BUILT_IN_LOGB):
1869 errno_set = true; builtin_optab = logb_optab; break;
1870 CASE_FLT_FN (BUILT_IN_LOG):
1871 errno_set = true; builtin_optab = log_optab; break;
1872 CASE_FLT_FN (BUILT_IN_LOG10):
1873 errno_set = true; builtin_optab = log10_optab; break;
1874 CASE_FLT_FN (BUILT_IN_LOG2):
1875 errno_set = true; builtin_optab = log2_optab; break;
1876 CASE_FLT_FN (BUILT_IN_LOG1P):
1877 errno_set = true; builtin_optab = log1p_optab; break;
1878 CASE_FLT_FN (BUILT_IN_ASIN):
1879 builtin_optab = asin_optab; break;
1880 CASE_FLT_FN (BUILT_IN_ACOS):
1881 builtin_optab = acos_optab; break;
1882 CASE_FLT_FN (BUILT_IN_TAN):
1883 builtin_optab = tan_optab; break;
1884 CASE_FLT_FN (BUILT_IN_ATAN):
1885 builtin_optab = atan_optab; break;
1886 CASE_FLT_FN (BUILT_IN_FLOOR):
1887 builtin_optab = floor_optab; break;
1888 CASE_FLT_FN (BUILT_IN_CEIL):
1889 builtin_optab = ceil_optab; break;
1890 CASE_FLT_FN (BUILT_IN_TRUNC):
1891 builtin_optab = btrunc_optab; break;
1892 CASE_FLT_FN (BUILT_IN_ROUND):
1893 builtin_optab = round_optab; break;
1894 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1895 builtin_optab = nearbyint_optab;
1896 if (flag_trapping_math)
1898 /* Else fallthrough and expand as rint. */
1899 CASE_FLT_FN (BUILT_IN_RINT):
1900 builtin_optab = rint_optab; break;
1905 /* Make a suitable register to place result in. */
1906 mode = TYPE_MODE (TREE_TYPE (exp));
1908 if (! flag_errno_math || ! HONOR_NANS (mode))
1911 /* Before working hard, check whether the instruction is available. */
1912 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1914 target = gen_reg_rtx (mode);
1916 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1917 need to expand the argument again. This way, we will not perform
1918 side-effects more the once. */
1919 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1921 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1925 /* Compute into TARGET.
1926 Set TARGET to wherever the result comes back. */
1927 target = expand_unop (mode, builtin_optab, op0, target, 0);
1932 expand_errno_check (exp, target);
1934 /* Output the entire sequence. */
1935 insns = get_insns ();
1941 /* If we were unable to expand via the builtin, stop the sequence
1942 (without outputting the insns) and call to the library function
1943 with the stabilized argument list. */
1947 before_call = get_last_insn ();
1949 return expand_call (exp, target, target == const0_rtx);
1952 /* Expand a call to the builtin binary math functions (pow and atan2).
1953 Return NULL_RTX if a normal call should be emitted rather than expanding the
1954 function in-line. EXP is the expression that is a call to the builtin
1955 function; if convenient, the result should be placed in TARGET.
1956 SUBTARGET may be used as the target for computing one of EXP's
1960 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1962 optab builtin_optab;
1963 rtx op0, op1, insns;
1964 int op1_type = REAL_TYPE;
1965 tree fndecl = get_callee_fndecl (exp);
1967 enum machine_mode mode;
1968 bool errno_set = true;
1970 switch (DECL_FUNCTION_CODE (fndecl))
1972 CASE_FLT_FN (BUILT_IN_SCALBN):
1973 CASE_FLT_FN (BUILT_IN_SCALBLN):
1974 CASE_FLT_FN (BUILT_IN_LDEXP):
1975 op1_type = INTEGER_TYPE;
1980 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
1983 arg0 = CALL_EXPR_ARG (exp, 0);
1984 arg1 = CALL_EXPR_ARG (exp, 1);
1986 switch (DECL_FUNCTION_CODE (fndecl))
1988 CASE_FLT_FN (BUILT_IN_POW):
1989 builtin_optab = pow_optab; break;
1990 CASE_FLT_FN (BUILT_IN_ATAN2):
1991 builtin_optab = atan2_optab; break;
1992 CASE_FLT_FN (BUILT_IN_SCALB):
1993 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
1995 builtin_optab = scalb_optab; break;
1996 CASE_FLT_FN (BUILT_IN_SCALBN):
1997 CASE_FLT_FN (BUILT_IN_SCALBLN):
1998 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2000 /* Fall through... */
2001 CASE_FLT_FN (BUILT_IN_LDEXP):
2002 builtin_optab = ldexp_optab; break;
2003 CASE_FLT_FN (BUILT_IN_FMOD):
2004 builtin_optab = fmod_optab; break;
2005 CASE_FLT_FN (BUILT_IN_REMAINDER):
2006 CASE_FLT_FN (BUILT_IN_DREM):
2007 builtin_optab = remainder_optab; break;
2012 /* Make a suitable register to place result in. */
2013 mode = TYPE_MODE (TREE_TYPE (exp));
2015 /* Before working hard, check whether the instruction is available. */
2016 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2019 target = gen_reg_rtx (mode);
2021 if (! flag_errno_math || ! HONOR_NANS (mode))
2024 /* Always stabilize the argument list. */
2025 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2026 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2028 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2029 op1 = expand_normal (arg1);
2033 /* Compute into TARGET.
2034 Set TARGET to wherever the result comes back. */
2035 target = expand_binop (mode, builtin_optab, op0, op1,
2036 target, 0, OPTAB_DIRECT);
2038 /* If we were unable to expand via the builtin, stop the sequence
2039 (without outputting the insns) and call to the library function
2040 with the stabilized argument list. */
2044 return expand_call (exp, target, target == const0_rtx);
2048 expand_errno_check (exp, target);
2050 /* Output the entire sequence. */
2051 insns = get_insns ();
2058 /* Expand a call to the builtin sin and cos math functions.
2059 Return NULL_RTX if a normal call should be emitted rather than expanding the
2060 function in-line. EXP is the expression that is a call to the builtin
2061 function; if convenient, the result should be placed in TARGET.
2062 SUBTARGET may be used as the target for computing one of EXP's
2066 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2068 optab builtin_optab;
2070 tree fndecl = get_callee_fndecl (exp);
2071 enum machine_mode mode;
2074 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2077 arg = CALL_EXPR_ARG (exp, 0);
2079 switch (DECL_FUNCTION_CODE (fndecl))
2081 CASE_FLT_FN (BUILT_IN_SIN):
2082 CASE_FLT_FN (BUILT_IN_COS):
2083 builtin_optab = sincos_optab; break;
2088 /* Make a suitable register to place result in. */
2089 mode = TYPE_MODE (TREE_TYPE (exp));
2091 /* Check if sincos insn is available, otherwise fallback
2092 to sin or cos insn. */
2093 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2094 switch (DECL_FUNCTION_CODE (fndecl))
2096 CASE_FLT_FN (BUILT_IN_SIN):
2097 builtin_optab = sin_optab; break;
2098 CASE_FLT_FN (BUILT_IN_COS):
2099 builtin_optab = cos_optab; break;
2104 /* Before working hard, check whether the instruction is available. */
2105 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2107 target = gen_reg_rtx (mode);
2109 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2110 need to expand the argument again. This way, we will not perform
2111 side-effects more the once. */
2112 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2114 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2118 /* Compute into TARGET.
2119 Set TARGET to wherever the result comes back. */
2120 if (builtin_optab == sincos_optab)
2124 switch (DECL_FUNCTION_CODE (fndecl))
2126 CASE_FLT_FN (BUILT_IN_SIN):
2127 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2129 CASE_FLT_FN (BUILT_IN_COS):
2130 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2135 gcc_assert (result);
2139 target = expand_unop (mode, builtin_optab, op0, target, 0);
2144 /* Output the entire sequence. */
2145 insns = get_insns ();
2151 /* If we were unable to expand via the builtin, stop the sequence
2152 (without outputting the insns) and call to the library function
2153 with the stabilized argument list. */
2157 target = expand_call (exp, target, target == const0_rtx);
2162 /* Expand a call to one of the builtin math functions that operate on
2163 floating point argument and output an integer result (ilogb, isinf,
2165 Return 0 if a normal call should be emitted rather than expanding the
2166 function in-line. EXP is the expression that is a call to the builtin
2167 function; if convenient, the result should be placed in TARGET.
2168 SUBTARGET may be used as the target for computing one of EXP's operands. */
2171 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2173 optab builtin_optab = 0;
2174 enum insn_code icode = CODE_FOR_nothing;
2176 tree fndecl = get_callee_fndecl (exp);
2177 enum machine_mode mode;
2178 bool errno_set = false;
2181 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2184 arg = CALL_EXPR_ARG (exp, 0);
2186 switch (DECL_FUNCTION_CODE (fndecl))
2188 CASE_FLT_FN (BUILT_IN_ILOGB):
2189 errno_set = true; builtin_optab = ilogb_optab; break;
2190 CASE_FLT_FN (BUILT_IN_ISINF):
2191 builtin_optab = isinf_optab; break;
2192 case BUILT_IN_ISNORMAL:
2193 case BUILT_IN_ISFINITE:
2194 CASE_FLT_FN (BUILT_IN_FINITE):
2195 /* These builtins have no optabs (yet). */
2201 /* There's no easy way to detect the case we need to set EDOM. */
2202 if (flag_errno_math && errno_set)
2205 /* Optab mode depends on the mode of the input argument. */
2206 mode = TYPE_MODE (TREE_TYPE (arg));
2209 icode = optab_handler (builtin_optab, mode)->insn_code;
2211 /* Before working hard, check whether the instruction is available. */
2212 if (icode != CODE_FOR_nothing)
2214 /* Make a suitable register to place result in. */
2216 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2217 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2219 gcc_assert (insn_data[icode].operand[0].predicate
2220 (target, GET_MODE (target)));
2222 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2223 need to expand the argument again. This way, we will not perform
2224 side-effects more the once. */
2225 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2227 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2229 if (mode != GET_MODE (op0))
2230 op0 = convert_to_mode (mode, op0, 0);
2232 /* Compute into TARGET.
2233 Set TARGET to wherever the result comes back. */
2234 emit_unop_insn (icode, target, op0, UNKNOWN);
2238 /* If there is no optab, try generic code. */
2239 switch (DECL_FUNCTION_CODE (fndecl))
2243 CASE_FLT_FN (BUILT_IN_ISINF):
2245 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2246 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2247 tree const type = TREE_TYPE (arg);
2251 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2252 real_from_string (&r, buf);
2253 result = build_call_expr (isgr_fn, 2,
2254 fold_build1 (ABS_EXPR, type, arg),
2255 build_real (type, r));
2256 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2258 CASE_FLT_FN (BUILT_IN_FINITE):
2259 case BUILT_IN_ISFINITE:
2261 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2262 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2263 tree const type = TREE_TYPE (arg);
2267 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2268 real_from_string (&r, buf);
2269 result = build_call_expr (isle_fn, 2,
2270 fold_build1 (ABS_EXPR, type, arg),
2271 build_real (type, r));
2272 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2274 case BUILT_IN_ISNORMAL:
2276 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2277 islessequal(fabs(x),DBL_MAX). */
2278 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2279 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2280 tree const type = TREE_TYPE (arg);
2281 REAL_VALUE_TYPE rmax, rmin;
2284 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2285 real_from_string (&rmax, buf);
2286 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2287 real_from_string (&rmin, buf);
2288 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2289 result = build_call_expr (isle_fn, 2, arg,
2290 build_real (type, rmax));
2291 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2292 build_call_expr (isge_fn, 2, arg,
2293 build_real (type, rmin)));
2294 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2300 target = expand_call (exp, target, target == const0_rtx);
2305 /* Expand a call to the builtin sincos math function.
2306 Return NULL_RTX if a normal call should be emitted rather than expanding the
2307 function in-line. EXP is the expression that is a call to the builtin
2311 expand_builtin_sincos (tree exp)
2313 rtx op0, op1, op2, target1, target2;
2314 enum machine_mode mode;
2315 tree arg, sinp, cosp;
2318 if (!validate_arglist (exp, REAL_TYPE,
2319 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2322 arg = CALL_EXPR_ARG (exp, 0);
2323 sinp = CALL_EXPR_ARG (exp, 1);
2324 cosp = CALL_EXPR_ARG (exp, 2);
2326 /* Make a suitable register to place result in. */
2327 mode = TYPE_MODE (TREE_TYPE (arg));
2329 /* Check if sincos insn is available, otherwise emit the call. */
2330 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2333 target1 = gen_reg_rtx (mode);
2334 target2 = gen_reg_rtx (mode);
2336 op0 = expand_normal (arg);
2337 op1 = expand_normal (build_fold_indirect_ref (sinp));
2338 op2 = expand_normal (build_fold_indirect_ref (cosp));
2340 /* Compute into target1 and target2.
2341 Set TARGET to wherever the result comes back. */
2342 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2343 gcc_assert (result);
2345 /* Move target1 and target2 to the memory locations indicated
2347 emit_move_insn (op1, target1);
2348 emit_move_insn (op2, target2);
2353 /* Expand a call to the internal cexpi builtin to the sincos math function.
2354 EXP is the expression that is a call to the builtin function; if convenient,
2355 the result should be placed in TARGET. SUBTARGET may be used as the target
2356 for computing one of EXP's operands. */
2359 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2361 tree fndecl = get_callee_fndecl (exp);
2363 enum machine_mode mode;
2366 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2369 arg = CALL_EXPR_ARG (exp, 0);
2370 type = TREE_TYPE (arg);
2371 mode = TYPE_MODE (TREE_TYPE (arg));
2373 /* Try expanding via a sincos optab, fall back to emitting a libcall
2374 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2375 is only generated from sincos, cexp or if we have either of them. */
2376 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2378 op1 = gen_reg_rtx (mode);
2379 op2 = gen_reg_rtx (mode);
2381 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2383 /* Compute into op1 and op2. */
2384 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2386 else if (TARGET_HAS_SINCOS)
2388 tree call, fn = NULL_TREE;
2392 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2393 fn = built_in_decls[BUILT_IN_SINCOSF];
2394 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2395 fn = built_in_decls[BUILT_IN_SINCOS];
2396 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2397 fn = built_in_decls[BUILT_IN_SINCOSL];
2401 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2402 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2403 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2404 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2405 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2406 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2408 /* Make sure not to fold the sincos call again. */
2409 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2410 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2411 call, 3, arg, top1, top2));
2415 tree call, fn = NULL_TREE, narg;
2416 tree ctype = build_complex_type (type);
2418 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2419 fn = built_in_decls[BUILT_IN_CEXPF];
2420 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2421 fn = built_in_decls[BUILT_IN_CEXP];
2422 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2423 fn = built_in_decls[BUILT_IN_CEXPL];
2427 /* If we don't have a decl for cexp create one. This is the
2428 friendliest fallback if the user calls __builtin_cexpi
2429 without full target C99 function support. */
2430 if (fn == NULL_TREE)
2433 const char *name = NULL;
2435 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2437 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2439 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2442 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2443 fn = build_fn_decl (name, fntype);
2446 narg = fold_build2 (COMPLEX_EXPR, ctype,
2447 build_real (type, dconst0), arg);
2449 /* Make sure not to fold the cexp call again. */
2450 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2451 return expand_expr (build_call_nary (ctype, call, 1, narg),
2452 target, VOIDmode, EXPAND_NORMAL);
2455 /* Now build the proper return type. */
2456 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2457 make_tree (TREE_TYPE (arg), op2),
2458 make_tree (TREE_TYPE (arg), op1)),
2459 target, VOIDmode, EXPAND_NORMAL);
2462 /* Expand a call to one of the builtin rounding functions gcc defines
2463 as an extension (lfloor and lceil). As these are gcc extensions we
2464 do not need to worry about setting errno to EDOM.
2465 If expanding via optab fails, lower expression to (int)(floor(x)).
2466 EXP is the expression that is a call to the builtin function;
2467 if convenient, the result should be placed in TARGET. */
2470 expand_builtin_int_roundingfn (tree exp, rtx target)
2472 convert_optab builtin_optab;
2473 rtx op0, insns, tmp;
2474 tree fndecl = get_callee_fndecl (exp);
2475 enum built_in_function fallback_fn;
2476 tree fallback_fndecl;
2477 enum machine_mode mode;
2480 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2483 arg = CALL_EXPR_ARG (exp, 0);
2485 switch (DECL_FUNCTION_CODE (fndecl))
2487 CASE_FLT_FN (BUILT_IN_LCEIL):
2488 CASE_FLT_FN (BUILT_IN_LLCEIL):
2489 builtin_optab = lceil_optab;
2490 fallback_fn = BUILT_IN_CEIL;
2493 CASE_FLT_FN (BUILT_IN_LFLOOR):
2494 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2495 builtin_optab = lfloor_optab;
2496 fallback_fn = BUILT_IN_FLOOR;
2503 /* Make a suitable register to place result in. */
2504 mode = TYPE_MODE (TREE_TYPE (exp));
2506 target = gen_reg_rtx (mode);
2508 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2509 need to expand the argument again. This way, we will not perform
2510 side-effects more the once. */
2511 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2513 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2517 /* Compute into TARGET. */
2518 if (expand_sfix_optab (target, op0, builtin_optab))
2520 /* Output the entire sequence. */
2521 insns = get_insns ();
2527 /* If we were unable to expand via the builtin, stop the sequence
2528 (without outputting the insns). */
2531 /* Fall back to floating point rounding optab. */
2532 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2534 /* For non-C99 targets we may end up without a fallback fndecl here
2535 if the user called __builtin_lfloor directly. In this case emit
2536 a call to the floor/ceil variants nevertheless. This should result
2537 in the best user experience for not full C99 targets. */
2538 if (fallback_fndecl == NULL_TREE)
2541 const char *name = NULL;
2543 switch (DECL_FUNCTION_CODE (fndecl))
2545 case BUILT_IN_LCEIL:
2546 case BUILT_IN_LLCEIL:
2549 case BUILT_IN_LCEILF:
2550 case BUILT_IN_LLCEILF:
2553 case BUILT_IN_LCEILL:
2554 case BUILT_IN_LLCEILL:
2557 case BUILT_IN_LFLOOR:
2558 case BUILT_IN_LLFLOOR:
2561 case BUILT_IN_LFLOORF:
2562 case BUILT_IN_LLFLOORF:
2565 case BUILT_IN_LFLOORL:
2566 case BUILT_IN_LLFLOORL:
2573 fntype = build_function_type_list (TREE_TYPE (arg),
2574 TREE_TYPE (arg), NULL_TREE);
2575 fallback_fndecl = build_fn_decl (name, fntype);
2578 exp = build_call_expr (fallback_fndecl, 1, arg);
2580 tmp = expand_normal (exp);
2582 /* Truncate the result of floating point optab to integer
2583 via expand_fix (). */
2584 target = gen_reg_rtx (mode);
2585 expand_fix (target, tmp, 0);
2590 /* Expand a call to one of the builtin math functions doing integer
2592 Return 0 if a normal call should be emitted rather than expanding the
2593 function in-line. EXP is the expression that is a call to the builtin
2594 function; if convenient, the result should be placed in TARGET. */
2597 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2599 convert_optab builtin_optab;
2601 tree fndecl = get_callee_fndecl (exp);
2603 enum machine_mode mode;
2605 /* There's no easy way to detect the case we need to set EDOM. */
2606 if (flag_errno_math)
2609 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2612 arg = CALL_EXPR_ARG (exp, 0);
2614 switch (DECL_FUNCTION_CODE (fndecl))
2616 CASE_FLT_FN (BUILT_IN_LRINT):
2617 CASE_FLT_FN (BUILT_IN_LLRINT):
2618 builtin_optab = lrint_optab; break;
2619 CASE_FLT_FN (BUILT_IN_LROUND):
2620 CASE_FLT_FN (BUILT_IN_LLROUND):
2621 builtin_optab = lround_optab; break;
2626 /* Make a suitable register to place result in. */
2627 mode = TYPE_MODE (TREE_TYPE (exp));
2629 target = gen_reg_rtx (mode);
2631 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2632 need to expand the argument again. This way, we will not perform
2633 side-effects more the once. */
2634 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2636 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2640 if (expand_sfix_optab (target, op0, builtin_optab))
2642 /* Output the entire sequence. */
2643 insns = get_insns ();
2649 /* If we were unable to expand via the builtin, stop the sequence
2650 (without outputting the insns) and call to the library function
2651 with the stabilized argument list. */
2654 target = expand_call (exp, target, target == const0_rtx);
2659 /* To evaluate powi(x,n), the floating point value x raised to the
2660 constant integer exponent n, we use a hybrid algorithm that
2661 combines the "window method" with look-up tables. For an
2662 introduction to exponentiation algorithms and "addition chains",
2663 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2664 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2665 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2666 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2668 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2669 multiplications to inline before calling the system library's pow
2670 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2671 so this default never requires calling pow, powf or powl. */
2673 #ifndef POWI_MAX_MULTS
2674 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2677 /* The size of the "optimal power tree" lookup table. All
2678 exponents less than this value are simply looked up in the
2679 powi_table below. This threshold is also used to size the
2680 cache of pseudo registers that hold intermediate results. */
2681 #define POWI_TABLE_SIZE 256
2683 /* The size, in bits of the window, used in the "window method"
2684 exponentiation algorithm. This is equivalent to a radix of
2685 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2686 #define POWI_WINDOW_SIZE 3
2688 /* The following table is an efficient representation of an
2689 "optimal power tree". For each value, i, the corresponding
2690 value, j, in the table states than an optimal evaluation
2691 sequence for calculating pow(x,i) can be found by evaluating
2692 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2693 100 integers is given in Knuth's "Seminumerical algorithms". */
2695 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2697 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2698 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2699 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2700 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2701 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2702 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2703 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2704 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2705 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2706 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2707 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2708 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2709 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2710 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2711 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2712 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2713 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2714 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2715 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2716 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2717 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2718 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2719 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2720 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2721 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2722 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2723 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2724 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2725 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2726 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2727 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2728 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2732 /* Return the number of multiplications required to calculate
2733 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2734 subroutine of powi_cost. CACHE is an array indicating
2735 which exponents have already been calculated. */
2738 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2740 /* If we've already calculated this exponent, then this evaluation
2741 doesn't require any additional multiplications. */
2746 return powi_lookup_cost (n - powi_table[n], cache)
2747 + powi_lookup_cost (powi_table[n], cache) + 1;
2750 /* Return the number of multiplications required to calculate
2751 powi(x,n) for an arbitrary x, given the exponent N. This
2752 function needs to be kept in sync with expand_powi below. */
2755 powi_cost (HOST_WIDE_INT n)
2757 bool cache[POWI_TABLE_SIZE];
2758 unsigned HOST_WIDE_INT digit;
2759 unsigned HOST_WIDE_INT val;
2765 /* Ignore the reciprocal when calculating the cost. */
2766 val = (n < 0) ? -n : n;
2768 /* Initialize the exponent cache. */
2769 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2774 while (val >= POWI_TABLE_SIZE)
2778 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2779 result += powi_lookup_cost (digit, cache)
2780 + POWI_WINDOW_SIZE + 1;
2781 val >>= POWI_WINDOW_SIZE;
2790 return result + powi_lookup_cost (val, cache);
2793 /* Recursive subroutine of expand_powi. This function takes the array,
2794 CACHE, of already calculated exponents and an exponent N and returns
2795 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2798 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2800 unsigned HOST_WIDE_INT digit;
2804 if (n < POWI_TABLE_SIZE)
2809 target = gen_reg_rtx (mode);
2812 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2813 op1 = expand_powi_1 (mode, powi_table[n], cache);
2817 target = gen_reg_rtx (mode);
2818 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2819 op0 = expand_powi_1 (mode, n - digit, cache);
2820 op1 = expand_powi_1 (mode, digit, cache);
2824 target = gen_reg_rtx (mode);
2825 op0 = expand_powi_1 (mode, n >> 1, cache);
2829 result = expand_mult (mode, op0, op1, target, 0);
2830 if (result != target)
2831 emit_move_insn (target, result);
2835 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2836 floating point operand in mode MODE, and N is the exponent. This
2837 function needs to be kept in sync with powi_cost above. */
2840 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2842 unsigned HOST_WIDE_INT val;
2843 rtx cache[POWI_TABLE_SIZE];
2847 return CONST1_RTX (mode);
2849 val = (n < 0) ? -n : n;
2851 memset (cache, 0, sizeof (cache));
2854 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2856 /* If the original exponent was negative, reciprocate the result. */
2858 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2859 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2864 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2865 a normal call should be emitted rather than expanding the function
2866 in-line. EXP is the expression that is a call to the builtin
2867 function; if convenient, the result should be placed in TARGET. */
2870 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2874 tree type = TREE_TYPE (exp);
2875 REAL_VALUE_TYPE cint, c, c2;
2878 enum machine_mode mode = TYPE_MODE (type);
2880 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2883 arg0 = CALL_EXPR_ARG (exp, 0);
2884 arg1 = CALL_EXPR_ARG (exp, 1);
2886 if (TREE_CODE (arg1) != REAL_CST
2887 || TREE_OVERFLOW (arg1))
2888 return expand_builtin_mathfn_2 (exp, target, subtarget);
2890 /* Handle constant exponents. */
2892 /* For integer valued exponents we can expand to an optimal multiplication
2893 sequence using expand_powi. */
2894 c = TREE_REAL_CST (arg1);
2895 n = real_to_integer (&c);
2896 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2897 if (real_identical (&c, &cint)
2898 && ((n >= -1 && n <= 2)
2899 || (flag_unsafe_math_optimizations
2901 && powi_cost (n) <= POWI_MAX_MULTS)))
2903 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2906 op = force_reg (mode, op);
2907 op = expand_powi (op, mode, n);
2912 narg0 = builtin_save_expr (arg0);
2914 /* If the exponent is not integer valued, check if it is half of an integer.
2915 In this case we can expand to sqrt (x) * x**(n/2). */
2916 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2917 if (fn != NULL_TREE)
2919 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2920 n = real_to_integer (&c2);
2921 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2922 if (real_identical (&c2, &cint)
2923 && ((flag_unsafe_math_optimizations
2925 && powi_cost (n/2) <= POWI_MAX_MULTS)
2928 tree call_expr = build_call_expr (fn, 1, narg0);
2929 /* Use expand_expr in case the newly built call expression
2930 was folded to a non-call. */
2931 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2934 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2935 op2 = force_reg (mode, op2);
2936 op2 = expand_powi (op2, mode, abs (n / 2));
2937 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2938 0, OPTAB_LIB_WIDEN);
2939 /* If the original exponent was negative, reciprocate the
2942 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2943 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2949 /* Try if the exponent is a third of an integer. In this case
2950 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2951 different from pow (x, 1./3.) due to rounding and behavior
2952 with negative x we need to constrain this transformation to
2953 unsafe math and positive x or finite math. */
2954 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2956 && flag_unsafe_math_optimizations
2957 && (tree_expr_nonnegative_p (arg0)
2958 || !HONOR_NANS (mode)))
2960 REAL_VALUE_TYPE dconst3;
2961 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2962 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2963 real_round (&c2, mode, &c2);
2964 n = real_to_integer (&c2);
2965 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2966 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2967 real_convert (&c2, mode, &c2);
2968 if (real_identical (&c2, &c)
2970 && powi_cost (n/3) <= POWI_MAX_MULTS)
2973 tree call_expr = build_call_expr (fn, 1,narg0);
2974 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2975 if (abs (n) % 3 == 2)
2976 op = expand_simple_binop (mode, MULT, op, op, op,
2977 0, OPTAB_LIB_WIDEN);
2980 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2981 op2 = force_reg (mode, op2);
2982 op2 = expand_powi (op2, mode, abs (n / 3));
2983 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2984 0, OPTAB_LIB_WIDEN);
2985 /* If the original exponent was negative, reciprocate the
2988 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2989 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2995 /* Fall back to optab expansion. */
2996 return expand_builtin_mathfn_2 (exp, target, subtarget);
2999 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3000 a normal call should be emitted rather than expanding the function
3001 in-line. EXP is the expression that is a call to the builtin
3002 function; if convenient, the result should be placed in TARGET. */
3005 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3009 enum machine_mode mode;
3010 enum machine_mode mode2;
3012 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3015 arg0 = CALL_EXPR_ARG (exp, 0);
3016 arg1 = CALL_EXPR_ARG (exp, 1);
3017 mode = TYPE_MODE (TREE_TYPE (exp));
3019 /* Handle constant power. */
3021 if (TREE_CODE (arg1) == INTEGER_CST
3022 && !TREE_OVERFLOW (arg1))
3024 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3026 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3027 Otherwise, check the number of multiplications required. */
3028 if ((TREE_INT_CST_HIGH (arg1) == 0
3029 || TREE_INT_CST_HIGH (arg1) == -1)
3030 && ((n >= -1 && n <= 2)
3032 && powi_cost (n) <= POWI_MAX_MULTS)))
3034 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3035 op0 = force_reg (mode, op0);
3036 return expand_powi (op0, mode, n);
3040 /* Emit a libcall to libgcc. */
3042 /* Mode of the 2nd argument must match that of an int. */
3043 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3045 if (target == NULL_RTX)
3046 target = gen_reg_rtx (mode);
3048 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3049 if (GET_MODE (op0) != mode)
3050 op0 = convert_to_mode (mode, op0, 0);
3051 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3052 if (GET_MODE (op1) != mode2)
3053 op1 = convert_to_mode (mode2, op1, 0);
3055 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3056 target, LCT_CONST, mode, 2,
3057 op0, mode, op1, mode2);
3062 /* Expand expression EXP which is a call to the strlen builtin. Return
3063 NULL_RTX if we failed the caller should emit a normal call, otherwise
3064 try to get the result in TARGET, if convenient. */
3067 expand_builtin_strlen (tree exp, rtx target,
3068 enum machine_mode target_mode)
3070 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3076 tree src = CALL_EXPR_ARG (exp, 0);
3077 rtx result, src_reg, char_rtx, before_strlen;
3078 enum machine_mode insn_mode = target_mode, char_mode;
3079 enum insn_code icode = CODE_FOR_nothing;
3082 /* If the length can be computed at compile-time, return it. */
3083 len = c_strlen (src, 0);
3085 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3087 /* If the length can be computed at compile-time and is constant
3088 integer, but there are side-effects in src, evaluate
3089 src for side-effects, then return len.
3090 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3091 can be optimized into: i++; x = 3; */
3092 len = c_strlen (src, 1);
3093 if (len && TREE_CODE (len) == INTEGER_CST)
3095 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3096 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3099 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3101 /* If SRC is not a pointer type, don't do this operation inline. */
3105 /* Bail out if we can't compute strlen in the right mode. */
3106 while (insn_mode != VOIDmode)
3108 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3109 if (icode != CODE_FOR_nothing)
3112 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3114 if (insn_mode == VOIDmode)
3117 /* Make a place to write the result of the instruction. */
3121 && GET_MODE (result) == insn_mode
3122 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3123 result = gen_reg_rtx (insn_mode);
3125 /* Make a place to hold the source address. We will not expand
3126 the actual source until we are sure that the expansion will
3127 not fail -- there are trees that cannot be expanded twice. */
3128 src_reg = gen_reg_rtx (Pmode);
3130 /* Mark the beginning of the strlen sequence so we can emit the
3131 source operand later. */
3132 before_strlen = get_last_insn ();
3134 char_rtx = const0_rtx;
3135 char_mode = insn_data[(int) icode].operand[2].mode;
3136 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3138 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3140 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3141 char_rtx, GEN_INT (align));
3146 /* Now that we are assured of success, expand the source. */
3148 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3150 emit_move_insn (src_reg, pat);
3155 emit_insn_after (pat, before_strlen);
3157 emit_insn_before (pat, get_insns ());
3159 /* Return the value in the proper mode for this function. */
3160 if (GET_MODE (result) == target_mode)
3162 else if (target != 0)
3163 convert_move (target, result, 0);
3165 target = convert_to_mode (target_mode, result, 0);
3171 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3172 caller should emit a normal call, otherwise try to get the result
3173 in TARGET, if convenient (and in mode MODE if that's convenient). */
3176 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3178 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3180 tree type = TREE_TYPE (exp);
3181 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3182 CALL_EXPR_ARG (exp, 1), type);
3184 return expand_expr (result, target, mode, EXPAND_NORMAL);
3189 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3190 caller should emit a normal call, otherwise try to get the result
3191 in TARGET, if convenient (and in mode MODE if that's convenient). */
3194 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3196 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3198 tree type = TREE_TYPE (exp);
3199 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3200 CALL_EXPR_ARG (exp, 1), type);
3202 return expand_expr (result, target, mode, EXPAND_NORMAL);
3204 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3209 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3210 caller should emit a normal call, otherwise try to get the result
3211 in TARGET, if convenient (and in mode MODE if that's convenient). */
3214 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3216 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3218 tree type = TREE_TYPE (exp);
3219 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3220 CALL_EXPR_ARG (exp, 1), type);
3222 return expand_expr (result, target, mode, EXPAND_NORMAL);
3227 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3228 caller should emit a normal call, otherwise try to get the result
3229 in TARGET, if convenient (and in mode MODE if that's convenient). */
3232 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3234 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3236 tree type = TREE_TYPE (exp);
3237 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3238 CALL_EXPR_ARG (exp, 1), type);
3240 return expand_expr (result, target, mode, EXPAND_NORMAL);
3245 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3246 bytes from constant string DATA + OFFSET and return it as target
3250 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3251 enum machine_mode mode)
3253 const char *str = (const char *) data;
3255 gcc_assert (offset >= 0
3256 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3257 <= strlen (str) + 1));
3259 return c_readstr (str + offset, mode);
3262 /* Expand a call EXP to the memcpy builtin.
3263 Return NULL_RTX if we failed, the caller should emit a normal call,
3264 otherwise try to get the result in TARGET, if convenient (and in
3265 mode MODE if that's convenient). */
3268 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3270 tree fndecl = get_callee_fndecl (exp);
3272 if (!validate_arglist (exp,
3273 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3277 tree dest = CALL_EXPR_ARG (exp, 0);
3278 tree src = CALL_EXPR_ARG (exp, 1);
3279 tree len = CALL_EXPR_ARG (exp, 2);
3280 const char *src_str;
3281 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3282 unsigned int dest_align
3283 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3284 rtx dest_mem, src_mem, dest_addr, len_rtx;
3285 tree result = fold_builtin_memory_op (dest, src, len,
3286 TREE_TYPE (TREE_TYPE (fndecl)),
3288 HOST_WIDE_INT expected_size = -1;
3289 unsigned int expected_align = 0;
3293 while (TREE_CODE (result) == COMPOUND_EXPR)
3295 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3297 result = TREE_OPERAND (result, 1);
3299 return expand_expr (result, target, mode, EXPAND_NORMAL);
3302 /* If DEST is not a pointer type, call the normal function. */
3303 if (dest_align == 0)
3306 /* If either SRC is not a pointer type, don't do this
3307 operation in-line. */
3311 stringop_block_profile (exp, &expected_align, &expected_size);
3312 if (expected_align < dest_align)
3313 expected_align = dest_align;
3314 dest_mem = get_memory_rtx (dest, len);
3315 set_mem_align (dest_mem, dest_align);
3316 len_rtx = expand_normal (len);
3317 src_str = c_getstr (src);
3319 /* If SRC is a string constant and block move would be done
3320 by pieces, we can avoid loading the string from memory
3321 and only stored the computed constants. */
3323 && GET_CODE (len_rtx) == CONST_INT
3324 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3325 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3326 CONST_CAST (char *, src_str),
3329 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3330 builtin_memcpy_read_str,
3331 CONST_CAST (char *, src_str),
3332 dest_align, false, 0);
3333 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3334 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3338 src_mem = get_memory_rtx (src, len);
3339 set_mem_align (src_mem, src_align);
3341 /* Copy word part most expediently. */
3342 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3343 CALL_EXPR_TAILCALL (exp)
3344 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3345 expected_align, expected_size);
3349 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3350 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3356 /* Expand a call EXP to the mempcpy builtin.
3357 Return NULL_RTX if we failed; the caller should emit a normal call,
3358 otherwise try to get the result in TARGET, if convenient (and in
3359 mode MODE if that's convenient). If ENDP is 0 return the
3360 destination pointer, if ENDP is 1 return the end pointer ala
3361 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3365 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3367 if (!validate_arglist (exp,
3368 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3372 tree dest = CALL_EXPR_ARG (exp, 0);
3373 tree src = CALL_EXPR_ARG (exp, 1);
3374 tree len = CALL_EXPR_ARG (exp, 2);
3375 return expand_builtin_mempcpy_args (dest, src, len,
3377 target, mode, /*endp=*/ 1);
3381 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3382 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3383 so that this can also be called without constructing an actual CALL_EXPR.
3384 TYPE is the return type of the call. The other arguments and return value
3385 are the same as for expand_builtin_mempcpy. */
3388 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3389 rtx target, enum machine_mode mode, int endp)
3391 /* If return value is ignored, transform mempcpy into memcpy. */
3392 if (target == const0_rtx)
3394 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3399 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3400 target, mode, EXPAND_NORMAL);
3404 const char *src_str;
3405 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3406 unsigned int dest_align
3407 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3408 rtx dest_mem, src_mem, len_rtx;
3409 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3413 while (TREE_CODE (result) == COMPOUND_EXPR)
3415 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3417 result = TREE_OPERAND (result, 1);
3419 return expand_expr (result, target, mode, EXPAND_NORMAL);
3422 /* If either SRC or DEST is not a pointer type, don't do this
3423 operation in-line. */
3424 if (dest_align == 0 || src_align == 0)
3427 /* If LEN is not constant, call the normal function. */
3428 if (! host_integerp (len, 1))
3431 len_rtx = expand_normal (len);
3432 src_str = c_getstr (src);
3434 /* If SRC is a string constant and block move would be done
3435 by pieces, we can avoid loading the string from memory
3436 and only stored the computed constants. */
3438 && GET_CODE (len_rtx) == CONST_INT
3439 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3440 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3441 CONST_CAST (char *, src_str),
3444 dest_mem = get_memory_rtx (dest, len);
3445 set_mem_align (dest_mem, dest_align);
3446 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3447 builtin_memcpy_read_str,
3448 CONST_CAST (char *, src_str),
3449 dest_align, false, endp);
3450 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3451 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3455 if (GET_CODE (len_rtx) == CONST_INT
3456 && can_move_by_pieces (INTVAL (len_rtx),
3457 MIN (dest_align, src_align)))
3459 dest_mem = get_memory_rtx (dest, len);
3460 set_mem_align (dest_mem, dest_align);
3461 src_mem = get_memory_rtx (src, len);
3462 set_mem_align (src_mem, src_align);
3463 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3464 MIN (dest_align, src_align), endp);
3465 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3466 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3474 /* Expand expression EXP, which is a call to the memmove builtin. Return
3475 NULL_RTX if we failed; the caller should emit a normal call. */
3478 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3480 if (!validate_arglist (exp,
3481 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3485 tree dest = CALL_EXPR_ARG (exp, 0);
3486 tree src = CALL_EXPR_ARG (exp, 1);
3487 tree len = CALL_EXPR_ARG (exp, 2);
3488 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3489 target, mode, ignore);
3493 /* Helper function to do the actual work for expand_builtin_memmove. The
3494 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3495 so that this can also be called without constructing an actual CALL_EXPR.
3496 TYPE is the return type of the call. The other arguments and return value
3497 are the same as for expand_builtin_memmove. */
3500 expand_builtin_memmove_args (tree dest, tree src, tree len,
3501 tree type, rtx target, enum machine_mode mode,
3504 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3508 STRIP_TYPE_NOPS (result);
3509 while (TREE_CODE (result) == COMPOUND_EXPR)
3511 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3513 result = TREE_OPERAND (result, 1);
3515 return expand_expr (result, target, mode, EXPAND_NORMAL);
3518 /* Otherwise, call the normal function. */
3522 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3523 NULL_RTX if we failed the caller should emit a normal call. */
3526 expand_builtin_bcopy (tree exp, int ignore)
3528 tree type = TREE_TYPE (exp);
3529 tree src, dest, size;
3531 if (!validate_arglist (exp,
3532 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3535 src = CALL_EXPR_ARG (exp, 0);
3536 dest = CALL_EXPR_ARG (exp, 1);
3537 size = CALL_EXPR_ARG (exp, 2);
3539 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3540 This is done this way so that if it isn't expanded inline, we fall
3541 back to calling bcopy instead of memmove. */
3542 return expand_builtin_memmove_args (dest, src,
3543 fold_convert (sizetype, size),
3544 type, const0_rtx, VOIDmode,
3549 # define HAVE_movstr 0
3550 # define CODE_FOR_movstr CODE_FOR_nothing
3553 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3554 we failed, the caller should emit a normal call, otherwise try to
3555 get the result in TARGET, if convenient. If ENDP is 0 return the
3556 destination pointer, if ENDP is 1 return the end pointer ala
3557 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3561 expand_movstr (tree dest, tree src, rtx target, int endp)
3567 const struct insn_data * data;
3572 dest_mem = get_memory_rtx (dest, NULL);
3573 src_mem = get_memory_rtx (src, NULL);
3576 target = force_reg (Pmode, XEXP (dest_mem, 0));
3577 dest_mem = replace_equiv_address (dest_mem, target);
3578 end = gen_reg_rtx (Pmode);
3582 if (target == 0 || target == const0_rtx)
3584 end = gen_reg_rtx (Pmode);
3592 data = insn_data + CODE_FOR_movstr;
3594 if (data->operand[0].mode != VOIDmode)
3595 end = gen_lowpart (data->operand[0].mode, end);
3597 insn = data->genfun (end, dest_mem, src_mem);
3603 /* movstr is supposed to set end to the address of the NUL
3604 terminator. If the caller requested a mempcpy-like return value,
3606 if (endp == 1 && target != const0_rtx)
3608 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3609 emit_move_insn (target, force_operand (tem, NULL_RTX));
3615 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3616 NULL_RTX if we failed the caller should emit a normal call, otherwise
3617 try to get the result in TARGET, if convenient (and in mode MODE if that's
3621 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3623 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3625 tree dest = CALL_EXPR_ARG (exp, 0);
3626 tree src = CALL_EXPR_ARG (exp, 1);
3627 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3632 /* Helper function to do the actual work for expand_builtin_strcpy. The
3633 arguments to the builtin_strcpy call DEST and SRC are broken out
3634 so that this can also be called without constructing an actual CALL_EXPR.
3635 The other arguments and return value are the same as for
3636 expand_builtin_strcpy. */
3639 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3640 rtx target, enum machine_mode mode)
3642 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3644 return expand_expr (result, target, mode, EXPAND_NORMAL);
3645 return expand_movstr (dest, src, target, /*endp=*/0);
3649 /* Expand a call EXP to the stpcpy builtin.
3650 Return NULL_RTX if we failed the caller should emit a normal call,
3651 otherwise try to get the result in TARGET, if convenient (and in
3652 mode MODE if that's convenient). */
3655 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3659 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3662 dst = CALL_EXPR_ARG (exp, 0);
3663 src = CALL_EXPR_ARG (exp, 1);
3665 /* If return value is ignored, transform stpcpy into strcpy. */
3666 if (target == const0_rtx)
3668 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3672 return expand_expr (build_call_expr (fn, 2, dst, src),
3673 target, mode, EXPAND_NORMAL);
3680 /* Ensure we get an actual string whose length can be evaluated at
3681 compile-time, not an expression containing a string. This is
3682 because the latter will potentially produce pessimized code
3683 when used to produce the return value. */
3684 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3685 return expand_movstr (dst, src, target, /*endp=*/2);
3687 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3688 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3689 target, mode, /*endp=*/2);
3694 if (TREE_CODE (len) == INTEGER_CST)
3696 rtx len_rtx = expand_normal (len);
3698 if (GET_CODE (len_rtx) == CONST_INT)
3700 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3701 dst, src, target, mode);
3707 if (mode != VOIDmode)
3708 target = gen_reg_rtx (mode);
3710 target = gen_reg_rtx (GET_MODE (ret));
3712 if (GET_MODE (target) != GET_MODE (ret))
3713 ret = gen_lowpart (GET_MODE (target), ret);
3715 ret = plus_constant (ret, INTVAL (len_rtx));
3716 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3724 return expand_movstr (dst, src, target, /*endp=*/2);
3728 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3729 bytes from constant string DATA + OFFSET and return it as target
3733 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3734 enum machine_mode mode)
3736 const char *str = (const char *) data;
3738 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3741 return c_readstr (str + offset, mode);
3744 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3745 NULL_RTX if we failed the caller should emit a normal call. */
3748 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3750 tree fndecl = get_callee_fndecl (exp);
3752 if (validate_arglist (exp,
3753 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3755 tree dest = CALL_EXPR_ARG (exp, 0);
3756 tree src = CALL_EXPR_ARG (exp, 1);
3757 tree len = CALL_EXPR_ARG (exp, 2);
3758 tree slen = c_strlen (src, 1);
3759 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3763 while (TREE_CODE (result) == COMPOUND_EXPR)
3765 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3767 result = TREE_OPERAND (result, 1);
3769 return expand_expr (result, target, mode, EXPAND_NORMAL);
3772 /* We must be passed a constant len and src parameter. */
3773 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3776 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3778 /* We're required to pad with trailing zeros if the requested
3779 len is greater than strlen(s2)+1. In that case try to
3780 use store_by_pieces, if it fails, punt. */
3781 if (tree_int_cst_lt (slen, len))
3783 unsigned int dest_align
3784 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3785 const char *p = c_getstr (src);
3788 if (!p || dest_align == 0 || !host_integerp (len, 1)
3789 || !can_store_by_pieces (tree_low_cst (len, 1),
3790 builtin_strncpy_read_str,
3791 CONST_CAST (char *, p),
3795 dest_mem = get_memory_rtx (dest, len);
3796 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3797 builtin_strncpy_read_str,
3798 CONST_CAST (char *, p), dest_align, false, 0);
3799 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3800 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3807 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3808 bytes from constant string DATA + OFFSET and return it as target
3812 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3813 enum machine_mode mode)
3815 const char *c = (const char *) data;
3816 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3818 memset (p, *c, GET_MODE_SIZE (mode));
3820 return c_readstr (p, mode);
3823 /* Callback routine for store_by_pieces. Return the RTL of a register
3824 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3825 char value given in the RTL register data. For example, if mode is
3826 4 bytes wide, return the RTL for 0x01010101*data. */
3829 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3830 enum machine_mode mode)
3836 size = GET_MODE_SIZE (mode);
3840 p = XALLOCAVEC (char, size);
3841 memset (p, 1, size);
3842 coeff = c_readstr (p, mode);
3844 target = convert_to_mode (mode, (rtx) data, 1);
3845 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3846 return force_reg (mode, target);
3849 /* Expand expression EXP, which is a call to the memset builtin. Return
3850 NULL_RTX if we failed the caller should emit a normal call, otherwise
3851 try to get the result in TARGET, if convenient (and in mode MODE if that's
3855 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3857 if (!validate_arglist (exp,
3858 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3862 tree dest = CALL_EXPR_ARG (exp, 0);
3863 tree val = CALL_EXPR_ARG (exp, 1);
3864 tree len = CALL_EXPR_ARG (exp, 2);
3865 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3869 /* Helper function to do the actual work for expand_builtin_memset. The
3870 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3871 so that this can also be called without constructing an actual CALL_EXPR.
3872 The other arguments and return value are the same as for
3873 expand_builtin_memset. */
3876 expand_builtin_memset_args (tree dest, tree val, tree len,
3877 rtx target, enum machine_mode mode, tree orig_exp)
3880 enum built_in_function fcode;
3882 unsigned int dest_align;
3883 rtx dest_mem, dest_addr, len_rtx;
3884 HOST_WIDE_INT expected_size = -1;
3885 unsigned int expected_align = 0;
3887 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3889 /* If DEST is not a pointer type, don't do this operation in-line. */
3890 if (dest_align == 0)
3893 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3894 if (expected_align < dest_align)
3895 expected_align = dest_align;
3897 /* If the LEN parameter is zero, return DEST. */
3898 if (integer_zerop (len))
3900 /* Evaluate and ignore VAL in case it has side-effects. */
3901 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3902 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3905 /* Stabilize the arguments in case we fail. */
3906 dest = builtin_save_expr (dest);
3907 val = builtin_save_expr (val);
3908 len = builtin_save_expr (len);
3910 len_rtx = expand_normal (len);
3911 dest_mem = get_memory_rtx (dest, len);
3913 if (TREE_CODE (val) != INTEGER_CST)
3917 val_rtx = expand_normal (val);
3918 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3921 /* Assume that we can memset by pieces if we can store
3922 * the coefficients by pieces (in the required modes).
3923 * We can't pass builtin_memset_gen_str as that emits RTL. */
3925 if (host_integerp (len, 1)
3926 && can_store_by_pieces (tree_low_cst (len, 1),
3927 builtin_memset_read_str, &c, dest_align,
3930 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3932 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3933 builtin_memset_gen_str, val_rtx, dest_align,
3936 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3937 dest_align, expected_align,
3941 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3942 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3946 if (target_char_cast (val, &c))
3951 if (host_integerp (len, 1)
3952 && can_store_by_pieces (tree_low_cst (len, 1),
3953 builtin_memset_read_str, &c, dest_align,
3955 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3956 builtin_memset_read_str, &c, dest_align, true, 0);
3957 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3958 dest_align, expected_align,
3962 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3963 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3967 set_mem_align (dest_mem, dest_align);
3968 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3969 CALL_EXPR_TAILCALL (orig_exp)
3970 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3971 expected_align, expected_size);
3975 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3976 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3982 fndecl = get_callee_fndecl (orig_exp);
3983 fcode = DECL_FUNCTION_CODE (fndecl);
3984 if (fcode == BUILT_IN_MEMSET)
3985 fn = build_call_expr (fndecl, 3, dest, val, len);
3986 else if (fcode == BUILT_IN_BZERO)
3987 fn = build_call_expr (fndecl, 2, dest, len);
3990 if (TREE_CODE (fn) == CALL_EXPR)
3991 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3992 return expand_call (fn, target, target == const0_rtx);
3995 /* Expand expression EXP, which is a call to the bzero builtin. Return
3996 NULL_RTX if we failed the caller should emit a normal call. */
3999 expand_builtin_bzero (tree exp)
4003 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4006 dest = CALL_EXPR_ARG (exp, 0);
4007 size = CALL_EXPR_ARG (exp, 1);
4009 /* New argument list transforming bzero(ptr x, int y) to
4010 memset(ptr x, int 0, size_t y). This is done this way
4011 so that if it isn't expanded inline, we fallback to
4012 calling bzero instead of memset. */
4014 return expand_builtin_memset_args (dest, integer_zero_node,
4015 fold_convert (sizetype, size),
4016 const0_rtx, VOIDmode, exp);
4019 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4020 caller should emit a normal call, otherwise try to get the result
4021 in TARGET, if convenient (and in mode MODE if that's convenient). */
4024 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4026 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4027 INTEGER_TYPE, VOID_TYPE))
4029 tree type = TREE_TYPE (exp);
4030 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4031 CALL_EXPR_ARG (exp, 1),
4032 CALL_EXPR_ARG (exp, 2), type);
4034 return expand_expr (result, target, mode, EXPAND_NORMAL);
4039 /* Expand expression EXP, which is a call to the memcmp built-in function.
4040 Return NULL_RTX if we failed and the
4041 caller should emit a normal call, otherwise try to get the result in
4042 TARGET, if convenient (and in mode MODE, if that's convenient). */
4045 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4047 if (!validate_arglist (exp,
4048 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4052 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4053 CALL_EXPR_ARG (exp, 1),
4054 CALL_EXPR_ARG (exp, 2));
4056 return expand_expr (result, target, mode, EXPAND_NORMAL);
4059 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4061 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4064 tree arg1 = CALL_EXPR_ARG (exp, 0);
4065 tree arg2 = CALL_EXPR_ARG (exp, 1);
4066 tree len = CALL_EXPR_ARG (exp, 2);
4069 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4071 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4072 enum machine_mode insn_mode;
4074 #ifdef HAVE_cmpmemsi
4076 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4079 #ifdef HAVE_cmpstrnsi
4081 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4086 /* If we don't have POINTER_TYPE, call the function. */
4087 if (arg1_align == 0 || arg2_align == 0)
4090 /* Make a place to write the result of the instruction. */
4093 && REG_P (result) && GET_MODE (result) == insn_mode
4094 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4095 result = gen_reg_rtx (insn_mode);
4097 arg1_rtx = get_memory_rtx (arg1, len);
4098 arg2_rtx = get_memory_rtx (arg2, len);
4099 arg3_rtx = expand_normal (len);
4101 /* Set MEM_SIZE as appropriate. */
4102 if (GET_CODE (arg3_rtx) == CONST_INT)
4104 set_mem_size (arg1_rtx, arg3_rtx);
4105 set_mem_size (arg2_rtx, arg3_rtx);
4108 #ifdef HAVE_cmpmemsi
4110 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4111 GEN_INT (MIN (arg1_align, arg2_align)));
4114 #ifdef HAVE_cmpstrnsi
4116 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4117 GEN_INT (MIN (arg1_align, arg2_align)));
4125 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4126 TYPE_MODE (integer_type_node), 3,
4127 XEXP (arg1_rtx, 0), Pmode,
4128 XEXP (arg2_rtx, 0), Pmode,
4129 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4130 TYPE_UNSIGNED (sizetype)),
4131 TYPE_MODE (sizetype));
4133 /* Return the value in the proper mode for this function. */
4134 mode = TYPE_MODE (TREE_TYPE (exp));
4135 if (GET_MODE (result) == mode)
4137 else if (target != 0)
4139 convert_move (target, result, 0);
4143 return convert_to_mode (mode, result, 0);
4150 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4151 if we failed the caller should emit a normal call, otherwise try to get
4152 the result in TARGET, if convenient. */
4155 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4157 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4161 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4162 CALL_EXPR_ARG (exp, 1));
4164 return expand_expr (result, target, mode, EXPAND_NORMAL);
4167 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4168 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4169 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4171 rtx arg1_rtx, arg2_rtx;
4172 rtx result, insn = NULL_RTX;
4174 tree arg1 = CALL_EXPR_ARG (exp, 0);
4175 tree arg2 = CALL_EXPR_ARG (exp, 1);
4178 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4180 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4182 /* If we don't have POINTER_TYPE, call the function. */
4183 if (arg1_align == 0 || arg2_align == 0)
4186 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4187 arg1 = builtin_save_expr (arg1);
4188 arg2 = builtin_save_expr (arg2);
4190 arg1_rtx = get_memory_rtx (arg1, NULL);
4191 arg2_rtx = get_memory_rtx (arg2, NULL);
4193 #ifdef HAVE_cmpstrsi
4194 /* Try to call cmpstrsi. */
4197 enum machine_mode insn_mode
4198 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4200 /* Make a place to write the result of the instruction. */
4203 && REG_P (result) && GET_MODE (result) == insn_mode
4204 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4205 result = gen_reg_rtx (insn_mode);
4207 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4208 GEN_INT (MIN (arg1_align, arg2_align)));
4211 #ifdef HAVE_cmpstrnsi
4212 /* Try to determine at least one length and call cmpstrnsi. */
4213 if (!insn && HAVE_cmpstrnsi)
4218 enum machine_mode insn_mode
4219 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4220 tree len1 = c_strlen (arg1, 1);
4221 tree len2 = c_strlen (arg2, 1);
4224 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4226 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4228 /* If we don't have a constant length for the first, use the length
4229 of the second, if we know it. We don't require a constant for
4230 this case; some cost analysis could be done if both are available
4231 but neither is constant. For now, assume they're equally cheap,
4232 unless one has side effects. If both strings have constant lengths,
4239 else if (TREE_SIDE_EFFECTS (len1))
4241 else if (TREE_SIDE_EFFECTS (len2))
4243 else if (TREE_CODE (len1) != INTEGER_CST)
4245 else if (TREE_CODE (len2) != INTEGER_CST)
4247 else if (tree_int_cst_lt (len1, len2))
4252 /* If both arguments have side effects, we cannot optimize. */
4253 if (!len || TREE_SIDE_EFFECTS (len))
4256 arg3_rtx = expand_normal (len);
4258 /* Make a place to write the result of the instruction. */
4261 && REG_P (result) && GET_MODE (result) == insn_mode
4262 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4263 result = gen_reg_rtx (insn_mode);
4265 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4266 GEN_INT (MIN (arg1_align, arg2_align)));
4274 /* Return the value in the proper mode for this function. */
4275 mode = TYPE_MODE (TREE_TYPE (exp));
4276 if (GET_MODE (result) == mode)
4279 return convert_to_mode (mode, result, 0);
4280 convert_move (target, result, 0);
4284 /* Expand the library call ourselves using a stabilized argument
4285 list to avoid re-evaluating the function's arguments twice. */
4286 #ifdef HAVE_cmpstrnsi
4289 fndecl = get_callee_fndecl (exp);
4290 fn = build_call_expr (fndecl, 2, arg1, arg2);
4291 if (TREE_CODE (fn) == CALL_EXPR)
4292 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4293 return expand_call (fn, target, target == const0_rtx);
4299 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4300 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4301 the result in TARGET, if convenient. */
4304 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4306 if (!validate_arglist (exp,
4307 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4311 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4312 CALL_EXPR_ARG (exp, 1),
4313 CALL_EXPR_ARG (exp, 2));
4315 return expand_expr (result, target, mode, EXPAND_NORMAL);
4318 /* If c_strlen can determine an expression for one of the string
4319 lengths, and it doesn't have side effects, then emit cmpstrnsi
4320 using length MIN(strlen(string)+1, arg3). */
4321 #ifdef HAVE_cmpstrnsi
4324 tree len, len1, len2;
4325 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4328 tree arg1 = CALL_EXPR_ARG (exp, 0);
4329 tree arg2 = CALL_EXPR_ARG (exp, 1);
4330 tree arg3 = CALL_EXPR_ARG (exp, 2);
4333 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4335 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4336 enum machine_mode insn_mode
4337 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4339 len1 = c_strlen (arg1, 1);
4340 len2 = c_strlen (arg2, 1);
4343 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4345 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4347 /* If we don't have a constant length for the first, use the length
4348 of the second, if we know it. We don't require a constant for
4349 this case; some cost analysis could be done if both are available
4350 but neither is constant. For now, assume they're equally cheap,
4351 unless one has side effects. If both strings have constant lengths,
4358 else if (TREE_SIDE_EFFECTS (len1))
4360 else if (TREE_SIDE_EFFECTS (len2))
4362 else if (TREE_CODE (len1) != INTEGER_CST)
4364 else if (TREE_CODE (len2) != INTEGER_CST)
4366 else if (tree_int_cst_lt (len1, len2))
4371 /* If both arguments have side effects, we cannot optimize. */
4372 if (!len || TREE_SIDE_EFFECTS (len))
4375 /* The actual new length parameter is MIN(len,arg3). */
4376 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4377 fold_convert (TREE_TYPE (len), arg3));
4379 /* If we don't have POINTER_TYPE, call the function. */
4380 if (arg1_align == 0 || arg2_align == 0)
4383 /* Make a place to write the result of the instruction. */
4386 && REG_P (result) && GET_MODE (result) == insn_mode
4387 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4388 result = gen_reg_rtx (insn_mode);
4390 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4391 arg1 = builtin_save_expr (arg1);
4392 arg2 = builtin_save_expr (arg2);
4393 len = builtin_save_expr (len);
4395 arg1_rtx = get_memory_rtx (arg1, len);
4396 arg2_rtx = get_memory_rtx (arg2, len);
4397 arg3_rtx = expand_normal (len);
4398 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4399 GEN_INT (MIN (arg1_align, arg2_align)));
4404 /* Return the value in the proper mode for this function. */
4405 mode = TYPE_MODE (TREE_TYPE (exp));
4406 if (GET_MODE (result) == mode)
4409 return convert_to_mode (mode, result, 0);
4410 convert_move (target, result, 0);
4414 /* Expand the library call ourselves using a stabilized argument
4415 list to avoid re-evaluating the function's arguments twice. */
4416 fndecl = get_callee_fndecl (exp);
4417 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4418 if (TREE_CODE (fn) == CALL_EXPR)
4419 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4420 return expand_call (fn, target, target == const0_rtx);
4426 /* Expand expression EXP, which is a call to the strcat builtin.
4427 Return NULL_RTX if we failed the caller should emit a normal call,
4428 otherwise try to get the result in TARGET, if convenient. */
4431 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4433 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4437 tree dst = CALL_EXPR_ARG (exp, 0);
4438 tree src = CALL_EXPR_ARG (exp, 1);
4439 const char *p = c_getstr (src);
4441 /* If the string length is zero, return the dst parameter. */
4442 if (p && *p == '\0')
4443 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4447 /* See if we can store by pieces into (dst + strlen(dst)). */
4448 tree newsrc, newdst,
4449 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4452 /* Stabilize the argument list. */
4453 newsrc = builtin_save_expr (src);
4454 dst = builtin_save_expr (dst);
4458 /* Create strlen (dst). */
4459 newdst = build_call_expr (strlen_fn, 1, dst);
4460 /* Create (dst p+ strlen (dst)). */
4462 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4463 newdst = builtin_save_expr (newdst);
4465 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4467 end_sequence (); /* Stop sequence. */
4471 /* Output the entire sequence. */
4472 insns = get_insns ();
4476 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4483 /* Expand expression EXP, which is a call to the strncat builtin.
4484 Return NULL_RTX if we failed the caller should emit a normal call,
4485 otherwise try to get the result in TARGET, if convenient. */
4488 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4490 if (validate_arglist (exp,
4491 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4493 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4494 CALL_EXPR_ARG (exp, 1),
4495 CALL_EXPR_ARG (exp, 2));
4497 return expand_expr (result, target, mode, EXPAND_NORMAL);
4502 /* Expand expression EXP, which is a call to the strspn builtin.
4503 Return NULL_RTX if we failed the caller should emit a normal call,
4504 otherwise try to get the result in TARGET, if convenient. */
4507 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4509 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4511 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4512 CALL_EXPR_ARG (exp, 1));
4514 return expand_expr (result, target, mode, EXPAND_NORMAL);
4519 /* Expand expression EXP, which is a call to the strcspn builtin.
4520 Return NULL_RTX if we failed the caller should emit a normal call,
4521 otherwise try to get the result in TARGET, if convenient. */
4524 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4526 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4528 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4529 CALL_EXPR_ARG (exp, 1));
4531 return expand_expr (result, target, mode, EXPAND_NORMAL);
4536 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4537 if that's convenient. */
4540 expand_builtin_saveregs (void)
4544 /* Don't do __builtin_saveregs more than once in a function.
4545 Save the result of the first call and reuse it. */
4546 if (saveregs_value != 0)
4547 return saveregs_value;
4549 /* When this function is called, it means that registers must be
4550 saved on entry to this function. So we migrate the call to the
4551 first insn of this function. */
4555 /* Do whatever the machine needs done in this case. */
4556 val = targetm.calls.expand_builtin_saveregs ();
4561 saveregs_value = val;
4563 /* Put the insns after the NOTE that starts the function. If this
4564 is inside a start_sequence, make the outer-level insn chain current, so
4565 the code is placed at the start of the function. */
4566 push_topmost_sequence ();
4567 emit_insn_after (seq, entry_of_function ());
4568 pop_topmost_sequence ();
4573 /* __builtin_args_info (N) returns word N of the arg space info
4574 for the current function. The number and meanings of words
4575 is controlled by the definition of CUMULATIVE_ARGS. */
4578 expand_builtin_args_info (tree exp)
4580 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4581 int *word_ptr = (int *) &crtl->args.info;
4583 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4585 if (call_expr_nargs (exp) != 0)
4587 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4588 error ("argument of %<__builtin_args_info%> must be constant");
4591 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4593 if (wordnum < 0 || wordnum >= nwords)
4594 error ("argument of %<__builtin_args_info%> out of range");
4596 return GEN_INT (word_ptr[wordnum]);
4600 error ("missing argument in %<__builtin_args_info%>");
4605 /* Expand a call to __builtin_next_arg. */
4608 expand_builtin_next_arg (void)
4610 /* Checking arguments is already done in fold_builtin_next_arg
4611 that must be called before this function. */
4612 return expand_binop (ptr_mode, add_optab,
4613 crtl->args.internal_arg_pointer,
4614 crtl->args.arg_offset_rtx,
4615 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4618 /* Make it easier for the backends by protecting the valist argument
4619 from multiple evaluations. */
4622 stabilize_va_list (tree valist, int needs_lvalue)
4624 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4626 gcc_assert (vatype != NULL_TREE);
4628 if (TREE_CODE (vatype) == ARRAY_TYPE)
4630 if (TREE_SIDE_EFFECTS (valist))
4631 valist = save_expr (valist);
4633 /* For this case, the backends will be expecting a pointer to
4634 vatype, but it's possible we've actually been given an array
4635 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4637 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4639 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4640 valist = build_fold_addr_expr_with_type (valist, p1);
4649 if (! TREE_SIDE_EFFECTS (valist))
4652 pt = build_pointer_type (vatype);
4653 valist = fold_build1 (ADDR_EXPR, pt, valist);
4654 TREE_SIDE_EFFECTS (valist) = 1;
4657 if (TREE_SIDE_EFFECTS (valist))
4658 valist = save_expr (valist);
4659 valist = build_fold_indirect_ref (valist);
4665 /* The "standard" definition of va_list is void*. */
4668 std_build_builtin_va_list (void)
4670 return ptr_type_node;
4673 /* The "standard" abi va_list is va_list_type_node. */
4676 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4678 return va_list_type_node;
4681 /* The "standard" type of va_list is va_list_type_node. */
4684 std_canonical_va_list_type (tree type)
4688 if (INDIRECT_REF_P (type))
4689 type = TREE_TYPE (type);
4690 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4691 type = TREE_TYPE (type);
4692 wtype = va_list_type_node;
4694 /* Treat structure va_list types. */
4695 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4696 htype = TREE_TYPE (htype);
4697 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4699 /* If va_list is an array type, the argument may have decayed
4700 to a pointer type, e.g. by being passed to another function.
4701 In that case, unwrap both types so that we can compare the
4702 underlying records. */
4703 if (TREE_CODE (htype) == ARRAY_TYPE
4704 || POINTER_TYPE_P (htype))
4706 wtype = TREE_TYPE (wtype);
4707 htype = TREE_TYPE (htype);
4710 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4711 return va_list_type_node;
4716 /* The "standard" implementation of va_start: just assign `nextarg' to
4720 std_expand_builtin_va_start (tree valist, rtx nextarg)
4722 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4723 convert_move (va_r, nextarg, 0);
4726 /* Expand EXP, a call to __builtin_va_start. */
4729 expand_builtin_va_start (tree exp)
4734 if (call_expr_nargs (exp) < 2)
4736 error ("too few arguments to function %<va_start%>");
4740 if (fold_builtin_next_arg (exp, true))
4743 nextarg = expand_builtin_next_arg ();
4744 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4746 if (targetm.expand_builtin_va_start)
4747 targetm.expand_builtin_va_start (valist, nextarg);
4749 std_expand_builtin_va_start (valist, nextarg);
4754 /* The "standard" implementation of va_arg: read the value from the
4755 current (padded) address and increment by the (padded) size. */
4758 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4760 tree addr, t, type_size, rounded_size, valist_tmp;
4761 unsigned HOST_WIDE_INT align, boundary;
4764 #ifdef ARGS_GROW_DOWNWARD
4765 /* All of the alignment and movement below is for args-grow-up machines.
4766 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4767 implement their own specialized gimplify_va_arg_expr routines. */
4771 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4773 type = build_pointer_type (type);
4775 align = PARM_BOUNDARY / BITS_PER_UNIT;
4776 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4778 /* When we align parameter on stack for caller, if the parameter
4779 alignment is beyond PREFERRED_STACK_BOUNDARY, it will be
4780 aligned at PREFERRED_STACK_BOUNDARY. We will match callee
4781 here with caller. */
4782 if (boundary > PREFERRED_STACK_BOUNDARY)
4783 boundary = PREFERRED_STACK_BOUNDARY;
4785 boundary /= BITS_PER_UNIT;
4787 /* Hoist the valist value into a temporary for the moment. */
4788 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4790 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4791 requires greater alignment, we must perform dynamic alignment. */
4792 if (boundary > align
4793 && !integer_zerop (TYPE_SIZE (type)))
4795 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4796 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4797 valist_tmp, size_int (boundary - 1)));
4798 gimplify_and_add (t, pre_p);
4800 t = fold_convert (sizetype, valist_tmp);
4801 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4802 fold_convert (TREE_TYPE (valist),
4803 fold_build2 (BIT_AND_EXPR, sizetype, t,
4804 size_int (-boundary))));
4805 gimplify_and_add (t, pre_p);
4810 /* If the actual alignment is less than the alignment of the type,
4811 adjust the type accordingly so that we don't assume strict alignment
4812 when dereferencing the pointer. */
4813 boundary *= BITS_PER_UNIT;
4814 if (boundary < TYPE_ALIGN (type))
4816 type = build_variant_type_copy (type);
4817 TYPE_ALIGN (type) = boundary;
4820 /* Compute the rounded size of the type. */
4821 type_size = size_in_bytes (type);
4822 rounded_size = round_up (type_size, align);
4824 /* Reduce rounded_size so it's sharable with the postqueue. */
4825 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4829 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4831 /* Small args are padded downward. */
4832 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4833 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4834 size_binop (MINUS_EXPR, rounded_size, type_size));
4835 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4838 /* Compute new value for AP. */
4839 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4840 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4841 gimplify_and_add (t, pre_p);
4843 addr = fold_convert (build_pointer_type (type), addr);
4846 addr = build_va_arg_indirect_ref (addr);
4848 return build_va_arg_indirect_ref (addr);
4851 /* Build an indirect-ref expression over the given TREE, which represents a
4852 piece of a va_arg() expansion. */
4854 build_va_arg_indirect_ref (tree addr)
4856 addr = build_fold_indirect_ref (addr);
4858 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4864 /* Return a dummy expression of type TYPE in order to keep going after an
4868 dummy_object (tree type)
4870 tree t = build_int_cst (build_pointer_type (type), 0);
4871 return build1 (INDIRECT_REF, type, t);
4874 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4875 builtin function, but a very special sort of operator. */
4877 enum gimplify_status
4878 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4880 tree promoted_type, have_va_type;
4881 tree valist = TREE_OPERAND (*expr_p, 0);
4882 tree type = TREE_TYPE (*expr_p);
4885 /* Verify that valist is of the proper type. */
4886 have_va_type = TREE_TYPE (valist);
4887 if (have_va_type == error_mark_node)
4889 have_va_type = targetm.canonical_va_list_type (have_va_type);
4891 if (have_va_type == NULL_TREE)
4893 error ("first argument to %<va_arg%> not of type %<va_list%>");
4897 /* Generate a diagnostic for requesting data of a type that cannot
4898 be passed through `...' due to type promotion at the call site. */
4899 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4902 static bool gave_help;
4904 /* Unfortunately, this is merely undefined, rather than a constraint
4905 violation, so we cannot make this an error. If this call is never
4906 executed, the program is still strictly conforming. */
4907 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4908 type, promoted_type);
4912 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4913 promoted_type, type);
4916 /* We can, however, treat "undefined" any way we please.
4917 Call abort to encourage the user to fix the program. */
4918 inform ("if this code is reached, the program will abort");
4919 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4920 append_to_statement_list (t, pre_p);
4922 /* This is dead code, but go ahead and finish so that the
4923 mode of the result comes out right. */
4924 *expr_p = dummy_object (type);
4929 /* Make it easier for the backends by protecting the valist argument
4930 from multiple evaluations. */
4931 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4933 /* For this case, the backends will be expecting a pointer to
4934 TREE_TYPE (abi), but it's possible we've
4935 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4937 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4939 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4940 valist = build_fold_addr_expr_with_type (valist, p1);
4942 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4945 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4947 if (!targetm.gimplify_va_arg_expr)
4948 /* FIXME:Once most targets are converted we should merely
4949 assert this is non-null. */
4952 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4957 /* Expand EXP, a call to __builtin_va_end. */
4960 expand_builtin_va_end (tree exp)
4962 tree valist = CALL_EXPR_ARG (exp, 0);
4964 /* Evaluate for side effects, if needed. I hate macros that don't
4966 if (TREE_SIDE_EFFECTS (valist))
4967 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4972 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4973 builtin rather than just as an assignment in stdarg.h because of the
4974 nastiness of array-type va_list types. */
4977 expand_builtin_va_copy (tree exp)
4981 dst = CALL_EXPR_ARG (exp, 0);
4982 src = CALL_EXPR_ARG (exp, 1);
4984 dst = stabilize_va_list (dst, 1);
4985 src = stabilize_va_list (src, 0);
4987 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4989 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4991 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4992 TREE_SIDE_EFFECTS (t) = 1;
4993 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4997 rtx dstb, srcb, size;
4999 /* Evaluate to pointers. */
5000 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5001 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5002 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5003 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5005 dstb = convert_memory_address (Pmode, dstb);
5006 srcb = convert_memory_address (Pmode, srcb);
5008 /* "Dereference" to BLKmode memories. */
5009 dstb = gen_rtx_MEM (BLKmode, dstb);
5010 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5011 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5012 srcb = gen_rtx_MEM (BLKmode, srcb);
5013 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5014 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5017 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5023 /* Expand a call to one of the builtin functions __builtin_frame_address or
5024 __builtin_return_address. */
5027 expand_builtin_frame_address (tree fndecl, tree exp)
5029 /* The argument must be a nonnegative integer constant.
5030 It counts the number of frames to scan up the stack.
5031 The value is the return address saved in that frame. */
5032 if (call_expr_nargs (exp) == 0)
5033 /* Warning about missing arg was already issued. */
5035 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5037 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5038 error ("invalid argument to %<__builtin_frame_address%>");
5040 error ("invalid argument to %<__builtin_return_address%>");
5046 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5047 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5049 /* Some ports cannot access arbitrary stack frames. */
5052 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5053 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5055 warning (0, "unsupported argument to %<__builtin_return_address%>");
5059 /* For __builtin_frame_address, return what we've got. */
5060 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5064 && ! CONSTANT_P (tem))
5065 tem = copy_to_mode_reg (Pmode, tem);
5070 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5071 we failed and the caller should emit a normal call, otherwise try to get
5072 the result in TARGET, if convenient. */
5075 expand_builtin_alloca (tree exp, rtx target)
5080 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5081 should always expand to function calls. These can be intercepted
5086 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5089 /* Compute the argument. */
5090 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5092 /* Allocate the desired space. */
5093 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5094 result = convert_memory_address (ptr_mode, result);
5099 /* Expand a call to a bswap builtin with argument ARG0. MODE
5100 is the mode to expand with. */
5103 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5105 enum machine_mode mode;
5109 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5112 arg = CALL_EXPR_ARG (exp, 0);
5113 mode = TYPE_MODE (TREE_TYPE (arg));
5114 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5116 target = expand_unop (mode, bswap_optab, op0, target, 1);
5118 gcc_assert (target);
5120 return convert_to_mode (mode, target, 0);
5123 /* Expand a call to a unary builtin in EXP.
5124 Return NULL_RTX if a normal call should be emitted rather than expanding the
5125 function in-line. If convenient, the result should be placed in TARGET.
5126 SUBTARGET may be used as the target for computing one of EXP's operands. */
5129 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5130 rtx subtarget, optab op_optab)
5134 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5137 /* Compute the argument. */
5138 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5139 VOIDmode, EXPAND_NORMAL);
5140 /* Compute op, into TARGET if possible.
5141 Set TARGET to wherever the result comes back. */
5142 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5143 op_optab, op0, target, 1);
5144 gcc_assert (target);
5146 return convert_to_mode (target_mode, target, 0);
5149 /* If the string passed to fputs is a constant and is one character
5150 long, we attempt to transform this call into __builtin_fputc(). */
5153 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5155 /* Verify the arguments in the original call. */
5156 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5158 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5159 CALL_EXPR_ARG (exp, 1),
5160 (target == const0_rtx),
5161 unlocked, NULL_TREE);
5163 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5168 /* Expand a call to __builtin_expect. We just return our argument
5169 as the builtin_expect semantic should've been already executed by
5170 tree branch prediction pass. */
5173 expand_builtin_expect (tree exp, rtx target)
5177 if (call_expr_nargs (exp) < 2)
5179 arg = CALL_EXPR_ARG (exp, 0);
5180 c = CALL_EXPR_ARG (exp, 1);
5182 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5183 /* When guessing was done, the hints should be already stripped away. */
5184 gcc_assert (!flag_guess_branch_prob
5185 || optimize == 0 || errorcount || sorrycount);
5190 expand_builtin_trap (void)
5194 emit_insn (gen_trap ());
5197 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5201 /* Expand EXP, a call to fabs, fabsf or fabsl.
5202 Return NULL_RTX if a normal call should be emitted rather than expanding
5203 the function inline. If convenient, the result should be placed
5204 in TARGET. SUBTARGET may be used as the target for computing
5208 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5210 enum machine_mode mode;
5214 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5217 arg = CALL_EXPR_ARG (exp, 0);
5218 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5219 mode = TYPE_MODE (TREE_TYPE (arg));
5220 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5221 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5224 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5225 Return NULL is a normal call should be emitted rather than expanding the
5226 function inline. If convenient, the result should be placed in TARGET.
5227 SUBTARGET may be used as the target for computing the operand. */
5230 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5235 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5238 arg = CALL_EXPR_ARG (exp, 0);
5239 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5241 arg = CALL_EXPR_ARG (exp, 1);
5242 op1 = expand_normal (arg);
5244 return expand_copysign (op0, op1, target);
5247 /* Create a new constant string literal and return a char* pointer to it.
5248 The STRING_CST value is the LEN characters at STR. */
5250 build_string_literal (int len, const char *str)
5252 tree t, elem, index, type;
5254 t = build_string (len, str);
5255 elem = build_type_variant (char_type_node, 1, 0);
5256 index = build_index_type (size_int (len - 1));
5257 type = build_array_type (elem, index);
5258 TREE_TYPE (t) = type;
5259 TREE_CONSTANT (t) = 1;
5260 TREE_READONLY (t) = 1;
5261 TREE_STATIC (t) = 1;
5263 type = build_pointer_type (elem);
5264 t = build1 (ADDR_EXPR, type,
5265 build4 (ARRAY_REF, elem,
5266 t, integer_zero_node, NULL_TREE, NULL_TREE));
5270 /* Expand EXP, a call to printf or printf_unlocked.
5271 Return NULL_RTX if a normal call should be emitted rather than transforming
5272 the function inline. If convenient, the result should be placed in
5273 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5276 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5279 /* If we're using an unlocked function, assume the other unlocked
5280 functions exist explicitly. */
5281 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5282 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5283 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5284 : implicit_built_in_decls[BUILT_IN_PUTS];
5285 const char *fmt_str;
5288 int nargs = call_expr_nargs (exp);
5290 /* If the return value is used, don't do the transformation. */
5291 if (target != const0_rtx)
5294 /* Verify the required arguments in the original call. */
5297 fmt = CALL_EXPR_ARG (exp, 0);
5298 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5301 /* Check whether the format is a literal string constant. */
5302 fmt_str = c_getstr (fmt);
5303 if (fmt_str == NULL)
5306 if (!init_target_chars ())
5309 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5310 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5313 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5316 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5318 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5319 else if (strcmp (fmt_str, target_percent_c) == 0)
5322 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5325 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5329 /* We can't handle anything else with % args or %% ... yet. */
5330 if (strchr (fmt_str, target_percent))
5336 /* If the format specifier was "", printf does nothing. */
5337 if (fmt_str[0] == '\0')
5339 /* If the format specifier has length of 1, call putchar. */
5340 if (fmt_str[1] == '\0')
5342 /* Given printf("c"), (where c is any one character,)
5343 convert "c"[0] to an int and pass that to the replacement
5345 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5347 fn = build_call_expr (fn_putchar, 1, arg);
5351 /* If the format specifier was "string\n", call puts("string"). */
5352 size_t len = strlen (fmt_str);
5353 if ((unsigned char)fmt_str[len - 1] == target_newline)
5355 /* Create a NUL-terminated string that's one char shorter
5356 than the original, stripping off the trailing '\n'. */
5357 char *newstr = XALLOCAVEC (char, len);
5358 memcpy (newstr, fmt_str, len - 1);
5359 newstr[len - 1] = 0;
5360 arg = build_string_literal (len, newstr);
5362 fn = build_call_expr (fn_puts, 1, arg);
5365 /* We'd like to arrange to call fputs(string,stdout) here,
5366 but we need stdout and don't have a way to get it yet. */
5373 if (TREE_CODE (fn) == CALL_EXPR)
5374 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5375 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5378 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5379 Return NULL_RTX if a normal call should be emitted rather than transforming
5380 the function inline. If convenient, the result should be placed in
5381 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5384 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5387 /* If we're using an unlocked function, assume the other unlocked
5388 functions exist explicitly. */
5389 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5390 : implicit_built_in_decls[BUILT_IN_FPUTC];
5391 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5392 : implicit_built_in_decls[BUILT_IN_FPUTS];
5393 const char *fmt_str;
5396 int nargs = call_expr_nargs (exp);
5398 /* If the return value is used, don't do the transformation. */
5399 if (target != const0_rtx)
5402 /* Verify the required arguments in the original call. */
5405 fp = CALL_EXPR_ARG (exp, 0);
5406 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5408 fmt = CALL_EXPR_ARG (exp, 1);
5409 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5412 /* Check whether the format is a literal string constant. */
5413 fmt_str = c_getstr (fmt);
5414 if (fmt_str == NULL)
5417 if (!init_target_chars ())
5420 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5421 if (strcmp (fmt_str, target_percent_s) == 0)
5424 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5426 arg = CALL_EXPR_ARG (exp, 2);
5428 fn = build_call_expr (fn_fputs, 2, arg, fp);
5430 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5431 else if (strcmp (fmt_str, target_percent_c) == 0)
5434 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5436 arg = CALL_EXPR_ARG (exp, 2);
5438 fn = build_call_expr (fn_fputc, 2, arg, fp);
5442 /* We can't handle anything else with % args or %% ... yet. */
5443 if (strchr (fmt_str, target_percent))
5449 /* If the format specifier was "", fprintf does nothing. */
5450 if (fmt_str[0] == '\0')
5452 /* Evaluate and ignore FILE* argument for side-effects. */
5453 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5457 /* When "string" doesn't contain %, replace all cases of
5458 fprintf(stream,string) with fputs(string,stream). The fputs
5459 builtin will take care of special cases like length == 1. */
5461 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5466 if (TREE_CODE (fn) == CALL_EXPR)
5467 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5468 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5471 /* Expand a call EXP to sprintf. Return NULL_RTX if
5472 a normal call should be emitted rather than expanding the function
5473 inline. If convenient, the result should be placed in TARGET with
5477 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5480 const char *fmt_str;
5481 int nargs = call_expr_nargs (exp);
5483 /* Verify the required arguments in the original call. */
5486 dest = CALL_EXPR_ARG (exp, 0);
5487 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5489 fmt = CALL_EXPR_ARG (exp, 0);
5490 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5493 /* Check whether the format is a literal string constant. */
5494 fmt_str = c_getstr (fmt);
5495 if (fmt_str == NULL)
5498 if (!init_target_chars ())
5501 /* If the format doesn't contain % args or %%, use strcpy. */
5502 if (strchr (fmt_str, target_percent) == 0)
5504 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5507 if ((nargs > 2) || ! fn)
5509 expand_expr (build_call_expr (fn, 2, dest, fmt),
5510 const0_rtx, VOIDmode, EXPAND_NORMAL);
5511 if (target == const0_rtx)
5513 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5514 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5516 /* If the format is "%s", use strcpy if the result isn't used. */
5517 else if (strcmp (fmt_str, target_percent_s) == 0)
5520 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5526 arg = CALL_EXPR_ARG (exp, 2);
5527 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5530 if (target != const0_rtx)
5532 len = c_strlen (arg, 1);
5533 if (! len || TREE_CODE (len) != INTEGER_CST)
5539 expand_expr (build_call_expr (fn, 2, dest, arg),
5540 const0_rtx, VOIDmode, EXPAND_NORMAL);
5542 if (target == const0_rtx)
5544 return expand_expr (len, target, mode, EXPAND_NORMAL);
5550 /* Expand a call to either the entry or exit function profiler. */
5553 expand_builtin_profile_func (bool exitp)
5557 this = DECL_RTL (current_function_decl);
5558 gcc_assert (MEM_P (this));
5559 this = XEXP (this, 0);
5562 which = profile_function_exit_libfunc;
5564 which = profile_function_entry_libfunc;
5566 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5567 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5574 /* Expand a call to __builtin___clear_cache. */
5577 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5579 #ifndef HAVE_clear_cache
5580 #ifdef CLEAR_INSN_CACHE
5581 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5582 does something. Just do the default expansion to a call to
5586 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5587 does nothing. There is no need to call it. Do nothing. */
5589 #endif /* CLEAR_INSN_CACHE */
5591 /* We have a "clear_cache" insn, and it will handle everything. */
5593 rtx begin_rtx, end_rtx;
5594 enum insn_code icode;
5596 /* We must not expand to a library call. If we did, any
5597 fallback library function in libgcc that might contain a call to
5598 __builtin___clear_cache() would recurse infinitely. */
5599 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5601 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5605 if (HAVE_clear_cache)
5607 icode = CODE_FOR_clear_cache;
5609 begin = CALL_EXPR_ARG (exp, 0);
5610 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5611 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5612 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5613 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5615 end = CALL_EXPR_ARG (exp, 1);
5616 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5617 end_rtx = convert_memory_address (Pmode, end_rtx);
5618 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5619 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5621 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5624 #endif /* HAVE_clear_cache */
5627 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5630 round_trampoline_addr (rtx tramp)
5632 rtx temp, addend, mask;
5634 /* If we don't need too much alignment, we'll have been guaranteed
5635 proper alignment by get_trampoline_type. */
5636 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5639 /* Round address up to desired boundary. */
5640 temp = gen_reg_rtx (Pmode);
5641 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5642 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5644 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5645 temp, 0, OPTAB_LIB_WIDEN);
5646 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5647 temp, 0, OPTAB_LIB_WIDEN);
5653 expand_builtin_init_trampoline (tree exp)
5655 tree t_tramp, t_func, t_chain;
5656 rtx r_tramp, r_func, r_chain;
5657 #ifdef TRAMPOLINE_TEMPLATE
5661 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5662 POINTER_TYPE, VOID_TYPE))
5665 t_tramp = CALL_EXPR_ARG (exp, 0);
5666 t_func = CALL_EXPR_ARG (exp, 1);
5667 t_chain = CALL_EXPR_ARG (exp, 2);
5669 r_tramp = expand_normal (t_tramp);
5670 r_func = expand_normal (t_func);
5671 r_chain = expand_normal (t_chain);
5673 /* Generate insns to initialize the trampoline. */
5674 r_tramp = round_trampoline_addr (r_tramp);
5675 #ifdef TRAMPOLINE_TEMPLATE
5676 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5677 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5678 emit_block_move (blktramp, assemble_trampoline_template (),
5679 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5681 trampolines_created = 1;
5682 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5688 expand_builtin_adjust_trampoline (tree exp)
5692 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5695 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5696 tramp = round_trampoline_addr (tramp);
5697 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5698 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5704 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5705 function. The function first checks whether the back end provides
5706 an insn to implement signbit for the respective mode. If not, it
5707 checks whether the floating point format of the value is such that
5708 the sign bit can be extracted. If that is not the case, the
5709 function returns NULL_RTX to indicate that a normal call should be
5710 emitted rather than expanding the function in-line. EXP is the
5711 expression that is a call to the builtin function; if convenient,
5712 the result should be placed in TARGET. */
5714 expand_builtin_signbit (tree exp, rtx target)
5716 const struct real_format *fmt;
5717 enum machine_mode fmode, imode, rmode;
5718 HOST_WIDE_INT hi, lo;
5721 enum insn_code icode;
5724 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5727 arg = CALL_EXPR_ARG (exp, 0);
5728 fmode = TYPE_MODE (TREE_TYPE (arg));
5729 rmode = TYPE_MODE (TREE_TYPE (exp));
5730 fmt = REAL_MODE_FORMAT (fmode);
5732 arg = builtin_save_expr (arg);
5734 /* Expand the argument yielding a RTX expression. */
5735 temp = expand_normal (arg);
5737 /* Check if the back end provides an insn that handles signbit for the
5739 icode = signbit_optab->handlers [(int) fmode].insn_code;
5740 if (icode != CODE_FOR_nothing)
5742 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5743 emit_unop_insn (icode, target, temp, UNKNOWN);
5747 /* For floating point formats without a sign bit, implement signbit
5749 bitpos = fmt->signbit_ro;
5752 /* But we can't do this if the format supports signed zero. */
5753 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5756 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5757 build_real (TREE_TYPE (arg), dconst0));
5758 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5761 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5763 imode = int_mode_for_mode (fmode);
5764 if (imode == BLKmode)
5766 temp = gen_lowpart (imode, temp);
5771 /* Handle targets with different FP word orders. */
5772 if (FLOAT_WORDS_BIG_ENDIAN)
5773 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5775 word = bitpos / BITS_PER_WORD;
5776 temp = operand_subword_force (temp, word, fmode);
5777 bitpos = bitpos % BITS_PER_WORD;
5780 /* Force the intermediate word_mode (or narrower) result into a
5781 register. This avoids attempting to create paradoxical SUBREGs
5782 of floating point modes below. */
5783 temp = force_reg (imode, temp);
5785 /* If the bitpos is within the "result mode" lowpart, the operation
5786 can be implement with a single bitwise AND. Otherwise, we need
5787 a right shift and an AND. */
5789 if (bitpos < GET_MODE_BITSIZE (rmode))
5791 if (bitpos < HOST_BITS_PER_WIDE_INT)
5794 lo = (HOST_WIDE_INT) 1 << bitpos;
5798 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5802 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5803 temp = gen_lowpart (rmode, temp);
5804 temp = expand_binop (rmode, and_optab, temp,
5805 immed_double_const (lo, hi, rmode),
5806 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5810 /* Perform a logical right shift to place the signbit in the least
5811 significant bit, then truncate the result to the desired mode
5812 and mask just this bit. */
5813 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5814 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5815 temp = gen_lowpart (rmode, temp);
5816 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5817 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5823 /* Expand fork or exec calls. TARGET is the desired target of the
5824 call. EXP is the call. FN is the
5825 identificator of the actual function. IGNORE is nonzero if the
5826 value is to be ignored. */
5829 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5834 /* If we are not profiling, just call the function. */
5835 if (!profile_arc_flag)
5838 /* Otherwise call the wrapper. This should be equivalent for the rest of
5839 compiler, so the code does not diverge, and the wrapper may run the
5840 code necessary for keeping the profiling sane. */
5842 switch (DECL_FUNCTION_CODE (fn))
5845 id = get_identifier ("__gcov_fork");
5848 case BUILT_IN_EXECL:
5849 id = get_identifier ("__gcov_execl");
5852 case BUILT_IN_EXECV:
5853 id = get_identifier ("__gcov_execv");
5856 case BUILT_IN_EXECLP:
5857 id = get_identifier ("__gcov_execlp");
5860 case BUILT_IN_EXECLE:
5861 id = get_identifier ("__gcov_execle");
5864 case BUILT_IN_EXECVP:
5865 id = get_identifier ("__gcov_execvp");
5868 case BUILT_IN_EXECVE:
5869 id = get_identifier ("__gcov_execve");
5876 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5877 DECL_EXTERNAL (decl) = 1;
5878 TREE_PUBLIC (decl) = 1;
5879 DECL_ARTIFICIAL (decl) = 1;
5880 TREE_NOTHROW (decl) = 1;
5881 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5882 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5883 call = rewrite_call_expr (exp, 0, decl, 0);
5884 return expand_call (call, target, ignore);
5889 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5890 the pointer in these functions is void*, the tree optimizers may remove
5891 casts. The mode computed in expand_builtin isn't reliable either, due
5892 to __sync_bool_compare_and_swap.
5894 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5895 group of builtins. This gives us log2 of the mode size. */
5897 static inline enum machine_mode
5898 get_builtin_sync_mode (int fcode_diff)
5900 /* The size is not negotiable, so ask not to get BLKmode in return
5901 if the target indicates that a smaller size would be better. */
5902 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5905 /* Expand the memory expression LOC and return the appropriate memory operand
5906 for the builtin_sync operations. */
5909 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5913 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5915 /* Note that we explicitly do not want any alias information for this
5916 memory, so that we kill all other live memories. Otherwise we don't
5917 satisfy the full barrier semantics of the intrinsic. */
5918 mem = validize_mem (gen_rtx_MEM (mode, addr));
5920 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5921 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5922 MEM_VOLATILE_P (mem) = 1;
5927 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5928 EXP is the CALL_EXPR. CODE is the rtx code
5929 that corresponds to the arithmetic or logical operation from the name;
5930 an exception here is that NOT actually means NAND. TARGET is an optional
5931 place for us to store the results; AFTER is true if this is the
5932 fetch_and_xxx form. IGNORE is true if we don't actually care about
5933 the result of the operation at all. */
5936 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5937 enum rtx_code code, bool after,
5938 rtx target, bool ignore)
5941 enum machine_mode old_mode;
5943 /* Expand the operands. */
5944 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5946 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5947 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5948 of CONST_INTs, where we know the old_mode only from the call argument. */
5949 old_mode = GET_MODE (val);
5950 if (old_mode == VOIDmode)
5951 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5952 val = convert_modes (mode, old_mode, val, 1);
5955 return expand_sync_operation (mem, val, code);
5957 return expand_sync_fetch_operation (mem, val, code, after, target);
5960 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5961 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5962 true if this is the boolean form. TARGET is a place for us to store the
5963 results; this is NOT optional if IS_BOOL is true. */
5966 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5967 bool is_bool, rtx target)
5969 rtx old_val, new_val, mem;
5970 enum machine_mode old_mode;
5972 /* Expand the operands. */
5973 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5976 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5977 mode, EXPAND_NORMAL);
5978 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5979 of CONST_INTs, where we know the old_mode only from the call argument. */
5980 old_mode = GET_MODE (old_val);
5981 if (old_mode == VOIDmode)
5982 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5983 old_val = convert_modes (mode, old_mode, old_val, 1);
5985 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5986 mode, EXPAND_NORMAL);
5987 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5988 of CONST_INTs, where we know the old_mode only from the call argument. */
5989 old_mode = GET_MODE (new_val);
5990 if (old_mode == VOIDmode)
5991 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5992 new_val = convert_modes (mode, old_mode, new_val, 1);
5995 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5997 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6000 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6001 general form is actually an atomic exchange, and some targets only
6002 support a reduced form with the second argument being a constant 1.
6003 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6007 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6011 enum machine_mode old_mode;
6013 /* Expand the operands. */
6014 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6015 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6016 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6017 of CONST_INTs, where we know the old_mode only from the call argument. */
6018 old_mode = GET_MODE (val);
6019 if (old_mode == VOIDmode)
6020 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6021 val = convert_modes (mode, old_mode, val, 1);
6023 return expand_sync_lock_test_and_set (mem, val, target);
6026 /* Expand the __sync_synchronize intrinsic. */
6029 expand_builtin_synchronize (void)
6033 #ifdef HAVE_memory_barrier
6034 if (HAVE_memory_barrier)
6036 emit_insn (gen_memory_barrier ());
6041 if (synchronize_libfunc != NULL_RTX)
6043 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6047 /* If no explicit memory barrier instruction is available, create an
6048 empty asm stmt with a memory clobber. */
6049 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6050 tree_cons (NULL, build_string (6, "memory"), NULL));
6051 ASM_VOLATILE_P (x) = 1;
6052 expand_asm_expr (x);
6055 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6058 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6060 enum insn_code icode;
6062 rtx val = const0_rtx;
6064 /* Expand the operands. */
6065 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6067 /* If there is an explicit operation in the md file, use it. */
6068 icode = sync_lock_release[mode];
6069 if (icode != CODE_FOR_nothing)
6071 if (!insn_data[icode].operand[1].predicate (val, mode))
6072 val = force_reg (mode, val);
6074 insn = GEN_FCN (icode) (mem, val);
6082 /* Otherwise we can implement this operation by emitting a barrier
6083 followed by a store of zero. */
6084 expand_builtin_synchronize ();
6085 emit_move_insn (mem, val);
6088 /* Expand an expression EXP that calls a built-in function,
6089 with result going to TARGET if that's convenient
6090 (and in mode MODE if that's convenient).
6091 SUBTARGET may be used as the target for computing one of EXP's operands.
6092 IGNORE is nonzero if the value is to be ignored. */
6095 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6098 tree fndecl = get_callee_fndecl (exp);
6099 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6100 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6102 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6103 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6105 /* When not optimizing, generate calls to library functions for a certain
6108 && !called_as_built_in (fndecl)
6109 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6110 && fcode != BUILT_IN_ALLOCA)
6111 return expand_call (exp, target, ignore);
6113 /* The built-in function expanders test for target == const0_rtx
6114 to determine whether the function's result will be ignored. */
6116 target = const0_rtx;
6118 /* If the result of a pure or const built-in function is ignored, and
6119 none of its arguments are volatile, we can avoid expanding the
6120 built-in call and just evaluate the arguments for side-effects. */
6121 if (target == const0_rtx
6122 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6124 bool volatilep = false;
6126 call_expr_arg_iterator iter;
6128 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6129 if (TREE_THIS_VOLATILE (arg))
6137 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6138 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6145 CASE_FLT_FN (BUILT_IN_FABS):
6146 target = expand_builtin_fabs (exp, target, subtarget);
6151 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6152 target = expand_builtin_copysign (exp, target, subtarget);
6157 /* Just do a normal library call if we were unable to fold
6159 CASE_FLT_FN (BUILT_IN_CABS):
6162 CASE_FLT_FN (BUILT_IN_EXP):
6163 CASE_FLT_FN (BUILT_IN_EXP10):
6164 CASE_FLT_FN (BUILT_IN_POW10):
6165 CASE_FLT_FN (BUILT_IN_EXP2):
6166 CASE_FLT_FN (BUILT_IN_EXPM1):
6167 CASE_FLT_FN (BUILT_IN_LOGB):
6168 CASE_FLT_FN (BUILT_IN_LOG):
6169 CASE_FLT_FN (BUILT_IN_LOG10):
6170 CASE_FLT_FN (BUILT_IN_LOG2):
6171 CASE_FLT_FN (BUILT_IN_LOG1P):
6172 CASE_FLT_FN (BUILT_IN_TAN):
6173 CASE_FLT_FN (BUILT_IN_ASIN):
6174 CASE_FLT_FN (BUILT_IN_ACOS):
6175 CASE_FLT_FN (BUILT_IN_ATAN):
6176 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6177 because of possible accuracy problems. */
6178 if (! flag_unsafe_math_optimizations)
6180 CASE_FLT_FN (BUILT_IN_SQRT):
6181 CASE_FLT_FN (BUILT_IN_FLOOR):
6182 CASE_FLT_FN (BUILT_IN_CEIL):
6183 CASE_FLT_FN (BUILT_IN_TRUNC):
6184 CASE_FLT_FN (BUILT_IN_ROUND):
6185 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6186 CASE_FLT_FN (BUILT_IN_RINT):
6187 target = expand_builtin_mathfn (exp, target, subtarget);
6192 CASE_FLT_FN (BUILT_IN_ILOGB):
6193 if (! flag_unsafe_math_optimizations)
6195 CASE_FLT_FN (BUILT_IN_ISINF):
6196 CASE_FLT_FN (BUILT_IN_FINITE):
6197 case BUILT_IN_ISFINITE:
6198 case BUILT_IN_ISNORMAL:
6199 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6204 CASE_FLT_FN (BUILT_IN_LCEIL):
6205 CASE_FLT_FN (BUILT_IN_LLCEIL):
6206 CASE_FLT_FN (BUILT_IN_LFLOOR):
6207 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6208 target = expand_builtin_int_roundingfn (exp, target);
6213 CASE_FLT_FN (BUILT_IN_LRINT):
6214 CASE_FLT_FN (BUILT_IN_LLRINT):
6215 CASE_FLT_FN (BUILT_IN_LROUND):
6216 CASE_FLT_FN (BUILT_IN_LLROUND):
6217 target = expand_builtin_int_roundingfn_2 (exp, target);
6222 CASE_FLT_FN (BUILT_IN_POW):
6223 target = expand_builtin_pow (exp, target, subtarget);
6228 CASE_FLT_FN (BUILT_IN_POWI):
6229 target = expand_builtin_powi (exp, target, subtarget);
6234 CASE_FLT_FN (BUILT_IN_ATAN2):
6235 CASE_FLT_FN (BUILT_IN_LDEXP):
6236 CASE_FLT_FN (BUILT_IN_SCALB):
6237 CASE_FLT_FN (BUILT_IN_SCALBN):
6238 CASE_FLT_FN (BUILT_IN_SCALBLN):
6239 if (! flag_unsafe_math_optimizations)
6242 CASE_FLT_FN (BUILT_IN_FMOD):
6243 CASE_FLT_FN (BUILT_IN_REMAINDER):
6244 CASE_FLT_FN (BUILT_IN_DREM):
6245 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6250 CASE_FLT_FN (BUILT_IN_CEXPI):
6251 target = expand_builtin_cexpi (exp, target, subtarget);
6252 gcc_assert (target);
6255 CASE_FLT_FN (BUILT_IN_SIN):
6256 CASE_FLT_FN (BUILT_IN_COS):
6257 if (! flag_unsafe_math_optimizations)
6259 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6264 CASE_FLT_FN (BUILT_IN_SINCOS):
6265 if (! flag_unsafe_math_optimizations)
6267 target = expand_builtin_sincos (exp);
6272 case BUILT_IN_APPLY_ARGS:
6273 return expand_builtin_apply_args ();
6275 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6276 FUNCTION with a copy of the parameters described by
6277 ARGUMENTS, and ARGSIZE. It returns a block of memory
6278 allocated on the stack into which is stored all the registers
6279 that might possibly be used for returning the result of a
6280 function. ARGUMENTS is the value returned by
6281 __builtin_apply_args. ARGSIZE is the number of bytes of
6282 arguments that must be copied. ??? How should this value be
6283 computed? We'll also need a safe worst case value for varargs
6285 case BUILT_IN_APPLY:
6286 if (!validate_arglist (exp, POINTER_TYPE,
6287 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6288 && !validate_arglist (exp, REFERENCE_TYPE,
6289 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6295 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6296 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6297 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6299 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6302 /* __builtin_return (RESULT) causes the function to return the
6303 value described by RESULT. RESULT is address of the block of
6304 memory returned by __builtin_apply. */
6305 case BUILT_IN_RETURN:
6306 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6307 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6310 case BUILT_IN_SAVEREGS:
6311 return expand_builtin_saveregs ();
6313 case BUILT_IN_ARGS_INFO:
6314 return expand_builtin_args_info (exp);
6316 case BUILT_IN_VA_ARG_PACK:
6317 /* All valid uses of __builtin_va_arg_pack () are removed during
6319 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6322 case BUILT_IN_VA_ARG_PACK_LEN:
6323 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6325 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6328 /* Return the address of the first anonymous stack arg. */
6329 case BUILT_IN_NEXT_ARG:
6330 if (fold_builtin_next_arg (exp, false))
6332 return expand_builtin_next_arg ();
6334 case BUILT_IN_CLEAR_CACHE:
6335 target = expand_builtin___clear_cache (exp);
6340 case BUILT_IN_CLASSIFY_TYPE:
6341 return expand_builtin_classify_type (exp);
6343 case BUILT_IN_CONSTANT_P:
6346 case BUILT_IN_FRAME_ADDRESS:
6347 case BUILT_IN_RETURN_ADDRESS:
6348 return expand_builtin_frame_address (fndecl, exp);
6350 /* Returns the address of the area where the structure is returned.
6352 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6353 if (call_expr_nargs (exp) != 0
6354 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6355 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6358 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6360 case BUILT_IN_ALLOCA:
6361 target = expand_builtin_alloca (exp, target);
6366 case BUILT_IN_STACK_SAVE:
6367 return expand_stack_save ();
6369 case BUILT_IN_STACK_RESTORE:
6370 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6373 case BUILT_IN_BSWAP32:
6374 case BUILT_IN_BSWAP64:
6375 target = expand_builtin_bswap (exp, target, subtarget);
6381 CASE_INT_FN (BUILT_IN_FFS):
6382 case BUILT_IN_FFSIMAX:
6383 target = expand_builtin_unop (target_mode, exp, target,
6384 subtarget, ffs_optab);
6389 CASE_INT_FN (BUILT_IN_CLZ):
6390 case BUILT_IN_CLZIMAX:
6391 target = expand_builtin_unop (target_mode, exp, target,
6392 subtarget, clz_optab);
6397 CASE_INT_FN (BUILT_IN_CTZ):
6398 case BUILT_IN_CTZIMAX:
6399 target = expand_builtin_unop (target_mode, exp, target,
6400 subtarget, ctz_optab);
6405 CASE_INT_FN (BUILT_IN_POPCOUNT):
6406 case BUILT_IN_POPCOUNTIMAX:
6407 target = expand_builtin_unop (target_mode, exp, target,
6408 subtarget, popcount_optab);
6413 CASE_INT_FN (BUILT_IN_PARITY):
6414 case BUILT_IN_PARITYIMAX:
6415 target = expand_builtin_unop (target_mode, exp, target,
6416 subtarget, parity_optab);
6421 case BUILT_IN_STRLEN:
6422 target = expand_builtin_strlen (exp, target, target_mode);
6427 case BUILT_IN_STRCPY:
6428 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6433 case BUILT_IN_STRNCPY:
6434 target = expand_builtin_strncpy (exp, target, mode);
6439 case BUILT_IN_STPCPY:
6440 target = expand_builtin_stpcpy (exp, target, mode);
6445 case BUILT_IN_STRCAT:
6446 target = expand_builtin_strcat (fndecl, exp, target, mode);
6451 case BUILT_IN_STRNCAT:
6452 target = expand_builtin_strncat (exp, target, mode);
6457 case BUILT_IN_STRSPN:
6458 target = expand_builtin_strspn (exp, target, mode);
6463 case BUILT_IN_STRCSPN:
6464 target = expand_builtin_strcspn (exp, target, mode);
6469 case BUILT_IN_STRSTR:
6470 target = expand_builtin_strstr (exp, target, mode);
6475 case BUILT_IN_STRPBRK:
6476 target = expand_builtin_strpbrk (exp, target, mode);
6481 case BUILT_IN_INDEX:
6482 case BUILT_IN_STRCHR:
6483 target = expand_builtin_strchr (exp, target, mode);
6488 case BUILT_IN_RINDEX:
6489 case BUILT_IN_STRRCHR:
6490 target = expand_builtin_strrchr (exp, target, mode);
6495 case BUILT_IN_MEMCPY:
6496 target = expand_builtin_memcpy (exp, target, mode);
6501 case BUILT_IN_MEMPCPY:
6502 target = expand_builtin_mempcpy (exp, target, mode);
6507 case BUILT_IN_MEMMOVE:
6508 target = expand_builtin_memmove (exp, target, mode, ignore);
6513 case BUILT_IN_BCOPY:
6514 target = expand_builtin_bcopy (exp, ignore);
6519 case BUILT_IN_MEMSET:
6520 target = expand_builtin_memset (exp, target, mode);
6525 case BUILT_IN_BZERO:
6526 target = expand_builtin_bzero (exp);
6531 case BUILT_IN_STRCMP:
6532 target = expand_builtin_strcmp (exp, target, mode);
6537 case BUILT_IN_STRNCMP:
6538 target = expand_builtin_strncmp (exp, target, mode);
6543 case BUILT_IN_MEMCHR:
6544 target = expand_builtin_memchr (exp, target, mode);
6550 case BUILT_IN_MEMCMP:
6551 target = expand_builtin_memcmp (exp, target, mode);
6556 case BUILT_IN_SETJMP:
6557 /* This should have been lowered to the builtins below. */
6560 case BUILT_IN_SETJMP_SETUP:
6561 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6562 and the receiver label. */
6563 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6565 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6566 VOIDmode, EXPAND_NORMAL);
6567 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6568 rtx label_r = label_rtx (label);
6570 /* This is copied from the handling of non-local gotos. */
6571 expand_builtin_setjmp_setup (buf_addr, label_r);
6572 nonlocal_goto_handler_labels
6573 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6574 nonlocal_goto_handler_labels);
6575 /* ??? Do not let expand_label treat us as such since we would
6576 not want to be both on the list of non-local labels and on
6577 the list of forced labels. */
6578 FORCED_LABEL (label) = 0;
6583 case BUILT_IN_SETJMP_DISPATCHER:
6584 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6585 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6587 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6588 rtx label_r = label_rtx (label);
6590 /* Remove the dispatcher label from the list of non-local labels
6591 since the receiver labels have been added to it above. */
6592 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6597 case BUILT_IN_SETJMP_RECEIVER:
6598 /* __builtin_setjmp_receiver is passed the receiver label. */
6599 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6601 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6602 rtx label_r = label_rtx (label);
6604 expand_builtin_setjmp_receiver (label_r);
6609 /* __builtin_longjmp is passed a pointer to an array of five words.
6610 It's similar to the C library longjmp function but works with
6611 __builtin_setjmp above. */
6612 case BUILT_IN_LONGJMP:
6613 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6615 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6616 VOIDmode, EXPAND_NORMAL);
6617 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6619 if (value != const1_rtx)
6621 error ("%<__builtin_longjmp%> second argument must be 1");
6625 expand_builtin_longjmp (buf_addr, value);
6630 case BUILT_IN_NONLOCAL_GOTO:
6631 target = expand_builtin_nonlocal_goto (exp);
6636 /* This updates the setjmp buffer that is its argument with the value
6637 of the current stack pointer. */
6638 case BUILT_IN_UPDATE_SETJMP_BUF:
6639 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6642 = expand_normal (CALL_EXPR_ARG (exp, 0));
6644 expand_builtin_update_setjmp_buf (buf_addr);
6650 expand_builtin_trap ();
6653 case BUILT_IN_PRINTF:
6654 target = expand_builtin_printf (exp, target, mode, false);
6659 case BUILT_IN_PRINTF_UNLOCKED:
6660 target = expand_builtin_printf (exp, target, mode, true);
6665 case BUILT_IN_FPUTS:
6666 target = expand_builtin_fputs (exp, target, false);
6670 case BUILT_IN_FPUTS_UNLOCKED:
6671 target = expand_builtin_fputs (exp, target, true);
6676 case BUILT_IN_FPRINTF:
6677 target = expand_builtin_fprintf (exp, target, mode, false);
6682 case BUILT_IN_FPRINTF_UNLOCKED:
6683 target = expand_builtin_fprintf (exp, target, mode, true);
6688 case BUILT_IN_SPRINTF:
6689 target = expand_builtin_sprintf (exp, target, mode);
6694 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6695 case BUILT_IN_SIGNBITD32:
6696 case BUILT_IN_SIGNBITD64:
6697 case BUILT_IN_SIGNBITD128:
6698 target = expand_builtin_signbit (exp, target);
6703 /* Various hooks for the DWARF 2 __throw routine. */
6704 case BUILT_IN_UNWIND_INIT:
6705 expand_builtin_unwind_init ();
6707 case BUILT_IN_DWARF_CFA:
6708 return virtual_cfa_rtx;
6709 #ifdef DWARF2_UNWIND_INFO
6710 case BUILT_IN_DWARF_SP_COLUMN:
6711 return expand_builtin_dwarf_sp_column ();
6712 case BUILT_IN_INIT_DWARF_REG_SIZES:
6713 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6716 case BUILT_IN_FROB_RETURN_ADDR:
6717 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6718 case BUILT_IN_EXTRACT_RETURN_ADDR:
6719 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6720 case BUILT_IN_EH_RETURN:
6721 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6722 CALL_EXPR_ARG (exp, 1));
6724 #ifdef EH_RETURN_DATA_REGNO
6725 case BUILT_IN_EH_RETURN_DATA_REGNO:
6726 return expand_builtin_eh_return_data_regno (exp);
6728 case BUILT_IN_EXTEND_POINTER:
6729 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6731 case BUILT_IN_VA_START:
6732 return expand_builtin_va_start (exp);
6733 case BUILT_IN_VA_END:
6734 return expand_builtin_va_end (exp);
6735 case BUILT_IN_VA_COPY:
6736 return expand_builtin_va_copy (exp);
6737 case BUILT_IN_EXPECT:
6738 return expand_builtin_expect (exp, target);
6739 case BUILT_IN_PREFETCH:
6740 expand_builtin_prefetch (exp);
6743 case BUILT_IN_PROFILE_FUNC_ENTER:
6744 return expand_builtin_profile_func (false);
6745 case BUILT_IN_PROFILE_FUNC_EXIT:
6746 return expand_builtin_profile_func (true);
6748 case BUILT_IN_INIT_TRAMPOLINE:
6749 return expand_builtin_init_trampoline (exp);
6750 case BUILT_IN_ADJUST_TRAMPOLINE:
6751 return expand_builtin_adjust_trampoline (exp);
6754 case BUILT_IN_EXECL:
6755 case BUILT_IN_EXECV:
6756 case BUILT_IN_EXECLP:
6757 case BUILT_IN_EXECLE:
6758 case BUILT_IN_EXECVP:
6759 case BUILT_IN_EXECVE:
6760 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6765 case BUILT_IN_FETCH_AND_ADD_1:
6766 case BUILT_IN_FETCH_AND_ADD_2:
6767 case BUILT_IN_FETCH_AND_ADD_4:
6768 case BUILT_IN_FETCH_AND_ADD_8:
6769 case BUILT_IN_FETCH_AND_ADD_16:
6770 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6771 target = expand_builtin_sync_operation (mode, exp, PLUS,
6772 false, target, ignore);
6777 case BUILT_IN_FETCH_AND_SUB_1:
6778 case BUILT_IN_FETCH_AND_SUB_2:
6779 case BUILT_IN_FETCH_AND_SUB_4:
6780 case BUILT_IN_FETCH_AND_SUB_8:
6781 case BUILT_IN_FETCH_AND_SUB_16:
6782 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6783 target = expand_builtin_sync_operation (mode, exp, MINUS,
6784 false, target, ignore);
6789 case BUILT_IN_FETCH_AND_OR_1:
6790 case BUILT_IN_FETCH_AND_OR_2:
6791 case BUILT_IN_FETCH_AND_OR_4:
6792 case BUILT_IN_FETCH_AND_OR_8:
6793 case BUILT_IN_FETCH_AND_OR_16:
6794 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6795 target = expand_builtin_sync_operation (mode, exp, IOR,
6796 false, target, ignore);
6801 case BUILT_IN_FETCH_AND_AND_1:
6802 case BUILT_IN_FETCH_AND_AND_2:
6803 case BUILT_IN_FETCH_AND_AND_4:
6804 case BUILT_IN_FETCH_AND_AND_8:
6805 case BUILT_IN_FETCH_AND_AND_16:
6806 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6807 target = expand_builtin_sync_operation (mode, exp, AND,
6808 false, target, ignore);
6813 case BUILT_IN_FETCH_AND_XOR_1:
6814 case BUILT_IN_FETCH_AND_XOR_2:
6815 case BUILT_IN_FETCH_AND_XOR_4:
6816 case BUILT_IN_FETCH_AND_XOR_8:
6817 case BUILT_IN_FETCH_AND_XOR_16:
6818 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6819 target = expand_builtin_sync_operation (mode, exp, XOR,
6820 false, target, ignore);
6825 case BUILT_IN_FETCH_AND_NAND_1:
6826 case BUILT_IN_FETCH_AND_NAND_2:
6827 case BUILT_IN_FETCH_AND_NAND_4:
6828 case BUILT_IN_FETCH_AND_NAND_8:
6829 case BUILT_IN_FETCH_AND_NAND_16:
6830 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6831 target = expand_builtin_sync_operation (mode, exp, NOT,
6832 false, target, ignore);
6837 case BUILT_IN_ADD_AND_FETCH_1:
6838 case BUILT_IN_ADD_AND_FETCH_2:
6839 case BUILT_IN_ADD_AND_FETCH_4:
6840 case BUILT_IN_ADD_AND_FETCH_8:
6841 case BUILT_IN_ADD_AND_FETCH_16:
6842 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6843 target = expand_builtin_sync_operation (mode, exp, PLUS,
6844 true, target, ignore);
6849 case BUILT_IN_SUB_AND_FETCH_1:
6850 case BUILT_IN_SUB_AND_FETCH_2:
6851 case BUILT_IN_SUB_AND_FETCH_4:
6852 case BUILT_IN_SUB_AND_FETCH_8:
6853 case BUILT_IN_SUB_AND_FETCH_16:
6854 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6855 target = expand_builtin_sync_operation (mode, exp, MINUS,
6856 true, target, ignore);
6861 case BUILT_IN_OR_AND_FETCH_1:
6862 case BUILT_IN_OR_AND_FETCH_2:
6863 case BUILT_IN_OR_AND_FETCH_4:
6864 case BUILT_IN_OR_AND_FETCH_8:
6865 case BUILT_IN_OR_AND_FETCH_16:
6866 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6867 target = expand_builtin_sync_operation (mode, exp, IOR,
6868 true, target, ignore);
6873 case BUILT_IN_AND_AND_FETCH_1:
6874 case BUILT_IN_AND_AND_FETCH_2:
6875 case BUILT_IN_AND_AND_FETCH_4:
6876 case BUILT_IN_AND_AND_FETCH_8:
6877 case BUILT_IN_AND_AND_FETCH_16:
6878 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6879 target = expand_builtin_sync_operation (mode, exp, AND,
6880 true, target, ignore);
6885 case BUILT_IN_XOR_AND_FETCH_1:
6886 case BUILT_IN_XOR_AND_FETCH_2:
6887 case BUILT_IN_XOR_AND_FETCH_4:
6888 case BUILT_IN_XOR_AND_FETCH_8:
6889 case BUILT_IN_XOR_AND_FETCH_16:
6890 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6891 target = expand_builtin_sync_operation (mode, exp, XOR,
6892 true, target, ignore);
6897 case BUILT_IN_NAND_AND_FETCH_1:
6898 case BUILT_IN_NAND_AND_FETCH_2:
6899 case BUILT_IN_NAND_AND_FETCH_4:
6900 case BUILT_IN_NAND_AND_FETCH_8:
6901 case BUILT_IN_NAND_AND_FETCH_16:
6902 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6903 target = expand_builtin_sync_operation (mode, exp, NOT,
6904 true, target, ignore);
6909 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6910 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6911 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6912 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6913 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6914 if (mode == VOIDmode)
6915 mode = TYPE_MODE (boolean_type_node);
6916 if (!target || !register_operand (target, mode))
6917 target = gen_reg_rtx (mode);
6919 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6920 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6925 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6926 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6927 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6928 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6929 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6930 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6931 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6936 case BUILT_IN_LOCK_TEST_AND_SET_1:
6937 case BUILT_IN_LOCK_TEST_AND_SET_2:
6938 case BUILT_IN_LOCK_TEST_AND_SET_4:
6939 case BUILT_IN_LOCK_TEST_AND_SET_8:
6940 case BUILT_IN_LOCK_TEST_AND_SET_16:
6941 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6942 target = expand_builtin_lock_test_and_set (mode, exp, target);
6947 case BUILT_IN_LOCK_RELEASE_1:
6948 case BUILT_IN_LOCK_RELEASE_2:
6949 case BUILT_IN_LOCK_RELEASE_4:
6950 case BUILT_IN_LOCK_RELEASE_8:
6951 case BUILT_IN_LOCK_RELEASE_16:
6952 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6953 expand_builtin_lock_release (mode, exp);
6956 case BUILT_IN_SYNCHRONIZE:
6957 expand_builtin_synchronize ();
6960 case BUILT_IN_OBJECT_SIZE:
6961 return expand_builtin_object_size (exp);
6963 case BUILT_IN_MEMCPY_CHK:
6964 case BUILT_IN_MEMPCPY_CHK:
6965 case BUILT_IN_MEMMOVE_CHK:
6966 case BUILT_IN_MEMSET_CHK:
6967 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6972 case BUILT_IN_STRCPY_CHK:
6973 case BUILT_IN_STPCPY_CHK:
6974 case BUILT_IN_STRNCPY_CHK:
6975 case BUILT_IN_STRCAT_CHK:
6976 case BUILT_IN_STRNCAT_CHK:
6977 case BUILT_IN_SNPRINTF_CHK:
6978 case BUILT_IN_VSNPRINTF_CHK:
6979 maybe_emit_chk_warning (exp, fcode);
6982 case BUILT_IN_SPRINTF_CHK:
6983 case BUILT_IN_VSPRINTF_CHK:
6984 maybe_emit_sprintf_chk_warning (exp, fcode);
6987 default: /* just do library call, if unknown builtin */
6991 /* The switch statement above can drop through to cause the function
6992 to be called normally. */
6993 return expand_call (exp, target, ignore);
6996 /* Determine whether a tree node represents a call to a built-in
6997 function. If the tree T is a call to a built-in function with
6998 the right number of arguments of the appropriate types, return
6999 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7000 Otherwise the return value is END_BUILTINS. */
7002 enum built_in_function
7003 builtin_mathfn_code (const_tree t)
7005 const_tree fndecl, arg, parmlist;
7006 const_tree argtype, parmtype;
7007 const_call_expr_arg_iterator iter;
7009 if (TREE_CODE (t) != CALL_EXPR
7010 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7011 return END_BUILTINS;
7013 fndecl = get_callee_fndecl (t);
7014 if (fndecl == NULL_TREE
7015 || TREE_CODE (fndecl) != FUNCTION_DECL
7016 || ! DECL_BUILT_IN (fndecl)
7017 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7018 return END_BUILTINS;
7020 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7021 init_const_call_expr_arg_iterator (t, &iter);
7022 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7024 /* If a function doesn't take a variable number of arguments,
7025 the last element in the list will have type `void'. */
7026 parmtype = TREE_VALUE (parmlist);
7027 if (VOID_TYPE_P (parmtype))
7029 if (more_const_call_expr_args_p (&iter))
7030 return END_BUILTINS;
7031 return DECL_FUNCTION_CODE (fndecl);
7034 if (! more_const_call_expr_args_p (&iter))
7035 return END_BUILTINS;
7037 arg = next_const_call_expr_arg (&iter);
7038 argtype = TREE_TYPE (arg);
7040 if (SCALAR_FLOAT_TYPE_P (parmtype))
7042 if (! SCALAR_FLOAT_TYPE_P (argtype))
7043 return END_BUILTINS;
7045 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7047 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7048 return END_BUILTINS;
7050 else if (POINTER_TYPE_P (parmtype))
7052 if (! POINTER_TYPE_P (argtype))
7053 return END_BUILTINS;
7055 else if (INTEGRAL_TYPE_P (parmtype))
7057 if (! INTEGRAL_TYPE_P (argtype))
7058 return END_BUILTINS;
7061 return END_BUILTINS;
7064 /* Variable-length argument list. */
7065 return DECL_FUNCTION_CODE (fndecl);
7068 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7069 evaluate to a constant. */
7072 fold_builtin_constant_p (tree arg)
7074 /* We return 1 for a numeric type that's known to be a constant
7075 value at compile-time or for an aggregate type that's a
7076 literal constant. */
7079 /* If we know this is a constant, emit the constant of one. */
7080 if (CONSTANT_CLASS_P (arg)
7081 || (TREE_CODE (arg) == CONSTRUCTOR
7082 && TREE_CONSTANT (arg)))
7083 return integer_one_node;
7084 if (TREE_CODE (arg) == ADDR_EXPR)
7086 tree op = TREE_OPERAND (arg, 0);
7087 if (TREE_CODE (op) == STRING_CST
7088 || (TREE_CODE (op) == ARRAY_REF
7089 && integer_zerop (TREE_OPERAND (op, 1))
7090 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7091 return integer_one_node;
7094 /* If this expression has side effects, show we don't know it to be a
7095 constant. Likewise if it's a pointer or aggregate type since in
7096 those case we only want literals, since those are only optimized
7097 when generating RTL, not later.
7098 And finally, if we are compiling an initializer, not code, we
7099 need to return a definite result now; there's not going to be any
7100 more optimization done. */
7101 if (TREE_SIDE_EFFECTS (arg)
7102 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7103 || POINTER_TYPE_P (TREE_TYPE (arg))
7105 || folding_initializer)
7106 return integer_zero_node;
7111 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7112 return it as a truthvalue. */
7115 build_builtin_expect_predicate (tree pred, tree expected)
7117 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7119 fn = built_in_decls[BUILT_IN_EXPECT];
7120 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7121 ret_type = TREE_TYPE (TREE_TYPE (fn));
7122 pred_type = TREE_VALUE (arg_types);
7123 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7125 pred = fold_convert (pred_type, pred);
7126 expected = fold_convert (expected_type, expected);
7127 call_expr = build_call_expr (fn, 2, pred, expected);
7129 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7130 build_int_cst (ret_type, 0));
7133 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7134 NULL_TREE if no simplification is possible. */
7137 fold_builtin_expect (tree arg0, tree arg1)
7140 enum tree_code code;
7142 /* If this is a builtin_expect within a builtin_expect keep the
7143 inner one. See through a comparison against a constant. It
7144 might have been added to create a thruthvalue. */
7146 if (COMPARISON_CLASS_P (inner)
7147 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7148 inner = TREE_OPERAND (inner, 0);
7150 if (TREE_CODE (inner) == CALL_EXPR
7151 && (fndecl = get_callee_fndecl (inner))
7152 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7153 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7156 /* Distribute the expected value over short-circuiting operators.
7157 See through the cast from truthvalue_type_node to long. */
7159 while (TREE_CODE (inner) == NOP_EXPR
7160 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7161 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7162 inner = TREE_OPERAND (inner, 0);
7164 code = TREE_CODE (inner);
7165 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7167 tree op0 = TREE_OPERAND (inner, 0);
7168 tree op1 = TREE_OPERAND (inner, 1);
7170 op0 = build_builtin_expect_predicate (op0, arg1);
7171 op1 = build_builtin_expect_predicate (op1, arg1);
7172 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7174 return fold_convert (TREE_TYPE (arg0), inner);
7177 /* If the argument isn't invariant then there's nothing else we can do. */
7178 if (!TREE_CONSTANT (arg0))
7181 /* If we expect that a comparison against the argument will fold to
7182 a constant return the constant. In practice, this means a true
7183 constant or the address of a non-weak symbol. */
7186 if (TREE_CODE (inner) == ADDR_EXPR)
7190 inner = TREE_OPERAND (inner, 0);
7192 while (TREE_CODE (inner) == COMPONENT_REF
7193 || TREE_CODE (inner) == ARRAY_REF);
7194 if (DECL_P (inner) && DECL_WEAK (inner))
7198 /* Otherwise, ARG0 already has the proper type for the return value. */
7202 /* Fold a call to __builtin_classify_type with argument ARG. */
7205 fold_builtin_classify_type (tree arg)
7208 return build_int_cst (NULL_TREE, no_type_class);
7210 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7213 /* Fold a call to __builtin_strlen with argument ARG. */
7216 fold_builtin_strlen (tree arg)
7218 if (!validate_arg (arg, POINTER_TYPE))
7222 tree len = c_strlen (arg, 0);
7226 /* Convert from the internal "sizetype" type to "size_t". */
7228 len = fold_convert (size_type_node, len);
7236 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7239 fold_builtin_inf (tree type, int warn)
7241 REAL_VALUE_TYPE real;
7243 /* __builtin_inff is intended to be usable to define INFINITY on all
7244 targets. If an infinity is not available, INFINITY expands "to a
7245 positive constant of type float that overflows at translation
7246 time", footnote "In this case, using INFINITY will violate the
7247 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7248 Thus we pedwarn to ensure this constraint violation is
7250 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7251 pedwarn (0, "target format does not support infinity");
7254 return build_real (type, real);
7257 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7260 fold_builtin_nan (tree arg, tree type, int quiet)
7262 REAL_VALUE_TYPE real;
7265 if (!validate_arg (arg, POINTER_TYPE))
7267 str = c_getstr (arg);
7271 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7274 return build_real (type, real);
7277 /* Return true if the floating point expression T has an integer value.
7278 We also allow +Inf, -Inf and NaN to be considered integer values. */
7281 integer_valued_real_p (tree t)
7283 switch (TREE_CODE (t))
7290 return integer_valued_real_p (TREE_OPERAND (t, 0));
7295 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7302 return integer_valued_real_p (TREE_OPERAND (t, 0))
7303 && integer_valued_real_p (TREE_OPERAND (t, 1));
7306 return integer_valued_real_p (TREE_OPERAND (t, 1))
7307 && integer_valued_real_p (TREE_OPERAND (t, 2));
7310 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7314 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7315 if (TREE_CODE (type) == INTEGER_TYPE)
7317 if (TREE_CODE (type) == REAL_TYPE)
7318 return integer_valued_real_p (TREE_OPERAND (t, 0));
7323 switch (builtin_mathfn_code (t))
7325 CASE_FLT_FN (BUILT_IN_CEIL):
7326 CASE_FLT_FN (BUILT_IN_FLOOR):
7327 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7328 CASE_FLT_FN (BUILT_IN_RINT):
7329 CASE_FLT_FN (BUILT_IN_ROUND):
7330 CASE_FLT_FN (BUILT_IN_TRUNC):
7333 CASE_FLT_FN (BUILT_IN_FMIN):
7334 CASE_FLT_FN (BUILT_IN_FMAX):
7335 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7336 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7349 /* FNDECL is assumed to be a builtin where truncation can be propagated
7350 across (for instance floor((double)f) == (double)floorf (f).
7351 Do the transformation for a call with argument ARG. */
7354 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7356 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7358 if (!validate_arg (arg, REAL_TYPE))
7361 /* Integer rounding functions are idempotent. */
7362 if (fcode == builtin_mathfn_code (arg))
7365 /* If argument is already integer valued, and we don't need to worry
7366 about setting errno, there's no need to perform rounding. */
7367 if (! flag_errno_math && integer_valued_real_p (arg))
7372 tree arg0 = strip_float_extensions (arg);
7373 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7374 tree newtype = TREE_TYPE (arg0);
7377 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7378 && (decl = mathfn_built_in (newtype, fcode)))
7379 return fold_convert (ftype,
7380 build_call_expr (decl, 1,
7381 fold_convert (newtype, arg0)));
7386 /* FNDECL is assumed to be builtin which can narrow the FP type of
7387 the argument, for instance lround((double)f) -> lroundf (f).
7388 Do the transformation for a call with argument ARG. */
7391 fold_fixed_mathfn (tree fndecl, tree arg)
7393 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7395 if (!validate_arg (arg, REAL_TYPE))
7398 /* If argument is already integer valued, and we don't need to worry
7399 about setting errno, there's no need to perform rounding. */
7400 if (! flag_errno_math && integer_valued_real_p (arg))
7401 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7405 tree ftype = TREE_TYPE (arg);
7406 tree arg0 = strip_float_extensions (arg);
7407 tree newtype = TREE_TYPE (arg0);
7410 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7411 && (decl = mathfn_built_in (newtype, fcode)))
7412 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7415 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7416 sizeof (long long) == sizeof (long). */
7417 if (TYPE_PRECISION (long_long_integer_type_node)
7418 == TYPE_PRECISION (long_integer_type_node))
7420 tree newfn = NULL_TREE;
7423 CASE_FLT_FN (BUILT_IN_LLCEIL):
7424 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7427 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7428 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7431 CASE_FLT_FN (BUILT_IN_LLROUND):
7432 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7435 CASE_FLT_FN (BUILT_IN_LLRINT):
7436 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7445 tree newcall = build_call_expr(newfn, 1, arg);
7446 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7453 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7454 return type. Return NULL_TREE if no simplification can be made. */
7457 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7461 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7462 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7465 /* Calculate the result when the argument is a constant. */
7466 if (TREE_CODE (arg) == COMPLEX_CST
7467 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7471 if (TREE_CODE (arg) == COMPLEX_EXPR)
7473 tree real = TREE_OPERAND (arg, 0);
7474 tree imag = TREE_OPERAND (arg, 1);
7476 /* If either part is zero, cabs is fabs of the other. */
7477 if (real_zerop (real))
7478 return fold_build1 (ABS_EXPR, type, imag);
7479 if (real_zerop (imag))
7480 return fold_build1 (ABS_EXPR, type, real);
7482 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7483 if (flag_unsafe_math_optimizations
7484 && operand_equal_p (real, imag, OEP_PURE_SAME))
7486 const REAL_VALUE_TYPE sqrt2_trunc
7487 = real_value_truncate (TYPE_MODE (type),
7488 *get_real_const (rv_sqrt2));
7490 return fold_build2 (MULT_EXPR, type,
7491 fold_build1 (ABS_EXPR, type, real),
7492 build_real (type, sqrt2_trunc));
7496 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7497 if (TREE_CODE (arg) == NEGATE_EXPR
7498 || TREE_CODE (arg) == CONJ_EXPR)
7499 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7501 /* Don't do this when optimizing for size. */
7502 if (flag_unsafe_math_optimizations
7503 && optimize && !optimize_size)
7505 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7507 if (sqrtfn != NULL_TREE)
7509 tree rpart, ipart, result;
7511 arg = builtin_save_expr (arg);
7513 rpart = fold_build1 (REALPART_EXPR, type, arg);
7514 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7516 rpart = builtin_save_expr (rpart);
7517 ipart = builtin_save_expr (ipart);
7519 result = fold_build2 (PLUS_EXPR, type,
7520 fold_build2 (MULT_EXPR, type,
7522 fold_build2 (MULT_EXPR, type,
7525 return build_call_expr (sqrtfn, 1, result);
7532 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7533 Return NULL_TREE if no simplification can be made. */
7536 fold_builtin_sqrt (tree arg, tree type)
7539 enum built_in_function fcode;
7542 if (!validate_arg (arg, REAL_TYPE))
7545 /* Calculate the result when the argument is a constant. */
7546 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7549 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7550 fcode = builtin_mathfn_code (arg);
7551 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7553 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7554 arg = fold_build2 (MULT_EXPR, type,
7555 CALL_EXPR_ARG (arg, 0),
7556 build_real (type, dconsthalf));
7557 return build_call_expr (expfn, 1, arg);
7560 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7561 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7563 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7567 tree arg0 = CALL_EXPR_ARG (arg, 0);
7569 /* The inner root was either sqrt or cbrt. */
7570 REAL_VALUE_TYPE dconstroot =
7571 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7573 /* Adjust for the outer root. */
7574 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7575 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7576 tree_root = build_real (type, dconstroot);
7577 return build_call_expr (powfn, 2, arg0, tree_root);
7581 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7582 if (flag_unsafe_math_optimizations
7583 && (fcode == BUILT_IN_POW
7584 || fcode == BUILT_IN_POWF
7585 || fcode == BUILT_IN_POWL))
7587 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7588 tree arg0 = CALL_EXPR_ARG (arg, 0);
7589 tree arg1 = CALL_EXPR_ARG (arg, 1);
7591 if (!tree_expr_nonnegative_p (arg0))
7592 arg0 = build1 (ABS_EXPR, type, arg0);
7593 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7594 build_real (type, dconsthalf));
7595 return build_call_expr (powfn, 2, arg0, narg1);
7601 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7602 Return NULL_TREE if no simplification can be made. */
7605 fold_builtin_cbrt (tree arg, tree type)
7607 const enum built_in_function fcode = builtin_mathfn_code (arg);
7610 if (!validate_arg (arg, REAL_TYPE))
7613 /* Calculate the result when the argument is a constant. */
7614 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7617 if (flag_unsafe_math_optimizations)
7619 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7620 if (BUILTIN_EXPONENT_P (fcode))
7622 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7623 const REAL_VALUE_TYPE third_trunc =
7624 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7625 arg = fold_build2 (MULT_EXPR, type,
7626 CALL_EXPR_ARG (arg, 0),
7627 build_real (type, third_trunc));
7628 return build_call_expr (expfn, 1, arg);
7631 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7632 if (BUILTIN_SQRT_P (fcode))
7634 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7638 tree arg0 = CALL_EXPR_ARG (arg, 0);
7640 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7642 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7643 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7644 tree_root = build_real (type, dconstroot);
7645 return build_call_expr (powfn, 2, arg0, tree_root);
7649 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7650 if (BUILTIN_CBRT_P (fcode))
7652 tree arg0 = CALL_EXPR_ARG (arg, 0);
7653 if (tree_expr_nonnegative_p (arg0))
7655 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7660 REAL_VALUE_TYPE dconstroot;
7662 real_arithmetic (&dconstroot, MULT_EXPR,
7663 get_real_const (rv_third),
7664 get_real_const (rv_third));
7665 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7666 tree_root = build_real (type, dconstroot);
7667 return build_call_expr (powfn, 2, arg0, tree_root);
7672 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7673 if (fcode == BUILT_IN_POW
7674 || fcode == BUILT_IN_POWF
7675 || fcode == BUILT_IN_POWL)
7677 tree arg00 = CALL_EXPR_ARG (arg, 0);
7678 tree arg01 = CALL_EXPR_ARG (arg, 1);
7679 if (tree_expr_nonnegative_p (arg00))
7681 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7682 const REAL_VALUE_TYPE dconstroot
7683 = real_value_truncate (TYPE_MODE (type),
7684 *get_real_const (rv_third));
7685 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7686 build_real (type, dconstroot));
7687 return build_call_expr (powfn, 2, arg00, narg01);
7694 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7695 TYPE is the type of the return value. Return NULL_TREE if no
7696 simplification can be made. */
7699 fold_builtin_cos (tree arg, tree type, tree fndecl)
7703 if (!validate_arg (arg, REAL_TYPE))
7706 /* Calculate the result when the argument is a constant. */
7707 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7710 /* Optimize cos(-x) into cos (x). */
7711 if ((narg = fold_strip_sign_ops (arg)))
7712 return build_call_expr (fndecl, 1, narg);
7717 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7718 Return NULL_TREE if no simplification can be made. */
7721 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7723 if (validate_arg (arg, REAL_TYPE))
7727 /* Calculate the result when the argument is a constant. */
7728 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7731 /* Optimize cosh(-x) into cosh (x). */
7732 if ((narg = fold_strip_sign_ops (arg)))
7733 return build_call_expr (fndecl, 1, narg);
7739 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7740 Return NULL_TREE if no simplification can be made. */
7743 fold_builtin_tan (tree arg, tree type)
7745 enum built_in_function fcode;
7748 if (!validate_arg (arg, REAL_TYPE))
7751 /* Calculate the result when the argument is a constant. */
7752 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7755 /* Optimize tan(atan(x)) = x. */
7756 fcode = builtin_mathfn_code (arg);
7757 if (flag_unsafe_math_optimizations
7758 && (fcode == BUILT_IN_ATAN
7759 || fcode == BUILT_IN_ATANF
7760 || fcode == BUILT_IN_ATANL))
7761 return CALL_EXPR_ARG (arg, 0);
7766 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7767 NULL_TREE if no simplification can be made. */
7770 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7775 if (!validate_arg (arg0, REAL_TYPE)
7776 || !validate_arg (arg1, POINTER_TYPE)
7777 || !validate_arg (arg2, POINTER_TYPE))
7780 type = TREE_TYPE (arg0);
7782 /* Calculate the result when the argument is a constant. */
7783 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7786 /* Canonicalize sincos to cexpi. */
7787 if (!TARGET_C99_FUNCTIONS)
7789 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7793 call = build_call_expr (fn, 1, arg0);
7794 call = builtin_save_expr (call);
7796 return build2 (COMPOUND_EXPR, type,
7797 build2 (MODIFY_EXPR, void_type_node,
7798 build_fold_indirect_ref (arg1),
7799 build1 (IMAGPART_EXPR, type, call)),
7800 build2 (MODIFY_EXPR, void_type_node,
7801 build_fold_indirect_ref (arg2),
7802 build1 (REALPART_EXPR, type, call)));
7805 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7806 NULL_TREE if no simplification can be made. */
7809 fold_builtin_cexp (tree arg0, tree type)
7812 tree realp, imagp, ifn;
7814 if (!validate_arg (arg0, COMPLEX_TYPE))
7817 rtype = TREE_TYPE (TREE_TYPE (arg0));
7819 /* In case we can figure out the real part of arg0 and it is constant zero
7821 if (!TARGET_C99_FUNCTIONS)
7823 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7827 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7828 && real_zerop (realp))
7830 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7831 return build_call_expr (ifn, 1, narg);
7834 /* In case we can easily decompose real and imaginary parts split cexp
7835 to exp (r) * cexpi (i). */
7836 if (flag_unsafe_math_optimizations
7839 tree rfn, rcall, icall;
7841 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7845 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7849 icall = build_call_expr (ifn, 1, imagp);
7850 icall = builtin_save_expr (icall);
7851 rcall = build_call_expr (rfn, 1, realp);
7852 rcall = builtin_save_expr (rcall);
7853 return fold_build2 (COMPLEX_EXPR, type,
7854 fold_build2 (MULT_EXPR, rtype,
7856 fold_build1 (REALPART_EXPR, rtype, icall)),
7857 fold_build2 (MULT_EXPR, rtype,
7859 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7865 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7866 Return NULL_TREE if no simplification can be made. */
7869 fold_builtin_trunc (tree fndecl, tree arg)
7871 if (!validate_arg (arg, REAL_TYPE))
7874 /* Optimize trunc of constant value. */
7875 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7877 REAL_VALUE_TYPE r, x;
7878 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7880 x = TREE_REAL_CST (arg);
7881 real_trunc (&r, TYPE_MODE (type), &x);
7882 return build_real (type, r);
7885 return fold_trunc_transparent_mathfn (fndecl, arg);
7888 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7889 Return NULL_TREE if no simplification can be made. */
7892 fold_builtin_floor (tree fndecl, tree arg)
7894 if (!validate_arg (arg, REAL_TYPE))
7897 /* Optimize floor of constant value. */
7898 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7902 x = TREE_REAL_CST (arg);
7903 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7905 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7908 real_floor (&r, TYPE_MODE (type), &x);
7909 return build_real (type, r);
7913 /* Fold floor (x) where x is nonnegative to trunc (x). */
7914 if (tree_expr_nonnegative_p (arg))
7916 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7918 return build_call_expr (truncfn, 1, arg);
7921 return fold_trunc_transparent_mathfn (fndecl, arg);
7924 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7925 Return NULL_TREE if no simplification can be made. */
7928 fold_builtin_ceil (tree fndecl, tree arg)
7930 if (!validate_arg (arg, REAL_TYPE))
7933 /* Optimize ceil of constant value. */
7934 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7938 x = TREE_REAL_CST (arg);
7939 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7941 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7944 real_ceil (&r, TYPE_MODE (type), &x);
7945 return build_real (type, r);
7949 return fold_trunc_transparent_mathfn (fndecl, arg);
7952 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7953 Return NULL_TREE if no simplification can be made. */
7956 fold_builtin_round (tree fndecl, tree arg)
7958 if (!validate_arg (arg, REAL_TYPE))
7961 /* Optimize round of constant value. */
7962 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7966 x = TREE_REAL_CST (arg);
7967 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7969 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7972 real_round (&r, TYPE_MODE (type), &x);
7973 return build_real (type, r);
7977 return fold_trunc_transparent_mathfn (fndecl, arg);
7980 /* Fold function call to builtin lround, lroundf or lroundl (or the
7981 corresponding long long versions) and other rounding functions. ARG
7982 is the argument to the call. Return NULL_TREE if no simplification
7986 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7988 if (!validate_arg (arg, REAL_TYPE))
7991 /* Optimize lround of constant value. */
7992 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7994 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7996 if (real_isfinite (&x))
7998 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7999 tree ftype = TREE_TYPE (arg);
8000 unsigned HOST_WIDE_INT lo2;
8001 HOST_WIDE_INT hi, lo;
8004 switch (DECL_FUNCTION_CODE (fndecl))
8006 CASE_FLT_FN (BUILT_IN_LFLOOR):
8007 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8008 real_floor (&r, TYPE_MODE (ftype), &x);
8011 CASE_FLT_FN (BUILT_IN_LCEIL):
8012 CASE_FLT_FN (BUILT_IN_LLCEIL):
8013 real_ceil (&r, TYPE_MODE (ftype), &x);
8016 CASE_FLT_FN (BUILT_IN_LROUND):
8017 CASE_FLT_FN (BUILT_IN_LLROUND):
8018 real_round (&r, TYPE_MODE (ftype), &x);
8025 REAL_VALUE_TO_INT (&lo, &hi, r);
8026 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8027 return build_int_cst_wide (itype, lo2, hi);
8031 switch (DECL_FUNCTION_CODE (fndecl))
8033 CASE_FLT_FN (BUILT_IN_LFLOOR):
8034 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8035 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8036 if (tree_expr_nonnegative_p (arg))
8037 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8043 return fold_fixed_mathfn (fndecl, arg);
8046 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8047 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8048 the argument to the call. Return NULL_TREE if no simplification can
8052 fold_builtin_bitop (tree fndecl, tree arg)
8054 if (!validate_arg (arg, INTEGER_TYPE))
8057 /* Optimize for constant argument. */
8058 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8060 HOST_WIDE_INT hi, width, result;
8061 unsigned HOST_WIDE_INT lo;
8064 type = TREE_TYPE (arg);
8065 width = TYPE_PRECISION (type);
8066 lo = TREE_INT_CST_LOW (arg);
8068 /* Clear all the bits that are beyond the type's precision. */
8069 if (width > HOST_BITS_PER_WIDE_INT)
8071 hi = TREE_INT_CST_HIGH (arg);
8072 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8073 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8078 if (width < HOST_BITS_PER_WIDE_INT)
8079 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8082 switch (DECL_FUNCTION_CODE (fndecl))
8084 CASE_INT_FN (BUILT_IN_FFS):
8086 result = exact_log2 (lo & -lo) + 1;
8088 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8093 CASE_INT_FN (BUILT_IN_CLZ):
8095 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8097 result = width - floor_log2 (lo) - 1;
8098 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8102 CASE_INT_FN (BUILT_IN_CTZ):
8104 result = exact_log2 (lo & -lo);
8106 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8107 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8111 CASE_INT_FN (BUILT_IN_POPCOUNT):
8114 result++, lo &= lo - 1;
8116 result++, hi &= hi - 1;
8119 CASE_INT_FN (BUILT_IN_PARITY):
8122 result++, lo &= lo - 1;
8124 result++, hi &= hi - 1;
8132 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8138 /* Fold function call to builtin_bswap and the long and long long
8139 variants. Return NULL_TREE if no simplification can be made. */
8141 fold_builtin_bswap (tree fndecl, tree arg)
8143 if (! validate_arg (arg, INTEGER_TYPE))
8146 /* Optimize constant value. */
8147 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8149 HOST_WIDE_INT hi, width, r_hi = 0;
8150 unsigned HOST_WIDE_INT lo, r_lo = 0;
8153 type = TREE_TYPE (arg);
8154 width = TYPE_PRECISION (type);
8155 lo = TREE_INT_CST_LOW (arg);
8156 hi = TREE_INT_CST_HIGH (arg);
8158 switch (DECL_FUNCTION_CODE (fndecl))
8160 case BUILT_IN_BSWAP32:
8161 case BUILT_IN_BSWAP64:
8165 for (s = 0; s < width; s += 8)
8167 int d = width - s - 8;
8168 unsigned HOST_WIDE_INT byte;
8170 if (s < HOST_BITS_PER_WIDE_INT)
8171 byte = (lo >> s) & 0xff;
8173 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8175 if (d < HOST_BITS_PER_WIDE_INT)
8178 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8188 if (width < HOST_BITS_PER_WIDE_INT)
8189 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8191 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8197 /* Return true if EXPR is the real constant contained in VALUE. */
8200 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8204 return ((TREE_CODE (expr) == REAL_CST
8205 && !TREE_OVERFLOW (expr)
8206 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8207 || (TREE_CODE (expr) == COMPLEX_CST
8208 && real_dconstp (TREE_REALPART (expr), value)
8209 && real_zerop (TREE_IMAGPART (expr))));
8212 /* A subroutine of fold_builtin to fold the various logarithmic
8213 functions. Return NULL_TREE if no simplification can me made.
8214 FUNC is the corresponding MPFR logarithm function. */
8217 fold_builtin_logarithm (tree fndecl, tree arg,
8218 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8220 if (validate_arg (arg, REAL_TYPE))
8222 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8224 const enum built_in_function fcode = builtin_mathfn_code (arg);
8226 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8227 instead we'll look for 'e' truncated to MODE. So only do
8228 this if flag_unsafe_math_optimizations is set. */
8229 if (flag_unsafe_math_optimizations && func == mpfr_log)
8231 const REAL_VALUE_TYPE e_truncated =
8232 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8233 if (real_dconstp (arg, &e_truncated))
8234 return build_real (type, dconst1);
8237 /* Calculate the result when the argument is a constant. */
8238 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8241 /* Special case, optimize logN(expN(x)) = x. */
8242 if (flag_unsafe_math_optimizations
8243 && ((func == mpfr_log
8244 && (fcode == BUILT_IN_EXP
8245 || fcode == BUILT_IN_EXPF
8246 || fcode == BUILT_IN_EXPL))
8247 || (func == mpfr_log2
8248 && (fcode == BUILT_IN_EXP2
8249 || fcode == BUILT_IN_EXP2F
8250 || fcode == BUILT_IN_EXP2L))
8251 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8252 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8254 /* Optimize logN(func()) for various exponential functions. We
8255 want to determine the value "x" and the power "exponent" in
8256 order to transform logN(x**exponent) into exponent*logN(x). */
8257 if (flag_unsafe_math_optimizations)
8259 tree exponent = 0, x = 0;
8263 CASE_FLT_FN (BUILT_IN_EXP):
8264 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8265 x = build_real (type,
8266 real_value_truncate (TYPE_MODE (type),
8267 *get_real_const (rv_e)));
8268 exponent = CALL_EXPR_ARG (arg, 0);
8270 CASE_FLT_FN (BUILT_IN_EXP2):
8271 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8272 x = build_real (type, dconst2);
8273 exponent = CALL_EXPR_ARG (arg, 0);
8275 CASE_FLT_FN (BUILT_IN_EXP10):
8276 CASE_FLT_FN (BUILT_IN_POW10):
8277 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8279 REAL_VALUE_TYPE dconst10;
8280 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8281 x = build_real (type, dconst10);
8283 exponent = CALL_EXPR_ARG (arg, 0);
8285 CASE_FLT_FN (BUILT_IN_SQRT):
8286 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8287 x = CALL_EXPR_ARG (arg, 0);
8288 exponent = build_real (type, dconsthalf);
8290 CASE_FLT_FN (BUILT_IN_CBRT):
8291 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8292 x = CALL_EXPR_ARG (arg, 0);
8293 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8294 *get_real_const (rv_third)));
8296 CASE_FLT_FN (BUILT_IN_POW):
8297 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8298 x = CALL_EXPR_ARG (arg, 0);
8299 exponent = CALL_EXPR_ARG (arg, 1);
8305 /* Now perform the optimization. */
8308 tree logfn = build_call_expr (fndecl, 1, x);
8309 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8317 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8318 NULL_TREE if no simplification can be made. */
8321 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8323 tree res, narg0, narg1;
8325 if (!validate_arg (arg0, REAL_TYPE)
8326 || !validate_arg (arg1, REAL_TYPE))
8329 /* Calculate the result when the argument is a constant. */
8330 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8333 /* If either argument to hypot has a negate or abs, strip that off.
8334 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8335 narg0 = fold_strip_sign_ops (arg0);
8336 narg1 = fold_strip_sign_ops (arg1);
8339 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8340 narg1 ? narg1 : arg1);
8343 /* If either argument is zero, hypot is fabs of the other. */
8344 if (real_zerop (arg0))
8345 return fold_build1 (ABS_EXPR, type, arg1);
8346 else if (real_zerop (arg1))
8347 return fold_build1 (ABS_EXPR, type, arg0);
8349 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8350 if (flag_unsafe_math_optimizations
8351 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8353 const REAL_VALUE_TYPE sqrt2_trunc
8354 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8355 return fold_build2 (MULT_EXPR, type,
8356 fold_build1 (ABS_EXPR, type, arg0),
8357 build_real (type, sqrt2_trunc));
8364 /* Fold a builtin function call to pow, powf, or powl. Return
8365 NULL_TREE if no simplification can be made. */
8367 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8371 if (!validate_arg (arg0, REAL_TYPE)
8372 || !validate_arg (arg1, REAL_TYPE))
8375 /* Calculate the result when the argument is a constant. */
8376 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8379 /* Optimize pow(1.0,y) = 1.0. */
8380 if (real_onep (arg0))
8381 return omit_one_operand (type, build_real (type, dconst1), arg1);
8383 if (TREE_CODE (arg1) == REAL_CST
8384 && !TREE_OVERFLOW (arg1))
8386 REAL_VALUE_TYPE cint;
8390 c = TREE_REAL_CST (arg1);
8392 /* Optimize pow(x,0.0) = 1.0. */
8393 if (REAL_VALUES_EQUAL (c, dconst0))
8394 return omit_one_operand (type, build_real (type, dconst1),
8397 /* Optimize pow(x,1.0) = x. */
8398 if (REAL_VALUES_EQUAL (c, dconst1))
8401 /* Optimize pow(x,-1.0) = 1.0/x. */
8402 if (REAL_VALUES_EQUAL (c, dconstm1))
8403 return fold_build2 (RDIV_EXPR, type,
8404 build_real (type, dconst1), arg0);
8406 /* Optimize pow(x,0.5) = sqrt(x). */
8407 if (flag_unsafe_math_optimizations
8408 && REAL_VALUES_EQUAL (c, dconsthalf))
8410 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8412 if (sqrtfn != NULL_TREE)
8413 return build_call_expr (sqrtfn, 1, arg0);
8416 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8417 if (flag_unsafe_math_optimizations)
8419 const REAL_VALUE_TYPE dconstroot
8420 = real_value_truncate (TYPE_MODE (type),
8421 *get_real_const (rv_third));
8423 if (REAL_VALUES_EQUAL (c, dconstroot))
8425 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8426 if (cbrtfn != NULL_TREE)
8427 return build_call_expr (cbrtfn, 1, arg0);
8431 /* Check for an integer exponent. */
8432 n = real_to_integer (&c);
8433 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8434 if (real_identical (&c, &cint))
8436 /* Attempt to evaluate pow at compile-time. */
8437 if (TREE_CODE (arg0) == REAL_CST
8438 && !TREE_OVERFLOW (arg0))
8443 x = TREE_REAL_CST (arg0);
8444 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8445 if (flag_unsafe_math_optimizations || !inexact)
8446 return build_real (type, x);
8449 /* Strip sign ops from even integer powers. */
8450 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8452 tree narg0 = fold_strip_sign_ops (arg0);
8454 return build_call_expr (fndecl, 2, narg0, arg1);
8459 if (flag_unsafe_math_optimizations)
8461 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8463 /* Optimize pow(expN(x),y) = expN(x*y). */
8464 if (BUILTIN_EXPONENT_P (fcode))
8466 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8467 tree arg = CALL_EXPR_ARG (arg0, 0);
8468 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8469 return build_call_expr (expfn, 1, arg);
8472 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8473 if (BUILTIN_SQRT_P (fcode))
8475 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8476 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8477 build_real (type, dconsthalf));
8478 return build_call_expr (fndecl, 2, narg0, narg1);
8481 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8482 if (BUILTIN_CBRT_P (fcode))
8484 tree arg = CALL_EXPR_ARG (arg0, 0);
8485 if (tree_expr_nonnegative_p (arg))
8487 const REAL_VALUE_TYPE dconstroot
8488 = real_value_truncate (TYPE_MODE (type),
8489 *get_real_const (rv_third));
8490 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8491 build_real (type, dconstroot));
8492 return build_call_expr (fndecl, 2, arg, narg1);
8496 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8497 if (fcode == BUILT_IN_POW
8498 || fcode == BUILT_IN_POWF
8499 || fcode == BUILT_IN_POWL)
8501 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8502 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8503 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8504 return build_call_expr (fndecl, 2, arg00, narg1);
8511 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8512 Return NULL_TREE if no simplification can be made. */
8514 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8515 tree arg0, tree arg1, tree type)
8517 if (!validate_arg (arg0, REAL_TYPE)
8518 || !validate_arg (arg1, INTEGER_TYPE))
8521 /* Optimize pow(1.0,y) = 1.0. */
8522 if (real_onep (arg0))
8523 return omit_one_operand (type, build_real (type, dconst1), arg1);
8525 if (host_integerp (arg1, 0))
8527 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8529 /* Evaluate powi at compile-time. */
8530 if (TREE_CODE (arg0) == REAL_CST
8531 && !TREE_OVERFLOW (arg0))
8534 x = TREE_REAL_CST (arg0);
8535 real_powi (&x, TYPE_MODE (type), &x, c);
8536 return build_real (type, x);
8539 /* Optimize pow(x,0) = 1.0. */
8541 return omit_one_operand (type, build_real (type, dconst1),
8544 /* Optimize pow(x,1) = x. */
8548 /* Optimize pow(x,-1) = 1.0/x. */
8550 return fold_build2 (RDIV_EXPR, type,
8551 build_real (type, dconst1), arg0);
8557 /* A subroutine of fold_builtin to fold the various exponent
8558 functions. Return NULL_TREE if no simplification can be made.
8559 FUNC is the corresponding MPFR exponent function. */
8562 fold_builtin_exponent (tree fndecl, tree arg,
8563 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8565 if (validate_arg (arg, REAL_TYPE))
8567 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8570 /* Calculate the result when the argument is a constant. */
8571 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8574 /* Optimize expN(logN(x)) = x. */
8575 if (flag_unsafe_math_optimizations)
8577 const enum built_in_function fcode = builtin_mathfn_code (arg);
8579 if ((func == mpfr_exp
8580 && (fcode == BUILT_IN_LOG
8581 || fcode == BUILT_IN_LOGF
8582 || fcode == BUILT_IN_LOGL))
8583 || (func == mpfr_exp2
8584 && (fcode == BUILT_IN_LOG2
8585 || fcode == BUILT_IN_LOG2F
8586 || fcode == BUILT_IN_LOG2L))
8587 || (func == mpfr_exp10
8588 && (fcode == BUILT_IN_LOG10
8589 || fcode == BUILT_IN_LOG10F
8590 || fcode == BUILT_IN_LOG10L)))
8591 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8598 /* Return true if VAR is a VAR_DECL or a component thereof. */
8601 var_decl_component_p (tree var)
8604 while (handled_component_p (inner))
8605 inner = TREE_OPERAND (inner, 0);
8606 return SSA_VAR_P (inner);
8609 /* Fold function call to builtin memset. Return
8610 NULL_TREE if no simplification can be made. */
8613 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8616 unsigned HOST_WIDE_INT length, cval;
8618 if (! validate_arg (dest, POINTER_TYPE)
8619 || ! validate_arg (c, INTEGER_TYPE)
8620 || ! validate_arg (len, INTEGER_TYPE))
8623 if (! host_integerp (len, 1))
8626 /* If the LEN parameter is zero, return DEST. */
8627 if (integer_zerop (len))
8628 return omit_one_operand (type, dest, c);
8630 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8635 if (TREE_CODE (var) != ADDR_EXPR)
8638 var = TREE_OPERAND (var, 0);
8639 if (TREE_THIS_VOLATILE (var))
8642 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8643 && !POINTER_TYPE_P (TREE_TYPE (var)))
8646 if (! var_decl_component_p (var))
8649 length = tree_low_cst (len, 1);
8650 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8651 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8655 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8658 if (integer_zerop (c))
8662 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8665 cval = tree_low_cst (c, 1);
8669 cval |= (cval << 31) << 1;
8672 ret = build_int_cst_type (TREE_TYPE (var), cval);
8673 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8677 return omit_one_operand (type, dest, ret);
8680 /* Fold function call to builtin memset. Return
8681 NULL_TREE if no simplification can be made. */
8684 fold_builtin_bzero (tree dest, tree size, bool ignore)
8686 if (! validate_arg (dest, POINTER_TYPE)
8687 || ! validate_arg (size, INTEGER_TYPE))
8693 /* New argument list transforming bzero(ptr x, int y) to
8694 memset(ptr x, int 0, size_t y). This is done this way
8695 so that if it isn't expanded inline, we fallback to
8696 calling bzero instead of memset. */
8698 return fold_builtin_memset (dest, integer_zero_node,
8699 fold_convert (sizetype, size),
8700 void_type_node, ignore);
8703 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8704 NULL_TREE if no simplification can be made.
8705 If ENDP is 0, return DEST (like memcpy).
8706 If ENDP is 1, return DEST+LEN (like mempcpy).
8707 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8708 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8712 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8714 tree destvar, srcvar, expr;
8716 if (! validate_arg (dest, POINTER_TYPE)
8717 || ! validate_arg (src, POINTER_TYPE)
8718 || ! validate_arg (len, INTEGER_TYPE))
8721 /* If the LEN parameter is zero, return DEST. */
8722 if (integer_zerop (len))
8723 return omit_one_operand (type, dest, src);
8725 /* If SRC and DEST are the same (and not volatile), return
8726 DEST{,+LEN,+LEN-1}. */
8727 if (operand_equal_p (src, dest, 0))
8731 tree srctype, desttype;
8734 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8735 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8737 /* Both DEST and SRC must be pointer types.
8738 ??? This is what old code did. Is the testing for pointer types
8741 If either SRC is readonly or length is 1, we can use memcpy. */
8742 if (dest_align && src_align
8743 && (readonly_data_expr (src)
8744 || (host_integerp (len, 1)
8745 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8746 tree_low_cst (len, 1)))))
8748 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8751 return build_call_expr (fn, 3, dest, src, len);
8756 if (!host_integerp (len, 0))
8759 This logic lose for arguments like (type *)malloc (sizeof (type)),
8760 since we strip the casts of up to VOID return value from malloc.
8761 Perhaps we ought to inherit type from non-VOID argument here? */
8764 srctype = TREE_TYPE (TREE_TYPE (src));
8765 desttype = TREE_TYPE (TREE_TYPE (dest));
8766 if (!srctype || !desttype
8767 || !TYPE_SIZE_UNIT (srctype)
8768 || !TYPE_SIZE_UNIT (desttype)
8769 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8770 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8771 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8772 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8775 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8776 < (int) TYPE_ALIGN (desttype)
8777 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8778 < (int) TYPE_ALIGN (srctype)))
8782 dest = builtin_save_expr (dest);
8784 srcvar = build_fold_indirect_ref (src);
8785 if (TREE_THIS_VOLATILE (srcvar))
8787 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8789 /* With memcpy, it is possible to bypass aliasing rules, so without
8790 this check i.e. execute/20060930-2.c would be misoptimized, because
8791 it use conflicting alias set to hold argument for the memcpy call.
8792 This check is probably unnecessary with -fno-strict-aliasing.
8793 Similarly for destvar. See also PR29286. */
8794 if (!var_decl_component_p (srcvar)
8795 /* Accept: memcpy (*char_var, "test", 1); that simplify
8797 || is_gimple_min_invariant (srcvar)
8798 || readonly_data_expr (src))
8801 destvar = build_fold_indirect_ref (dest);
8802 if (TREE_THIS_VOLATILE (destvar))
8804 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8806 if (!var_decl_component_p (destvar))
8809 if (srctype == desttype
8810 || (gimple_in_ssa_p (cfun)
8811 && useless_type_conversion_p (desttype, srctype)))
8813 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8814 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8815 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8816 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8817 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8819 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8820 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8826 if (endp == 0 || endp == 3)
8827 return omit_one_operand (type, dest, expr);
8833 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8836 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8837 dest = fold_convert (type, dest);
8839 dest = omit_one_operand (type, dest, expr);
8843 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8844 If LEN is not NULL, it represents the length of the string to be
8845 copied. Return NULL_TREE if no simplification can be made. */
8848 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8852 if (!validate_arg (dest, POINTER_TYPE)
8853 || !validate_arg (src, POINTER_TYPE))
8856 /* If SRC and DEST are the same (and not volatile), return DEST. */
8857 if (operand_equal_p (src, dest, 0))
8858 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8863 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8869 len = c_strlen (src, 1);
8870 if (! len || TREE_SIDE_EFFECTS (len))
8874 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8875 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8876 build_call_expr (fn, 3, dest, src, len));
8879 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8880 If SLEN is not NULL, it represents the length of the source string.
8881 Return NULL_TREE if no simplification can be made. */
8884 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8888 if (!validate_arg (dest, POINTER_TYPE)
8889 || !validate_arg (src, POINTER_TYPE)
8890 || !validate_arg (len, INTEGER_TYPE))
8893 /* If the LEN parameter is zero, return DEST. */
8894 if (integer_zerop (len))
8895 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8897 /* We can't compare slen with len as constants below if len is not a
8899 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8903 slen = c_strlen (src, 1);
8905 /* Now, we must be passed a constant src ptr parameter. */
8906 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8909 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8911 /* We do not support simplification of this case, though we do
8912 support it when expanding trees into RTL. */
8913 /* FIXME: generate a call to __builtin_memset. */
8914 if (tree_int_cst_lt (slen, len))
8917 /* OK transform into builtin memcpy. */
8918 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8921 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8922 build_call_expr (fn, 3, dest, src, len));
8925 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8926 arguments to the call, and TYPE is its return type.
8927 Return NULL_TREE if no simplification can be made. */
8930 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8932 if (!validate_arg (arg1, POINTER_TYPE)
8933 || !validate_arg (arg2, INTEGER_TYPE)
8934 || !validate_arg (len, INTEGER_TYPE))
8940 if (TREE_CODE (arg2) != INTEGER_CST
8941 || !host_integerp (len, 1))
8944 p1 = c_getstr (arg1);
8945 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8951 if (target_char_cast (arg2, &c))
8954 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8957 return build_int_cst (TREE_TYPE (arg1), 0);
8959 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8961 return fold_convert (type, tem);
8967 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8968 Return NULL_TREE if no simplification can be made. */
8971 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8973 const char *p1, *p2;
8975 if (!validate_arg (arg1, POINTER_TYPE)
8976 || !validate_arg (arg2, POINTER_TYPE)
8977 || !validate_arg (len, INTEGER_TYPE))
8980 /* If the LEN parameter is zero, return zero. */
8981 if (integer_zerop (len))
8982 return omit_two_operands (integer_type_node, integer_zero_node,
8985 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8986 if (operand_equal_p (arg1, arg2, 0))
8987 return omit_one_operand (integer_type_node, integer_zero_node, len);
8989 p1 = c_getstr (arg1);
8990 p2 = c_getstr (arg2);
8992 /* If all arguments are constant, and the value of len is not greater
8993 than the lengths of arg1 and arg2, evaluate at compile-time. */
8994 if (host_integerp (len, 1) && p1 && p2
8995 && compare_tree_int (len, strlen (p1) + 1) <= 0
8996 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8998 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9001 return integer_one_node;
9003 return integer_minus_one_node;
9005 return integer_zero_node;
9008 /* If len parameter is one, return an expression corresponding to
9009 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9010 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9012 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9013 tree cst_uchar_ptr_node
9014 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9016 tree ind1 = fold_convert (integer_type_node,
9017 build1 (INDIRECT_REF, cst_uchar_node,
9018 fold_convert (cst_uchar_ptr_node,
9020 tree ind2 = fold_convert (integer_type_node,
9021 build1 (INDIRECT_REF, cst_uchar_node,
9022 fold_convert (cst_uchar_ptr_node,
9024 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9030 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9031 Return NULL_TREE if no simplification can be made. */
9034 fold_builtin_strcmp (tree arg1, tree arg2)
9036 const char *p1, *p2;
9038 if (!validate_arg (arg1, POINTER_TYPE)
9039 || !validate_arg (arg2, POINTER_TYPE))
9042 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9043 if (operand_equal_p (arg1, arg2, 0))
9044 return integer_zero_node;
9046 p1 = c_getstr (arg1);
9047 p2 = c_getstr (arg2);
9051 const int i = strcmp (p1, p2);
9053 return integer_minus_one_node;
9055 return integer_one_node;
9057 return integer_zero_node;
9060 /* If the second arg is "", return *(const unsigned char*)arg1. */
9061 if (p2 && *p2 == '\0')
9063 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9064 tree cst_uchar_ptr_node
9065 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9067 return fold_convert (integer_type_node,
9068 build1 (INDIRECT_REF, cst_uchar_node,
9069 fold_convert (cst_uchar_ptr_node,
9073 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9074 if (p1 && *p1 == '\0')
9076 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9077 tree cst_uchar_ptr_node
9078 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9080 tree temp = fold_convert (integer_type_node,
9081 build1 (INDIRECT_REF, cst_uchar_node,
9082 fold_convert (cst_uchar_ptr_node,
9084 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9090 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9091 Return NULL_TREE if no simplification can be made. */
9094 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9096 const char *p1, *p2;
9098 if (!validate_arg (arg1, POINTER_TYPE)
9099 || !validate_arg (arg2, POINTER_TYPE)
9100 || !validate_arg (len, INTEGER_TYPE))
9103 /* If the LEN parameter is zero, return zero. */
9104 if (integer_zerop (len))
9105 return omit_two_operands (integer_type_node, integer_zero_node,
9108 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9109 if (operand_equal_p (arg1, arg2, 0))
9110 return omit_one_operand (integer_type_node, integer_zero_node, len);
9112 p1 = c_getstr (arg1);
9113 p2 = c_getstr (arg2);
9115 if (host_integerp (len, 1) && p1 && p2)
9117 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9119 return integer_one_node;
9121 return integer_minus_one_node;
9123 return integer_zero_node;
9126 /* If the second arg is "", and the length is greater than zero,
9127 return *(const unsigned char*)arg1. */
9128 if (p2 && *p2 == '\0'
9129 && TREE_CODE (len) == INTEGER_CST
9130 && tree_int_cst_sgn (len) == 1)
9132 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9133 tree cst_uchar_ptr_node
9134 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9136 return fold_convert (integer_type_node,
9137 build1 (INDIRECT_REF, cst_uchar_node,
9138 fold_convert (cst_uchar_ptr_node,
9142 /* If the first arg is "", and the length is greater than zero,
9143 return -*(const unsigned char*)arg2. */
9144 if (p1 && *p1 == '\0'
9145 && TREE_CODE (len) == INTEGER_CST
9146 && tree_int_cst_sgn (len) == 1)
9148 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9149 tree cst_uchar_ptr_node
9150 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9152 tree temp = fold_convert (integer_type_node,
9153 build1 (INDIRECT_REF, cst_uchar_node,
9154 fold_convert (cst_uchar_ptr_node,
9156 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9159 /* If len parameter is one, return an expression corresponding to
9160 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9161 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9163 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9164 tree cst_uchar_ptr_node
9165 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9167 tree ind1 = fold_convert (integer_type_node,
9168 build1 (INDIRECT_REF, cst_uchar_node,
9169 fold_convert (cst_uchar_ptr_node,
9171 tree ind2 = fold_convert (integer_type_node,
9172 build1 (INDIRECT_REF, cst_uchar_node,
9173 fold_convert (cst_uchar_ptr_node,
9175 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9181 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9182 ARG. Return NULL_TREE if no simplification can be made. */
9185 fold_builtin_signbit (tree arg, tree type)
9189 if (!validate_arg (arg, REAL_TYPE))
9192 /* If ARG is a compile-time constant, determine the result. */
9193 if (TREE_CODE (arg) == REAL_CST
9194 && !TREE_OVERFLOW (arg))
9198 c = TREE_REAL_CST (arg);
9199 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9200 return fold_convert (type, temp);
9203 /* If ARG is non-negative, the result is always zero. */
9204 if (tree_expr_nonnegative_p (arg))
9205 return omit_one_operand (type, integer_zero_node, arg);
9207 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9208 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9209 return fold_build2 (LT_EXPR, type, arg,
9210 build_real (TREE_TYPE (arg), dconst0));
9215 /* Fold function call to builtin copysign, copysignf or copysignl with
9216 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9220 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9224 if (!validate_arg (arg1, REAL_TYPE)
9225 || !validate_arg (arg2, REAL_TYPE))
9228 /* copysign(X,X) is X. */
9229 if (operand_equal_p (arg1, arg2, 0))
9230 return fold_convert (type, arg1);
9232 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9233 if (TREE_CODE (arg1) == REAL_CST
9234 && TREE_CODE (arg2) == REAL_CST
9235 && !TREE_OVERFLOW (arg1)
9236 && !TREE_OVERFLOW (arg2))
9238 REAL_VALUE_TYPE c1, c2;
9240 c1 = TREE_REAL_CST (arg1);
9241 c2 = TREE_REAL_CST (arg2);
9242 /* c1.sign := c2.sign. */
9243 real_copysign (&c1, &c2);
9244 return build_real (type, c1);
9247 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9248 Remember to evaluate Y for side-effects. */
9249 if (tree_expr_nonnegative_p (arg2))
9250 return omit_one_operand (type,
9251 fold_build1 (ABS_EXPR, type, arg1),
9254 /* Strip sign changing operations for the first argument. */
9255 tem = fold_strip_sign_ops (arg1);
9257 return build_call_expr (fndecl, 2, tem, arg2);
9262 /* Fold a call to builtin isascii with argument ARG. */
9265 fold_builtin_isascii (tree arg)
9267 if (!validate_arg (arg, INTEGER_TYPE))
9271 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9272 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9273 build_int_cst (NULL_TREE,
9274 ~ (unsigned HOST_WIDE_INT) 0x7f));
9275 return fold_build2 (EQ_EXPR, integer_type_node,
9276 arg, integer_zero_node);
9280 /* Fold a call to builtin toascii with argument ARG. */
9283 fold_builtin_toascii (tree arg)
9285 if (!validate_arg (arg, INTEGER_TYPE))
9288 /* Transform toascii(c) -> (c & 0x7f). */
9289 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9290 build_int_cst (NULL_TREE, 0x7f));
9293 /* Fold a call to builtin isdigit with argument ARG. */
9296 fold_builtin_isdigit (tree arg)
9298 if (!validate_arg (arg, INTEGER_TYPE))
9302 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9303 /* According to the C standard, isdigit is unaffected by locale.
9304 However, it definitely is affected by the target character set. */
9305 unsigned HOST_WIDE_INT target_digit0
9306 = lang_hooks.to_target_charset ('0');
9308 if (target_digit0 == 0)
9311 arg = fold_convert (unsigned_type_node, arg);
9312 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9313 build_int_cst (unsigned_type_node, target_digit0));
9314 return fold_build2 (LE_EXPR, integer_type_node, arg,
9315 build_int_cst (unsigned_type_node, 9));
9319 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9322 fold_builtin_fabs (tree arg, tree type)
9324 if (!validate_arg (arg, REAL_TYPE))
9327 arg = fold_convert (type, arg);
9328 if (TREE_CODE (arg) == REAL_CST)
9329 return fold_abs_const (arg, type);
9330 return fold_build1 (ABS_EXPR, type, arg);
9333 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9336 fold_builtin_abs (tree arg, tree type)
9338 if (!validate_arg (arg, INTEGER_TYPE))
9341 arg = fold_convert (type, arg);
9342 if (TREE_CODE (arg) == INTEGER_CST)
9343 return fold_abs_const (arg, type);
9344 return fold_build1 (ABS_EXPR, type, arg);
9347 /* Fold a call to builtin fmin or fmax. */
9350 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9352 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9354 /* Calculate the result when the argument is a constant. */
9355 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9360 /* If either argument is NaN, return the other one. Avoid the
9361 transformation if we get (and honor) a signalling NaN. Using
9362 omit_one_operand() ensures we create a non-lvalue. */
9363 if (TREE_CODE (arg0) == REAL_CST
9364 && real_isnan (&TREE_REAL_CST (arg0))
9365 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9366 || ! TREE_REAL_CST (arg0).signalling))
9367 return omit_one_operand (type, arg1, arg0);
9368 if (TREE_CODE (arg1) == REAL_CST
9369 && real_isnan (&TREE_REAL_CST (arg1))
9370 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9371 || ! TREE_REAL_CST (arg1).signalling))
9372 return omit_one_operand (type, arg0, arg1);
9374 /* Transform fmin/fmax(x,x) -> x. */
9375 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9376 return omit_one_operand (type, arg0, arg1);
9378 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9379 functions to return the numeric arg if the other one is NaN.
9380 These tree codes don't honor that, so only transform if
9381 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9382 handled, so we don't have to worry about it either. */
9383 if (flag_finite_math_only)
9384 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9385 fold_convert (type, arg0),
9386 fold_convert (type, arg1));
9391 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9394 fold_builtin_carg (tree arg, tree type)
9396 if (validate_arg (arg, COMPLEX_TYPE))
9398 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9402 tree new_arg = builtin_save_expr (arg);
9403 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9404 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9405 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9412 /* Fold a call to builtin logb/ilogb. */
9415 fold_builtin_logb (tree arg, tree rettype)
9417 if (! validate_arg (arg, REAL_TYPE))
9422 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9424 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9430 /* If arg is Inf or NaN and we're logb, return it. */
9431 if (TREE_CODE (rettype) == REAL_TYPE)
9432 return fold_convert (rettype, arg);
9433 /* Fall through... */
9435 /* Zero may set errno and/or raise an exception for logb, also
9436 for ilogb we don't know FP_ILOGB0. */
9439 /* For normal numbers, proceed iff radix == 2. In GCC,
9440 normalized significands are in the range [0.5, 1.0). We
9441 want the exponent as if they were [1.0, 2.0) so get the
9442 exponent and subtract 1. */
9443 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9444 return fold_convert (rettype, build_int_cst (NULL_TREE,
9445 REAL_EXP (value)-1));
9453 /* Fold a call to builtin significand, if radix == 2. */
9456 fold_builtin_significand (tree arg, tree rettype)
9458 if (! validate_arg (arg, REAL_TYPE))
9463 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9465 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9472 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9473 return fold_convert (rettype, arg);
9475 /* For normal numbers, proceed iff radix == 2. */
9476 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9478 REAL_VALUE_TYPE result = *value;
9479 /* In GCC, normalized significands are in the range [0.5,
9480 1.0). We want them to be [1.0, 2.0) so set the
9482 SET_REAL_EXP (&result, 1);
9483 return build_real (rettype, result);
9492 /* Fold a call to builtin frexp, we can assume the base is 2. */
9495 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9497 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9502 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9505 arg1 = build_fold_indirect_ref (arg1);
9507 /* Proceed if a valid pointer type was passed in. */
9508 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9510 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9516 /* For +-0, return (*exp = 0, +-0). */
9517 exp = integer_zero_node;
9522 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9523 return omit_one_operand (rettype, arg0, arg1);
9526 /* Since the frexp function always expects base 2, and in
9527 GCC normalized significands are already in the range
9528 [0.5, 1.0), we have exactly what frexp wants. */
9529 REAL_VALUE_TYPE frac_rvt = *value;
9530 SET_REAL_EXP (&frac_rvt, 0);
9531 frac = build_real (rettype, frac_rvt);
9532 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9539 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9540 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9541 TREE_SIDE_EFFECTS (arg1) = 1;
9542 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9548 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9549 then we can assume the base is two. If it's false, then we have to
9550 check the mode of the TYPE parameter in certain cases. */
9553 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9555 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9560 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9561 if (real_zerop (arg0) || integer_zerop (arg1)
9562 || (TREE_CODE (arg0) == REAL_CST
9563 && !real_isfinite (&TREE_REAL_CST (arg0))))
9564 return omit_one_operand (type, arg0, arg1);
9566 /* If both arguments are constant, then try to evaluate it. */
9567 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9568 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9569 && host_integerp (arg1, 0))
9571 /* Bound the maximum adjustment to twice the range of the
9572 mode's valid exponents. Use abs to ensure the range is
9573 positive as a sanity check. */
9574 const long max_exp_adj = 2 *
9575 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9576 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9578 /* Get the user-requested adjustment. */
9579 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9581 /* The requested adjustment must be inside this range. This
9582 is a preliminary cap to avoid things like overflow, we
9583 may still fail to compute the result for other reasons. */
9584 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9586 REAL_VALUE_TYPE initial_result;
9588 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9590 /* Ensure we didn't overflow. */
9591 if (! real_isinf (&initial_result))
9593 const REAL_VALUE_TYPE trunc_result
9594 = real_value_truncate (TYPE_MODE (type), initial_result);
9596 /* Only proceed if the target mode can hold the
9598 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9599 return build_real (type, trunc_result);
9608 /* Fold a call to builtin modf. */
9611 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9613 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9618 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9621 arg1 = build_fold_indirect_ref (arg1);
9623 /* Proceed if a valid pointer type was passed in. */
9624 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9626 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9627 REAL_VALUE_TYPE trunc, frac;
9633 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9634 trunc = frac = *value;
9637 /* For +-Inf, return (*arg1 = arg0, +-0). */
9639 frac.sign = value->sign;
9643 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9644 real_trunc (&trunc, VOIDmode, value);
9645 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9646 /* If the original number was negative and already
9647 integral, then the fractional part is -0.0. */
9648 if (value->sign && frac.cl == rvc_zero)
9649 frac.sign = value->sign;
9653 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9654 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9655 build_real (rettype, trunc));
9656 TREE_SIDE_EFFECTS (arg1) = 1;
9657 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9658 build_real (rettype, frac));
9664 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9665 ARG is the argument for the call. */
9668 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9670 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9673 if (!validate_arg (arg, REAL_TYPE))
9676 switch (builtin_index)
9678 case BUILT_IN_ISINF:
9679 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9680 return omit_one_operand (type, integer_zero_node, arg);
9682 if (TREE_CODE (arg) == REAL_CST)
9684 r = TREE_REAL_CST (arg);
9685 if (real_isinf (&r))
9686 return real_compare (GT_EXPR, &r, &dconst0)
9687 ? integer_one_node : integer_minus_one_node;
9689 return integer_zero_node;
9694 case BUILT_IN_ISINF_SIGN:
9696 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9697 /* In a boolean context, GCC will fold the inner COND_EXPR to
9698 1. So e.g. "if (isinf_sign(x))" would be folded to just
9699 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9700 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9701 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9702 tree tmp = NULL_TREE;
9704 arg = builtin_save_expr (arg);
9706 if (signbit_fn && isinf_fn)
9708 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9709 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9711 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9712 signbit_call, integer_zero_node);
9713 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9714 isinf_call, integer_zero_node);
9716 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9717 integer_minus_one_node, integer_one_node);
9718 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9725 case BUILT_IN_ISFINITE:
9726 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9727 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9728 return omit_one_operand (type, integer_one_node, arg);
9730 if (TREE_CODE (arg) == REAL_CST)
9732 r = TREE_REAL_CST (arg);
9733 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9738 case BUILT_IN_ISNAN:
9739 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9740 return omit_one_operand (type, integer_zero_node, arg);
9742 if (TREE_CODE (arg) == REAL_CST)
9744 r = TREE_REAL_CST (arg);
9745 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9748 arg = builtin_save_expr (arg);
9749 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9756 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9757 This builtin will generate code to return the appropriate floating
9758 point classification depending on the value of the floating point
9759 number passed in. The possible return values must be supplied as
9760 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9761 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9762 one floating point argument which is "type generic". */
9765 fold_builtin_fpclassify (tree exp)
9767 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9768 arg, type, res, tmp;
9769 enum machine_mode mode;
9773 /* Verify the required arguments in the original call. */
9774 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9775 INTEGER_TYPE, INTEGER_TYPE,
9776 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9779 fp_nan = CALL_EXPR_ARG (exp, 0);
9780 fp_infinite = CALL_EXPR_ARG (exp, 1);
9781 fp_normal = CALL_EXPR_ARG (exp, 2);
9782 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9783 fp_zero = CALL_EXPR_ARG (exp, 4);
9784 arg = CALL_EXPR_ARG (exp, 5);
9785 type = TREE_TYPE (arg);
9786 mode = TYPE_MODE (type);
9787 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9791 (fabs(x) == Inf ? FP_INFINITE :
9792 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9793 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9795 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9796 build_real (type, dconst0));
9797 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9799 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9800 real_from_string (&r, buf);
9801 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9802 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9804 if (HONOR_INFINITIES (mode))
9807 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9808 build_real (type, r));
9809 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9812 if (HONOR_NANS (mode))
9814 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9815 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9821 /* Fold a call to an unordered comparison function such as
9822 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9823 being called and ARG0 and ARG1 are the arguments for the call.
9824 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9825 the opposite of the desired result. UNORDERED_CODE is used
9826 for modes that can hold NaNs and ORDERED_CODE is used for
9830 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9831 enum tree_code unordered_code,
9832 enum tree_code ordered_code)
9834 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9835 enum tree_code code;
9837 enum tree_code code0, code1;
9838 tree cmp_type = NULL_TREE;
9840 type0 = TREE_TYPE (arg0);
9841 type1 = TREE_TYPE (arg1);
9843 code0 = TREE_CODE (type0);
9844 code1 = TREE_CODE (type1);
9846 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9847 /* Choose the wider of two real types. */
9848 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9850 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9852 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9855 arg0 = fold_convert (cmp_type, arg0);
9856 arg1 = fold_convert (cmp_type, arg1);
9858 if (unordered_code == UNORDERED_EXPR)
9860 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9861 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9862 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9865 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9867 return fold_build1 (TRUTH_NOT_EXPR, type,
9868 fold_build2 (code, type, arg0, arg1));
9871 /* Fold a call to built-in function FNDECL with 0 arguments.
9872 IGNORE is true if the result of the function call is ignored. This
9873 function returns NULL_TREE if no simplification was possible. */
9876 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9878 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9879 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9882 CASE_FLT_FN (BUILT_IN_INF):
9883 case BUILT_IN_INFD32:
9884 case BUILT_IN_INFD64:
9885 case BUILT_IN_INFD128:
9886 return fold_builtin_inf (type, true);
9888 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9889 return fold_builtin_inf (type, false);
9891 case BUILT_IN_CLASSIFY_TYPE:
9892 return fold_builtin_classify_type (NULL_TREE);
9900 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9901 IGNORE is true if the result of the function call is ignored. This
9902 function returns NULL_TREE if no simplification was possible. */
9905 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9907 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9908 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9912 case BUILT_IN_CONSTANT_P:
9914 tree val = fold_builtin_constant_p (arg0);
9916 /* Gimplification will pull the CALL_EXPR for the builtin out of
9917 an if condition. When not optimizing, we'll not CSE it back.
9918 To avoid link error types of regressions, return false now. */
9919 if (!val && !optimize)
9920 val = integer_zero_node;
9925 case BUILT_IN_CLASSIFY_TYPE:
9926 return fold_builtin_classify_type (arg0);
9928 case BUILT_IN_STRLEN:
9929 return fold_builtin_strlen (arg0);
9931 CASE_FLT_FN (BUILT_IN_FABS):
9932 return fold_builtin_fabs (arg0, type);
9936 case BUILT_IN_LLABS:
9937 case BUILT_IN_IMAXABS:
9938 return fold_builtin_abs (arg0, type);
9940 CASE_FLT_FN (BUILT_IN_CONJ):
9941 if (validate_arg (arg0, COMPLEX_TYPE))
9942 return fold_build1 (CONJ_EXPR, type, arg0);
9945 CASE_FLT_FN (BUILT_IN_CREAL):
9946 if (validate_arg (arg0, COMPLEX_TYPE))
9947 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9950 CASE_FLT_FN (BUILT_IN_CIMAG):
9951 if (validate_arg (arg0, COMPLEX_TYPE))
9952 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9955 CASE_FLT_FN (BUILT_IN_CCOS):
9956 CASE_FLT_FN (BUILT_IN_CCOSH):
9957 /* These functions are "even", i.e. f(x) == f(-x). */
9958 if (validate_arg (arg0, COMPLEX_TYPE))
9960 tree narg = fold_strip_sign_ops (arg0);
9962 return build_call_expr (fndecl, 1, narg);
9966 CASE_FLT_FN (BUILT_IN_CABS):
9967 return fold_builtin_cabs (arg0, type, fndecl);
9969 CASE_FLT_FN (BUILT_IN_CARG):
9970 return fold_builtin_carg (arg0, type);
9972 CASE_FLT_FN (BUILT_IN_SQRT):
9973 return fold_builtin_sqrt (arg0, type);
9975 CASE_FLT_FN (BUILT_IN_CBRT):
9976 return fold_builtin_cbrt (arg0, type);
9978 CASE_FLT_FN (BUILT_IN_ASIN):
9979 if (validate_arg (arg0, REAL_TYPE))
9980 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9981 &dconstm1, &dconst1, true);
9984 CASE_FLT_FN (BUILT_IN_ACOS):
9985 if (validate_arg (arg0, REAL_TYPE))
9986 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9987 &dconstm1, &dconst1, true);
9990 CASE_FLT_FN (BUILT_IN_ATAN):
9991 if (validate_arg (arg0, REAL_TYPE))
9992 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9995 CASE_FLT_FN (BUILT_IN_ASINH):
9996 if (validate_arg (arg0, REAL_TYPE))
9997 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10000 CASE_FLT_FN (BUILT_IN_ACOSH):
10001 if (validate_arg (arg0, REAL_TYPE))
10002 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10003 &dconst1, NULL, true);
10006 CASE_FLT_FN (BUILT_IN_ATANH):
10007 if (validate_arg (arg0, REAL_TYPE))
10008 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10009 &dconstm1, &dconst1, false);
10012 CASE_FLT_FN (BUILT_IN_SIN):
10013 if (validate_arg (arg0, REAL_TYPE))
10014 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10017 CASE_FLT_FN (BUILT_IN_COS):
10018 return fold_builtin_cos (arg0, type, fndecl);
10021 CASE_FLT_FN (BUILT_IN_TAN):
10022 return fold_builtin_tan (arg0, type);
10024 CASE_FLT_FN (BUILT_IN_CEXP):
10025 return fold_builtin_cexp (arg0, type);
10027 CASE_FLT_FN (BUILT_IN_CEXPI):
10028 if (validate_arg (arg0, REAL_TYPE))
10029 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10032 CASE_FLT_FN (BUILT_IN_SINH):
10033 if (validate_arg (arg0, REAL_TYPE))
10034 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10037 CASE_FLT_FN (BUILT_IN_COSH):
10038 return fold_builtin_cosh (arg0, type, fndecl);
10040 CASE_FLT_FN (BUILT_IN_TANH):
10041 if (validate_arg (arg0, REAL_TYPE))
10042 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10045 CASE_FLT_FN (BUILT_IN_ERF):
10046 if (validate_arg (arg0, REAL_TYPE))
10047 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10050 CASE_FLT_FN (BUILT_IN_ERFC):
10051 if (validate_arg (arg0, REAL_TYPE))
10052 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10055 CASE_FLT_FN (BUILT_IN_TGAMMA):
10056 if (validate_arg (arg0, REAL_TYPE))
10057 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10060 CASE_FLT_FN (BUILT_IN_EXP):
10061 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10063 CASE_FLT_FN (BUILT_IN_EXP2):
10064 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10066 CASE_FLT_FN (BUILT_IN_EXP10):
10067 CASE_FLT_FN (BUILT_IN_POW10):
10068 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10070 CASE_FLT_FN (BUILT_IN_EXPM1):
10071 if (validate_arg (arg0, REAL_TYPE))
10072 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10075 CASE_FLT_FN (BUILT_IN_LOG):
10076 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10078 CASE_FLT_FN (BUILT_IN_LOG2):
10079 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10081 CASE_FLT_FN (BUILT_IN_LOG10):
10082 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10084 CASE_FLT_FN (BUILT_IN_LOG1P):
10085 if (validate_arg (arg0, REAL_TYPE))
10086 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10087 &dconstm1, NULL, false);
10090 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10091 CASE_FLT_FN (BUILT_IN_J0):
10092 if (validate_arg (arg0, REAL_TYPE))
10093 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10097 CASE_FLT_FN (BUILT_IN_J1):
10098 if (validate_arg (arg0, REAL_TYPE))
10099 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10103 CASE_FLT_FN (BUILT_IN_Y0):
10104 if (validate_arg (arg0, REAL_TYPE))
10105 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10106 &dconst0, NULL, false);
10109 CASE_FLT_FN (BUILT_IN_Y1):
10110 if (validate_arg (arg0, REAL_TYPE))
10111 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10112 &dconst0, NULL, false);
10116 CASE_FLT_FN (BUILT_IN_NAN):
10117 case BUILT_IN_NAND32:
10118 case BUILT_IN_NAND64:
10119 case BUILT_IN_NAND128:
10120 return fold_builtin_nan (arg0, type, true);
10122 CASE_FLT_FN (BUILT_IN_NANS):
10123 return fold_builtin_nan (arg0, type, false);
10125 CASE_FLT_FN (BUILT_IN_FLOOR):
10126 return fold_builtin_floor (fndecl, arg0);
10128 CASE_FLT_FN (BUILT_IN_CEIL):
10129 return fold_builtin_ceil (fndecl, arg0);
10131 CASE_FLT_FN (BUILT_IN_TRUNC):
10132 return fold_builtin_trunc (fndecl, arg0);
10134 CASE_FLT_FN (BUILT_IN_ROUND):
10135 return fold_builtin_round (fndecl, arg0);
10137 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10138 CASE_FLT_FN (BUILT_IN_RINT):
10139 return fold_trunc_transparent_mathfn (fndecl, arg0);
10141 CASE_FLT_FN (BUILT_IN_LCEIL):
10142 CASE_FLT_FN (BUILT_IN_LLCEIL):
10143 CASE_FLT_FN (BUILT_IN_LFLOOR):
10144 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10145 CASE_FLT_FN (BUILT_IN_LROUND):
10146 CASE_FLT_FN (BUILT_IN_LLROUND):
10147 return fold_builtin_int_roundingfn (fndecl, arg0);
10149 CASE_FLT_FN (BUILT_IN_LRINT):
10150 CASE_FLT_FN (BUILT_IN_LLRINT):
10151 return fold_fixed_mathfn (fndecl, arg0);
10153 case BUILT_IN_BSWAP32:
10154 case BUILT_IN_BSWAP64:
10155 return fold_builtin_bswap (fndecl, arg0);
10157 CASE_INT_FN (BUILT_IN_FFS):
10158 CASE_INT_FN (BUILT_IN_CLZ):
10159 CASE_INT_FN (BUILT_IN_CTZ):
10160 CASE_INT_FN (BUILT_IN_POPCOUNT):
10161 CASE_INT_FN (BUILT_IN_PARITY):
10162 return fold_builtin_bitop (fndecl, arg0);
10164 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10165 return fold_builtin_signbit (arg0, type);
10167 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10168 return fold_builtin_significand (arg0, type);
10170 CASE_FLT_FN (BUILT_IN_ILOGB):
10171 CASE_FLT_FN (BUILT_IN_LOGB):
10172 return fold_builtin_logb (arg0, type);
10174 case BUILT_IN_ISASCII:
10175 return fold_builtin_isascii (arg0);
10177 case BUILT_IN_TOASCII:
10178 return fold_builtin_toascii (arg0);
10180 case BUILT_IN_ISDIGIT:
10181 return fold_builtin_isdigit (arg0);
10183 CASE_FLT_FN (BUILT_IN_FINITE):
10184 case BUILT_IN_FINITED32:
10185 case BUILT_IN_FINITED64:
10186 case BUILT_IN_FINITED128:
10187 case BUILT_IN_ISFINITE:
10188 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10190 CASE_FLT_FN (BUILT_IN_ISINF):
10191 case BUILT_IN_ISINFD32:
10192 case BUILT_IN_ISINFD64:
10193 case BUILT_IN_ISINFD128:
10194 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10196 case BUILT_IN_ISINF_SIGN:
10197 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10199 CASE_FLT_FN (BUILT_IN_ISNAN):
10200 case BUILT_IN_ISNAND32:
10201 case BUILT_IN_ISNAND64:
10202 case BUILT_IN_ISNAND128:
10203 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10205 case BUILT_IN_PRINTF:
10206 case BUILT_IN_PRINTF_UNLOCKED:
10207 case BUILT_IN_VPRINTF:
10208 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10218 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10219 IGNORE is true if the result of the function call is ignored. This
10220 function returns NULL_TREE if no simplification was possible. */
10223 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10225 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10226 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10230 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10231 CASE_FLT_FN (BUILT_IN_JN):
10232 if (validate_arg (arg0, INTEGER_TYPE)
10233 && validate_arg (arg1, REAL_TYPE))
10234 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10237 CASE_FLT_FN (BUILT_IN_YN):
10238 if (validate_arg (arg0, INTEGER_TYPE)
10239 && validate_arg (arg1, REAL_TYPE))
10240 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10244 CASE_FLT_FN (BUILT_IN_DREM):
10245 CASE_FLT_FN (BUILT_IN_REMAINDER):
10246 if (validate_arg (arg0, REAL_TYPE)
10247 && validate_arg(arg1, REAL_TYPE))
10248 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10251 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10252 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10253 if (validate_arg (arg0, REAL_TYPE)
10254 && validate_arg(arg1, POINTER_TYPE))
10255 return do_mpfr_lgamma_r (arg0, arg1, type);
10259 CASE_FLT_FN (BUILT_IN_ATAN2):
10260 if (validate_arg (arg0, REAL_TYPE)
10261 && validate_arg(arg1, REAL_TYPE))
10262 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10265 CASE_FLT_FN (BUILT_IN_FDIM):
10266 if (validate_arg (arg0, REAL_TYPE)
10267 && validate_arg(arg1, REAL_TYPE))
10268 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10271 CASE_FLT_FN (BUILT_IN_HYPOT):
10272 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10274 CASE_FLT_FN (BUILT_IN_LDEXP):
10275 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10276 CASE_FLT_FN (BUILT_IN_SCALBN):
10277 CASE_FLT_FN (BUILT_IN_SCALBLN):
10278 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10280 CASE_FLT_FN (BUILT_IN_FREXP):
10281 return fold_builtin_frexp (arg0, arg1, type);
10283 CASE_FLT_FN (BUILT_IN_MODF):
10284 return fold_builtin_modf (arg0, arg1, type);
10286 case BUILT_IN_BZERO:
10287 return fold_builtin_bzero (arg0, arg1, ignore);
10289 case BUILT_IN_FPUTS:
10290 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10292 case BUILT_IN_FPUTS_UNLOCKED:
10293 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10295 case BUILT_IN_STRSTR:
10296 return fold_builtin_strstr (arg0, arg1, type);
10298 case BUILT_IN_STRCAT:
10299 return fold_builtin_strcat (arg0, arg1);
10301 case BUILT_IN_STRSPN:
10302 return fold_builtin_strspn (arg0, arg1);
10304 case BUILT_IN_STRCSPN:
10305 return fold_builtin_strcspn (arg0, arg1);
10307 case BUILT_IN_STRCHR:
10308 case BUILT_IN_INDEX:
10309 return fold_builtin_strchr (arg0, arg1, type);
10311 case BUILT_IN_STRRCHR:
10312 case BUILT_IN_RINDEX:
10313 return fold_builtin_strrchr (arg0, arg1, type);
10315 case BUILT_IN_STRCPY:
10316 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10318 case BUILT_IN_STRCMP:
10319 return fold_builtin_strcmp (arg0, arg1);
10321 case BUILT_IN_STRPBRK:
10322 return fold_builtin_strpbrk (arg0, arg1, type);
10324 case BUILT_IN_EXPECT:
10325 return fold_builtin_expect (arg0, arg1);
10327 CASE_FLT_FN (BUILT_IN_POW):
10328 return fold_builtin_pow (fndecl, arg0, arg1, type);
10330 CASE_FLT_FN (BUILT_IN_POWI):
10331 return fold_builtin_powi (fndecl, arg0, arg1, type);
10333 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10334 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10336 CASE_FLT_FN (BUILT_IN_FMIN):
10337 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10339 CASE_FLT_FN (BUILT_IN_FMAX):
10340 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10342 case BUILT_IN_ISGREATER:
10343 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10344 case BUILT_IN_ISGREATEREQUAL:
10345 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10346 case BUILT_IN_ISLESS:
10347 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10348 case BUILT_IN_ISLESSEQUAL:
10349 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10350 case BUILT_IN_ISLESSGREATER:
10351 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10352 case BUILT_IN_ISUNORDERED:
10353 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10356 /* We do the folding for va_start in the expander. */
10357 case BUILT_IN_VA_START:
10360 case BUILT_IN_SPRINTF:
10361 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10363 case BUILT_IN_OBJECT_SIZE:
10364 return fold_builtin_object_size (arg0, arg1);
10366 case BUILT_IN_PRINTF:
10367 case BUILT_IN_PRINTF_UNLOCKED:
10368 case BUILT_IN_VPRINTF:
10369 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10371 case BUILT_IN_PRINTF_CHK:
10372 case BUILT_IN_VPRINTF_CHK:
10373 if (!validate_arg (arg0, INTEGER_TYPE)
10374 || TREE_SIDE_EFFECTS (arg0))
10377 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10380 case BUILT_IN_FPRINTF:
10381 case BUILT_IN_FPRINTF_UNLOCKED:
10382 case BUILT_IN_VFPRINTF:
10383 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10392 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10393 and ARG2. IGNORE is true if the result of the function call is ignored.
10394 This function returns NULL_TREE if no simplification was possible. */
10397 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10399 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10400 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10404 CASE_FLT_FN (BUILT_IN_SINCOS):
10405 return fold_builtin_sincos (arg0, arg1, arg2);
10407 CASE_FLT_FN (BUILT_IN_FMA):
10408 if (validate_arg (arg0, REAL_TYPE)
10409 && validate_arg(arg1, REAL_TYPE)
10410 && validate_arg(arg2, REAL_TYPE))
10411 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10414 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10415 CASE_FLT_FN (BUILT_IN_REMQUO):
10416 if (validate_arg (arg0, REAL_TYPE)
10417 && validate_arg(arg1, REAL_TYPE)
10418 && validate_arg(arg2, POINTER_TYPE))
10419 return do_mpfr_remquo (arg0, arg1, arg2);
10423 case BUILT_IN_MEMSET:
10424 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10426 case BUILT_IN_BCOPY:
10427 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10429 case BUILT_IN_MEMCPY:
10430 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10432 case BUILT_IN_MEMPCPY:
10433 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10435 case BUILT_IN_MEMMOVE:
10436 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10438 case BUILT_IN_STRNCAT:
10439 return fold_builtin_strncat (arg0, arg1, arg2);
10441 case BUILT_IN_STRNCPY:
10442 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10444 case BUILT_IN_STRNCMP:
10445 return fold_builtin_strncmp (arg0, arg1, arg2);
10447 case BUILT_IN_MEMCHR:
10448 return fold_builtin_memchr (arg0, arg1, arg2, type);
10450 case BUILT_IN_BCMP:
10451 case BUILT_IN_MEMCMP:
10452 return fold_builtin_memcmp (arg0, arg1, arg2);;
10454 case BUILT_IN_SPRINTF:
10455 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10457 case BUILT_IN_STRCPY_CHK:
10458 case BUILT_IN_STPCPY_CHK:
10459 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10462 case BUILT_IN_STRCAT_CHK:
10463 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10465 case BUILT_IN_PRINTF_CHK:
10466 case BUILT_IN_VPRINTF_CHK:
10467 if (!validate_arg (arg0, INTEGER_TYPE)
10468 || TREE_SIDE_EFFECTS (arg0))
10471 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10474 case BUILT_IN_FPRINTF:
10475 case BUILT_IN_FPRINTF_UNLOCKED:
10476 case BUILT_IN_VFPRINTF:
10477 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10479 case BUILT_IN_FPRINTF_CHK:
10480 case BUILT_IN_VFPRINTF_CHK:
10481 if (!validate_arg (arg1, INTEGER_TYPE)
10482 || TREE_SIDE_EFFECTS (arg1))
10485 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10494 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10495 ARG2, and ARG3. IGNORE is true if the result of the function call is
10496 ignored. This function returns NULL_TREE if no simplification was
10500 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10503 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10507 case BUILT_IN_MEMCPY_CHK:
10508 case BUILT_IN_MEMPCPY_CHK:
10509 case BUILT_IN_MEMMOVE_CHK:
10510 case BUILT_IN_MEMSET_CHK:
10511 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10513 DECL_FUNCTION_CODE (fndecl));
10515 case BUILT_IN_STRNCPY_CHK:
10516 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10518 case BUILT_IN_STRNCAT_CHK:
10519 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10521 case BUILT_IN_FPRINTF_CHK:
10522 case BUILT_IN_VFPRINTF_CHK:
10523 if (!validate_arg (arg1, INTEGER_TYPE)
10524 || TREE_SIDE_EFFECTS (arg1))
10527 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10537 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10538 arguments, where NARGS <= 4. IGNORE is true if the result of the
10539 function call is ignored. This function returns NULL_TREE if no
10540 simplification was possible. Note that this only folds builtins with
10541 fixed argument patterns. Foldings that do varargs-to-varargs
10542 transformations, or that match calls with more than 4 arguments,
10543 need to be handled with fold_builtin_varargs instead. */
10545 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10548 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10550 tree ret = NULL_TREE;
10555 ret = fold_builtin_0 (fndecl, ignore);
10558 ret = fold_builtin_1 (fndecl, args[0], ignore);
10561 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10564 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10567 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10575 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10576 TREE_NO_WARNING (ret) = 1;
10582 /* Builtins with folding operations that operate on "..." arguments
10583 need special handling; we need to store the arguments in a convenient
10584 data structure before attempting any folding. Fortunately there are
10585 only a few builtins that fall into this category. FNDECL is the
10586 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10587 result of the function call is ignored. */
10590 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10592 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10593 tree ret = NULL_TREE;
10597 case BUILT_IN_SPRINTF_CHK:
10598 case BUILT_IN_VSPRINTF_CHK:
10599 ret = fold_builtin_sprintf_chk (exp, fcode);
10602 case BUILT_IN_SNPRINTF_CHK:
10603 case BUILT_IN_VSNPRINTF_CHK:
10604 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10607 case BUILT_IN_FPCLASSIFY:
10608 ret = fold_builtin_fpclassify (exp);
10616 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10617 TREE_NO_WARNING (ret) = 1;
10623 /* A wrapper function for builtin folding that prevents warnings for
10624 "statement without effect" and the like, caused by removing the
10625 call node earlier than the warning is generated. */
10628 fold_call_expr (tree exp, bool ignore)
10630 tree ret = NULL_TREE;
10631 tree fndecl = get_callee_fndecl (exp);
10633 && TREE_CODE (fndecl) == FUNCTION_DECL
10634 && DECL_BUILT_IN (fndecl)
10635 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10636 yet. Defer folding until we see all the arguments
10637 (after inlining). */
10638 && !CALL_EXPR_VA_ARG_PACK (exp))
10640 int nargs = call_expr_nargs (exp);
10642 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10643 instead last argument is __builtin_va_arg_pack (). Defer folding
10644 even in that case, until arguments are finalized. */
10645 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10647 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10649 && TREE_CODE (fndecl2) == FUNCTION_DECL
10650 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10651 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10655 /* FIXME: Don't use a list in this interface. */
10656 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10657 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10660 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10662 tree *args = CALL_EXPR_ARGP (exp);
10663 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10666 ret = fold_builtin_varargs (fndecl, exp, ignore);
10669 /* Propagate location information from original call to
10670 expansion of builtin. Otherwise things like
10671 maybe_emit_chk_warning, that operate on the expansion
10672 of a builtin, will use the wrong location information. */
10673 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10675 tree realret = ret;
10676 if (TREE_CODE (ret) == NOP_EXPR)
10677 realret = TREE_OPERAND (ret, 0);
10678 if (CAN_HAVE_LOCATION_P (realret)
10679 && !EXPR_HAS_LOCATION (realret))
10680 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10690 /* Conveniently construct a function call expression. FNDECL names the
10691 function to be called and ARGLIST is a TREE_LIST of arguments. */
10694 build_function_call_expr (tree fndecl, tree arglist)
10696 tree fntype = TREE_TYPE (fndecl);
10697 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10698 int n = list_length (arglist);
10699 tree *argarray = (tree *) alloca (n * sizeof (tree));
10702 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10703 argarray[i] = TREE_VALUE (arglist);
10704 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10707 /* Conveniently construct a function call expression. FNDECL names the
10708 function to be called, N is the number of arguments, and the "..."
10709 parameters are the argument expressions. */
10712 build_call_expr (tree fndecl, int n, ...)
10715 tree fntype = TREE_TYPE (fndecl);
10716 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10717 tree *argarray = (tree *) alloca (n * sizeof (tree));
10721 for (i = 0; i < n; i++)
10722 argarray[i] = va_arg (ap, tree);
10724 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10727 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10728 N arguments are passed in the array ARGARRAY. */
10731 fold_builtin_call_array (tree type,
10736 tree ret = NULL_TREE;
10740 if (TREE_CODE (fn) == ADDR_EXPR)
10742 tree fndecl = TREE_OPERAND (fn, 0);
10743 if (TREE_CODE (fndecl) == FUNCTION_DECL
10744 && DECL_BUILT_IN (fndecl))
10746 /* If last argument is __builtin_va_arg_pack (), arguments to this
10747 function are not finalized yet. Defer folding until they are. */
10748 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10750 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10752 && TREE_CODE (fndecl2) == FUNCTION_DECL
10753 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10754 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10755 return build_call_array (type, fn, n, argarray);
10757 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10759 tree arglist = NULL_TREE;
10760 for (i = n - 1; i >= 0; i--)
10761 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10762 ret = targetm.fold_builtin (fndecl, arglist, false);
10766 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10768 /* First try the transformations that don't require consing up
10770 ret = fold_builtin_n (fndecl, argarray, n, false);
10775 /* If we got this far, we need to build an exp. */
10776 exp = build_call_array (type, fn, n, argarray);
10777 ret = fold_builtin_varargs (fndecl, exp, false);
10778 return ret ? ret : exp;
10782 return build_call_array (type, fn, n, argarray);
10785 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10786 along with N new arguments specified as the "..." parameters. SKIP
10787 is the number of arguments in EXP to be omitted. This function is used
10788 to do varargs-to-varargs transformations. */
10791 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10793 int oldnargs = call_expr_nargs (exp);
10794 int nargs = oldnargs - skip + n;
10795 tree fntype = TREE_TYPE (fndecl);
10796 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10804 buffer = XALLOCAVEC (tree, nargs);
10806 for (i = 0; i < n; i++)
10807 buffer[i] = va_arg (ap, tree);
10809 for (j = skip; j < oldnargs; j++, i++)
10810 buffer[i] = CALL_EXPR_ARG (exp, j);
10813 buffer = CALL_EXPR_ARGP (exp) + skip;
10815 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10818 /* Validate a single argument ARG against a tree code CODE representing
10822 validate_arg (const_tree arg, enum tree_code code)
10826 else if (code == POINTER_TYPE)
10827 return POINTER_TYPE_P (TREE_TYPE (arg));
10828 else if (code == INTEGER_TYPE)
10829 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10830 return code == TREE_CODE (TREE_TYPE (arg));
10833 /* This function validates the types of a function call argument list
10834 against a specified list of tree_codes. If the last specifier is a 0,
10835 that represents an ellipses, otherwise the last specifier must be a
10839 validate_arglist (const_tree callexpr, ...)
10841 enum tree_code code;
10844 const_call_expr_arg_iterator iter;
10847 va_start (ap, callexpr);
10848 init_const_call_expr_arg_iterator (callexpr, &iter);
10852 code = va_arg (ap, enum tree_code);
10856 /* This signifies an ellipses, any further arguments are all ok. */
10860 /* This signifies an endlink, if no arguments remain, return
10861 true, otherwise return false. */
10862 res = !more_const_call_expr_args_p (&iter);
10865 /* If no parameters remain or the parameter's code does not
10866 match the specified code, return false. Otherwise continue
10867 checking any remaining arguments. */
10868 arg = next_const_call_expr_arg (&iter);
10869 if (!validate_arg (arg, code))
10876 /* We need gotos here since we can only have one VA_CLOSE in a
10884 /* Default target-specific builtin expander that does nothing. */
10887 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10888 rtx target ATTRIBUTE_UNUSED,
10889 rtx subtarget ATTRIBUTE_UNUSED,
10890 enum machine_mode mode ATTRIBUTE_UNUSED,
10891 int ignore ATTRIBUTE_UNUSED)
10896 /* Returns true is EXP represents data that would potentially reside
10897 in a readonly section. */
10900 readonly_data_expr (tree exp)
10904 if (TREE_CODE (exp) != ADDR_EXPR)
10907 exp = get_base_address (TREE_OPERAND (exp, 0));
10911 /* Make sure we call decl_readonly_section only for trees it
10912 can handle (since it returns true for everything it doesn't
10914 if (TREE_CODE (exp) == STRING_CST
10915 || TREE_CODE (exp) == CONSTRUCTOR
10916 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10917 return decl_readonly_section (exp, 0);
10922 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10923 to the call, and TYPE is its return type.
10925 Return NULL_TREE if no simplification was possible, otherwise return the
10926 simplified form of the call as a tree.
10928 The simplified form may be a constant or other expression which
10929 computes the same value, but in a more efficient manner (including
10930 calls to other builtin functions).
10932 The call may contain arguments which need to be evaluated, but
10933 which are not useful to determine the result of the call. In
10934 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10935 COMPOUND_EXPR will be an argument which must be evaluated.
10936 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10937 COMPOUND_EXPR in the chain will contain the tree for the simplified
10938 form of the builtin function call. */
10941 fold_builtin_strstr (tree s1, tree s2, tree type)
10943 if (!validate_arg (s1, POINTER_TYPE)
10944 || !validate_arg (s2, POINTER_TYPE))
10949 const char *p1, *p2;
10951 p2 = c_getstr (s2);
10955 p1 = c_getstr (s1);
10958 const char *r = strstr (p1, p2);
10962 return build_int_cst (TREE_TYPE (s1), 0);
10964 /* Return an offset into the constant string argument. */
10965 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10966 s1, size_int (r - p1));
10967 return fold_convert (type, tem);
10970 /* The argument is const char *, and the result is char *, so we need
10971 a type conversion here to avoid a warning. */
10973 return fold_convert (type, s1);
10978 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10982 /* New argument list transforming strstr(s1, s2) to
10983 strchr(s1, s2[0]). */
10984 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10988 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10989 the call, and TYPE is its return type.
10991 Return NULL_TREE if no simplification was possible, otherwise return the
10992 simplified form of the call as a tree.
10994 The simplified form may be a constant or other expression which
10995 computes the same value, but in a more efficient manner (including
10996 calls to other builtin functions).
10998 The call may contain arguments which need to be evaluated, but
10999 which are not useful to determine the result of the call. In
11000 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11001 COMPOUND_EXPR will be an argument which must be evaluated.
11002 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11003 COMPOUND_EXPR in the chain will contain the tree for the simplified
11004 form of the builtin function call. */
11007 fold_builtin_strchr (tree s1, tree s2, tree type)
11009 if (!validate_arg (s1, POINTER_TYPE)
11010 || !validate_arg (s2, INTEGER_TYPE))
11016 if (TREE_CODE (s2) != INTEGER_CST)
11019 p1 = c_getstr (s1);
11026 if (target_char_cast (s2, &c))
11029 r = strchr (p1, c);
11032 return build_int_cst (TREE_TYPE (s1), 0);
11034 /* Return an offset into the constant string argument. */
11035 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11036 s1, size_int (r - p1));
11037 return fold_convert (type, tem);
11043 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11044 the call, and TYPE is its return type.
11046 Return NULL_TREE if no simplification was possible, otherwise return the
11047 simplified form of the call as a tree.
11049 The simplified form may be a constant or other expression which
11050 computes the same value, but in a more efficient manner (including
11051 calls to other builtin functions).
11053 The call may contain arguments which need to be evaluated, but
11054 which are not useful to determine the result of the call. In
11055 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11056 COMPOUND_EXPR will be an argument which must be evaluated.
11057 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11058 COMPOUND_EXPR in the chain will contain the tree for the simplified
11059 form of the builtin function call. */
11062 fold_builtin_strrchr (tree s1, tree s2, tree type)
11064 if (!validate_arg (s1, POINTER_TYPE)
11065 || !validate_arg (s2, INTEGER_TYPE))
11072 if (TREE_CODE (s2) != INTEGER_CST)
11075 p1 = c_getstr (s1);
11082 if (target_char_cast (s2, &c))
11085 r = strrchr (p1, c);
11088 return build_int_cst (TREE_TYPE (s1), 0);
11090 /* Return an offset into the constant string argument. */
11091 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11092 s1, size_int (r - p1));
11093 return fold_convert (type, tem);
11096 if (! integer_zerop (s2))
11099 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11103 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11104 return build_call_expr (fn, 2, s1, s2);
11108 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11109 to the call, and TYPE is its return type.
11111 Return NULL_TREE if no simplification was possible, otherwise return the
11112 simplified form of the call as a tree.
11114 The simplified form may be a constant or other expression which
11115 computes the same value, but in a more efficient manner (including
11116 calls to other builtin functions).
11118 The call may contain arguments which need to be evaluated, but
11119 which are not useful to determine the result of the call. In
11120 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11121 COMPOUND_EXPR will be an argument which must be evaluated.
11122 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11123 COMPOUND_EXPR in the chain will contain the tree for the simplified
11124 form of the builtin function call. */
11127 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11129 if (!validate_arg (s1, POINTER_TYPE)
11130 || !validate_arg (s2, POINTER_TYPE))
11135 const char *p1, *p2;
11137 p2 = c_getstr (s2);
11141 p1 = c_getstr (s1);
11144 const char *r = strpbrk (p1, p2);
11148 return build_int_cst (TREE_TYPE (s1), 0);
11150 /* Return an offset into the constant string argument. */
11151 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11152 s1, size_int (r - p1));
11153 return fold_convert (type, tem);
11157 /* strpbrk(x, "") == NULL.
11158 Evaluate and ignore s1 in case it had side-effects. */
11159 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11162 return NULL_TREE; /* Really call strpbrk. */
11164 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11168 /* New argument list transforming strpbrk(s1, s2) to
11169 strchr(s1, s2[0]). */
11170 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11174 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11177 Return NULL_TREE if no simplification was possible, otherwise return the
11178 simplified form of the call as a tree.
11180 The simplified form may be a constant or other expression which
11181 computes the same value, but in a more efficient manner (including
11182 calls to other builtin functions).
11184 The call may contain arguments which need to be evaluated, but
11185 which are not useful to determine the result of the call. In
11186 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11187 COMPOUND_EXPR will be an argument which must be evaluated.
11188 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11189 COMPOUND_EXPR in the chain will contain the tree for the simplified
11190 form of the builtin function call. */
11193 fold_builtin_strcat (tree dst, tree src)
11195 if (!validate_arg (dst, POINTER_TYPE)
11196 || !validate_arg (src, POINTER_TYPE))
11200 const char *p = c_getstr (src);
11202 /* If the string length is zero, return the dst parameter. */
11203 if (p && *p == '\0')
11210 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11211 arguments to the call.
11213 Return NULL_TREE if no simplification was possible, otherwise return the
11214 simplified form of the call as a tree.
11216 The simplified form may be a constant or other expression which
11217 computes the same value, but in a more efficient manner (including
11218 calls to other builtin functions).
11220 The call may contain arguments which need to be evaluated, but
11221 which are not useful to determine the result of the call. In
11222 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11223 COMPOUND_EXPR will be an argument which must be evaluated.
11224 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11225 COMPOUND_EXPR in the chain will contain the tree for the simplified
11226 form of the builtin function call. */
11229 fold_builtin_strncat (tree dst, tree src, tree len)
11231 if (!validate_arg (dst, POINTER_TYPE)
11232 || !validate_arg (src, POINTER_TYPE)
11233 || !validate_arg (len, INTEGER_TYPE))
11237 const char *p = c_getstr (src);
11239 /* If the requested length is zero, or the src parameter string
11240 length is zero, return the dst parameter. */
11241 if (integer_zerop (len) || (p && *p == '\0'))
11242 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11244 /* If the requested len is greater than or equal to the string
11245 length, call strcat. */
11246 if (TREE_CODE (len) == INTEGER_CST && p
11247 && compare_tree_int (len, strlen (p)) >= 0)
11249 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11251 /* If the replacement _DECL isn't initialized, don't do the
11256 return build_call_expr (fn, 2, dst, src);
11262 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11265 Return NULL_TREE if no simplification was possible, otherwise return the
11266 simplified form of the call as a tree.
11268 The simplified form may be a constant or other expression which
11269 computes the same value, but in a more efficient manner (including
11270 calls to other builtin functions).
11272 The call may contain arguments which need to be evaluated, but
11273 which are not useful to determine the result of the call. In
11274 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11275 COMPOUND_EXPR will be an argument which must be evaluated.
11276 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11277 COMPOUND_EXPR in the chain will contain the tree for the simplified
11278 form of the builtin function call. */
11281 fold_builtin_strspn (tree s1, tree s2)
11283 if (!validate_arg (s1, POINTER_TYPE)
11284 || !validate_arg (s2, POINTER_TYPE))
11288 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11290 /* If both arguments are constants, evaluate at compile-time. */
11293 const size_t r = strspn (p1, p2);
11294 return size_int (r);
11297 /* If either argument is "", return NULL_TREE. */
11298 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11299 /* Evaluate and ignore both arguments in case either one has
11301 return omit_two_operands (integer_type_node, integer_zero_node,
11307 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11310 Return NULL_TREE if no simplification was possible, otherwise return the
11311 simplified form of the call as a tree.
11313 The simplified form may be a constant or other expression which
11314 computes the same value, but in a more efficient manner (including
11315 calls to other builtin functions).
11317 The call may contain arguments which need to be evaluated, but
11318 which are not useful to determine the result of the call. In
11319 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11320 COMPOUND_EXPR will be an argument which must be evaluated.
11321 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11322 COMPOUND_EXPR in the chain will contain the tree for the simplified
11323 form of the builtin function call. */
11326 fold_builtin_strcspn (tree s1, tree s2)
11328 if (!validate_arg (s1, POINTER_TYPE)
11329 || !validate_arg (s2, POINTER_TYPE))
11333 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11335 /* If both arguments are constants, evaluate at compile-time. */
11338 const size_t r = strcspn (p1, p2);
11339 return size_int (r);
11342 /* If the first argument is "", return NULL_TREE. */
11343 if (p1 && *p1 == '\0')
11345 /* Evaluate and ignore argument s2 in case it has
11347 return omit_one_operand (integer_type_node,
11348 integer_zero_node, s2);
11351 /* If the second argument is "", return __builtin_strlen(s1). */
11352 if (p2 && *p2 == '\0')
11354 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11356 /* If the replacement _DECL isn't initialized, don't do the
11361 return build_call_expr (fn, 1, s1);
11367 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11368 to the call. IGNORE is true if the value returned
11369 by the builtin will be ignored. UNLOCKED is true is true if this
11370 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11371 the known length of the string. Return NULL_TREE if no simplification
11375 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11377 /* If we're using an unlocked function, assume the other unlocked
11378 functions exist explicitly. */
11379 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11380 : implicit_built_in_decls[BUILT_IN_FPUTC];
11381 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11382 : implicit_built_in_decls[BUILT_IN_FWRITE];
11384 /* If the return value is used, don't do the transformation. */
11388 /* Verify the arguments in the original call. */
11389 if (!validate_arg (arg0, POINTER_TYPE)
11390 || !validate_arg (arg1, POINTER_TYPE))
11394 len = c_strlen (arg0, 0);
11396 /* Get the length of the string passed to fputs. If the length
11397 can't be determined, punt. */
11399 || TREE_CODE (len) != INTEGER_CST)
11402 switch (compare_tree_int (len, 1))
11404 case -1: /* length is 0, delete the call entirely . */
11405 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11407 case 0: /* length is 1, call fputc. */
11409 const char *p = c_getstr (arg0);
11414 return build_call_expr (fn_fputc, 2,
11415 build_int_cst (NULL_TREE, p[0]), arg1);
11421 case 1: /* length is greater than 1, call fwrite. */
11423 /* If optimizing for size keep fputs. */
11426 /* New argument list transforming fputs(string, stream) to
11427 fwrite(string, 1, len, stream). */
11429 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11434 gcc_unreachable ();
11439 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11440 produced. False otherwise. This is done so that we don't output the error
11441 or warning twice or three times. */
11443 fold_builtin_next_arg (tree exp, bool va_start_p)
11445 tree fntype = TREE_TYPE (current_function_decl);
11446 int nargs = call_expr_nargs (exp);
11449 if (TYPE_ARG_TYPES (fntype) == 0
11450 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11451 == void_type_node))
11453 error ("%<va_start%> used in function with fixed args");
11459 if (va_start_p && (nargs != 2))
11461 error ("wrong number of arguments to function %<va_start%>");
11464 arg = CALL_EXPR_ARG (exp, 1);
11466 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11467 when we checked the arguments and if needed issued a warning. */
11472 /* Evidently an out of date version of <stdarg.h>; can't validate
11473 va_start's second argument, but can still work as intended. */
11474 warning (0, "%<__builtin_next_arg%> called without an argument");
11477 else if (nargs > 1)
11479 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11482 arg = CALL_EXPR_ARG (exp, 0);
11485 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11486 or __builtin_next_arg (0) the first time we see it, after checking
11487 the arguments and if needed issuing a warning. */
11488 if (!integer_zerop (arg))
11490 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11492 /* Strip off all nops for the sake of the comparison. This
11493 is not quite the same as STRIP_NOPS. It does more.
11494 We must also strip off INDIRECT_EXPR for C++ reference
11496 while (CONVERT_EXPR_P (arg)
11497 || TREE_CODE (arg) == INDIRECT_REF)
11498 arg = TREE_OPERAND (arg, 0);
11499 if (arg != last_parm)
11501 /* FIXME: Sometimes with the tree optimizers we can get the
11502 not the last argument even though the user used the last
11503 argument. We just warn and set the arg to be the last
11504 argument so that we will get wrong-code because of
11506 warning (0, "second parameter of %<va_start%> not last named argument");
11508 /* We want to verify the second parameter just once before the tree
11509 optimizers are run and then avoid keeping it in the tree,
11510 as otherwise we could warn even for correct code like:
11511 void foo (int i, ...)
11512 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11514 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11516 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11522 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11523 ORIG may be null if this is a 2-argument call. We don't attempt to
11524 simplify calls with more than 3 arguments.
11526 Return NULL_TREE if no simplification was possible, otherwise return the
11527 simplified form of the call as a tree. If IGNORED is true, it means that
11528 the caller does not use the returned value of the function. */
11531 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11534 const char *fmt_str = NULL;
11536 /* Verify the required arguments in the original call. We deal with two
11537 types of sprintf() calls: 'sprintf (str, fmt)' and
11538 'sprintf (dest, "%s", orig)'. */
11539 if (!validate_arg (dest, POINTER_TYPE)
11540 || !validate_arg (fmt, POINTER_TYPE))
11542 if (orig && !validate_arg (orig, POINTER_TYPE))
11545 /* Check whether the format is a literal string constant. */
11546 fmt_str = c_getstr (fmt);
11547 if (fmt_str == NULL)
11551 retval = NULL_TREE;
11553 if (!init_target_chars ())
11556 /* If the format doesn't contain % args or %%, use strcpy. */
11557 if (strchr (fmt_str, target_percent) == NULL)
11559 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11564 /* Don't optimize sprintf (buf, "abc", ptr++). */
11568 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11569 'format' is known to contain no % formats. */
11570 call = build_call_expr (fn, 2, dest, fmt);
11572 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11575 /* If the format is "%s", use strcpy if the result isn't used. */
11576 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11579 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11584 /* Don't crash on sprintf (str1, "%s"). */
11588 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11591 retval = c_strlen (orig, 1);
11592 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11595 call = build_call_expr (fn, 2, dest, orig);
11598 if (call && retval)
11600 retval = fold_convert
11601 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11603 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11609 /* Expand a call EXP to __builtin_object_size. */
11612 expand_builtin_object_size (tree exp)
11615 int object_size_type;
11616 tree fndecl = get_callee_fndecl (exp);
11618 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11620 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11622 expand_builtin_trap ();
11626 ost = CALL_EXPR_ARG (exp, 1);
11629 if (TREE_CODE (ost) != INTEGER_CST
11630 || tree_int_cst_sgn (ost) < 0
11631 || compare_tree_int (ost, 3) > 0)
11633 error ("%Klast argument of %D is not integer constant between 0 and 3",
11635 expand_builtin_trap ();
11639 object_size_type = tree_low_cst (ost, 0);
11641 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11644 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11645 FCODE is the BUILT_IN_* to use.
11646 Return NULL_RTX if we failed; the caller should emit a normal call,
11647 otherwise try to get the result in TARGET, if convenient (and in
11648 mode MODE if that's convenient). */
11651 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11652 enum built_in_function fcode)
11654 tree dest, src, len, size;
11656 if (!validate_arglist (exp,
11658 fcode == BUILT_IN_MEMSET_CHK
11659 ? INTEGER_TYPE : POINTER_TYPE,
11660 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11663 dest = CALL_EXPR_ARG (exp, 0);
11664 src = CALL_EXPR_ARG (exp, 1);
11665 len = CALL_EXPR_ARG (exp, 2);
11666 size = CALL_EXPR_ARG (exp, 3);
11668 if (! host_integerp (size, 1))
11671 if (host_integerp (len, 1) || integer_all_onesp (size))
11675 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11677 warning (0, "%Kcall to %D will always overflow destination buffer",
11678 exp, get_callee_fndecl (exp));
11683 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11684 mem{cpy,pcpy,move,set} is available. */
11687 case BUILT_IN_MEMCPY_CHK:
11688 fn = built_in_decls[BUILT_IN_MEMCPY];
11690 case BUILT_IN_MEMPCPY_CHK:
11691 fn = built_in_decls[BUILT_IN_MEMPCPY];
11693 case BUILT_IN_MEMMOVE_CHK:
11694 fn = built_in_decls[BUILT_IN_MEMMOVE];
11696 case BUILT_IN_MEMSET_CHK:
11697 fn = built_in_decls[BUILT_IN_MEMSET];
11706 fn = build_call_expr (fn, 3, dest, src, len);
11707 STRIP_TYPE_NOPS (fn);
11708 while (TREE_CODE (fn) == COMPOUND_EXPR)
11710 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11712 fn = TREE_OPERAND (fn, 1);
11714 if (TREE_CODE (fn) == CALL_EXPR)
11715 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11716 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11718 else if (fcode == BUILT_IN_MEMSET_CHK)
11722 unsigned int dest_align
11723 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11725 /* If DEST is not a pointer type, call the normal function. */
11726 if (dest_align == 0)
11729 /* If SRC and DEST are the same (and not volatile), do nothing. */
11730 if (operand_equal_p (src, dest, 0))
11734 if (fcode != BUILT_IN_MEMPCPY_CHK)
11736 /* Evaluate and ignore LEN in case it has side-effects. */
11737 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11738 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11741 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11742 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11745 /* __memmove_chk special case. */
11746 if (fcode == BUILT_IN_MEMMOVE_CHK)
11748 unsigned int src_align
11749 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11751 if (src_align == 0)
11754 /* If src is categorized for a readonly section we can use
11755 normal __memcpy_chk. */
11756 if (readonly_data_expr (src))
11758 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11761 fn = build_call_expr (fn, 4, dest, src, len, size);
11762 STRIP_TYPE_NOPS (fn);
11763 while (TREE_CODE (fn) == COMPOUND_EXPR)
11765 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11767 fn = TREE_OPERAND (fn, 1);
11769 if (TREE_CODE (fn) == CALL_EXPR)
11770 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11771 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11778 /* Emit warning if a buffer overflow is detected at compile time. */
11781 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11788 case BUILT_IN_STRCPY_CHK:
11789 case BUILT_IN_STPCPY_CHK:
11790 /* For __strcat_chk the warning will be emitted only if overflowing
11791 by at least strlen (dest) + 1 bytes. */
11792 case BUILT_IN_STRCAT_CHK:
11793 len = CALL_EXPR_ARG (exp, 1);
11794 size = CALL_EXPR_ARG (exp, 2);
11797 case BUILT_IN_STRNCAT_CHK:
11798 case BUILT_IN_STRNCPY_CHK:
11799 len = CALL_EXPR_ARG (exp, 2);
11800 size = CALL_EXPR_ARG (exp, 3);
11802 case BUILT_IN_SNPRINTF_CHK:
11803 case BUILT_IN_VSNPRINTF_CHK:
11804 len = CALL_EXPR_ARG (exp, 1);
11805 size = CALL_EXPR_ARG (exp, 3);
11808 gcc_unreachable ();
11814 if (! host_integerp (size, 1) || integer_all_onesp (size))
11819 len = c_strlen (len, 1);
11820 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11823 else if (fcode == BUILT_IN_STRNCAT_CHK)
11825 tree src = CALL_EXPR_ARG (exp, 1);
11826 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11828 src = c_strlen (src, 1);
11829 if (! src || ! host_integerp (src, 1))
11831 warning (0, "%Kcall to %D might overflow destination buffer",
11832 exp, get_callee_fndecl (exp));
11835 else if (tree_int_cst_lt (src, size))
11838 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11841 warning (0, "%Kcall to %D will always overflow destination buffer",
11842 exp, get_callee_fndecl (exp));
11845 /* Emit warning if a buffer overflow is detected at compile time
11846 in __sprintf_chk/__vsprintf_chk calls. */
11849 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11851 tree dest, size, len, fmt, flag;
11852 const char *fmt_str;
11853 int nargs = call_expr_nargs (exp);
11855 /* Verify the required arguments in the original call. */
11859 dest = CALL_EXPR_ARG (exp, 0);
11860 flag = CALL_EXPR_ARG (exp, 1);
11861 size = CALL_EXPR_ARG (exp, 2);
11862 fmt = CALL_EXPR_ARG (exp, 3);
11864 if (! host_integerp (size, 1) || integer_all_onesp (size))
11867 /* Check whether the format is a literal string constant. */
11868 fmt_str = c_getstr (fmt);
11869 if (fmt_str == NULL)
11872 if (!init_target_chars ())
11875 /* If the format doesn't contain % args or %%, we know its size. */
11876 if (strchr (fmt_str, target_percent) == 0)
11877 len = build_int_cstu (size_type_node, strlen (fmt_str));
11878 /* If the format is "%s" and first ... argument is a string literal,
11880 else if (fcode == BUILT_IN_SPRINTF_CHK
11881 && strcmp (fmt_str, target_percent_s) == 0)
11887 arg = CALL_EXPR_ARG (exp, 4);
11888 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11891 len = c_strlen (arg, 1);
11892 if (!len || ! host_integerp (len, 1))
11898 if (! tree_int_cst_lt (len, size))
11900 warning (0, "%Kcall to %D will always overflow destination buffer",
11901 exp, get_callee_fndecl (exp));
11905 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11909 fold_builtin_object_size (tree ptr, tree ost)
11911 tree ret = NULL_TREE;
11912 int object_size_type;
11914 if (!validate_arg (ptr, POINTER_TYPE)
11915 || !validate_arg (ost, INTEGER_TYPE))
11920 if (TREE_CODE (ost) != INTEGER_CST
11921 || tree_int_cst_sgn (ost) < 0
11922 || compare_tree_int (ost, 3) > 0)
11925 object_size_type = tree_low_cst (ost, 0);
11927 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11928 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11929 and (size_t) 0 for types 2 and 3. */
11930 if (TREE_SIDE_EFFECTS (ptr))
11931 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11933 if (TREE_CODE (ptr) == ADDR_EXPR)
11934 ret = build_int_cstu (size_type_node,
11935 compute_builtin_object_size (ptr, object_size_type));
11937 else if (TREE_CODE (ptr) == SSA_NAME)
11939 unsigned HOST_WIDE_INT bytes;
11941 /* If object size is not known yet, delay folding until
11942 later. Maybe subsequent passes will help determining
11944 bytes = compute_builtin_object_size (ptr, object_size_type);
11945 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11947 ret = build_int_cstu (size_type_node, bytes);
11952 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11953 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11954 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11961 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11962 DEST, SRC, LEN, and SIZE are the arguments to the call.
11963 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11964 code of the builtin. If MAXLEN is not NULL, it is maximum length
11965 passed as third argument. */
11968 fold_builtin_memory_chk (tree fndecl,
11969 tree dest, tree src, tree len, tree size,
11970 tree maxlen, bool ignore,
11971 enum built_in_function fcode)
11975 if (!validate_arg (dest, POINTER_TYPE)
11976 || !validate_arg (src,
11977 (fcode == BUILT_IN_MEMSET_CHK
11978 ? INTEGER_TYPE : POINTER_TYPE))
11979 || !validate_arg (len, INTEGER_TYPE)
11980 || !validate_arg (size, INTEGER_TYPE))
11983 /* If SRC and DEST are the same (and not volatile), return DEST
11984 (resp. DEST+LEN for __mempcpy_chk). */
11985 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11987 if (fcode != BUILT_IN_MEMPCPY_CHK)
11988 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11991 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11992 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11996 if (! host_integerp (size, 1))
11999 if (! integer_all_onesp (size))
12001 if (! host_integerp (len, 1))
12003 /* If LEN is not constant, try MAXLEN too.
12004 For MAXLEN only allow optimizing into non-_ocs function
12005 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12006 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12008 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12010 /* (void) __mempcpy_chk () can be optimized into
12011 (void) __memcpy_chk (). */
12012 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12016 return build_call_expr (fn, 4, dest, src, len, size);
12024 if (tree_int_cst_lt (size, maxlen))
12029 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12030 mem{cpy,pcpy,move,set} is available. */
12033 case BUILT_IN_MEMCPY_CHK:
12034 fn = built_in_decls[BUILT_IN_MEMCPY];
12036 case BUILT_IN_MEMPCPY_CHK:
12037 fn = built_in_decls[BUILT_IN_MEMPCPY];
12039 case BUILT_IN_MEMMOVE_CHK:
12040 fn = built_in_decls[BUILT_IN_MEMMOVE];
12042 case BUILT_IN_MEMSET_CHK:
12043 fn = built_in_decls[BUILT_IN_MEMSET];
12052 return build_call_expr (fn, 3, dest, src, len);
12055 /* Fold a call to the __st[rp]cpy_chk builtin.
12056 DEST, SRC, and SIZE are the arguments to the call.
12057 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12058 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12059 strings passed as second argument. */
12062 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12063 tree maxlen, bool ignore,
12064 enum built_in_function fcode)
12068 if (!validate_arg (dest, POINTER_TYPE)
12069 || !validate_arg (src, POINTER_TYPE)
12070 || !validate_arg (size, INTEGER_TYPE))
12073 /* If SRC and DEST are the same (and not volatile), return DEST. */
12074 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12075 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12077 if (! host_integerp (size, 1))
12080 if (! integer_all_onesp (size))
12082 len = c_strlen (src, 1);
12083 if (! len || ! host_integerp (len, 1))
12085 /* If LEN is not constant, try MAXLEN too.
12086 For MAXLEN only allow optimizing into non-_ocs function
12087 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12088 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12090 if (fcode == BUILT_IN_STPCPY_CHK)
12095 /* If return value of __stpcpy_chk is ignored,
12096 optimize into __strcpy_chk. */
12097 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12101 return build_call_expr (fn, 3, dest, src, size);
12104 if (! len || TREE_SIDE_EFFECTS (len))
12107 /* If c_strlen returned something, but not a constant,
12108 transform __strcpy_chk into __memcpy_chk. */
12109 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12113 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12114 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12115 build_call_expr (fn, 4,
12116 dest, src, len, size));
12122 if (! tree_int_cst_lt (maxlen, size))
12126 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12127 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12128 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12132 return build_call_expr (fn, 2, dest, src);
12135 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12136 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12137 length passed as third argument. */
12140 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12145 if (!validate_arg (dest, POINTER_TYPE)
12146 || !validate_arg (src, POINTER_TYPE)
12147 || !validate_arg (len, INTEGER_TYPE)
12148 || !validate_arg (size, INTEGER_TYPE))
12151 if (! host_integerp (size, 1))
12154 if (! integer_all_onesp (size))
12156 if (! host_integerp (len, 1))
12158 /* If LEN is not constant, try MAXLEN too.
12159 For MAXLEN only allow optimizing into non-_ocs function
12160 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12161 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12167 if (tree_int_cst_lt (size, maxlen))
12171 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12172 fn = built_in_decls[BUILT_IN_STRNCPY];
12176 return build_call_expr (fn, 3, dest, src, len);
12179 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12180 are the arguments to the call. */
12183 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12188 if (!validate_arg (dest, POINTER_TYPE)
12189 || !validate_arg (src, POINTER_TYPE)
12190 || !validate_arg (size, INTEGER_TYPE))
12193 p = c_getstr (src);
12194 /* If the SRC parameter is "", return DEST. */
12195 if (p && *p == '\0')
12196 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12198 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12201 /* If __builtin_strcat_chk is used, assume strcat is available. */
12202 fn = built_in_decls[BUILT_IN_STRCAT];
12206 return build_call_expr (fn, 2, dest, src);
12209 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12213 fold_builtin_strncat_chk (tree fndecl,
12214 tree dest, tree src, tree len, tree size)
12219 if (!validate_arg (dest, POINTER_TYPE)
12220 || !validate_arg (src, POINTER_TYPE)
12221 || !validate_arg (size, INTEGER_TYPE)
12222 || !validate_arg (size, INTEGER_TYPE))
12225 p = c_getstr (src);
12226 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12227 if (p && *p == '\0')
12228 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12229 else if (integer_zerop (len))
12230 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12232 if (! host_integerp (size, 1))
12235 if (! integer_all_onesp (size))
12237 tree src_len = c_strlen (src, 1);
12239 && host_integerp (src_len, 1)
12240 && host_integerp (len, 1)
12241 && ! tree_int_cst_lt (len, src_len))
12243 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12244 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12248 return build_call_expr (fn, 3, dest, src, size);
12253 /* If __builtin_strncat_chk is used, assume strncat is available. */
12254 fn = built_in_decls[BUILT_IN_STRNCAT];
12258 return build_call_expr (fn, 3, dest, src, len);
12261 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12262 a normal call should be emitted rather than expanding the function
12263 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12266 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12268 tree dest, size, len, fn, fmt, flag;
12269 const char *fmt_str;
12270 int nargs = call_expr_nargs (exp);
12272 /* Verify the required arguments in the original call. */
12275 dest = CALL_EXPR_ARG (exp, 0);
12276 if (!validate_arg (dest, POINTER_TYPE))
12278 flag = CALL_EXPR_ARG (exp, 1);
12279 if (!validate_arg (flag, INTEGER_TYPE))
12281 size = CALL_EXPR_ARG (exp, 2);
12282 if (!validate_arg (size, INTEGER_TYPE))
12284 fmt = CALL_EXPR_ARG (exp, 3);
12285 if (!validate_arg (fmt, POINTER_TYPE))
12288 if (! host_integerp (size, 1))
12293 if (!init_target_chars ())
12296 /* Check whether the format is a literal string constant. */
12297 fmt_str = c_getstr (fmt);
12298 if (fmt_str != NULL)
12300 /* If the format doesn't contain % args or %%, we know the size. */
12301 if (strchr (fmt_str, target_percent) == 0)
12303 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12304 len = build_int_cstu (size_type_node, strlen (fmt_str));
12306 /* If the format is "%s" and first ... argument is a string literal,
12307 we know the size too. */
12308 else if (fcode == BUILT_IN_SPRINTF_CHK
12309 && strcmp (fmt_str, target_percent_s) == 0)
12315 arg = CALL_EXPR_ARG (exp, 4);
12316 if (validate_arg (arg, POINTER_TYPE))
12318 len = c_strlen (arg, 1);
12319 if (! len || ! host_integerp (len, 1))
12326 if (! integer_all_onesp (size))
12328 if (! len || ! tree_int_cst_lt (len, size))
12332 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12333 or if format doesn't contain % chars or is "%s". */
12334 if (! integer_zerop (flag))
12336 if (fmt_str == NULL)
12338 if (strchr (fmt_str, target_percent) != NULL
12339 && strcmp (fmt_str, target_percent_s))
12343 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12344 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12345 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12349 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12352 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12353 a normal call should be emitted rather than expanding the function
12354 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12355 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12356 passed as second argument. */
12359 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12360 enum built_in_function fcode)
12362 tree dest, size, len, fn, fmt, flag;
12363 const char *fmt_str;
12365 /* Verify the required arguments in the original call. */
12366 if (call_expr_nargs (exp) < 5)
12368 dest = CALL_EXPR_ARG (exp, 0);
12369 if (!validate_arg (dest, POINTER_TYPE))
12371 len = CALL_EXPR_ARG (exp, 1);
12372 if (!validate_arg (len, INTEGER_TYPE))
12374 flag = CALL_EXPR_ARG (exp, 2);
12375 if (!validate_arg (flag, INTEGER_TYPE))
12377 size = CALL_EXPR_ARG (exp, 3);
12378 if (!validate_arg (size, INTEGER_TYPE))
12380 fmt = CALL_EXPR_ARG (exp, 4);
12381 if (!validate_arg (fmt, POINTER_TYPE))
12384 if (! host_integerp (size, 1))
12387 if (! integer_all_onesp (size))
12389 if (! host_integerp (len, 1))
12391 /* If LEN is not constant, try MAXLEN too.
12392 For MAXLEN only allow optimizing into non-_ocs function
12393 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12394 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12400 if (tree_int_cst_lt (size, maxlen))
12404 if (!init_target_chars ())
12407 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12408 or if format doesn't contain % chars or is "%s". */
12409 if (! integer_zerop (flag))
12411 fmt_str = c_getstr (fmt);
12412 if (fmt_str == NULL)
12414 if (strchr (fmt_str, target_percent) != NULL
12415 && strcmp (fmt_str, target_percent_s))
12419 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12421 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12422 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12426 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12429 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12430 FMT and ARG are the arguments to the call; we don't fold cases with
12431 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12433 Return NULL_TREE if no simplification was possible, otherwise return the
12434 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12435 code of the function to be simplified. */
12438 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12439 enum built_in_function fcode)
12441 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12442 const char *fmt_str = NULL;
12444 /* If the return value is used, don't do the transformation. */
12448 /* Verify the required arguments in the original call. */
12449 if (!validate_arg (fmt, POINTER_TYPE))
12452 /* Check whether the format is a literal string constant. */
12453 fmt_str = c_getstr (fmt);
12454 if (fmt_str == NULL)
12457 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12459 /* If we're using an unlocked function, assume the other
12460 unlocked functions exist explicitly. */
12461 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12462 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12466 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12467 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12470 if (!init_target_chars ())
12473 if (strcmp (fmt_str, target_percent_s) == 0
12474 || strchr (fmt_str, target_percent) == NULL)
12478 if (strcmp (fmt_str, target_percent_s) == 0)
12480 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12483 if (!arg || !validate_arg (arg, POINTER_TYPE))
12486 str = c_getstr (arg);
12492 /* The format specifier doesn't contain any '%' characters. */
12493 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12499 /* If the string was "", printf does nothing. */
12500 if (str[0] == '\0')
12501 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12503 /* If the string has length of 1, call putchar. */
12504 if (str[1] == '\0')
12506 /* Given printf("c"), (where c is any one character,)
12507 convert "c"[0] to an int and pass that to the replacement
12509 newarg = build_int_cst (NULL_TREE, str[0]);
12511 call = build_call_expr (fn_putchar, 1, newarg);
12515 /* If the string was "string\n", call puts("string"). */
12516 size_t len = strlen (str);
12517 if ((unsigned char)str[len - 1] == target_newline)
12519 /* Create a NUL-terminated string that's one char shorter
12520 than the original, stripping off the trailing '\n'. */
12521 char *newstr = XALLOCAVEC (char, len);
12522 memcpy (newstr, str, len - 1);
12523 newstr[len - 1] = 0;
12525 newarg = build_string_literal (len, newstr);
12527 call = build_call_expr (fn_puts, 1, newarg);
12530 /* We'd like to arrange to call fputs(string,stdout) here,
12531 but we need stdout and don't have a way to get it yet. */
12536 /* The other optimizations can be done only on the non-va_list variants. */
12537 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12540 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12541 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12543 if (!arg || !validate_arg (arg, POINTER_TYPE))
12546 call = build_call_expr (fn_puts, 1, arg);
12549 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12550 else if (strcmp (fmt_str, target_percent_c) == 0)
12552 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12555 call = build_call_expr (fn_putchar, 1, arg);
12561 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12564 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12565 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12566 more than 3 arguments, and ARG may be null in the 2-argument case.
12568 Return NULL_TREE if no simplification was possible, otherwise return the
12569 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12570 code of the function to be simplified. */
12573 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12574 enum built_in_function fcode)
12576 tree fn_fputc, fn_fputs, call = NULL_TREE;
12577 const char *fmt_str = NULL;
12579 /* If the return value is used, don't do the transformation. */
12583 /* Verify the required arguments in the original call. */
12584 if (!validate_arg (fp, POINTER_TYPE))
12586 if (!validate_arg (fmt, POINTER_TYPE))
12589 /* Check whether the format is a literal string constant. */
12590 fmt_str = c_getstr (fmt);
12591 if (fmt_str == NULL)
12594 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12596 /* If we're using an unlocked function, assume the other
12597 unlocked functions exist explicitly. */
12598 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12599 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12603 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12604 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12607 if (!init_target_chars ())
12610 /* If the format doesn't contain % args or %%, use strcpy. */
12611 if (strchr (fmt_str, target_percent) == NULL)
12613 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12617 /* If the format specifier was "", fprintf does nothing. */
12618 if (fmt_str[0] == '\0')
12620 /* If FP has side-effects, just wait until gimplification is
12622 if (TREE_SIDE_EFFECTS (fp))
12625 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12628 /* When "string" doesn't contain %, replace all cases of
12629 fprintf (fp, string) with fputs (string, fp). The fputs
12630 builtin will take care of special cases like length == 1. */
12632 call = build_call_expr (fn_fputs, 2, fmt, fp);
12635 /* The other optimizations can be done only on the non-va_list variants. */
12636 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12639 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12640 else if (strcmp (fmt_str, target_percent_s) == 0)
12642 if (!arg || !validate_arg (arg, POINTER_TYPE))
12645 call = build_call_expr (fn_fputs, 2, arg, fp);
12648 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12649 else if (strcmp (fmt_str, target_percent_c) == 0)
12651 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12654 call = build_call_expr (fn_fputc, 2, arg, fp);
12659 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12662 /* Initialize format string characters in the target charset. */
12665 init_target_chars (void)
12670 target_newline = lang_hooks.to_target_charset ('\n');
12671 target_percent = lang_hooks.to_target_charset ('%');
12672 target_c = lang_hooks.to_target_charset ('c');
12673 target_s = lang_hooks.to_target_charset ('s');
12674 if (target_newline == 0 || target_percent == 0 || target_c == 0
12678 target_percent_c[0] = target_percent;
12679 target_percent_c[1] = target_c;
12680 target_percent_c[2] = '\0';
12682 target_percent_s[0] = target_percent;
12683 target_percent_s[1] = target_s;
12684 target_percent_s[2] = '\0';
12686 target_percent_s_newline[0] = target_percent;
12687 target_percent_s_newline[1] = target_s;
12688 target_percent_s_newline[2] = target_newline;
12689 target_percent_s_newline[3] = '\0';
12696 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12697 and no overflow/underflow occurred. INEXACT is true if M was not
12698 exactly calculated. TYPE is the tree type for the result. This
12699 function assumes that you cleared the MPFR flags and then
12700 calculated M to see if anything subsequently set a flag prior to
12701 entering this function. Return NULL_TREE if any checks fail. */
12704 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12706 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12707 overflow/underflow occurred. If -frounding-math, proceed iff the
12708 result of calling FUNC was exact. */
12709 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12710 && (!flag_rounding_math || !inexact))
12712 REAL_VALUE_TYPE rr;
12714 real_from_mpfr (&rr, m, type, GMP_RNDN);
12715 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12716 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12717 but the mpft_t is not, then we underflowed in the
12719 if (real_isfinite (&rr)
12720 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12722 REAL_VALUE_TYPE rmode;
12724 real_convert (&rmode, TYPE_MODE (type), &rr);
12725 /* Proceed iff the specified mode can hold the value. */
12726 if (real_identical (&rmode, &rr))
12727 return build_real (type, rmode);
12733 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12734 FUNC on it and return the resulting value as a tree with type TYPE.
12735 If MIN and/or MAX are not NULL, then the supplied ARG must be
12736 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12737 acceptable values, otherwise they are not. The mpfr precision is
12738 set to the precision of TYPE. We assume that function FUNC returns
12739 zero if the result could be calculated exactly within the requested
12743 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12744 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12747 tree result = NULL_TREE;
12751 /* To proceed, MPFR must exactly represent the target floating point
12752 format, which only happens when the target base equals two. */
12753 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12754 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12756 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12758 if (real_isfinite (ra)
12759 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12760 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12762 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12766 mpfr_init2 (m, prec);
12767 mpfr_from_real (m, ra, GMP_RNDN);
12768 mpfr_clear_flags ();
12769 inexact = func (m, m, GMP_RNDN);
12770 result = do_mpfr_ckconv (m, type, inexact);
12778 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12779 FUNC on it and return the resulting value as a tree with type TYPE.
12780 The mpfr precision is set to the precision of TYPE. We assume that
12781 function FUNC returns zero if the result could be calculated
12782 exactly within the requested precision. */
12785 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12786 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12788 tree result = NULL_TREE;
12793 /* To proceed, MPFR must exactly represent the target floating point
12794 format, which only happens when the target base equals two. */
12795 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12796 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12797 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12799 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12800 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12802 if (real_isfinite (ra1) && real_isfinite (ra2))
12804 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12808 mpfr_inits2 (prec, m1, m2, NULL);
12809 mpfr_from_real (m1, ra1, GMP_RNDN);
12810 mpfr_from_real (m2, ra2, GMP_RNDN);
12811 mpfr_clear_flags ();
12812 inexact = func (m1, m1, m2, GMP_RNDN);
12813 result = do_mpfr_ckconv (m1, type, inexact);
12814 mpfr_clears (m1, m2, NULL);
12821 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12822 FUNC on it and return the resulting value as a tree with type TYPE.
12823 The mpfr precision is set to the precision of TYPE. We assume that
12824 function FUNC returns zero if the result could be calculated
12825 exactly within the requested precision. */
12828 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12829 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12831 tree result = NULL_TREE;
12837 /* To proceed, MPFR must exactly represent the target floating point
12838 format, which only happens when the target base equals two. */
12839 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12840 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12841 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12842 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12844 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12845 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12846 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12848 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12850 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12854 mpfr_inits2 (prec, m1, m2, m3, NULL);
12855 mpfr_from_real (m1, ra1, GMP_RNDN);
12856 mpfr_from_real (m2, ra2, GMP_RNDN);
12857 mpfr_from_real (m3, ra3, GMP_RNDN);
12858 mpfr_clear_flags ();
12859 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12860 result = do_mpfr_ckconv (m1, type, inexact);
12861 mpfr_clears (m1, m2, m3, NULL);
12868 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12869 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12870 If ARG_SINP and ARG_COSP are NULL then the result is returned
12871 as a complex value.
12872 The type is taken from the type of ARG and is used for setting the
12873 precision of the calculation and results. */
12876 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12878 tree const type = TREE_TYPE (arg);
12879 tree result = NULL_TREE;
12883 /* To proceed, MPFR must exactly represent the target floating point
12884 format, which only happens when the target base equals two. */
12885 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12886 && TREE_CODE (arg) == REAL_CST
12887 && !TREE_OVERFLOW (arg))
12889 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12891 if (real_isfinite (ra))
12893 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12894 tree result_s, result_c;
12898 mpfr_inits2 (prec, m, ms, mc, NULL);
12899 mpfr_from_real (m, ra, GMP_RNDN);
12900 mpfr_clear_flags ();
12901 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12902 result_s = do_mpfr_ckconv (ms, type, inexact);
12903 result_c = do_mpfr_ckconv (mc, type, inexact);
12904 mpfr_clears (m, ms, mc, NULL);
12905 if (result_s && result_c)
12907 /* If we are to return in a complex value do so. */
12908 if (!arg_sinp && !arg_cosp)
12909 return build_complex (build_complex_type (type),
12910 result_c, result_s);
12912 /* Dereference the sin/cos pointer arguments. */
12913 arg_sinp = build_fold_indirect_ref (arg_sinp);
12914 arg_cosp = build_fold_indirect_ref (arg_cosp);
12915 /* Proceed if valid pointer type were passed in. */
12916 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12917 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12919 /* Set the values. */
12920 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12922 TREE_SIDE_EFFECTS (result_s) = 1;
12923 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12925 TREE_SIDE_EFFECTS (result_c) = 1;
12926 /* Combine the assignments into a compound expr. */
12927 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12928 result_s, result_c));
12936 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12937 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12938 two-argument mpfr order N Bessel function FUNC on them and return
12939 the resulting value as a tree with type TYPE. The mpfr precision
12940 is set to the precision of TYPE. We assume that function FUNC
12941 returns zero if the result could be calculated exactly within the
12942 requested precision. */
12944 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12945 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12946 const REAL_VALUE_TYPE *min, bool inclusive)
12948 tree result = NULL_TREE;
12953 /* To proceed, MPFR must exactly represent the target floating point
12954 format, which only happens when the target base equals two. */
12955 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12956 && host_integerp (arg1, 0)
12957 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12959 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12960 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12963 && real_isfinite (ra)
12964 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12966 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12970 mpfr_init2 (m, prec);
12971 mpfr_from_real (m, ra, GMP_RNDN);
12972 mpfr_clear_flags ();
12973 inexact = func (m, n, m, GMP_RNDN);
12974 result = do_mpfr_ckconv (m, type, inexact);
12982 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12983 the pointer *(ARG_QUO) and return the result. The type is taken
12984 from the type of ARG0 and is used for setting the precision of the
12985 calculation and results. */
12988 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12990 tree const type = TREE_TYPE (arg0);
12991 tree result = NULL_TREE;
12996 /* To proceed, MPFR must exactly represent the target floating point
12997 format, which only happens when the target base equals two. */
12998 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12999 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13000 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13002 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13003 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13005 if (real_isfinite (ra0) && real_isfinite (ra1))
13007 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13012 mpfr_inits2 (prec, m0, m1, NULL);
13013 mpfr_from_real (m0, ra0, GMP_RNDN);
13014 mpfr_from_real (m1, ra1, GMP_RNDN);
13015 mpfr_clear_flags ();
13016 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
13017 /* Remquo is independent of the rounding mode, so pass
13018 inexact=0 to do_mpfr_ckconv(). */
13019 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13020 mpfr_clears (m0, m1, NULL);
13023 /* MPFR calculates quo in the host's long so it may
13024 return more bits in quo than the target int can hold
13025 if sizeof(host long) > sizeof(target int). This can
13026 happen even for native compilers in LP64 mode. In
13027 these cases, modulo the quo value with the largest
13028 number that the target int can hold while leaving one
13029 bit for the sign. */
13030 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13031 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13033 /* Dereference the quo pointer argument. */
13034 arg_quo = build_fold_indirect_ref (arg_quo);
13035 /* Proceed iff a valid pointer type was passed in. */
13036 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13038 /* Set the value. */
13039 tree result_quo = fold_build2 (MODIFY_EXPR,
13040 TREE_TYPE (arg_quo), arg_quo,
13041 build_int_cst (NULL, integer_quo));
13042 TREE_SIDE_EFFECTS (result_quo) = 1;
13043 /* Combine the quo assignment with the rem. */
13044 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13045 result_quo, result_rem));
13053 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13054 resulting value as a tree with type TYPE. The mpfr precision is
13055 set to the precision of TYPE. We assume that this mpfr function
13056 returns zero if the result could be calculated exactly within the
13057 requested precision. In addition, the integer pointer represented
13058 by ARG_SG will be dereferenced and set to the appropriate signgam
13062 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13064 tree result = NULL_TREE;
13068 /* To proceed, MPFR must exactly represent the target floating point
13069 format, which only happens when the target base equals two. Also
13070 verify ARG is a constant and that ARG_SG is an int pointer. */
13071 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13072 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13073 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13074 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13076 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13078 /* In addition to NaN and Inf, the argument cannot be zero or a
13079 negative integer. */
13080 if (real_isfinite (ra)
13081 && ra->cl != rvc_zero
13082 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13084 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13089 mpfr_init2 (m, prec);
13090 mpfr_from_real (m, ra, GMP_RNDN);
13091 mpfr_clear_flags ();
13092 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
13093 result_lg = do_mpfr_ckconv (m, type, inexact);
13099 /* Dereference the arg_sg pointer argument. */
13100 arg_sg = build_fold_indirect_ref (arg_sg);
13101 /* Assign the signgam value into *arg_sg. */
13102 result_sg = fold_build2 (MODIFY_EXPR,
13103 TREE_TYPE (arg_sg), arg_sg,
13104 build_int_cst (NULL, sg));
13105 TREE_SIDE_EFFECTS (result_sg) = 1;
13106 /* Combine the signgam assignment with the lgamma result. */
13107 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13108 result_sg, result_lg));