1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
250 if (strncmp (name, "__sync_", 7) == 0)
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
280 switch (TREE_CODE (exp))
283 exp = TREE_OPERAND (exp, 0);
284 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
287 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
288 align = MIN (inner, max_align);
291 case POINTER_PLUS_EXPR:
292 /* If sum of pointer + int, restrict our maximum alignment to that
293 imposed by the integer. If not, we can't do any better than
295 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
298 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
299 & (max_align / BITS_PER_UNIT - 1))
303 exp = TREE_OPERAND (exp, 0);
307 /* See what we are pointing at and look at its alignment. */
308 exp = TREE_OPERAND (exp, 0);
310 if (handled_component_p (exp))
312 HOST_WIDE_INT bitsize, bitpos;
314 enum machine_mode mode;
315 int unsignedp, volatilep;
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
320 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
321 if (offset && TREE_CODE (offset) == PLUS_EXPR
322 && host_integerp (TREE_OPERAND (offset, 1), 1))
324 /* Any overflow in calculating offset_bits won't change
327 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
331 inner = MIN (inner, (offset_bits & -offset_bits));
332 offset = TREE_OPERAND (offset, 0);
334 if (offset && TREE_CODE (offset) == MULT_EXPR
335 && host_integerp (TREE_OPERAND (offset, 1), 1))
337 /* Any overflow in calculating offset_factor won't change
339 unsigned offset_factor
340 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
344 inner = MIN (inner, (offset_factor & -offset_factor));
347 inner = MIN (inner, BITS_PER_UNIT);
350 align = MIN (inner, DECL_ALIGN (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 else if (CONSTANT_CLASS_P (exp))
353 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
355 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
356 || TREE_CODE (exp) == INDIRECT_REF)
357 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
359 align = MIN (align, inner);
360 return MIN (align, max_align);
368 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
369 way, because it could contain a zero byte in the middle.
370 TREE_STRING_LENGTH is the size of the character array, not the string.
372 ONLY_VALUE should be nonzero if the result is not going to be emitted
373 into the instruction stream and zero if it is going to be expanded.
374 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
375 is returned, otherwise NULL, since
376 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
377 evaluate the side-effects.
379 The value returned is of type `ssizetype'.
381 Unfortunately, string_constant can't access the values of const char
382 arrays with initializers, so neither can we do so here. */
385 c_strlen (tree src, int only_value)
388 HOST_WIDE_INT offset;
393 if (TREE_CODE (src) == COND_EXPR
394 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
398 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
399 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
400 if (tree_int_cst_equal (len1, len2))
404 if (TREE_CODE (src) == COMPOUND_EXPR
405 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
406 return c_strlen (TREE_OPERAND (src, 1), only_value);
408 src = string_constant (src, &offset_node);
412 max = TREE_STRING_LENGTH (src) - 1;
413 ptr = TREE_STRING_POINTER (src);
415 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
417 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
418 compute the offset to the following null if we don't know where to
419 start searching for it. */
422 for (i = 0; i < max; i++)
426 /* We don't know the starting offset, but we do know that the string
427 has no internal zero bytes. We can assume that the offset falls
428 within the bounds of the string; otherwise, the programmer deserves
429 what he gets. Subtract the offset from the length of the string,
430 and return that. This would perhaps not be valid if we were dealing
431 with named arrays in addition to literal string constants. */
433 return size_diffop (size_int (max), offset_node);
436 /* We have a known offset into the string. Start searching there for
437 a null character if we can represent it as a single HOST_WIDE_INT. */
438 if (offset_node == 0)
440 else if (! host_integerp (offset_node, 0))
443 offset = tree_low_cst (offset_node, 0);
445 /* If the offset is known to be out of bounds, warn, and call strlen at
447 if (offset < 0 || offset > max)
449 /* Suppress multiple warnings for propagated constant strings. */
450 if (! TREE_NO_WARNING (src))
452 warning (0, "offset outside bounds of constant string");
453 TREE_NO_WARNING (src) = 1;
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
475 src = string_constant (src, &offset_node);
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
492 c_readstr (const char *str, enum machine_mode mode)
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
518 return immed_double_const (c[0], c[1], mode);
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
526 target_char_cast (tree cst, char *p)
528 unsigned HOST_WIDE_INT val, hostval;
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
554 builtin_save_expr (tree exp)
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
561 return save_expr (exp);
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
591 tem = hard_frame_pointer_rtx;
593 /* Tell reload not to eliminate the frame pointer. */
594 crtl->accesses_prior_frames = 1;
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
603 SETUP_FRAME_ADDRESSES ();
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
648 /* Alias set used for setjmp buffer. */
649 static alias_set_type setjmp_alias_set = -1;
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
665 buf_addr = convert_memory_address (Pmode, buf_addr);
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 cfun->calls_setjmp = 1;
699 /* We have a nonlocal label. */
700 cfun->has_nonlocal_label = 1;
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_use (hard_frame_pointer_rtx);
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_clobber (static_chain_rtx);
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_clobber (hard_frame_pointer_rtx);
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
732 #ifdef ELIMINABLE_REGS
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
741 if (i == ARRAY_SIZE (elim_regs))
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (crtl->args.internal_arg_pointer,
747 copy_to_reg (get_arg_pointer_save_area ()));
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
776 expand_builtin_longjmp (rtx buf_addr, rtx value)
778 rtx fp, lab, stack, insn, last;
779 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 /* DRAP is needed for stack realign if longjmp is expanded to current
783 if (SUPPORTS_STACK_ALIGNMENT)
784 crtl->need_drap = true;
786 if (setjmp_alias_set == -1)
787 setjmp_alias_set = new_alias_set ();
789 buf_addr = convert_memory_address (Pmode, buf_addr);
791 buf_addr = force_reg (Pmode, buf_addr);
793 /* We used to store value in static_chain_rtx, but that fails if pointers
794 are smaller than integers. We instead require that the user must pass
795 a second argument of 1, because that is what builtin_setjmp will
796 return. This also makes EH slightly more efficient, since we are no
797 longer copying around a value that we don't care about. */
798 gcc_assert (value == const1_rtx);
800 last = get_last_insn ();
801 #ifdef HAVE_builtin_longjmp
802 if (HAVE_builtin_longjmp)
803 emit_insn (gen_builtin_longjmp (buf_addr));
807 fp = gen_rtx_MEM (Pmode, buf_addr);
808 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
809 GET_MODE_SIZE (Pmode)));
811 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
812 2 * GET_MODE_SIZE (Pmode)));
813 set_mem_alias_set (fp, setjmp_alias_set);
814 set_mem_alias_set (lab, setjmp_alias_set);
815 set_mem_alias_set (stack, setjmp_alias_set);
817 /* Pick up FP, label, and SP from the block and jump. This code is
818 from expand_goto in stmt.c; see there for detailed comments. */
819 #ifdef HAVE_nonlocal_goto
820 if (HAVE_nonlocal_goto)
821 /* We have to pass a value to the nonlocal_goto pattern that will
822 get copied into the static_chain pointer, but it does not matter
823 what that value is, because builtin_setjmp does not use it. */
824 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
828 lab = copy_to_reg (lab);
830 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
831 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
833 emit_move_insn (hard_frame_pointer_rtx, fp);
834 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
836 emit_use (hard_frame_pointer_rtx);
837 emit_use (stack_pointer_rtx);
838 emit_indirect_jump (lab);
842 /* Search backwards and mark the jump insn as a non-local goto.
843 Note that this precludes the use of __builtin_longjmp to a
844 __builtin_setjmp target in the same function. However, we've
845 already cautioned the user that these functions are for
846 internal exception handling use only. */
847 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
849 gcc_assert (insn != last);
853 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
856 else if (CALL_P (insn))
861 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
862 and the address of the save area. */
865 expand_builtin_nonlocal_goto (tree exp)
867 tree t_label, t_save_area;
868 rtx r_label, r_save_area, r_fp, r_sp, insn;
870 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
873 t_label = CALL_EXPR_ARG (exp, 0);
874 t_save_area = CALL_EXPR_ARG (exp, 1);
876 r_label = expand_normal (t_label);
877 r_label = convert_memory_address (Pmode, r_label);
878 r_save_area = expand_normal (t_save_area);
879 r_save_area = convert_memory_address (Pmode, r_save_area);
880 /* Copy the address of the save location to a register just in case it was based
881 on the frame pointer. */
882 r_save_area = copy_to_reg (r_save_area);
883 r_fp = gen_rtx_MEM (Pmode, r_save_area);
884 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
885 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
887 crtl->has_nonlocal_goto = 1;
889 #ifdef HAVE_nonlocal_goto
890 /* ??? We no longer need to pass the static chain value, afaik. */
891 if (HAVE_nonlocal_goto)
892 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
896 r_label = copy_to_reg (r_label);
898 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
899 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
901 /* Restore frame pointer for containing function.
902 This sets the actual hard register used for the frame pointer
903 to the location of the function's incoming static chain info.
904 The non-local goto handler will then adjust it to contain the
905 proper value and reload the argument pointer, if needed. */
906 emit_move_insn (hard_frame_pointer_rtx, r_fp);
907 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
909 /* USE of hard_frame_pointer_rtx added for consistency;
910 not clear if really needed. */
911 emit_use (hard_frame_pointer_rtx);
912 emit_use (stack_pointer_rtx);
914 /* If the architecture is using a GP register, we must
915 conservatively assume that the target function makes use of it.
916 The prologue of functions with nonlocal gotos must therefore
917 initialize the GP register to the appropriate value, and we
918 must then make sure that this value is live at the point
919 of the jump. (Note that this doesn't necessarily apply
920 to targets with a nonlocal_goto pattern; they are free
921 to implement it in their own way. Note also that this is
922 a no-op if the GP register is a global invariant.) */
923 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
924 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
925 emit_use (pic_offset_table_rtx);
927 emit_indirect_jump (r_label);
930 /* Search backwards to the jump insn and mark it as a
932 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
936 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
939 else if (CALL_P (insn))
946 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
947 (not all will be used on all machines) that was passed to __builtin_setjmp.
948 It updates the stack pointer in that block to correspond to the current
952 expand_builtin_update_setjmp_buf (rtx buf_addr)
954 enum machine_mode sa_mode = Pmode;
958 #ifdef HAVE_save_stack_nonlocal
959 if (HAVE_save_stack_nonlocal)
960 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
962 #ifdef STACK_SAVEAREA_MODE
963 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
967 = gen_rtx_MEM (sa_mode,
970 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
974 emit_insn (gen_setjmp ());
977 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
980 /* Expand a call to __builtin_prefetch. For a target that does not support
981 data prefetch, evaluate the memory address argument in case it has side
985 expand_builtin_prefetch (tree exp)
987 tree arg0, arg1, arg2;
991 if (!validate_arglist (exp, POINTER_TYPE, 0))
994 arg0 = CALL_EXPR_ARG (exp, 0);
996 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
997 zero (read) and argument 2 (locality) defaults to 3 (high degree of
999 nargs = call_expr_nargs (exp);
1001 arg1 = CALL_EXPR_ARG (exp, 1);
1003 arg1 = integer_zero_node;
1005 arg2 = CALL_EXPR_ARG (exp, 2);
1007 arg2 = build_int_cst (NULL_TREE, 3);
1009 /* Argument 0 is an address. */
1010 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1012 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1013 if (TREE_CODE (arg1) != INTEGER_CST)
1015 error ("second argument to %<__builtin_prefetch%> must be a constant");
1016 arg1 = integer_zero_node;
1018 op1 = expand_normal (arg1);
1019 /* Argument 1 must be either zero or one. */
1020 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1022 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1027 /* Argument 2 (locality) must be a compile-time constant int. */
1028 if (TREE_CODE (arg2) != INTEGER_CST)
1030 error ("third argument to %<__builtin_prefetch%> must be a constant");
1031 arg2 = integer_zero_node;
1033 op2 = expand_normal (arg2);
1034 /* Argument 2 must be 0, 1, 2, or 3. */
1035 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1037 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1041 #ifdef HAVE_prefetch
1044 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1046 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1047 || (GET_MODE (op0) != Pmode))
1049 op0 = convert_memory_address (Pmode, op0);
1050 op0 = force_reg (Pmode, op0);
1052 emit_insn (gen_prefetch (op0, op1, op2));
1056 /* Don't do anything with direct references to volatile memory, but
1057 generate code to handle other side effects. */
1058 if (!MEM_P (op0) && side_effects_p (op0))
1062 /* Get a MEM rtx for expression EXP which is the address of an operand
1063 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1064 the maximum length of the block of memory that might be accessed or
1068 get_memory_rtx (tree exp, tree len)
1070 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1071 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1073 /* Get an expression we can use to find the attributes to assign to MEM.
1074 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1075 we can. First remove any nops. */
1076 while (CONVERT_EXPR_P (exp)
1077 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1078 exp = TREE_OPERAND (exp, 0);
1080 if (TREE_CODE (exp) == ADDR_EXPR)
1081 exp = TREE_OPERAND (exp, 0);
1082 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1083 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1087 /* Honor attributes derived from exp, except for the alias set
1088 (as builtin stringops may alias with anything) and the size
1089 (as stringops may access multiple array elements). */
1092 set_mem_attributes (mem, exp, 0);
1094 /* Allow the string and memory builtins to overflow from one
1095 field into another, see http://gcc.gnu.org/PR23561.
1096 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1097 memory accessed by the string or memory builtin will fit
1098 within the field. */
1099 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1101 tree mem_expr = MEM_EXPR (mem);
1102 HOST_WIDE_INT offset = -1, length = -1;
1105 while (TREE_CODE (inner) == ARRAY_REF
1106 || CONVERT_EXPR_P (inner)
1107 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1108 || TREE_CODE (inner) == SAVE_EXPR)
1109 inner = TREE_OPERAND (inner, 0);
1111 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1113 if (MEM_OFFSET (mem)
1114 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1115 offset = INTVAL (MEM_OFFSET (mem));
1117 if (offset >= 0 && len && host_integerp (len, 0))
1118 length = tree_low_cst (len, 0);
1120 while (TREE_CODE (inner) == COMPONENT_REF)
1122 tree field = TREE_OPERAND (inner, 1);
1123 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1124 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1126 /* Bitfields are generally not byte-addressable. */
1127 gcc_assert (!DECL_BIT_FIELD (field)
1128 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1129 % BITS_PER_UNIT) == 0
1130 && host_integerp (DECL_SIZE (field), 0)
1131 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1132 % BITS_PER_UNIT) == 0));
1134 /* If we can prove that the memory starting at XEXP (mem, 0) and
1135 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1136 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1137 fields without DECL_SIZE_UNIT like flexible array members. */
1139 && DECL_SIZE_UNIT (field)
1140 && host_integerp (DECL_SIZE_UNIT (field), 0))
1143 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1146 && offset + length <= size)
1151 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1152 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1153 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1161 mem_expr = TREE_OPERAND (mem_expr, 0);
1162 inner = TREE_OPERAND (inner, 0);
1165 if (mem_expr == NULL)
1167 if (mem_expr != MEM_EXPR (mem))
1169 set_mem_expr (mem, mem_expr);
1170 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1173 set_mem_alias_set (mem, 0);
1174 set_mem_size (mem, NULL_RTX);
1180 /* Built-in functions to perform an untyped call and return. */
1182 /* For each register that may be used for calling a function, this
1183 gives a mode used to copy the register's value. VOIDmode indicates
1184 the register is not used for calling a function. If the machine
1185 has register windows, this gives only the outbound registers.
1186 INCOMING_REGNO gives the corresponding inbound register. */
1187 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1189 /* For each register that may be used for returning values, this gives
1190 a mode used to copy the register's value. VOIDmode indicates the
1191 register is not used for returning values. If the machine has
1192 register windows, this gives only the outbound registers.
1193 INCOMING_REGNO gives the corresponding inbound register. */
1194 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1196 /* For each register that may be used for calling a function, this
1197 gives the offset of that register into the block returned by
1198 __builtin_apply_args. 0 indicates that the register is not
1199 used for calling a function. */
1200 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1202 /* Return the size required for the block returned by __builtin_apply_args,
1203 and initialize apply_args_mode. */
1206 apply_args_size (void)
1208 static int size = -1;
1211 enum machine_mode mode;
1213 /* The values computed by this function never change. */
1216 /* The first value is the incoming arg-pointer. */
1217 size = GET_MODE_SIZE (Pmode);
1219 /* The second value is the structure value address unless this is
1220 passed as an "invisible" first argument. */
1221 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1222 size += GET_MODE_SIZE (Pmode);
1224 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1225 if (FUNCTION_ARG_REGNO_P (regno))
1227 mode = reg_raw_mode[regno];
1229 gcc_assert (mode != VOIDmode);
1231 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1232 if (size % align != 0)
1233 size = CEIL (size, align) * align;
1234 apply_args_reg_offset[regno] = size;
1235 size += GET_MODE_SIZE (mode);
1236 apply_args_mode[regno] = mode;
1240 apply_args_mode[regno] = VOIDmode;
1241 apply_args_reg_offset[regno] = 0;
1247 /* Return the size required for the block returned by __builtin_apply,
1248 and initialize apply_result_mode. */
1251 apply_result_size (void)
1253 static int size = -1;
1255 enum machine_mode mode;
1257 /* The values computed by this function never change. */
1262 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1263 if (FUNCTION_VALUE_REGNO_P (regno))
1265 mode = reg_raw_mode[regno];
1267 gcc_assert (mode != VOIDmode);
1269 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1270 if (size % align != 0)
1271 size = CEIL (size, align) * align;
1272 size += GET_MODE_SIZE (mode);
1273 apply_result_mode[regno] = mode;
1276 apply_result_mode[regno] = VOIDmode;
1278 /* Allow targets that use untyped_call and untyped_return to override
1279 the size so that machine-specific information can be stored here. */
1280 #ifdef APPLY_RESULT_SIZE
1281 size = APPLY_RESULT_SIZE;
1287 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1288 /* Create a vector describing the result block RESULT. If SAVEP is true,
1289 the result block is used to save the values; otherwise it is used to
1290 restore the values. */
1293 result_vector (int savep, rtx result)
1295 int regno, size, align, nelts;
1296 enum machine_mode mode;
1298 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1301 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1302 if ((mode = apply_result_mode[regno]) != VOIDmode)
1304 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1305 if (size % align != 0)
1306 size = CEIL (size, align) * align;
1307 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1308 mem = adjust_address (result, mode, size);
1309 savevec[nelts++] = (savep
1310 ? gen_rtx_SET (VOIDmode, mem, reg)
1311 : gen_rtx_SET (VOIDmode, reg, mem));
1312 size += GET_MODE_SIZE (mode);
1314 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1316 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1318 /* Save the state required to perform an untyped call with the same
1319 arguments as were passed to the current function. */
1322 expand_builtin_apply_args_1 (void)
1325 int size, align, regno;
1326 enum machine_mode mode;
1327 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1329 /* Create a block where the arg-pointer, structure value address,
1330 and argument registers can be saved. */
1331 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1333 /* Walk past the arg-pointer and structure value address. */
1334 size = GET_MODE_SIZE (Pmode);
1335 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1336 size += GET_MODE_SIZE (Pmode);
1338 /* Save each register used in calling a function to the block. */
1339 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1340 if ((mode = apply_args_mode[regno]) != VOIDmode)
1342 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1343 if (size % align != 0)
1344 size = CEIL (size, align) * align;
1346 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1348 emit_move_insn (adjust_address (registers, mode, size), tem);
1349 size += GET_MODE_SIZE (mode);
1352 /* Save the arg pointer to the block. */
1353 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1354 #ifdef STACK_GROWS_DOWNWARD
1355 /* We need the pointer as the caller actually passed them to us, not
1356 as we might have pretended they were passed. Make sure it's a valid
1357 operand, as emit_move_insn isn't expected to handle a PLUS. */
1359 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1362 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1364 size = GET_MODE_SIZE (Pmode);
1366 /* Save the structure value address unless this is passed as an
1367 "invisible" first argument. */
1368 if (struct_incoming_value)
1370 emit_move_insn (adjust_address (registers, Pmode, size),
1371 copy_to_reg (struct_incoming_value));
1372 size += GET_MODE_SIZE (Pmode);
1375 /* Return the address of the block. */
1376 return copy_addr_to_reg (XEXP (registers, 0));
1379 /* __builtin_apply_args returns block of memory allocated on
1380 the stack into which is stored the arg pointer, structure
1381 value address, static chain, and all the registers that might
1382 possibly be used in performing a function call. The code is
1383 moved to the start of the function so the incoming values are
1387 expand_builtin_apply_args (void)
1389 /* Don't do __builtin_apply_args more than once in a function.
1390 Save the result of the first call and reuse it. */
1391 if (apply_args_value != 0)
1392 return apply_args_value;
1394 /* When this function is called, it means that registers must be
1395 saved on entry to this function. So we migrate the
1396 call to the first insn of this function. */
1401 temp = expand_builtin_apply_args_1 ();
1405 apply_args_value = temp;
1407 /* Put the insns after the NOTE that starts the function.
1408 If this is inside a start_sequence, make the outer-level insn
1409 chain current, so the code is placed at the start of the
1411 push_topmost_sequence ();
1412 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1413 pop_topmost_sequence ();
1418 /* Perform an untyped call and save the state required to perform an
1419 untyped return of whatever value was returned by the given function. */
1422 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1424 int size, align, regno;
1425 enum machine_mode mode;
1426 rtx incoming_args, result, reg, dest, src, call_insn;
1427 rtx old_stack_level = 0;
1428 rtx call_fusage = 0;
1429 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1431 arguments = convert_memory_address (Pmode, arguments);
1433 /* Create a block where the return registers can be saved. */
1434 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1436 /* Fetch the arg pointer from the ARGUMENTS block. */
1437 incoming_args = gen_reg_rtx (Pmode);
1438 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1439 #ifndef STACK_GROWS_DOWNWARD
1440 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1441 incoming_args, 0, OPTAB_LIB_WIDEN);
1444 /* Push a new argument block and copy the arguments. Do not allow
1445 the (potential) memcpy call below to interfere with our stack
1447 do_pending_stack_adjust ();
1450 /* Save the stack with nonlocal if available. */
1451 #ifdef HAVE_save_stack_nonlocal
1452 if (HAVE_save_stack_nonlocal)
1453 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1456 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1458 /* Allocate a block of memory onto the stack and copy the memory
1459 arguments to the outgoing arguments address. */
1460 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1462 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1463 may have already set current_function_calls_alloca to true.
1464 current_function_calls_alloca won't be set if argsize is zero,
1465 so we have to guarantee need_drap is true here. */
1466 if (SUPPORTS_STACK_ALIGNMENT)
1467 crtl->need_drap = true;
1469 dest = virtual_outgoing_args_rtx;
1470 #ifndef STACK_GROWS_DOWNWARD
1471 if (GET_CODE (argsize) == CONST_INT)
1472 dest = plus_constant (dest, -INTVAL (argsize));
1474 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1476 dest = gen_rtx_MEM (BLKmode, dest);
1477 set_mem_align (dest, PARM_BOUNDARY);
1478 src = gen_rtx_MEM (BLKmode, incoming_args);
1479 set_mem_align (src, PARM_BOUNDARY);
1480 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1482 /* Refer to the argument block. */
1484 arguments = gen_rtx_MEM (BLKmode, arguments);
1485 set_mem_align (arguments, PARM_BOUNDARY);
1487 /* Walk past the arg-pointer and structure value address. */
1488 size = GET_MODE_SIZE (Pmode);
1490 size += GET_MODE_SIZE (Pmode);
1492 /* Restore each of the registers previously saved. Make USE insns
1493 for each of these registers for use in making the call. */
1494 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1495 if ((mode = apply_args_mode[regno]) != VOIDmode)
1497 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1498 if (size % align != 0)
1499 size = CEIL (size, align) * align;
1500 reg = gen_rtx_REG (mode, regno);
1501 emit_move_insn (reg, adjust_address (arguments, mode, size));
1502 use_reg (&call_fusage, reg);
1503 size += GET_MODE_SIZE (mode);
1506 /* Restore the structure value address unless this is passed as an
1507 "invisible" first argument. */
1508 size = GET_MODE_SIZE (Pmode);
1511 rtx value = gen_reg_rtx (Pmode);
1512 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1513 emit_move_insn (struct_value, value);
1514 if (REG_P (struct_value))
1515 use_reg (&call_fusage, struct_value);
1516 size += GET_MODE_SIZE (Pmode);
1519 /* All arguments and registers used for the call are set up by now! */
1520 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1522 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1523 and we don't want to load it into a register as an optimization,
1524 because prepare_call_address already did it if it should be done. */
1525 if (GET_CODE (function) != SYMBOL_REF)
1526 function = memory_address (FUNCTION_MODE, function);
1528 /* Generate the actual call instruction and save the return value. */
1529 #ifdef HAVE_untyped_call
1530 if (HAVE_untyped_call)
1531 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1532 result, result_vector (1, result)));
1535 #ifdef HAVE_call_value
1536 if (HAVE_call_value)
1540 /* Locate the unique return register. It is not possible to
1541 express a call that sets more than one return register using
1542 call_value; use untyped_call for that. In fact, untyped_call
1543 only needs to save the return registers in the given block. */
1544 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1545 if ((mode = apply_result_mode[regno]) != VOIDmode)
1547 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1549 valreg = gen_rtx_REG (mode, regno);
1552 emit_call_insn (GEN_CALL_VALUE (valreg,
1553 gen_rtx_MEM (FUNCTION_MODE, function),
1554 const0_rtx, NULL_RTX, const0_rtx));
1556 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1562 /* Find the CALL insn we just emitted, and attach the register usage
1564 call_insn = last_call_insn ();
1565 add_function_usage_to (call_insn, call_fusage);
1567 /* Restore the stack. */
1568 #ifdef HAVE_save_stack_nonlocal
1569 if (HAVE_save_stack_nonlocal)
1570 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1573 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1577 /* Return the address of the result block. */
1578 result = copy_addr_to_reg (XEXP (result, 0));
1579 return convert_memory_address (ptr_mode, result);
1582 /* Perform an untyped return. */
1585 expand_builtin_return (rtx result)
1587 int size, align, regno;
1588 enum machine_mode mode;
1590 rtx call_fusage = 0;
1592 result = convert_memory_address (Pmode, result);
1594 apply_result_size ();
1595 result = gen_rtx_MEM (BLKmode, result);
1597 #ifdef HAVE_untyped_return
1598 if (HAVE_untyped_return)
1600 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1606 /* Restore the return value and note that each value is used. */
1608 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1609 if ((mode = apply_result_mode[regno]) != VOIDmode)
1611 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1612 if (size % align != 0)
1613 size = CEIL (size, align) * align;
1614 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1615 emit_move_insn (reg, adjust_address (result, mode, size));
1617 push_to_sequence (call_fusage);
1619 call_fusage = get_insns ();
1621 size += GET_MODE_SIZE (mode);
1624 /* Put the USE insns before the return. */
1625 emit_insn (call_fusage);
1627 /* Return whatever values was restored by jumping directly to the end
1629 expand_naked_return ();
1632 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1634 static enum type_class
1635 type_to_class (tree type)
1637 switch (TREE_CODE (type))
1639 case VOID_TYPE: return void_type_class;
1640 case INTEGER_TYPE: return integer_type_class;
1641 case ENUMERAL_TYPE: return enumeral_type_class;
1642 case BOOLEAN_TYPE: return boolean_type_class;
1643 case POINTER_TYPE: return pointer_type_class;
1644 case REFERENCE_TYPE: return reference_type_class;
1645 case OFFSET_TYPE: return offset_type_class;
1646 case REAL_TYPE: return real_type_class;
1647 case COMPLEX_TYPE: return complex_type_class;
1648 case FUNCTION_TYPE: return function_type_class;
1649 case METHOD_TYPE: return method_type_class;
1650 case RECORD_TYPE: return record_type_class;
1652 case QUAL_UNION_TYPE: return union_type_class;
1653 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1654 ? string_type_class : array_type_class);
1655 case LANG_TYPE: return lang_type_class;
1656 default: return no_type_class;
1660 /* Expand a call EXP to __builtin_classify_type. */
1663 expand_builtin_classify_type (tree exp)
1665 if (call_expr_nargs (exp))
1666 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1667 return GEN_INT (no_type_class);
1670 /* This helper macro, meant to be used in mathfn_built_in below,
1671 determines which among a set of three builtin math functions is
1672 appropriate for a given type mode. The `F' and `L' cases are
1673 automatically generated from the `double' case. */
1674 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1675 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1676 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1677 fcodel = BUILT_IN_MATHFN##L ; break;
1678 /* Similar to above, but appends _R after any F/L suffix. */
1679 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1680 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1681 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1682 fcodel = BUILT_IN_MATHFN##L_R ; break;
1684 /* Return mathematic function equivalent to FN but operating directly
1685 on TYPE, if available. If IMPLICIT is true find the function in
1686 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1687 can't do the conversion, return zero. */
1690 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1692 tree const *const fn_arr
1693 = implicit ? implicit_built_in_decls : built_in_decls;
1694 enum built_in_function fcode, fcodef, fcodel;
1698 CASE_MATHFN (BUILT_IN_ACOS)
1699 CASE_MATHFN (BUILT_IN_ACOSH)
1700 CASE_MATHFN (BUILT_IN_ASIN)
1701 CASE_MATHFN (BUILT_IN_ASINH)
1702 CASE_MATHFN (BUILT_IN_ATAN)
1703 CASE_MATHFN (BUILT_IN_ATAN2)
1704 CASE_MATHFN (BUILT_IN_ATANH)
1705 CASE_MATHFN (BUILT_IN_CBRT)
1706 CASE_MATHFN (BUILT_IN_CEIL)
1707 CASE_MATHFN (BUILT_IN_CEXPI)
1708 CASE_MATHFN (BUILT_IN_COPYSIGN)
1709 CASE_MATHFN (BUILT_IN_COS)
1710 CASE_MATHFN (BUILT_IN_COSH)
1711 CASE_MATHFN (BUILT_IN_DREM)
1712 CASE_MATHFN (BUILT_IN_ERF)
1713 CASE_MATHFN (BUILT_IN_ERFC)
1714 CASE_MATHFN (BUILT_IN_EXP)
1715 CASE_MATHFN (BUILT_IN_EXP10)
1716 CASE_MATHFN (BUILT_IN_EXP2)
1717 CASE_MATHFN (BUILT_IN_EXPM1)
1718 CASE_MATHFN (BUILT_IN_FABS)
1719 CASE_MATHFN (BUILT_IN_FDIM)
1720 CASE_MATHFN (BUILT_IN_FLOOR)
1721 CASE_MATHFN (BUILT_IN_FMA)
1722 CASE_MATHFN (BUILT_IN_FMAX)
1723 CASE_MATHFN (BUILT_IN_FMIN)
1724 CASE_MATHFN (BUILT_IN_FMOD)
1725 CASE_MATHFN (BUILT_IN_FREXP)
1726 CASE_MATHFN (BUILT_IN_GAMMA)
1727 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1728 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1729 CASE_MATHFN (BUILT_IN_HYPOT)
1730 CASE_MATHFN (BUILT_IN_ILOGB)
1731 CASE_MATHFN (BUILT_IN_INF)
1732 CASE_MATHFN (BUILT_IN_ISINF)
1733 CASE_MATHFN (BUILT_IN_J0)
1734 CASE_MATHFN (BUILT_IN_J1)
1735 CASE_MATHFN (BUILT_IN_JN)
1736 CASE_MATHFN (BUILT_IN_LCEIL)
1737 CASE_MATHFN (BUILT_IN_LDEXP)
1738 CASE_MATHFN (BUILT_IN_LFLOOR)
1739 CASE_MATHFN (BUILT_IN_LGAMMA)
1740 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1741 CASE_MATHFN (BUILT_IN_LLCEIL)
1742 CASE_MATHFN (BUILT_IN_LLFLOOR)
1743 CASE_MATHFN (BUILT_IN_LLRINT)
1744 CASE_MATHFN (BUILT_IN_LLROUND)
1745 CASE_MATHFN (BUILT_IN_LOG)
1746 CASE_MATHFN (BUILT_IN_LOG10)
1747 CASE_MATHFN (BUILT_IN_LOG1P)
1748 CASE_MATHFN (BUILT_IN_LOG2)
1749 CASE_MATHFN (BUILT_IN_LOGB)
1750 CASE_MATHFN (BUILT_IN_LRINT)
1751 CASE_MATHFN (BUILT_IN_LROUND)
1752 CASE_MATHFN (BUILT_IN_MODF)
1753 CASE_MATHFN (BUILT_IN_NAN)
1754 CASE_MATHFN (BUILT_IN_NANS)
1755 CASE_MATHFN (BUILT_IN_NEARBYINT)
1756 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1757 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1758 CASE_MATHFN (BUILT_IN_POW)
1759 CASE_MATHFN (BUILT_IN_POWI)
1760 CASE_MATHFN (BUILT_IN_POW10)
1761 CASE_MATHFN (BUILT_IN_REMAINDER)
1762 CASE_MATHFN (BUILT_IN_REMQUO)
1763 CASE_MATHFN (BUILT_IN_RINT)
1764 CASE_MATHFN (BUILT_IN_ROUND)
1765 CASE_MATHFN (BUILT_IN_SCALB)
1766 CASE_MATHFN (BUILT_IN_SCALBLN)
1767 CASE_MATHFN (BUILT_IN_SCALBN)
1768 CASE_MATHFN (BUILT_IN_SIGNBIT)
1769 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1770 CASE_MATHFN (BUILT_IN_SIN)
1771 CASE_MATHFN (BUILT_IN_SINCOS)
1772 CASE_MATHFN (BUILT_IN_SINH)
1773 CASE_MATHFN (BUILT_IN_SQRT)
1774 CASE_MATHFN (BUILT_IN_TAN)
1775 CASE_MATHFN (BUILT_IN_TANH)
1776 CASE_MATHFN (BUILT_IN_TGAMMA)
1777 CASE_MATHFN (BUILT_IN_TRUNC)
1778 CASE_MATHFN (BUILT_IN_Y0)
1779 CASE_MATHFN (BUILT_IN_Y1)
1780 CASE_MATHFN (BUILT_IN_YN)
1786 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1787 return fn_arr[fcode];
1788 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1789 return fn_arr[fcodef];
1790 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1791 return fn_arr[fcodel];
1796 /* Like mathfn_built_in_1(), but always use the implicit array. */
1799 mathfn_built_in (tree type, enum built_in_function fn)
1801 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1804 /* If errno must be maintained, expand the RTL to check if the result,
1805 TARGET, of a built-in function call, EXP, is NaN, and if so set
1809 expand_errno_check (tree exp, rtx target)
1811 rtx lab = gen_label_rtx ();
1813 /* Test the result; if it is NaN, set errno=EDOM because
1814 the argument was not in the domain. */
1815 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1819 /* If this built-in doesn't throw an exception, set errno directly. */
1820 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1822 #ifdef GEN_ERRNO_RTX
1823 rtx errno_rtx = GEN_ERRNO_RTX;
1826 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1828 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1834 /* Make sure the library call isn't expanded as a tail call. */
1835 CALL_EXPR_TAILCALL (exp) = 0;
1837 /* We can't set errno=EDOM directly; let the library call do it.
1838 Pop the arguments right away in case the call gets deleted. */
1840 expand_call (exp, target, 0);
1845 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1846 Return NULL_RTX if a normal call should be emitted rather than expanding
1847 the function in-line. EXP is the expression that is a call to the builtin
1848 function; if convenient, the result should be placed in TARGET.
1849 SUBTARGET may be used as the target for computing one of EXP's operands. */
1852 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1854 optab builtin_optab;
1855 rtx op0, insns, before_call;
1856 tree fndecl = get_callee_fndecl (exp);
1857 enum machine_mode mode;
1858 bool errno_set = false;
1861 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1864 arg = CALL_EXPR_ARG (exp, 0);
1866 switch (DECL_FUNCTION_CODE (fndecl))
1868 CASE_FLT_FN (BUILT_IN_SQRT):
1869 errno_set = ! tree_expr_nonnegative_p (arg);
1870 builtin_optab = sqrt_optab;
1872 CASE_FLT_FN (BUILT_IN_EXP):
1873 errno_set = true; builtin_optab = exp_optab; break;
1874 CASE_FLT_FN (BUILT_IN_EXP10):
1875 CASE_FLT_FN (BUILT_IN_POW10):
1876 errno_set = true; builtin_optab = exp10_optab; break;
1877 CASE_FLT_FN (BUILT_IN_EXP2):
1878 errno_set = true; builtin_optab = exp2_optab; break;
1879 CASE_FLT_FN (BUILT_IN_EXPM1):
1880 errno_set = true; builtin_optab = expm1_optab; break;
1881 CASE_FLT_FN (BUILT_IN_LOGB):
1882 errno_set = true; builtin_optab = logb_optab; break;
1883 CASE_FLT_FN (BUILT_IN_LOG):
1884 errno_set = true; builtin_optab = log_optab; break;
1885 CASE_FLT_FN (BUILT_IN_LOG10):
1886 errno_set = true; builtin_optab = log10_optab; break;
1887 CASE_FLT_FN (BUILT_IN_LOG2):
1888 errno_set = true; builtin_optab = log2_optab; break;
1889 CASE_FLT_FN (BUILT_IN_LOG1P):
1890 errno_set = true; builtin_optab = log1p_optab; break;
1891 CASE_FLT_FN (BUILT_IN_ASIN):
1892 builtin_optab = asin_optab; break;
1893 CASE_FLT_FN (BUILT_IN_ACOS):
1894 builtin_optab = acos_optab; break;
1895 CASE_FLT_FN (BUILT_IN_TAN):
1896 builtin_optab = tan_optab; break;
1897 CASE_FLT_FN (BUILT_IN_ATAN):
1898 builtin_optab = atan_optab; break;
1899 CASE_FLT_FN (BUILT_IN_FLOOR):
1900 builtin_optab = floor_optab; break;
1901 CASE_FLT_FN (BUILT_IN_CEIL):
1902 builtin_optab = ceil_optab; break;
1903 CASE_FLT_FN (BUILT_IN_TRUNC):
1904 builtin_optab = btrunc_optab; break;
1905 CASE_FLT_FN (BUILT_IN_ROUND):
1906 builtin_optab = round_optab; break;
1907 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1908 builtin_optab = nearbyint_optab;
1909 if (flag_trapping_math)
1911 /* Else fallthrough and expand as rint. */
1912 CASE_FLT_FN (BUILT_IN_RINT):
1913 builtin_optab = rint_optab; break;
1918 /* Make a suitable register to place result in. */
1919 mode = TYPE_MODE (TREE_TYPE (exp));
1921 if (! flag_errno_math || ! HONOR_NANS (mode))
1924 /* Before working hard, check whether the instruction is available. */
1925 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1927 target = gen_reg_rtx (mode);
1929 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1930 need to expand the argument again. This way, we will not perform
1931 side-effects more the once. */
1932 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1934 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1938 /* Compute into TARGET.
1939 Set TARGET to wherever the result comes back. */
1940 target = expand_unop (mode, builtin_optab, op0, target, 0);
1945 expand_errno_check (exp, target);
1947 /* Output the entire sequence. */
1948 insns = get_insns ();
1954 /* If we were unable to expand via the builtin, stop the sequence
1955 (without outputting the insns) and call to the library function
1956 with the stabilized argument list. */
1960 before_call = get_last_insn ();
1962 return expand_call (exp, target, target == const0_rtx);
1965 /* Expand a call to the builtin binary math functions (pow and atan2).
1966 Return NULL_RTX if a normal call should be emitted rather than expanding the
1967 function in-line. EXP is the expression that is a call to the builtin
1968 function; if convenient, the result should be placed in TARGET.
1969 SUBTARGET may be used as the target for computing one of EXP's
1973 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1975 optab builtin_optab;
1976 rtx op0, op1, insns;
1977 int op1_type = REAL_TYPE;
1978 tree fndecl = get_callee_fndecl (exp);
1980 enum machine_mode mode;
1981 bool errno_set = true;
1983 switch (DECL_FUNCTION_CODE (fndecl))
1985 CASE_FLT_FN (BUILT_IN_SCALBN):
1986 CASE_FLT_FN (BUILT_IN_SCALBLN):
1987 CASE_FLT_FN (BUILT_IN_LDEXP):
1988 op1_type = INTEGER_TYPE;
1993 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
1996 arg0 = CALL_EXPR_ARG (exp, 0);
1997 arg1 = CALL_EXPR_ARG (exp, 1);
1999 switch (DECL_FUNCTION_CODE (fndecl))
2001 CASE_FLT_FN (BUILT_IN_POW):
2002 builtin_optab = pow_optab; break;
2003 CASE_FLT_FN (BUILT_IN_ATAN2):
2004 builtin_optab = atan2_optab; break;
2005 CASE_FLT_FN (BUILT_IN_SCALB):
2006 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2008 builtin_optab = scalb_optab; break;
2009 CASE_FLT_FN (BUILT_IN_SCALBN):
2010 CASE_FLT_FN (BUILT_IN_SCALBLN):
2011 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2013 /* Fall through... */
2014 CASE_FLT_FN (BUILT_IN_LDEXP):
2015 builtin_optab = ldexp_optab; break;
2016 CASE_FLT_FN (BUILT_IN_FMOD):
2017 builtin_optab = fmod_optab; break;
2018 CASE_FLT_FN (BUILT_IN_REMAINDER):
2019 CASE_FLT_FN (BUILT_IN_DREM):
2020 builtin_optab = remainder_optab; break;
2025 /* Make a suitable register to place result in. */
2026 mode = TYPE_MODE (TREE_TYPE (exp));
2028 /* Before working hard, check whether the instruction is available. */
2029 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2032 target = gen_reg_rtx (mode);
2034 if (! flag_errno_math || ! HONOR_NANS (mode))
2037 /* Always stabilize the argument list. */
2038 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2039 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2041 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2042 op1 = expand_normal (arg1);
2046 /* Compute into TARGET.
2047 Set TARGET to wherever the result comes back. */
2048 target = expand_binop (mode, builtin_optab, op0, op1,
2049 target, 0, OPTAB_DIRECT);
2051 /* If we were unable to expand via the builtin, stop the sequence
2052 (without outputting the insns) and call to the library function
2053 with the stabilized argument list. */
2057 return expand_call (exp, target, target == const0_rtx);
2061 expand_errno_check (exp, target);
2063 /* Output the entire sequence. */
2064 insns = get_insns ();
2071 /* Expand a call to the builtin sin and cos math functions.
2072 Return NULL_RTX if a normal call should be emitted rather than expanding the
2073 function in-line. EXP is the expression that is a call to the builtin
2074 function; if convenient, the result should be placed in TARGET.
2075 SUBTARGET may be used as the target for computing one of EXP's
2079 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2081 optab builtin_optab;
2083 tree fndecl = get_callee_fndecl (exp);
2084 enum machine_mode mode;
2087 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2090 arg = CALL_EXPR_ARG (exp, 0);
2092 switch (DECL_FUNCTION_CODE (fndecl))
2094 CASE_FLT_FN (BUILT_IN_SIN):
2095 CASE_FLT_FN (BUILT_IN_COS):
2096 builtin_optab = sincos_optab; break;
2101 /* Make a suitable register to place result in. */
2102 mode = TYPE_MODE (TREE_TYPE (exp));
2104 /* Check if sincos insn is available, otherwise fallback
2105 to sin or cos insn. */
2106 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2107 switch (DECL_FUNCTION_CODE (fndecl))
2109 CASE_FLT_FN (BUILT_IN_SIN):
2110 builtin_optab = sin_optab; break;
2111 CASE_FLT_FN (BUILT_IN_COS):
2112 builtin_optab = cos_optab; break;
2117 /* Before working hard, check whether the instruction is available. */
2118 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2120 target = gen_reg_rtx (mode);
2122 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2123 need to expand the argument again. This way, we will not perform
2124 side-effects more the once. */
2125 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2127 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2131 /* Compute into TARGET.
2132 Set TARGET to wherever the result comes back. */
2133 if (builtin_optab == sincos_optab)
2137 switch (DECL_FUNCTION_CODE (fndecl))
2139 CASE_FLT_FN (BUILT_IN_SIN):
2140 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2142 CASE_FLT_FN (BUILT_IN_COS):
2143 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2148 gcc_assert (result);
2152 target = expand_unop (mode, builtin_optab, op0, target, 0);
2157 /* Output the entire sequence. */
2158 insns = get_insns ();
2164 /* If we were unable to expand via the builtin, stop the sequence
2165 (without outputting the insns) and call to the library function
2166 with the stabilized argument list. */
2170 target = expand_call (exp, target, target == const0_rtx);
2175 /* Expand a call to one of the builtin math functions that operate on
2176 floating point argument and output an integer result (ilogb, isinf,
2178 Return 0 if a normal call should be emitted rather than expanding the
2179 function in-line. EXP is the expression that is a call to the builtin
2180 function; if convenient, the result should be placed in TARGET.
2181 SUBTARGET may be used as the target for computing one of EXP's operands. */
2184 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2186 optab builtin_optab = 0;
2187 enum insn_code icode = CODE_FOR_nothing;
2189 tree fndecl = get_callee_fndecl (exp);
2190 enum machine_mode mode;
2191 bool errno_set = false;
2194 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2197 arg = CALL_EXPR_ARG (exp, 0);
2199 switch (DECL_FUNCTION_CODE (fndecl))
2201 CASE_FLT_FN (BUILT_IN_ILOGB):
2202 errno_set = true; builtin_optab = ilogb_optab; break;
2203 CASE_FLT_FN (BUILT_IN_ISINF):
2204 builtin_optab = isinf_optab; break;
2205 case BUILT_IN_ISNORMAL:
2206 case BUILT_IN_ISFINITE:
2207 CASE_FLT_FN (BUILT_IN_FINITE):
2208 /* These builtins have no optabs (yet). */
2214 /* There's no easy way to detect the case we need to set EDOM. */
2215 if (flag_errno_math && errno_set)
2218 /* Optab mode depends on the mode of the input argument. */
2219 mode = TYPE_MODE (TREE_TYPE (arg));
2222 icode = optab_handler (builtin_optab, mode)->insn_code;
2224 /* Before working hard, check whether the instruction is available. */
2225 if (icode != CODE_FOR_nothing)
2227 /* Make a suitable register to place result in. */
2229 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2230 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2232 gcc_assert (insn_data[icode].operand[0].predicate
2233 (target, GET_MODE (target)));
2235 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2236 need to expand the argument again. This way, we will not perform
2237 side-effects more the once. */
2238 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2240 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2242 if (mode != GET_MODE (op0))
2243 op0 = convert_to_mode (mode, op0, 0);
2245 /* Compute into TARGET.
2246 Set TARGET to wherever the result comes back. */
2247 emit_unop_insn (icode, target, op0, UNKNOWN);
2251 /* If there is no optab, try generic code. */
2252 switch (DECL_FUNCTION_CODE (fndecl))
2256 CASE_FLT_FN (BUILT_IN_ISINF):
2258 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2259 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2260 tree const type = TREE_TYPE (arg);
2264 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2265 real_from_string (&r, buf);
2266 result = build_call_expr (isgr_fn, 2,
2267 fold_build1 (ABS_EXPR, type, arg),
2268 build_real (type, r));
2269 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2271 CASE_FLT_FN (BUILT_IN_FINITE):
2272 case BUILT_IN_ISFINITE:
2274 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2275 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2276 tree const type = TREE_TYPE (arg);
2280 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2281 real_from_string (&r, buf);
2282 result = build_call_expr (isle_fn, 2,
2283 fold_build1 (ABS_EXPR, type, arg),
2284 build_real (type, r));
2285 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2287 case BUILT_IN_ISNORMAL:
2289 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2290 islessequal(fabs(x),DBL_MAX). */
2291 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2292 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2293 tree const type = TREE_TYPE (arg);
2294 REAL_VALUE_TYPE rmax, rmin;
2297 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2298 real_from_string (&rmax, buf);
2299 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2300 real_from_string (&rmin, buf);
2301 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2302 result = build_call_expr (isle_fn, 2, arg,
2303 build_real (type, rmax));
2304 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2305 build_call_expr (isge_fn, 2, arg,
2306 build_real (type, rmin)));
2307 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2313 target = expand_call (exp, target, target == const0_rtx);
2318 /* Expand a call to the builtin sincos math function.
2319 Return NULL_RTX if a normal call should be emitted rather than expanding the
2320 function in-line. EXP is the expression that is a call to the builtin
2324 expand_builtin_sincos (tree exp)
2326 rtx op0, op1, op2, target1, target2;
2327 enum machine_mode mode;
2328 tree arg, sinp, cosp;
2331 if (!validate_arglist (exp, REAL_TYPE,
2332 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2335 arg = CALL_EXPR_ARG (exp, 0);
2336 sinp = CALL_EXPR_ARG (exp, 1);
2337 cosp = CALL_EXPR_ARG (exp, 2);
2339 /* Make a suitable register to place result in. */
2340 mode = TYPE_MODE (TREE_TYPE (arg));
2342 /* Check if sincos insn is available, otherwise emit the call. */
2343 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2346 target1 = gen_reg_rtx (mode);
2347 target2 = gen_reg_rtx (mode);
2349 op0 = expand_normal (arg);
2350 op1 = expand_normal (build_fold_indirect_ref (sinp));
2351 op2 = expand_normal (build_fold_indirect_ref (cosp));
2353 /* Compute into target1 and target2.
2354 Set TARGET to wherever the result comes back. */
2355 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2356 gcc_assert (result);
2358 /* Move target1 and target2 to the memory locations indicated
2360 emit_move_insn (op1, target1);
2361 emit_move_insn (op2, target2);
2366 /* Expand a call to the internal cexpi builtin to the sincos math function.
2367 EXP is the expression that is a call to the builtin function; if convenient,
2368 the result should be placed in TARGET. SUBTARGET may be used as the target
2369 for computing one of EXP's operands. */
2372 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2374 tree fndecl = get_callee_fndecl (exp);
2376 enum machine_mode mode;
2379 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2382 arg = CALL_EXPR_ARG (exp, 0);
2383 type = TREE_TYPE (arg);
2384 mode = TYPE_MODE (TREE_TYPE (arg));
2386 /* Try expanding via a sincos optab, fall back to emitting a libcall
2387 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2388 is only generated from sincos, cexp or if we have either of them. */
2389 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2391 op1 = gen_reg_rtx (mode);
2392 op2 = gen_reg_rtx (mode);
2394 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2396 /* Compute into op1 and op2. */
2397 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2399 else if (TARGET_HAS_SINCOS)
2401 tree call, fn = NULL_TREE;
2405 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2406 fn = built_in_decls[BUILT_IN_SINCOSF];
2407 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2408 fn = built_in_decls[BUILT_IN_SINCOS];
2409 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2410 fn = built_in_decls[BUILT_IN_SINCOSL];
2414 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2415 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2416 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2417 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2418 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2419 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2421 /* Make sure not to fold the sincos call again. */
2422 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2423 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2424 call, 3, arg, top1, top2));
2428 tree call, fn = NULL_TREE, narg;
2429 tree ctype = build_complex_type (type);
2431 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2432 fn = built_in_decls[BUILT_IN_CEXPF];
2433 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2434 fn = built_in_decls[BUILT_IN_CEXP];
2435 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2436 fn = built_in_decls[BUILT_IN_CEXPL];
2440 /* If we don't have a decl for cexp create one. This is the
2441 friendliest fallback if the user calls __builtin_cexpi
2442 without full target C99 function support. */
2443 if (fn == NULL_TREE)
2446 const char *name = NULL;
2448 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2450 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2452 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2455 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2456 fn = build_fn_decl (name, fntype);
2459 narg = fold_build2 (COMPLEX_EXPR, ctype,
2460 build_real (type, dconst0), arg);
2462 /* Make sure not to fold the cexp call again. */
2463 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2464 return expand_expr (build_call_nary (ctype, call, 1, narg),
2465 target, VOIDmode, EXPAND_NORMAL);
2468 /* Now build the proper return type. */
2469 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2470 make_tree (TREE_TYPE (arg), op2),
2471 make_tree (TREE_TYPE (arg), op1)),
2472 target, VOIDmode, EXPAND_NORMAL);
2475 /* Expand a call to one of the builtin rounding functions gcc defines
2476 as an extension (lfloor and lceil). As these are gcc extensions we
2477 do not need to worry about setting errno to EDOM.
2478 If expanding via optab fails, lower expression to (int)(floor(x)).
2479 EXP is the expression that is a call to the builtin function;
2480 if convenient, the result should be placed in TARGET. */
2483 expand_builtin_int_roundingfn (tree exp, rtx target)
2485 convert_optab builtin_optab;
2486 rtx op0, insns, tmp;
2487 tree fndecl = get_callee_fndecl (exp);
2488 enum built_in_function fallback_fn;
2489 tree fallback_fndecl;
2490 enum machine_mode mode;
2493 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2496 arg = CALL_EXPR_ARG (exp, 0);
2498 switch (DECL_FUNCTION_CODE (fndecl))
2500 CASE_FLT_FN (BUILT_IN_LCEIL):
2501 CASE_FLT_FN (BUILT_IN_LLCEIL):
2502 builtin_optab = lceil_optab;
2503 fallback_fn = BUILT_IN_CEIL;
2506 CASE_FLT_FN (BUILT_IN_LFLOOR):
2507 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2508 builtin_optab = lfloor_optab;
2509 fallback_fn = BUILT_IN_FLOOR;
2516 /* Make a suitable register to place result in. */
2517 mode = TYPE_MODE (TREE_TYPE (exp));
2519 target = gen_reg_rtx (mode);
2521 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2522 need to expand the argument again. This way, we will not perform
2523 side-effects more the once. */
2524 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2526 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2530 /* Compute into TARGET. */
2531 if (expand_sfix_optab (target, op0, builtin_optab))
2533 /* Output the entire sequence. */
2534 insns = get_insns ();
2540 /* If we were unable to expand via the builtin, stop the sequence
2541 (without outputting the insns). */
2544 /* Fall back to floating point rounding optab. */
2545 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2547 /* For non-C99 targets we may end up without a fallback fndecl here
2548 if the user called __builtin_lfloor directly. In this case emit
2549 a call to the floor/ceil variants nevertheless. This should result
2550 in the best user experience for not full C99 targets. */
2551 if (fallback_fndecl == NULL_TREE)
2554 const char *name = NULL;
2556 switch (DECL_FUNCTION_CODE (fndecl))
2558 case BUILT_IN_LCEIL:
2559 case BUILT_IN_LLCEIL:
2562 case BUILT_IN_LCEILF:
2563 case BUILT_IN_LLCEILF:
2566 case BUILT_IN_LCEILL:
2567 case BUILT_IN_LLCEILL:
2570 case BUILT_IN_LFLOOR:
2571 case BUILT_IN_LLFLOOR:
2574 case BUILT_IN_LFLOORF:
2575 case BUILT_IN_LLFLOORF:
2578 case BUILT_IN_LFLOORL:
2579 case BUILT_IN_LLFLOORL:
2586 fntype = build_function_type_list (TREE_TYPE (arg),
2587 TREE_TYPE (arg), NULL_TREE);
2588 fallback_fndecl = build_fn_decl (name, fntype);
2591 exp = build_call_expr (fallback_fndecl, 1, arg);
2593 tmp = expand_normal (exp);
2595 /* Truncate the result of floating point optab to integer
2596 via expand_fix (). */
2597 target = gen_reg_rtx (mode);
2598 expand_fix (target, tmp, 0);
2603 /* Expand a call to one of the builtin math functions doing integer
2605 Return 0 if a normal call should be emitted rather than expanding the
2606 function in-line. EXP is the expression that is a call to the builtin
2607 function; if convenient, the result should be placed in TARGET. */
2610 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2612 convert_optab builtin_optab;
2614 tree fndecl = get_callee_fndecl (exp);
2616 enum machine_mode mode;
2618 /* There's no easy way to detect the case we need to set EDOM. */
2619 if (flag_errno_math)
2622 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2625 arg = CALL_EXPR_ARG (exp, 0);
2627 switch (DECL_FUNCTION_CODE (fndecl))
2629 CASE_FLT_FN (BUILT_IN_LRINT):
2630 CASE_FLT_FN (BUILT_IN_LLRINT):
2631 builtin_optab = lrint_optab; break;
2632 CASE_FLT_FN (BUILT_IN_LROUND):
2633 CASE_FLT_FN (BUILT_IN_LLROUND):
2634 builtin_optab = lround_optab; break;
2639 /* Make a suitable register to place result in. */
2640 mode = TYPE_MODE (TREE_TYPE (exp));
2642 target = gen_reg_rtx (mode);
2644 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2645 need to expand the argument again. This way, we will not perform
2646 side-effects more the once. */
2647 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2649 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2653 if (expand_sfix_optab (target, op0, builtin_optab))
2655 /* Output the entire sequence. */
2656 insns = get_insns ();
2662 /* If we were unable to expand via the builtin, stop the sequence
2663 (without outputting the insns) and call to the library function
2664 with the stabilized argument list. */
2667 target = expand_call (exp, target, target == const0_rtx);
2672 /* To evaluate powi(x,n), the floating point value x raised to the
2673 constant integer exponent n, we use a hybrid algorithm that
2674 combines the "window method" with look-up tables. For an
2675 introduction to exponentiation algorithms and "addition chains",
2676 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2677 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2678 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2679 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2681 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2682 multiplications to inline before calling the system library's pow
2683 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2684 so this default never requires calling pow, powf or powl. */
2686 #ifndef POWI_MAX_MULTS
2687 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2690 /* The size of the "optimal power tree" lookup table. All
2691 exponents less than this value are simply looked up in the
2692 powi_table below. This threshold is also used to size the
2693 cache of pseudo registers that hold intermediate results. */
2694 #define POWI_TABLE_SIZE 256
2696 /* The size, in bits of the window, used in the "window method"
2697 exponentiation algorithm. This is equivalent to a radix of
2698 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2699 #define POWI_WINDOW_SIZE 3
2701 /* The following table is an efficient representation of an
2702 "optimal power tree". For each value, i, the corresponding
2703 value, j, in the table states than an optimal evaluation
2704 sequence for calculating pow(x,i) can be found by evaluating
2705 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2706 100 integers is given in Knuth's "Seminumerical algorithms". */
2708 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2710 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2711 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2712 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2713 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2714 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2715 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2716 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2717 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2718 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2719 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2720 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2721 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2722 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2723 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2724 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2725 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2726 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2727 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2728 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2729 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2730 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2731 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2732 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2733 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2734 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2735 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2736 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2737 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2738 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2739 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2740 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2741 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2745 /* Return the number of multiplications required to calculate
2746 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2747 subroutine of powi_cost. CACHE is an array indicating
2748 which exponents have already been calculated. */
2751 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2753 /* If we've already calculated this exponent, then this evaluation
2754 doesn't require any additional multiplications. */
2759 return powi_lookup_cost (n - powi_table[n], cache)
2760 + powi_lookup_cost (powi_table[n], cache) + 1;
2763 /* Return the number of multiplications required to calculate
2764 powi(x,n) for an arbitrary x, given the exponent N. This
2765 function needs to be kept in sync with expand_powi below. */
2768 powi_cost (HOST_WIDE_INT n)
2770 bool cache[POWI_TABLE_SIZE];
2771 unsigned HOST_WIDE_INT digit;
2772 unsigned HOST_WIDE_INT val;
2778 /* Ignore the reciprocal when calculating the cost. */
2779 val = (n < 0) ? -n : n;
2781 /* Initialize the exponent cache. */
2782 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2787 while (val >= POWI_TABLE_SIZE)
2791 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2792 result += powi_lookup_cost (digit, cache)
2793 + POWI_WINDOW_SIZE + 1;
2794 val >>= POWI_WINDOW_SIZE;
2803 return result + powi_lookup_cost (val, cache);
2806 /* Recursive subroutine of expand_powi. This function takes the array,
2807 CACHE, of already calculated exponents and an exponent N and returns
2808 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2811 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2813 unsigned HOST_WIDE_INT digit;
2817 if (n < POWI_TABLE_SIZE)
2822 target = gen_reg_rtx (mode);
2825 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2826 op1 = expand_powi_1 (mode, powi_table[n], cache);
2830 target = gen_reg_rtx (mode);
2831 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2832 op0 = expand_powi_1 (mode, n - digit, cache);
2833 op1 = expand_powi_1 (mode, digit, cache);
2837 target = gen_reg_rtx (mode);
2838 op0 = expand_powi_1 (mode, n >> 1, cache);
2842 result = expand_mult (mode, op0, op1, target, 0);
2843 if (result != target)
2844 emit_move_insn (target, result);
2848 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2849 floating point operand in mode MODE, and N is the exponent. This
2850 function needs to be kept in sync with powi_cost above. */
2853 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2855 unsigned HOST_WIDE_INT val;
2856 rtx cache[POWI_TABLE_SIZE];
2860 return CONST1_RTX (mode);
2862 val = (n < 0) ? -n : n;
2864 memset (cache, 0, sizeof (cache));
2867 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2869 /* If the original exponent was negative, reciprocate the result. */
2871 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2872 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2877 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2878 a normal call should be emitted rather than expanding the function
2879 in-line. EXP is the expression that is a call to the builtin
2880 function; if convenient, the result should be placed in TARGET. */
2883 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2887 tree type = TREE_TYPE (exp);
2888 REAL_VALUE_TYPE cint, c, c2;
2891 enum machine_mode mode = TYPE_MODE (type);
2893 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2896 arg0 = CALL_EXPR_ARG (exp, 0);
2897 arg1 = CALL_EXPR_ARG (exp, 1);
2899 if (TREE_CODE (arg1) != REAL_CST
2900 || TREE_OVERFLOW (arg1))
2901 return expand_builtin_mathfn_2 (exp, target, subtarget);
2903 /* Handle constant exponents. */
2905 /* For integer valued exponents we can expand to an optimal multiplication
2906 sequence using expand_powi. */
2907 c = TREE_REAL_CST (arg1);
2908 n = real_to_integer (&c);
2909 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2910 if (real_identical (&c, &cint)
2911 && ((n >= -1 && n <= 2)
2912 || (flag_unsafe_math_optimizations
2914 && powi_cost (n) <= POWI_MAX_MULTS)))
2916 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2919 op = force_reg (mode, op);
2920 op = expand_powi (op, mode, n);
2925 narg0 = builtin_save_expr (arg0);
2927 /* If the exponent is not integer valued, check if it is half of an integer.
2928 In this case we can expand to sqrt (x) * x**(n/2). */
2929 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2930 if (fn != NULL_TREE)
2932 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2933 n = real_to_integer (&c2);
2934 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2935 if (real_identical (&c2, &cint)
2936 && ((flag_unsafe_math_optimizations
2938 && powi_cost (n/2) <= POWI_MAX_MULTS)
2941 tree call_expr = build_call_expr (fn, 1, narg0);
2942 /* Use expand_expr in case the newly built call expression
2943 was folded to a non-call. */
2944 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2947 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2948 op2 = force_reg (mode, op2);
2949 op2 = expand_powi (op2, mode, abs (n / 2));
2950 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2951 0, OPTAB_LIB_WIDEN);
2952 /* If the original exponent was negative, reciprocate the
2955 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2956 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2962 /* Try if the exponent is a third of an integer. In this case
2963 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2964 different from pow (x, 1./3.) due to rounding and behavior
2965 with negative x we need to constrain this transformation to
2966 unsafe math and positive x or finite math. */
2967 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2969 && flag_unsafe_math_optimizations
2970 && (tree_expr_nonnegative_p (arg0)
2971 || !HONOR_NANS (mode)))
2973 REAL_VALUE_TYPE dconst3;
2974 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2975 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2976 real_round (&c2, mode, &c2);
2977 n = real_to_integer (&c2);
2978 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2979 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2980 real_convert (&c2, mode, &c2);
2981 if (real_identical (&c2, &c)
2983 && powi_cost (n/3) <= POWI_MAX_MULTS)
2986 tree call_expr = build_call_expr (fn, 1,narg0);
2987 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2988 if (abs (n) % 3 == 2)
2989 op = expand_simple_binop (mode, MULT, op, op, op,
2990 0, OPTAB_LIB_WIDEN);
2993 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2994 op2 = force_reg (mode, op2);
2995 op2 = expand_powi (op2, mode, abs (n / 3));
2996 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2997 0, OPTAB_LIB_WIDEN);
2998 /* If the original exponent was negative, reciprocate the
3001 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3002 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3008 /* Fall back to optab expansion. */
3009 return expand_builtin_mathfn_2 (exp, target, subtarget);
3012 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3013 a normal call should be emitted rather than expanding the function
3014 in-line. EXP is the expression that is a call to the builtin
3015 function; if convenient, the result should be placed in TARGET. */
3018 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3022 enum machine_mode mode;
3023 enum machine_mode mode2;
3025 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3028 arg0 = CALL_EXPR_ARG (exp, 0);
3029 arg1 = CALL_EXPR_ARG (exp, 1);
3030 mode = TYPE_MODE (TREE_TYPE (exp));
3032 /* Handle constant power. */
3034 if (TREE_CODE (arg1) == INTEGER_CST
3035 && !TREE_OVERFLOW (arg1))
3037 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3039 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3040 Otherwise, check the number of multiplications required. */
3041 if ((TREE_INT_CST_HIGH (arg1) == 0
3042 || TREE_INT_CST_HIGH (arg1) == -1)
3043 && ((n >= -1 && n <= 2)
3045 && powi_cost (n) <= POWI_MAX_MULTS)))
3047 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3048 op0 = force_reg (mode, op0);
3049 return expand_powi (op0, mode, n);
3053 /* Emit a libcall to libgcc. */
3055 /* Mode of the 2nd argument must match that of an int. */
3056 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3058 if (target == NULL_RTX)
3059 target = gen_reg_rtx (mode);
3061 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3062 if (GET_MODE (op0) != mode)
3063 op0 = convert_to_mode (mode, op0, 0);
3064 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3065 if (GET_MODE (op1) != mode2)
3066 op1 = convert_to_mode (mode2, op1, 0);
3068 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3069 target, LCT_CONST, mode, 2,
3070 op0, mode, op1, mode2);
3075 /* Expand expression EXP which is a call to the strlen builtin. Return
3076 NULL_RTX if we failed the caller should emit a normal call, otherwise
3077 try to get the result in TARGET, if convenient. */
3080 expand_builtin_strlen (tree exp, rtx target,
3081 enum machine_mode target_mode)
3083 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3089 tree src = CALL_EXPR_ARG (exp, 0);
3090 rtx result, src_reg, char_rtx, before_strlen;
3091 enum machine_mode insn_mode = target_mode, char_mode;
3092 enum insn_code icode = CODE_FOR_nothing;
3095 /* If the length can be computed at compile-time, return it. */
3096 len = c_strlen (src, 0);
3098 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3100 /* If the length can be computed at compile-time and is constant
3101 integer, but there are side-effects in src, evaluate
3102 src for side-effects, then return len.
3103 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3104 can be optimized into: i++; x = 3; */
3105 len = c_strlen (src, 1);
3106 if (len && TREE_CODE (len) == INTEGER_CST)
3108 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3109 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3112 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3114 /* If SRC is not a pointer type, don't do this operation inline. */
3118 /* Bail out if we can't compute strlen in the right mode. */
3119 while (insn_mode != VOIDmode)
3121 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3122 if (icode != CODE_FOR_nothing)
3125 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3127 if (insn_mode == VOIDmode)
3130 /* Make a place to write the result of the instruction. */
3134 && GET_MODE (result) == insn_mode
3135 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3136 result = gen_reg_rtx (insn_mode);
3138 /* Make a place to hold the source address. We will not expand
3139 the actual source until we are sure that the expansion will
3140 not fail -- there are trees that cannot be expanded twice. */
3141 src_reg = gen_reg_rtx (Pmode);
3143 /* Mark the beginning of the strlen sequence so we can emit the
3144 source operand later. */
3145 before_strlen = get_last_insn ();
3147 char_rtx = const0_rtx;
3148 char_mode = insn_data[(int) icode].operand[2].mode;
3149 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3151 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3153 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3154 char_rtx, GEN_INT (align));
3159 /* Now that we are assured of success, expand the source. */
3161 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3163 emit_move_insn (src_reg, pat);
3168 emit_insn_after (pat, before_strlen);
3170 emit_insn_before (pat, get_insns ());
3172 /* Return the value in the proper mode for this function. */
3173 if (GET_MODE (result) == target_mode)
3175 else if (target != 0)
3176 convert_move (target, result, 0);
3178 target = convert_to_mode (target_mode, result, 0);
3184 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3185 caller should emit a normal call, otherwise try to get the result
3186 in TARGET, if convenient (and in mode MODE if that's convenient). */
3189 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3191 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3193 tree type = TREE_TYPE (exp);
3194 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3195 CALL_EXPR_ARG (exp, 1), type);
3197 return expand_expr (result, target, mode, EXPAND_NORMAL);
3202 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3203 caller should emit a normal call, otherwise try to get the result
3204 in TARGET, if convenient (and in mode MODE if that's convenient). */
3207 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3209 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3211 tree type = TREE_TYPE (exp);
3212 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3213 CALL_EXPR_ARG (exp, 1), type);
3215 return expand_expr (result, target, mode, EXPAND_NORMAL);
3217 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3222 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3223 caller should emit a normal call, otherwise try to get the result
3224 in TARGET, if convenient (and in mode MODE if that's convenient). */
3227 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3229 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3231 tree type = TREE_TYPE (exp);
3232 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3233 CALL_EXPR_ARG (exp, 1), type);
3235 return expand_expr (result, target, mode, EXPAND_NORMAL);
3240 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3241 caller should emit a normal call, otherwise try to get the result
3242 in TARGET, if convenient (and in mode MODE if that's convenient). */
3245 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3247 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3249 tree type = TREE_TYPE (exp);
3250 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3251 CALL_EXPR_ARG (exp, 1), type);
3253 return expand_expr (result, target, mode, EXPAND_NORMAL);
3258 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3259 bytes from constant string DATA + OFFSET and return it as target
3263 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3264 enum machine_mode mode)
3266 const char *str = (const char *) data;
3268 gcc_assert (offset >= 0
3269 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3270 <= strlen (str) + 1));
3272 return c_readstr (str + offset, mode);
3275 /* Expand a call EXP to the memcpy builtin.
3276 Return NULL_RTX if we failed, the caller should emit a normal call,
3277 otherwise try to get the result in TARGET, if convenient (and in
3278 mode MODE if that's convenient). */
3281 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3283 tree fndecl = get_callee_fndecl (exp);
3285 if (!validate_arglist (exp,
3286 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3290 tree dest = CALL_EXPR_ARG (exp, 0);
3291 tree src = CALL_EXPR_ARG (exp, 1);
3292 tree len = CALL_EXPR_ARG (exp, 2);
3293 const char *src_str;
3294 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3295 unsigned int dest_align
3296 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3297 rtx dest_mem, src_mem, dest_addr, len_rtx;
3298 tree result = fold_builtin_memory_op (dest, src, len,
3299 TREE_TYPE (TREE_TYPE (fndecl)),
3301 HOST_WIDE_INT expected_size = -1;
3302 unsigned int expected_align = 0;
3303 tree_ann_common_t ann;
3307 while (TREE_CODE (result) == COMPOUND_EXPR)
3309 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3311 result = TREE_OPERAND (result, 1);
3313 return expand_expr (result, target, mode, EXPAND_NORMAL);
3316 /* If DEST is not a pointer type, call the normal function. */
3317 if (dest_align == 0)
3320 /* If either SRC is not a pointer type, don't do this
3321 operation in-line. */
3325 ann = tree_common_ann (exp);
3327 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3329 if (expected_align < dest_align)
3330 expected_align = dest_align;
3331 dest_mem = get_memory_rtx (dest, len);
3332 set_mem_align (dest_mem, dest_align);
3333 len_rtx = expand_normal (len);
3334 src_str = c_getstr (src);
3336 /* If SRC is a string constant and block move would be done
3337 by pieces, we can avoid loading the string from memory
3338 and only stored the computed constants. */
3340 && GET_CODE (len_rtx) == CONST_INT
3341 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3342 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3343 CONST_CAST (char *, src_str),
3346 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3347 builtin_memcpy_read_str,
3348 CONST_CAST (char *, src_str),
3349 dest_align, false, 0);
3350 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3351 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3355 src_mem = get_memory_rtx (src, len);
3356 set_mem_align (src_mem, src_align);
3358 /* Copy word part most expediently. */
3359 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3360 CALL_EXPR_TAILCALL (exp)
3361 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3362 expected_align, expected_size);
3366 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3367 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3373 /* Expand a call EXP to the mempcpy builtin.
3374 Return NULL_RTX if we failed; the caller should emit a normal call,
3375 otherwise try to get the result in TARGET, if convenient (and in
3376 mode MODE if that's convenient). If ENDP is 0 return the
3377 destination pointer, if ENDP is 1 return the end pointer ala
3378 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3382 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3384 if (!validate_arglist (exp,
3385 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3389 tree dest = CALL_EXPR_ARG (exp, 0);
3390 tree src = CALL_EXPR_ARG (exp, 1);
3391 tree len = CALL_EXPR_ARG (exp, 2);
3392 return expand_builtin_mempcpy_args (dest, src, len,
3394 target, mode, /*endp=*/ 1);
3398 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3399 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3400 so that this can also be called without constructing an actual CALL_EXPR.
3401 TYPE is the return type of the call. The other arguments and return value
3402 are the same as for expand_builtin_mempcpy. */
3405 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3406 rtx target, enum machine_mode mode, int endp)
3408 /* If return value is ignored, transform mempcpy into memcpy. */
3409 if (target == const0_rtx)
3411 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3416 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3417 target, mode, EXPAND_NORMAL);
3421 const char *src_str;
3422 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3423 unsigned int dest_align
3424 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3425 rtx dest_mem, src_mem, len_rtx;
3426 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3430 while (TREE_CODE (result) == COMPOUND_EXPR)
3432 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3434 result = TREE_OPERAND (result, 1);
3436 return expand_expr (result, target, mode, EXPAND_NORMAL);
3439 /* If either SRC or DEST is not a pointer type, don't do this
3440 operation in-line. */
3441 if (dest_align == 0 || src_align == 0)
3444 /* If LEN is not constant, call the normal function. */
3445 if (! host_integerp (len, 1))
3448 len_rtx = expand_normal (len);
3449 src_str = c_getstr (src);
3451 /* If SRC is a string constant and block move would be done
3452 by pieces, we can avoid loading the string from memory
3453 and only stored the computed constants. */
3455 && GET_CODE (len_rtx) == CONST_INT
3456 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3457 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3458 CONST_CAST (char *, src_str),
3461 dest_mem = get_memory_rtx (dest, len);
3462 set_mem_align (dest_mem, dest_align);
3463 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3464 builtin_memcpy_read_str,
3465 CONST_CAST (char *, src_str),
3466 dest_align, false, endp);
3467 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3468 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3472 if (GET_CODE (len_rtx) == CONST_INT
3473 && can_move_by_pieces (INTVAL (len_rtx),
3474 MIN (dest_align, src_align)))
3476 dest_mem = get_memory_rtx (dest, len);
3477 set_mem_align (dest_mem, dest_align);
3478 src_mem = get_memory_rtx (src, len);
3479 set_mem_align (src_mem, src_align);
3480 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3481 MIN (dest_align, src_align), endp);
3482 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3483 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3491 /* Expand expression EXP, which is a call to the memmove builtin. Return
3492 NULL_RTX if we failed; the caller should emit a normal call. */
3495 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3497 if (!validate_arglist (exp,
3498 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3502 tree dest = CALL_EXPR_ARG (exp, 0);
3503 tree src = CALL_EXPR_ARG (exp, 1);
3504 tree len = CALL_EXPR_ARG (exp, 2);
3505 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3506 target, mode, ignore);
3510 /* Helper function to do the actual work for expand_builtin_memmove. The
3511 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3512 so that this can also be called without constructing an actual CALL_EXPR.
3513 TYPE is the return type of the call. The other arguments and return value
3514 are the same as for expand_builtin_memmove. */
3517 expand_builtin_memmove_args (tree dest, tree src, tree len,
3518 tree type, rtx target, enum machine_mode mode,
3521 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3525 STRIP_TYPE_NOPS (result);
3526 while (TREE_CODE (result) == COMPOUND_EXPR)
3528 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3530 result = TREE_OPERAND (result, 1);
3532 return expand_expr (result, target, mode, EXPAND_NORMAL);
3535 /* Otherwise, call the normal function. */
3539 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3540 NULL_RTX if we failed the caller should emit a normal call. */
3543 expand_builtin_bcopy (tree exp, int ignore)
3545 tree type = TREE_TYPE (exp);
3546 tree src, dest, size;
3548 if (!validate_arglist (exp,
3549 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3552 src = CALL_EXPR_ARG (exp, 0);
3553 dest = CALL_EXPR_ARG (exp, 1);
3554 size = CALL_EXPR_ARG (exp, 2);
3556 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3557 This is done this way so that if it isn't expanded inline, we fall
3558 back to calling bcopy instead of memmove. */
3559 return expand_builtin_memmove_args (dest, src,
3560 fold_convert (sizetype, size),
3561 type, const0_rtx, VOIDmode,
3566 # define HAVE_movstr 0
3567 # define CODE_FOR_movstr CODE_FOR_nothing
3570 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3571 we failed, the caller should emit a normal call, otherwise try to
3572 get the result in TARGET, if convenient. If ENDP is 0 return the
3573 destination pointer, if ENDP is 1 return the end pointer ala
3574 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3578 expand_movstr (tree dest, tree src, rtx target, int endp)
3584 const struct insn_data * data;
3589 dest_mem = get_memory_rtx (dest, NULL);
3590 src_mem = get_memory_rtx (src, NULL);
3593 target = force_reg (Pmode, XEXP (dest_mem, 0));
3594 dest_mem = replace_equiv_address (dest_mem, target);
3595 end = gen_reg_rtx (Pmode);
3599 if (target == 0 || target == const0_rtx)
3601 end = gen_reg_rtx (Pmode);
3609 data = insn_data + CODE_FOR_movstr;
3611 if (data->operand[0].mode != VOIDmode)
3612 end = gen_lowpart (data->operand[0].mode, end);
3614 insn = data->genfun (end, dest_mem, src_mem);
3620 /* movstr is supposed to set end to the address of the NUL
3621 terminator. If the caller requested a mempcpy-like return value,
3623 if (endp == 1 && target != const0_rtx)
3625 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3626 emit_move_insn (target, force_operand (tem, NULL_RTX));
3632 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3633 NULL_RTX if we failed the caller should emit a normal call, otherwise
3634 try to get the result in TARGET, if convenient (and in mode MODE if that's
3638 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3640 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3642 tree dest = CALL_EXPR_ARG (exp, 0);
3643 tree src = CALL_EXPR_ARG (exp, 1);
3644 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3649 /* Helper function to do the actual work for expand_builtin_strcpy. The
3650 arguments to the builtin_strcpy call DEST and SRC are broken out
3651 so that this can also be called without constructing an actual CALL_EXPR.
3652 The other arguments and return value are the same as for
3653 expand_builtin_strcpy. */
3656 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3657 rtx target, enum machine_mode mode)
3659 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3661 return expand_expr (result, target, mode, EXPAND_NORMAL);
3662 return expand_movstr (dest, src, target, /*endp=*/0);
3666 /* Expand a call EXP to the stpcpy builtin.
3667 Return NULL_RTX if we failed the caller should emit a normal call,
3668 otherwise try to get the result in TARGET, if convenient (and in
3669 mode MODE if that's convenient). */
3672 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3676 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3679 dst = CALL_EXPR_ARG (exp, 0);
3680 src = CALL_EXPR_ARG (exp, 1);
3682 /* If return value is ignored, transform stpcpy into strcpy. */
3683 if (target == const0_rtx)
3685 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3689 return expand_expr (build_call_expr (fn, 2, dst, src),
3690 target, mode, EXPAND_NORMAL);
3697 /* Ensure we get an actual string whose length can be evaluated at
3698 compile-time, not an expression containing a string. This is
3699 because the latter will potentially produce pessimized code
3700 when used to produce the return value. */
3701 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3702 return expand_movstr (dst, src, target, /*endp=*/2);
3704 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3705 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3706 target, mode, /*endp=*/2);
3711 if (TREE_CODE (len) == INTEGER_CST)
3713 rtx len_rtx = expand_normal (len);
3715 if (GET_CODE (len_rtx) == CONST_INT)
3717 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3718 dst, src, target, mode);
3724 if (mode != VOIDmode)
3725 target = gen_reg_rtx (mode);
3727 target = gen_reg_rtx (GET_MODE (ret));
3729 if (GET_MODE (target) != GET_MODE (ret))
3730 ret = gen_lowpart (GET_MODE (target), ret);
3732 ret = plus_constant (ret, INTVAL (len_rtx));
3733 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3741 return expand_movstr (dst, src, target, /*endp=*/2);
3745 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3746 bytes from constant string DATA + OFFSET and return it as target
3750 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3751 enum machine_mode mode)
3753 const char *str = (const char *) data;
3755 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3758 return c_readstr (str + offset, mode);
3761 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3762 NULL_RTX if we failed the caller should emit a normal call. */
3765 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3767 tree fndecl = get_callee_fndecl (exp);
3769 if (validate_arglist (exp,
3770 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3772 tree dest = CALL_EXPR_ARG (exp, 0);
3773 tree src = CALL_EXPR_ARG (exp, 1);
3774 tree len = CALL_EXPR_ARG (exp, 2);
3775 tree slen = c_strlen (src, 1);
3776 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3780 while (TREE_CODE (result) == COMPOUND_EXPR)
3782 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3784 result = TREE_OPERAND (result, 1);
3786 return expand_expr (result, target, mode, EXPAND_NORMAL);
3789 /* We must be passed a constant len and src parameter. */
3790 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3793 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3795 /* We're required to pad with trailing zeros if the requested
3796 len is greater than strlen(s2)+1. In that case try to
3797 use store_by_pieces, if it fails, punt. */
3798 if (tree_int_cst_lt (slen, len))
3800 unsigned int dest_align
3801 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3802 const char *p = c_getstr (src);
3805 if (!p || dest_align == 0 || !host_integerp (len, 1)
3806 || !can_store_by_pieces (tree_low_cst (len, 1),
3807 builtin_strncpy_read_str,
3808 CONST_CAST (char *, p),
3812 dest_mem = get_memory_rtx (dest, len);
3813 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3814 builtin_strncpy_read_str,
3815 CONST_CAST (char *, p), dest_align, false, 0);
3816 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3817 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3824 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3825 bytes from constant string DATA + OFFSET and return it as target
3829 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3830 enum machine_mode mode)
3832 const char *c = (const char *) data;
3833 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3835 memset (p, *c, GET_MODE_SIZE (mode));
3837 return c_readstr (p, mode);
3840 /* Callback routine for store_by_pieces. Return the RTL of a register
3841 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3842 char value given in the RTL register data. For example, if mode is
3843 4 bytes wide, return the RTL for 0x01010101*data. */
3846 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3847 enum machine_mode mode)
3853 size = GET_MODE_SIZE (mode);
3857 p = XALLOCAVEC (char, size);
3858 memset (p, 1, size);
3859 coeff = c_readstr (p, mode);
3861 target = convert_to_mode (mode, (rtx) data, 1);
3862 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3863 return force_reg (mode, target);
3866 /* Expand expression EXP, which is a call to the memset builtin. Return
3867 NULL_RTX if we failed the caller should emit a normal call, otherwise
3868 try to get the result in TARGET, if convenient (and in mode MODE if that's
3872 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3874 if (!validate_arglist (exp,
3875 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3879 tree dest = CALL_EXPR_ARG (exp, 0);
3880 tree val = CALL_EXPR_ARG (exp, 1);
3881 tree len = CALL_EXPR_ARG (exp, 2);
3882 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3886 /* Helper function to do the actual work for expand_builtin_memset. The
3887 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3888 so that this can also be called without constructing an actual CALL_EXPR.
3889 The other arguments and return value are the same as for
3890 expand_builtin_memset. */
3893 expand_builtin_memset_args (tree dest, tree val, tree len,
3894 rtx target, enum machine_mode mode, tree orig_exp)
3897 enum built_in_function fcode;
3899 unsigned int dest_align;
3900 rtx dest_mem, dest_addr, len_rtx;
3901 HOST_WIDE_INT expected_size = -1;
3902 unsigned int expected_align = 0;
3903 tree_ann_common_t ann;
3905 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3907 /* If DEST is not a pointer type, don't do this operation in-line. */
3908 if (dest_align == 0)
3911 ann = tree_common_ann (orig_exp);
3913 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3915 if (expected_align < dest_align)
3916 expected_align = dest_align;
3918 /* If the LEN parameter is zero, return DEST. */
3919 if (integer_zerop (len))
3921 /* Evaluate and ignore VAL in case it has side-effects. */
3922 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3923 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3926 /* Stabilize the arguments in case we fail. */
3927 dest = builtin_save_expr (dest);
3928 val = builtin_save_expr (val);
3929 len = builtin_save_expr (len);
3931 len_rtx = expand_normal (len);
3932 dest_mem = get_memory_rtx (dest, len);
3934 if (TREE_CODE (val) != INTEGER_CST)
3938 val_rtx = expand_normal (val);
3939 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3942 /* Assume that we can memset by pieces if we can store
3943 * the coefficients by pieces (in the required modes).
3944 * We can't pass builtin_memset_gen_str as that emits RTL. */
3946 if (host_integerp (len, 1)
3947 && can_store_by_pieces (tree_low_cst (len, 1),
3948 builtin_memset_read_str, &c, dest_align,
3951 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3953 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3954 builtin_memset_gen_str, val_rtx, dest_align,
3957 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3958 dest_align, expected_align,
3962 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3963 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3967 if (target_char_cast (val, &c))
3972 if (host_integerp (len, 1)
3973 && can_store_by_pieces (tree_low_cst (len, 1),
3974 builtin_memset_read_str, &c, dest_align,
3976 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3977 builtin_memset_read_str, &c, dest_align, true, 0);
3978 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3979 dest_align, expected_align,
3983 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3984 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3988 set_mem_align (dest_mem, dest_align);
3989 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3990 CALL_EXPR_TAILCALL (orig_exp)
3991 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3992 expected_align, expected_size);
3996 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3997 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4003 fndecl = get_callee_fndecl (orig_exp);
4004 fcode = DECL_FUNCTION_CODE (fndecl);
4005 if (fcode == BUILT_IN_MEMSET)
4006 fn = build_call_expr (fndecl, 3, dest, val, len);
4007 else if (fcode == BUILT_IN_BZERO)
4008 fn = build_call_expr (fndecl, 2, dest, len);
4011 if (TREE_CODE (fn) == CALL_EXPR)
4012 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4013 return expand_call (fn, target, target == const0_rtx);
4016 /* Expand expression EXP, which is a call to the bzero builtin. Return
4017 NULL_RTX if we failed the caller should emit a normal call. */
4020 expand_builtin_bzero (tree exp)
4024 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4027 dest = CALL_EXPR_ARG (exp, 0);
4028 size = CALL_EXPR_ARG (exp, 1);
4030 /* New argument list transforming bzero(ptr x, int y) to
4031 memset(ptr x, int 0, size_t y). This is done this way
4032 so that if it isn't expanded inline, we fallback to
4033 calling bzero instead of memset. */
4035 return expand_builtin_memset_args (dest, integer_zero_node,
4036 fold_convert (sizetype, size),
4037 const0_rtx, VOIDmode, exp);
4040 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4041 caller should emit a normal call, otherwise try to get the result
4042 in TARGET, if convenient (and in mode MODE if that's convenient). */
4045 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4047 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4048 INTEGER_TYPE, VOID_TYPE))
4050 tree type = TREE_TYPE (exp);
4051 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4052 CALL_EXPR_ARG (exp, 1),
4053 CALL_EXPR_ARG (exp, 2), type);
4055 return expand_expr (result, target, mode, EXPAND_NORMAL);
4060 /* Expand expression EXP, which is a call to the memcmp built-in function.
4061 Return NULL_RTX if we failed and the
4062 caller should emit a normal call, otherwise try to get the result in
4063 TARGET, if convenient (and in mode MODE, if that's convenient). */
4066 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4068 if (!validate_arglist (exp,
4069 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4073 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4074 CALL_EXPR_ARG (exp, 1),
4075 CALL_EXPR_ARG (exp, 2));
4077 return expand_expr (result, target, mode, EXPAND_NORMAL);
4080 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4082 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4085 tree arg1 = CALL_EXPR_ARG (exp, 0);
4086 tree arg2 = CALL_EXPR_ARG (exp, 1);
4087 tree len = CALL_EXPR_ARG (exp, 2);
4090 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4092 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4093 enum machine_mode insn_mode;
4095 #ifdef HAVE_cmpmemsi
4097 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4100 #ifdef HAVE_cmpstrnsi
4102 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4107 /* If we don't have POINTER_TYPE, call the function. */
4108 if (arg1_align == 0 || arg2_align == 0)
4111 /* Make a place to write the result of the instruction. */
4114 && REG_P (result) && GET_MODE (result) == insn_mode
4115 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4116 result = gen_reg_rtx (insn_mode);
4118 arg1_rtx = get_memory_rtx (arg1, len);
4119 arg2_rtx = get_memory_rtx (arg2, len);
4120 arg3_rtx = expand_normal (len);
4122 /* Set MEM_SIZE as appropriate. */
4123 if (GET_CODE (arg3_rtx) == CONST_INT)
4125 set_mem_size (arg1_rtx, arg3_rtx);
4126 set_mem_size (arg2_rtx, arg3_rtx);
4129 #ifdef HAVE_cmpmemsi
4131 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4132 GEN_INT (MIN (arg1_align, arg2_align)));
4135 #ifdef HAVE_cmpstrnsi
4137 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4138 GEN_INT (MIN (arg1_align, arg2_align)));
4146 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4147 TYPE_MODE (integer_type_node), 3,
4148 XEXP (arg1_rtx, 0), Pmode,
4149 XEXP (arg2_rtx, 0), Pmode,
4150 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4151 TYPE_UNSIGNED (sizetype)),
4152 TYPE_MODE (sizetype));
4154 /* Return the value in the proper mode for this function. */
4155 mode = TYPE_MODE (TREE_TYPE (exp));
4156 if (GET_MODE (result) == mode)
4158 else if (target != 0)
4160 convert_move (target, result, 0);
4164 return convert_to_mode (mode, result, 0);
4171 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4172 if we failed the caller should emit a normal call, otherwise try to get
4173 the result in TARGET, if convenient. */
4176 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4178 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4182 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4183 CALL_EXPR_ARG (exp, 1));
4185 return expand_expr (result, target, mode, EXPAND_NORMAL);
4188 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4189 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4190 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4192 rtx arg1_rtx, arg2_rtx;
4193 rtx result, insn = NULL_RTX;
4195 tree arg1 = CALL_EXPR_ARG (exp, 0);
4196 tree arg2 = CALL_EXPR_ARG (exp, 1);
4199 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4201 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4203 /* If we don't have POINTER_TYPE, call the function. */
4204 if (arg1_align == 0 || arg2_align == 0)
4207 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4208 arg1 = builtin_save_expr (arg1);
4209 arg2 = builtin_save_expr (arg2);
4211 arg1_rtx = get_memory_rtx (arg1, NULL);
4212 arg2_rtx = get_memory_rtx (arg2, NULL);
4214 #ifdef HAVE_cmpstrsi
4215 /* Try to call cmpstrsi. */
4218 enum machine_mode insn_mode
4219 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4221 /* Make a place to write the result of the instruction. */
4224 && REG_P (result) && GET_MODE (result) == insn_mode
4225 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4226 result = gen_reg_rtx (insn_mode);
4228 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4229 GEN_INT (MIN (arg1_align, arg2_align)));
4232 #ifdef HAVE_cmpstrnsi
4233 /* Try to determine at least one length and call cmpstrnsi. */
4234 if (!insn && HAVE_cmpstrnsi)
4239 enum machine_mode insn_mode
4240 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4241 tree len1 = c_strlen (arg1, 1);
4242 tree len2 = c_strlen (arg2, 1);
4245 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4247 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4249 /* If we don't have a constant length for the first, use the length
4250 of the second, if we know it. We don't require a constant for
4251 this case; some cost analysis could be done if both are available
4252 but neither is constant. For now, assume they're equally cheap,
4253 unless one has side effects. If both strings have constant lengths,
4260 else if (TREE_SIDE_EFFECTS (len1))
4262 else if (TREE_SIDE_EFFECTS (len2))
4264 else if (TREE_CODE (len1) != INTEGER_CST)
4266 else if (TREE_CODE (len2) != INTEGER_CST)
4268 else if (tree_int_cst_lt (len1, len2))
4273 /* If both arguments have side effects, we cannot optimize. */
4274 if (!len || TREE_SIDE_EFFECTS (len))
4277 arg3_rtx = expand_normal (len);
4279 /* Make a place to write the result of the instruction. */
4282 && REG_P (result) && GET_MODE (result) == insn_mode
4283 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4284 result = gen_reg_rtx (insn_mode);
4286 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4287 GEN_INT (MIN (arg1_align, arg2_align)));
4295 /* Return the value in the proper mode for this function. */
4296 mode = TYPE_MODE (TREE_TYPE (exp));
4297 if (GET_MODE (result) == mode)
4300 return convert_to_mode (mode, result, 0);
4301 convert_move (target, result, 0);
4305 /* Expand the library call ourselves using a stabilized argument
4306 list to avoid re-evaluating the function's arguments twice. */
4307 #ifdef HAVE_cmpstrnsi
4310 fndecl = get_callee_fndecl (exp);
4311 fn = build_call_expr (fndecl, 2, arg1, arg2);
4312 if (TREE_CODE (fn) == CALL_EXPR)
4313 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4314 return expand_call (fn, target, target == const0_rtx);
4320 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4321 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4322 the result in TARGET, if convenient. */
4325 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4327 if (!validate_arglist (exp,
4328 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4332 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4333 CALL_EXPR_ARG (exp, 1),
4334 CALL_EXPR_ARG (exp, 2));
4336 return expand_expr (result, target, mode, EXPAND_NORMAL);
4339 /* If c_strlen can determine an expression for one of the string
4340 lengths, and it doesn't have side effects, then emit cmpstrnsi
4341 using length MIN(strlen(string)+1, arg3). */
4342 #ifdef HAVE_cmpstrnsi
4345 tree len, len1, len2;
4346 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4349 tree arg1 = CALL_EXPR_ARG (exp, 0);
4350 tree arg2 = CALL_EXPR_ARG (exp, 1);
4351 tree arg3 = CALL_EXPR_ARG (exp, 2);
4354 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4356 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4357 enum machine_mode insn_mode
4358 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4360 len1 = c_strlen (arg1, 1);
4361 len2 = c_strlen (arg2, 1);
4364 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4366 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4368 /* If we don't have a constant length for the first, use the length
4369 of the second, if we know it. We don't require a constant for
4370 this case; some cost analysis could be done if both are available
4371 but neither is constant. For now, assume they're equally cheap,
4372 unless one has side effects. If both strings have constant lengths,
4379 else if (TREE_SIDE_EFFECTS (len1))
4381 else if (TREE_SIDE_EFFECTS (len2))
4383 else if (TREE_CODE (len1) != INTEGER_CST)
4385 else if (TREE_CODE (len2) != INTEGER_CST)
4387 else if (tree_int_cst_lt (len1, len2))
4392 /* If both arguments have side effects, we cannot optimize. */
4393 if (!len || TREE_SIDE_EFFECTS (len))
4396 /* The actual new length parameter is MIN(len,arg3). */
4397 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4398 fold_convert (TREE_TYPE (len), arg3));
4400 /* If we don't have POINTER_TYPE, call the function. */
4401 if (arg1_align == 0 || arg2_align == 0)
4404 /* Make a place to write the result of the instruction. */
4407 && REG_P (result) && GET_MODE (result) == insn_mode
4408 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4409 result = gen_reg_rtx (insn_mode);
4411 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4412 arg1 = builtin_save_expr (arg1);
4413 arg2 = builtin_save_expr (arg2);
4414 len = builtin_save_expr (len);
4416 arg1_rtx = get_memory_rtx (arg1, len);
4417 arg2_rtx = get_memory_rtx (arg2, len);
4418 arg3_rtx = expand_normal (len);
4419 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4420 GEN_INT (MIN (arg1_align, arg2_align)));
4425 /* Return the value in the proper mode for this function. */
4426 mode = TYPE_MODE (TREE_TYPE (exp));
4427 if (GET_MODE (result) == mode)
4430 return convert_to_mode (mode, result, 0);
4431 convert_move (target, result, 0);
4435 /* Expand the library call ourselves using a stabilized argument
4436 list to avoid re-evaluating the function's arguments twice. */
4437 fndecl = get_callee_fndecl (exp);
4438 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4439 if (TREE_CODE (fn) == CALL_EXPR)
4440 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4441 return expand_call (fn, target, target == const0_rtx);
4447 /* Expand expression EXP, which is a call to the strcat builtin.
4448 Return NULL_RTX if we failed the caller should emit a normal call,
4449 otherwise try to get the result in TARGET, if convenient. */
4452 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4454 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4458 tree dst = CALL_EXPR_ARG (exp, 0);
4459 tree src = CALL_EXPR_ARG (exp, 1);
4460 const char *p = c_getstr (src);
4462 /* If the string length is zero, return the dst parameter. */
4463 if (p && *p == '\0')
4464 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4468 /* See if we can store by pieces into (dst + strlen(dst)). */
4469 tree newsrc, newdst,
4470 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4473 /* Stabilize the argument list. */
4474 newsrc = builtin_save_expr (src);
4475 dst = builtin_save_expr (dst);
4479 /* Create strlen (dst). */
4480 newdst = build_call_expr (strlen_fn, 1, dst);
4481 /* Create (dst p+ strlen (dst)). */
4483 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4484 newdst = builtin_save_expr (newdst);
4486 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4488 end_sequence (); /* Stop sequence. */
4492 /* Output the entire sequence. */
4493 insns = get_insns ();
4497 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4504 /* Expand expression EXP, which is a call to the strncat builtin.
4505 Return NULL_RTX if we failed the caller should emit a normal call,
4506 otherwise try to get the result in TARGET, if convenient. */
4509 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4511 if (validate_arglist (exp,
4512 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4514 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4515 CALL_EXPR_ARG (exp, 1),
4516 CALL_EXPR_ARG (exp, 2));
4518 return expand_expr (result, target, mode, EXPAND_NORMAL);
4523 /* Expand expression EXP, which is a call to the strspn builtin.
4524 Return NULL_RTX if we failed the caller should emit a normal call,
4525 otherwise try to get the result in TARGET, if convenient. */
4528 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4530 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4532 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4533 CALL_EXPR_ARG (exp, 1));
4535 return expand_expr (result, target, mode, EXPAND_NORMAL);
4540 /* Expand expression EXP, which is a call to the strcspn builtin.
4541 Return NULL_RTX if we failed the caller should emit a normal call,
4542 otherwise try to get the result in TARGET, if convenient. */
4545 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4547 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4549 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4550 CALL_EXPR_ARG (exp, 1));
4552 return expand_expr (result, target, mode, EXPAND_NORMAL);
4557 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4558 if that's convenient. */
4561 expand_builtin_saveregs (void)
4565 /* Don't do __builtin_saveregs more than once in a function.
4566 Save the result of the first call and reuse it. */
4567 if (saveregs_value != 0)
4568 return saveregs_value;
4570 /* When this function is called, it means that registers must be
4571 saved on entry to this function. So we migrate the call to the
4572 first insn of this function. */
4576 /* Do whatever the machine needs done in this case. */
4577 val = targetm.calls.expand_builtin_saveregs ();
4582 saveregs_value = val;
4584 /* Put the insns after the NOTE that starts the function. If this
4585 is inside a start_sequence, make the outer-level insn chain current, so
4586 the code is placed at the start of the function. */
4587 push_topmost_sequence ();
4588 emit_insn_after (seq, entry_of_function ());
4589 pop_topmost_sequence ();
4594 /* __builtin_args_info (N) returns word N of the arg space info
4595 for the current function. The number and meanings of words
4596 is controlled by the definition of CUMULATIVE_ARGS. */
4599 expand_builtin_args_info (tree exp)
4601 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4602 int *word_ptr = (int *) &crtl->args.info;
4604 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4606 if (call_expr_nargs (exp) != 0)
4608 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4609 error ("argument of %<__builtin_args_info%> must be constant");
4612 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4614 if (wordnum < 0 || wordnum >= nwords)
4615 error ("argument of %<__builtin_args_info%> out of range");
4617 return GEN_INT (word_ptr[wordnum]);
4621 error ("missing argument in %<__builtin_args_info%>");
4626 /* Expand a call to __builtin_next_arg. */
4629 expand_builtin_next_arg (void)
4631 /* Checking arguments is already done in fold_builtin_next_arg
4632 that must be called before this function. */
4633 return expand_binop (ptr_mode, add_optab,
4634 crtl->args.internal_arg_pointer,
4635 crtl->args.arg_offset_rtx,
4636 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4639 /* Make it easier for the backends by protecting the valist argument
4640 from multiple evaluations. */
4643 stabilize_va_list (tree valist, int needs_lvalue)
4645 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4647 gcc_assert (vatype != NULL_TREE);
4649 if (TREE_CODE (vatype) == ARRAY_TYPE)
4651 if (TREE_SIDE_EFFECTS (valist))
4652 valist = save_expr (valist);
4654 /* For this case, the backends will be expecting a pointer to
4655 vatype, but it's possible we've actually been given an array
4656 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4658 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4660 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4661 valist = build_fold_addr_expr_with_type (valist, p1);
4670 if (! TREE_SIDE_EFFECTS (valist))
4673 pt = build_pointer_type (vatype);
4674 valist = fold_build1 (ADDR_EXPR, pt, valist);
4675 TREE_SIDE_EFFECTS (valist) = 1;
4678 if (TREE_SIDE_EFFECTS (valist))
4679 valist = save_expr (valist);
4680 valist = build_fold_indirect_ref (valist);
4686 /* The "standard" definition of va_list is void*. */
4689 std_build_builtin_va_list (void)
4691 return ptr_type_node;
4694 /* The "standard" abi va_list is va_list_type_node. */
4697 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4699 return va_list_type_node;
4702 /* The "standard" type of va_list is va_list_type_node. */
4705 std_canonical_va_list_type (tree type)
4709 if (INDIRECT_REF_P (type))
4710 type = TREE_TYPE (type);
4711 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4712 type = TREE_TYPE (type);
4713 wtype = va_list_type_node;
4715 /* Treat structure va_list types. */
4716 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4717 htype = TREE_TYPE (htype);
4718 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4720 /* If va_list is an array type, the argument may have decayed
4721 to a pointer type, e.g. by being passed to another function.
4722 In that case, unwrap both types so that we can compare the
4723 underlying records. */
4724 if (TREE_CODE (htype) == ARRAY_TYPE
4725 || POINTER_TYPE_P (htype))
4727 wtype = TREE_TYPE (wtype);
4728 htype = TREE_TYPE (htype);
4731 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4732 return va_list_type_node;
4737 /* The "standard" implementation of va_start: just assign `nextarg' to
4741 std_expand_builtin_va_start (tree valist, rtx nextarg)
4743 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4744 convert_move (va_r, nextarg, 0);
4747 /* Expand EXP, a call to __builtin_va_start. */
4750 expand_builtin_va_start (tree exp)
4755 if (call_expr_nargs (exp) < 2)
4757 error ("too few arguments to function %<va_start%>");
4761 if (fold_builtin_next_arg (exp, true))
4764 nextarg = expand_builtin_next_arg ();
4765 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4767 if (targetm.expand_builtin_va_start)
4768 targetm.expand_builtin_va_start (valist, nextarg);
4770 std_expand_builtin_va_start (valist, nextarg);
4775 /* The "standard" implementation of va_arg: read the value from the
4776 current (padded) address and increment by the (padded) size. */
4779 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4782 tree addr, t, type_size, rounded_size, valist_tmp;
4783 unsigned HOST_WIDE_INT align, boundary;
4786 #ifdef ARGS_GROW_DOWNWARD
4787 /* All of the alignment and movement below is for args-grow-up machines.
4788 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4789 implement their own specialized gimplify_va_arg_expr routines. */
4793 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4795 type = build_pointer_type (type);
4797 align = PARM_BOUNDARY / BITS_PER_UNIT;
4798 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4800 /* When we align parameter on stack for caller, if the parameter
4801 alignment is beyond PREFERRED_STACK_BOUNDARY, it will be
4802 aligned at PREFERRED_STACK_BOUNDARY. We will match callee
4803 here with caller. */
4804 if (boundary > PREFERRED_STACK_BOUNDARY)
4805 boundary = PREFERRED_STACK_BOUNDARY;
4807 boundary /= BITS_PER_UNIT;
4809 /* Hoist the valist value into a temporary for the moment. */
4810 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4812 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4813 requires greater alignment, we must perform dynamic alignment. */
4814 if (boundary > align
4815 && !integer_zerop (TYPE_SIZE (type)))
4817 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4818 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4819 valist_tmp, size_int (boundary - 1)));
4820 gimplify_and_add (t, pre_p);
4822 t = fold_convert (sizetype, valist_tmp);
4823 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4824 fold_convert (TREE_TYPE (valist),
4825 fold_build2 (BIT_AND_EXPR, sizetype, t,
4826 size_int (-boundary))));
4827 gimplify_and_add (t, pre_p);
4832 /* If the actual alignment is less than the alignment of the type,
4833 adjust the type accordingly so that we don't assume strict alignment
4834 when dereferencing the pointer. */
4835 boundary *= BITS_PER_UNIT;
4836 if (boundary < TYPE_ALIGN (type))
4838 type = build_variant_type_copy (type);
4839 TYPE_ALIGN (type) = boundary;
4842 /* Compute the rounded size of the type. */
4843 type_size = size_in_bytes (type);
4844 rounded_size = round_up (type_size, align);
4846 /* Reduce rounded_size so it's sharable with the postqueue. */
4847 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4851 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4853 /* Small args are padded downward. */
4854 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4855 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4856 size_binop (MINUS_EXPR, rounded_size, type_size));
4857 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4860 /* Compute new value for AP. */
4861 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4862 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4863 gimplify_and_add (t, pre_p);
4865 addr = fold_convert (build_pointer_type (type), addr);
4868 addr = build_va_arg_indirect_ref (addr);
4870 return build_va_arg_indirect_ref (addr);
4873 /* Build an indirect-ref expression over the given TREE, which represents a
4874 piece of a va_arg() expansion. */
4876 build_va_arg_indirect_ref (tree addr)
4878 addr = build_fold_indirect_ref (addr);
4880 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4886 /* Return a dummy expression of type TYPE in order to keep going after an
4890 dummy_object (tree type)
4892 tree t = build_int_cst (build_pointer_type (type), 0);
4893 return build1 (INDIRECT_REF, type, t);
4896 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4897 builtin function, but a very special sort of operator. */
4899 enum gimplify_status
4900 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4902 tree promoted_type, have_va_type;
4903 tree valist = TREE_OPERAND (*expr_p, 0);
4904 tree type = TREE_TYPE (*expr_p);
4907 /* Verify that valist is of the proper type. */
4908 have_va_type = TREE_TYPE (valist);
4909 if (have_va_type == error_mark_node)
4911 have_va_type = targetm.canonical_va_list_type (have_va_type);
4913 if (have_va_type == NULL_TREE)
4915 error ("first argument to %<va_arg%> not of type %<va_list%>");
4919 /* Generate a diagnostic for requesting data of a type that cannot
4920 be passed through `...' due to type promotion at the call site. */
4921 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4924 static bool gave_help;
4926 /* Unfortunately, this is merely undefined, rather than a constraint
4927 violation, so we cannot make this an error. If this call is never
4928 executed, the program is still strictly conforming. */
4929 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4930 type, promoted_type);
4934 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4935 promoted_type, type);
4938 /* We can, however, treat "undefined" any way we please.
4939 Call abort to encourage the user to fix the program. */
4940 inform ("if this code is reached, the program will abort");
4941 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4942 gimplify_and_add (t, pre_p);
4944 /* This is dead code, but go ahead and finish so that the
4945 mode of the result comes out right. */
4946 *expr_p = dummy_object (type);
4951 /* Make it easier for the backends by protecting the valist argument
4952 from multiple evaluations. */
4953 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4955 /* For this case, the backends will be expecting a pointer to
4956 TREE_TYPE (abi), but it's possible we've
4957 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4959 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4961 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4962 valist = build_fold_addr_expr_with_type (valist, p1);
4965 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4968 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4970 if (!targetm.gimplify_va_arg_expr)
4971 /* FIXME: Once most targets are converted we should merely
4972 assert this is non-null. */
4975 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4980 /* Expand EXP, a call to __builtin_va_end. */
4983 expand_builtin_va_end (tree exp)
4985 tree valist = CALL_EXPR_ARG (exp, 0);
4987 /* Evaluate for side effects, if needed. I hate macros that don't
4989 if (TREE_SIDE_EFFECTS (valist))
4990 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4995 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4996 builtin rather than just as an assignment in stdarg.h because of the
4997 nastiness of array-type va_list types. */
5000 expand_builtin_va_copy (tree exp)
5004 dst = CALL_EXPR_ARG (exp, 0);
5005 src = CALL_EXPR_ARG (exp, 1);
5007 dst = stabilize_va_list (dst, 1);
5008 src = stabilize_va_list (src, 0);
5010 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5012 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5014 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5015 TREE_SIDE_EFFECTS (t) = 1;
5016 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5020 rtx dstb, srcb, size;
5022 /* Evaluate to pointers. */
5023 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5024 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5025 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5026 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5028 dstb = convert_memory_address (Pmode, dstb);
5029 srcb = convert_memory_address (Pmode, srcb);
5031 /* "Dereference" to BLKmode memories. */
5032 dstb = gen_rtx_MEM (BLKmode, dstb);
5033 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5034 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5035 srcb = gen_rtx_MEM (BLKmode, srcb);
5036 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5037 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5040 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5046 /* Expand a call to one of the builtin functions __builtin_frame_address or
5047 __builtin_return_address. */
5050 expand_builtin_frame_address (tree fndecl, tree exp)
5052 /* The argument must be a nonnegative integer constant.
5053 It counts the number of frames to scan up the stack.
5054 The value is the return address saved in that frame. */
5055 if (call_expr_nargs (exp) == 0)
5056 /* Warning about missing arg was already issued. */
5058 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5060 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5061 error ("invalid argument to %<__builtin_frame_address%>");
5063 error ("invalid argument to %<__builtin_return_address%>");
5069 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5070 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5072 /* Some ports cannot access arbitrary stack frames. */
5075 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5076 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5078 warning (0, "unsupported argument to %<__builtin_return_address%>");
5082 /* For __builtin_frame_address, return what we've got. */
5083 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5087 && ! CONSTANT_P (tem))
5088 tem = copy_to_mode_reg (Pmode, tem);
5093 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5094 we failed and the caller should emit a normal call, otherwise try to get
5095 the result in TARGET, if convenient. */
5098 expand_builtin_alloca (tree exp, rtx target)
5103 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5104 should always expand to function calls. These can be intercepted
5109 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5112 /* Compute the argument. */
5113 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5115 /* Allocate the desired space. */
5116 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5117 result = convert_memory_address (ptr_mode, result);
5122 /* Expand a call to a bswap builtin with argument ARG0. MODE
5123 is the mode to expand with. */
5126 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5128 enum machine_mode mode;
5132 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5135 arg = CALL_EXPR_ARG (exp, 0);
5136 mode = TYPE_MODE (TREE_TYPE (arg));
5137 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5139 target = expand_unop (mode, bswap_optab, op0, target, 1);
5141 gcc_assert (target);
5143 return convert_to_mode (mode, target, 0);
5146 /* Expand a call to a unary builtin in EXP.
5147 Return NULL_RTX if a normal call should be emitted rather than expanding the
5148 function in-line. If convenient, the result should be placed in TARGET.
5149 SUBTARGET may be used as the target for computing one of EXP's operands. */
5152 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5153 rtx subtarget, optab op_optab)
5157 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5160 /* Compute the argument. */
5161 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5162 VOIDmode, EXPAND_NORMAL);
5163 /* Compute op, into TARGET if possible.
5164 Set TARGET to wherever the result comes back. */
5165 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5166 op_optab, op0, target, 1);
5167 gcc_assert (target);
5169 return convert_to_mode (target_mode, target, 0);
5172 /* If the string passed to fputs is a constant and is one character
5173 long, we attempt to transform this call into __builtin_fputc(). */
5176 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5178 /* Verify the arguments in the original call. */
5179 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5181 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5182 CALL_EXPR_ARG (exp, 1),
5183 (target == const0_rtx),
5184 unlocked, NULL_TREE);
5186 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5191 /* Expand a call to __builtin_expect. We just return our argument
5192 as the builtin_expect semantic should've been already executed by
5193 tree branch prediction pass. */
5196 expand_builtin_expect (tree exp, rtx target)
5200 if (call_expr_nargs (exp) < 2)
5202 arg = CALL_EXPR_ARG (exp, 0);
5203 c = CALL_EXPR_ARG (exp, 1);
5205 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5206 /* When guessing was done, the hints should be already stripped away. */
5207 gcc_assert (!flag_guess_branch_prob
5208 || optimize == 0 || errorcount || sorrycount);
5213 expand_builtin_trap (void)
5217 emit_insn (gen_trap ());
5220 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5224 /* Expand EXP, a call to fabs, fabsf or fabsl.
5225 Return NULL_RTX if a normal call should be emitted rather than expanding
5226 the function inline. If convenient, the result should be placed
5227 in TARGET. SUBTARGET may be used as the target for computing
5231 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5233 enum machine_mode mode;
5237 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5240 arg = CALL_EXPR_ARG (exp, 0);
5241 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5242 mode = TYPE_MODE (TREE_TYPE (arg));
5243 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5244 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5247 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5248 Return NULL is a normal call should be emitted rather than expanding the
5249 function inline. If convenient, the result should be placed in TARGET.
5250 SUBTARGET may be used as the target for computing the operand. */
5253 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5258 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5261 arg = CALL_EXPR_ARG (exp, 0);
5262 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5264 arg = CALL_EXPR_ARG (exp, 1);
5265 op1 = expand_normal (arg);
5267 return expand_copysign (op0, op1, target);
5270 /* Create a new constant string literal and return a char* pointer to it.
5271 The STRING_CST value is the LEN characters at STR. */
5273 build_string_literal (int len, const char *str)
5275 tree t, elem, index, type;
5277 t = build_string (len, str);
5278 elem = build_type_variant (char_type_node, 1, 0);
5279 index = build_index_type (size_int (len - 1));
5280 type = build_array_type (elem, index);
5281 TREE_TYPE (t) = type;
5282 TREE_CONSTANT (t) = 1;
5283 TREE_READONLY (t) = 1;
5284 TREE_STATIC (t) = 1;
5286 type = build_pointer_type (elem);
5287 t = build1 (ADDR_EXPR, type,
5288 build4 (ARRAY_REF, elem,
5289 t, integer_zero_node, NULL_TREE, NULL_TREE));
5293 /* Expand EXP, a call to printf or printf_unlocked.
5294 Return NULL_RTX if a normal call should be emitted rather than transforming
5295 the function inline. If convenient, the result should be placed in
5296 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5299 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5302 /* If we're using an unlocked function, assume the other unlocked
5303 functions exist explicitly. */
5304 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5305 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5306 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5307 : implicit_built_in_decls[BUILT_IN_PUTS];
5308 const char *fmt_str;
5311 int nargs = call_expr_nargs (exp);
5313 /* If the return value is used, don't do the transformation. */
5314 if (target != const0_rtx)
5317 /* Verify the required arguments in the original call. */
5320 fmt = CALL_EXPR_ARG (exp, 0);
5321 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5324 /* Check whether the format is a literal string constant. */
5325 fmt_str = c_getstr (fmt);
5326 if (fmt_str == NULL)
5329 if (!init_target_chars ())
5332 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5333 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5336 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5339 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5341 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5342 else if (strcmp (fmt_str, target_percent_c) == 0)
5345 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5348 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5352 /* We can't handle anything else with % args or %% ... yet. */
5353 if (strchr (fmt_str, target_percent))
5359 /* If the format specifier was "", printf does nothing. */
5360 if (fmt_str[0] == '\0')
5362 /* If the format specifier has length of 1, call putchar. */
5363 if (fmt_str[1] == '\0')
5365 /* Given printf("c"), (where c is any one character,)
5366 convert "c"[0] to an int and pass that to the replacement
5368 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5370 fn = build_call_expr (fn_putchar, 1, arg);
5374 /* If the format specifier was "string\n", call puts("string"). */
5375 size_t len = strlen (fmt_str);
5376 if ((unsigned char)fmt_str[len - 1] == target_newline)
5378 /* Create a NUL-terminated string that's one char shorter
5379 than the original, stripping off the trailing '\n'. */
5380 char *newstr = XALLOCAVEC (char, len);
5381 memcpy (newstr, fmt_str, len - 1);
5382 newstr[len - 1] = 0;
5383 arg = build_string_literal (len, newstr);
5385 fn = build_call_expr (fn_puts, 1, arg);
5388 /* We'd like to arrange to call fputs(string,stdout) here,
5389 but we need stdout and don't have a way to get it yet. */
5396 if (TREE_CODE (fn) == CALL_EXPR)
5397 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5398 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5401 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5402 Return NULL_RTX if a normal call should be emitted rather than transforming
5403 the function inline. If convenient, the result should be placed in
5404 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5407 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5410 /* If we're using an unlocked function, assume the other unlocked
5411 functions exist explicitly. */
5412 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5413 : implicit_built_in_decls[BUILT_IN_FPUTC];
5414 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5415 : implicit_built_in_decls[BUILT_IN_FPUTS];
5416 const char *fmt_str;
5419 int nargs = call_expr_nargs (exp);
5421 /* If the return value is used, don't do the transformation. */
5422 if (target != const0_rtx)
5425 /* Verify the required arguments in the original call. */
5428 fp = CALL_EXPR_ARG (exp, 0);
5429 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5431 fmt = CALL_EXPR_ARG (exp, 1);
5432 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5435 /* Check whether the format is a literal string constant. */
5436 fmt_str = c_getstr (fmt);
5437 if (fmt_str == NULL)
5440 if (!init_target_chars ())
5443 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5444 if (strcmp (fmt_str, target_percent_s) == 0)
5447 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5449 arg = CALL_EXPR_ARG (exp, 2);
5451 fn = build_call_expr (fn_fputs, 2, arg, fp);
5453 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5454 else if (strcmp (fmt_str, target_percent_c) == 0)
5457 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5459 arg = CALL_EXPR_ARG (exp, 2);
5461 fn = build_call_expr (fn_fputc, 2, arg, fp);
5465 /* We can't handle anything else with % args or %% ... yet. */
5466 if (strchr (fmt_str, target_percent))
5472 /* If the format specifier was "", fprintf does nothing. */
5473 if (fmt_str[0] == '\0')
5475 /* Evaluate and ignore FILE* argument for side-effects. */
5476 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5480 /* When "string" doesn't contain %, replace all cases of
5481 fprintf(stream,string) with fputs(string,stream). The fputs
5482 builtin will take care of special cases like length == 1. */
5484 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5489 if (TREE_CODE (fn) == CALL_EXPR)
5490 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5491 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5494 /* Expand a call EXP to sprintf. Return NULL_RTX if
5495 a normal call should be emitted rather than expanding the function
5496 inline. If convenient, the result should be placed in TARGET with
5500 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5503 const char *fmt_str;
5504 int nargs = call_expr_nargs (exp);
5506 /* Verify the required arguments in the original call. */
5509 dest = CALL_EXPR_ARG (exp, 0);
5510 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5512 fmt = CALL_EXPR_ARG (exp, 0);
5513 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5516 /* Check whether the format is a literal string constant. */
5517 fmt_str = c_getstr (fmt);
5518 if (fmt_str == NULL)
5521 if (!init_target_chars ())
5524 /* If the format doesn't contain % args or %%, use strcpy. */
5525 if (strchr (fmt_str, target_percent) == 0)
5527 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5530 if ((nargs > 2) || ! fn)
5532 expand_expr (build_call_expr (fn, 2, dest, fmt),
5533 const0_rtx, VOIDmode, EXPAND_NORMAL);
5534 if (target == const0_rtx)
5536 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5537 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5539 /* If the format is "%s", use strcpy if the result isn't used. */
5540 else if (strcmp (fmt_str, target_percent_s) == 0)
5543 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5549 arg = CALL_EXPR_ARG (exp, 2);
5550 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5553 if (target != const0_rtx)
5555 len = c_strlen (arg, 1);
5556 if (! len || TREE_CODE (len) != INTEGER_CST)
5562 expand_expr (build_call_expr (fn, 2, dest, arg),
5563 const0_rtx, VOIDmode, EXPAND_NORMAL);
5565 if (target == const0_rtx)
5567 return expand_expr (len, target, mode, EXPAND_NORMAL);
5573 /* Expand a call to either the entry or exit function profiler. */
5576 expand_builtin_profile_func (bool exitp)
5580 this = DECL_RTL (current_function_decl);
5581 gcc_assert (MEM_P (this));
5582 this = XEXP (this, 0);
5585 which = profile_function_exit_libfunc;
5587 which = profile_function_entry_libfunc;
5589 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5590 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5597 /* Expand a call to __builtin___clear_cache. */
5600 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5602 #ifndef HAVE_clear_cache
5603 #ifdef CLEAR_INSN_CACHE
5604 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5605 does something. Just do the default expansion to a call to
5609 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5610 does nothing. There is no need to call it. Do nothing. */
5612 #endif /* CLEAR_INSN_CACHE */
5614 /* We have a "clear_cache" insn, and it will handle everything. */
5616 rtx begin_rtx, end_rtx;
5617 enum insn_code icode;
5619 /* We must not expand to a library call. If we did, any
5620 fallback library function in libgcc that might contain a call to
5621 __builtin___clear_cache() would recurse infinitely. */
5622 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5624 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5628 if (HAVE_clear_cache)
5630 icode = CODE_FOR_clear_cache;
5632 begin = CALL_EXPR_ARG (exp, 0);
5633 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5634 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5635 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5636 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5638 end = CALL_EXPR_ARG (exp, 1);
5639 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5640 end_rtx = convert_memory_address (Pmode, end_rtx);
5641 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5642 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5644 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5647 #endif /* HAVE_clear_cache */
5650 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5653 round_trampoline_addr (rtx tramp)
5655 rtx temp, addend, mask;
5657 /* If we don't need too much alignment, we'll have been guaranteed
5658 proper alignment by get_trampoline_type. */
5659 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5662 /* Round address up to desired boundary. */
5663 temp = gen_reg_rtx (Pmode);
5664 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5665 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5667 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5668 temp, 0, OPTAB_LIB_WIDEN);
5669 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5670 temp, 0, OPTAB_LIB_WIDEN);
5676 expand_builtin_init_trampoline (tree exp)
5678 tree t_tramp, t_func, t_chain;
5679 rtx r_tramp, r_func, r_chain;
5680 #ifdef TRAMPOLINE_TEMPLATE
5684 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5685 POINTER_TYPE, VOID_TYPE))
5688 t_tramp = CALL_EXPR_ARG (exp, 0);
5689 t_func = CALL_EXPR_ARG (exp, 1);
5690 t_chain = CALL_EXPR_ARG (exp, 2);
5692 r_tramp = expand_normal (t_tramp);
5693 r_func = expand_normal (t_func);
5694 r_chain = expand_normal (t_chain);
5696 /* Generate insns to initialize the trampoline. */
5697 r_tramp = round_trampoline_addr (r_tramp);
5698 #ifdef TRAMPOLINE_TEMPLATE
5699 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5700 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5701 emit_block_move (blktramp, assemble_trampoline_template (),
5702 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5704 trampolines_created = 1;
5705 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5711 expand_builtin_adjust_trampoline (tree exp)
5715 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5718 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5719 tramp = round_trampoline_addr (tramp);
5720 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5721 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5727 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5728 function. The function first checks whether the back end provides
5729 an insn to implement signbit for the respective mode. If not, it
5730 checks whether the floating point format of the value is such that
5731 the sign bit can be extracted. If that is not the case, the
5732 function returns NULL_RTX to indicate that a normal call should be
5733 emitted rather than expanding the function in-line. EXP is the
5734 expression that is a call to the builtin function; if convenient,
5735 the result should be placed in TARGET. */
5737 expand_builtin_signbit (tree exp, rtx target)
5739 const struct real_format *fmt;
5740 enum machine_mode fmode, imode, rmode;
5741 HOST_WIDE_INT hi, lo;
5744 enum insn_code icode;
5747 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5750 arg = CALL_EXPR_ARG (exp, 0);
5751 fmode = TYPE_MODE (TREE_TYPE (arg));
5752 rmode = TYPE_MODE (TREE_TYPE (exp));
5753 fmt = REAL_MODE_FORMAT (fmode);
5755 arg = builtin_save_expr (arg);
5757 /* Expand the argument yielding a RTX expression. */
5758 temp = expand_normal (arg);
5760 /* Check if the back end provides an insn that handles signbit for the
5762 icode = signbit_optab->handlers [(int) fmode].insn_code;
5763 if (icode != CODE_FOR_nothing)
5765 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5766 emit_unop_insn (icode, target, temp, UNKNOWN);
5770 /* For floating point formats without a sign bit, implement signbit
5772 bitpos = fmt->signbit_ro;
5775 /* But we can't do this if the format supports signed zero. */
5776 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5779 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5780 build_real (TREE_TYPE (arg), dconst0));
5781 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5784 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5786 imode = int_mode_for_mode (fmode);
5787 if (imode == BLKmode)
5789 temp = gen_lowpart (imode, temp);
5794 /* Handle targets with different FP word orders. */
5795 if (FLOAT_WORDS_BIG_ENDIAN)
5796 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5798 word = bitpos / BITS_PER_WORD;
5799 temp = operand_subword_force (temp, word, fmode);
5800 bitpos = bitpos % BITS_PER_WORD;
5803 /* Force the intermediate word_mode (or narrower) result into a
5804 register. This avoids attempting to create paradoxical SUBREGs
5805 of floating point modes below. */
5806 temp = force_reg (imode, temp);
5808 /* If the bitpos is within the "result mode" lowpart, the operation
5809 can be implement with a single bitwise AND. Otherwise, we need
5810 a right shift and an AND. */
5812 if (bitpos < GET_MODE_BITSIZE (rmode))
5814 if (bitpos < HOST_BITS_PER_WIDE_INT)
5817 lo = (HOST_WIDE_INT) 1 << bitpos;
5821 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5825 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5826 temp = gen_lowpart (rmode, temp);
5827 temp = expand_binop (rmode, and_optab, temp,
5828 immed_double_const (lo, hi, rmode),
5829 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5833 /* Perform a logical right shift to place the signbit in the least
5834 significant bit, then truncate the result to the desired mode
5835 and mask just this bit. */
5836 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5837 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5838 temp = gen_lowpart (rmode, temp);
5839 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5840 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5846 /* Expand fork or exec calls. TARGET is the desired target of the
5847 call. EXP is the call. FN is the
5848 identificator of the actual function. IGNORE is nonzero if the
5849 value is to be ignored. */
5852 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5857 /* If we are not profiling, just call the function. */
5858 if (!profile_arc_flag)
5861 /* Otherwise call the wrapper. This should be equivalent for the rest of
5862 compiler, so the code does not diverge, and the wrapper may run the
5863 code necessary for keeping the profiling sane. */
5865 switch (DECL_FUNCTION_CODE (fn))
5868 id = get_identifier ("__gcov_fork");
5871 case BUILT_IN_EXECL:
5872 id = get_identifier ("__gcov_execl");
5875 case BUILT_IN_EXECV:
5876 id = get_identifier ("__gcov_execv");
5879 case BUILT_IN_EXECLP:
5880 id = get_identifier ("__gcov_execlp");
5883 case BUILT_IN_EXECLE:
5884 id = get_identifier ("__gcov_execle");
5887 case BUILT_IN_EXECVP:
5888 id = get_identifier ("__gcov_execvp");
5891 case BUILT_IN_EXECVE:
5892 id = get_identifier ("__gcov_execve");
5899 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5900 DECL_EXTERNAL (decl) = 1;
5901 TREE_PUBLIC (decl) = 1;
5902 DECL_ARTIFICIAL (decl) = 1;
5903 TREE_NOTHROW (decl) = 1;
5904 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5905 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5906 call = rewrite_call_expr (exp, 0, decl, 0);
5907 return expand_call (call, target, ignore);
5912 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5913 the pointer in these functions is void*, the tree optimizers may remove
5914 casts. The mode computed in expand_builtin isn't reliable either, due
5915 to __sync_bool_compare_and_swap.
5917 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5918 group of builtins. This gives us log2 of the mode size. */
5920 static inline enum machine_mode
5921 get_builtin_sync_mode (int fcode_diff)
5923 /* The size is not negotiable, so ask not to get BLKmode in return
5924 if the target indicates that a smaller size would be better. */
5925 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5928 /* Expand the memory expression LOC and return the appropriate memory operand
5929 for the builtin_sync operations. */
5932 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5936 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5938 /* Note that we explicitly do not want any alias information for this
5939 memory, so that we kill all other live memories. Otherwise we don't
5940 satisfy the full barrier semantics of the intrinsic. */
5941 mem = validize_mem (gen_rtx_MEM (mode, addr));
5943 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5944 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5945 MEM_VOLATILE_P (mem) = 1;
5950 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5951 EXP is the CALL_EXPR. CODE is the rtx code
5952 that corresponds to the arithmetic or logical operation from the name;
5953 an exception here is that NOT actually means NAND. TARGET is an optional
5954 place for us to store the results; AFTER is true if this is the
5955 fetch_and_xxx form. IGNORE is true if we don't actually care about
5956 the result of the operation at all. */
5959 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5960 enum rtx_code code, bool after,
5961 rtx target, bool ignore)
5964 enum machine_mode old_mode;
5966 /* Expand the operands. */
5967 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5969 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5970 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5971 of CONST_INTs, where we know the old_mode only from the call argument. */
5972 old_mode = GET_MODE (val);
5973 if (old_mode == VOIDmode)
5974 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5975 val = convert_modes (mode, old_mode, val, 1);
5978 return expand_sync_operation (mem, val, code);
5980 return expand_sync_fetch_operation (mem, val, code, after, target);
5983 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5984 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5985 true if this is the boolean form. TARGET is a place for us to store the
5986 results; this is NOT optional if IS_BOOL is true. */
5989 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5990 bool is_bool, rtx target)
5992 rtx old_val, new_val, mem;
5993 enum machine_mode old_mode;
5995 /* Expand the operands. */
5996 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5999 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
6000 mode, EXPAND_NORMAL);
6001 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6002 of CONST_INTs, where we know the old_mode only from the call argument. */
6003 old_mode = GET_MODE (old_val);
6004 if (old_mode == VOIDmode)
6005 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6006 old_val = convert_modes (mode, old_mode, old_val, 1);
6008 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
6009 mode, EXPAND_NORMAL);
6010 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6011 of CONST_INTs, where we know the old_mode only from the call argument. */
6012 old_mode = GET_MODE (new_val);
6013 if (old_mode == VOIDmode)
6014 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
6015 new_val = convert_modes (mode, old_mode, new_val, 1);
6018 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
6020 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6023 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6024 general form is actually an atomic exchange, and some targets only
6025 support a reduced form with the second argument being a constant 1.
6026 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6030 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6034 enum machine_mode old_mode;
6036 /* Expand the operands. */
6037 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6038 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6039 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6040 of CONST_INTs, where we know the old_mode only from the call argument. */
6041 old_mode = GET_MODE (val);
6042 if (old_mode == VOIDmode)
6043 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6044 val = convert_modes (mode, old_mode, val, 1);
6046 return expand_sync_lock_test_and_set (mem, val, target);
6049 /* Expand the __sync_synchronize intrinsic. */
6052 expand_builtin_synchronize (void)
6056 #ifdef HAVE_memory_barrier
6057 if (HAVE_memory_barrier)
6059 emit_insn (gen_memory_barrier ());
6064 if (synchronize_libfunc != NULL_RTX)
6066 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6070 /* If no explicit memory barrier instruction is available, create an
6071 empty asm stmt with a memory clobber. */
6072 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6073 tree_cons (NULL, build_string (6, "memory"), NULL));
6074 ASM_VOLATILE_P (x) = 1;
6075 expand_asm_expr (x);
6078 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6081 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6083 enum insn_code icode;
6085 rtx val = const0_rtx;
6087 /* Expand the operands. */
6088 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6090 /* If there is an explicit operation in the md file, use it. */
6091 icode = sync_lock_release[mode];
6092 if (icode != CODE_FOR_nothing)
6094 if (!insn_data[icode].operand[1].predicate (val, mode))
6095 val = force_reg (mode, val);
6097 insn = GEN_FCN (icode) (mem, val);
6105 /* Otherwise we can implement this operation by emitting a barrier
6106 followed by a store of zero. */
6107 expand_builtin_synchronize ();
6108 emit_move_insn (mem, val);
6111 /* Expand an expression EXP that calls a built-in function,
6112 with result going to TARGET if that's convenient
6113 (and in mode MODE if that's convenient).
6114 SUBTARGET may be used as the target for computing one of EXP's operands.
6115 IGNORE is nonzero if the value is to be ignored. */
6118 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6121 tree fndecl = get_callee_fndecl (exp);
6122 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6123 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6125 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6126 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6128 /* When not optimizing, generate calls to library functions for a certain
6131 && !called_as_built_in (fndecl)
6132 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6133 && fcode != BUILT_IN_ALLOCA)
6134 return expand_call (exp, target, ignore);
6136 /* The built-in function expanders test for target == const0_rtx
6137 to determine whether the function's result will be ignored. */
6139 target = const0_rtx;
6141 /* If the result of a pure or const built-in function is ignored, and
6142 none of its arguments are volatile, we can avoid expanding the
6143 built-in call and just evaluate the arguments for side-effects. */
6144 if (target == const0_rtx
6145 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6147 bool volatilep = false;
6149 call_expr_arg_iterator iter;
6151 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6152 if (TREE_THIS_VOLATILE (arg))
6160 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6161 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6168 CASE_FLT_FN (BUILT_IN_FABS):
6169 target = expand_builtin_fabs (exp, target, subtarget);
6174 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6175 target = expand_builtin_copysign (exp, target, subtarget);
6180 /* Just do a normal library call if we were unable to fold
6182 CASE_FLT_FN (BUILT_IN_CABS):
6185 CASE_FLT_FN (BUILT_IN_EXP):
6186 CASE_FLT_FN (BUILT_IN_EXP10):
6187 CASE_FLT_FN (BUILT_IN_POW10):
6188 CASE_FLT_FN (BUILT_IN_EXP2):
6189 CASE_FLT_FN (BUILT_IN_EXPM1):
6190 CASE_FLT_FN (BUILT_IN_LOGB):
6191 CASE_FLT_FN (BUILT_IN_LOG):
6192 CASE_FLT_FN (BUILT_IN_LOG10):
6193 CASE_FLT_FN (BUILT_IN_LOG2):
6194 CASE_FLT_FN (BUILT_IN_LOG1P):
6195 CASE_FLT_FN (BUILT_IN_TAN):
6196 CASE_FLT_FN (BUILT_IN_ASIN):
6197 CASE_FLT_FN (BUILT_IN_ACOS):
6198 CASE_FLT_FN (BUILT_IN_ATAN):
6199 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6200 because of possible accuracy problems. */
6201 if (! flag_unsafe_math_optimizations)
6203 CASE_FLT_FN (BUILT_IN_SQRT):
6204 CASE_FLT_FN (BUILT_IN_FLOOR):
6205 CASE_FLT_FN (BUILT_IN_CEIL):
6206 CASE_FLT_FN (BUILT_IN_TRUNC):
6207 CASE_FLT_FN (BUILT_IN_ROUND):
6208 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6209 CASE_FLT_FN (BUILT_IN_RINT):
6210 target = expand_builtin_mathfn (exp, target, subtarget);
6215 CASE_FLT_FN (BUILT_IN_ILOGB):
6216 if (! flag_unsafe_math_optimizations)
6218 CASE_FLT_FN (BUILT_IN_ISINF):
6219 CASE_FLT_FN (BUILT_IN_FINITE):
6220 case BUILT_IN_ISFINITE:
6221 case BUILT_IN_ISNORMAL:
6222 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6227 CASE_FLT_FN (BUILT_IN_LCEIL):
6228 CASE_FLT_FN (BUILT_IN_LLCEIL):
6229 CASE_FLT_FN (BUILT_IN_LFLOOR):
6230 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6231 target = expand_builtin_int_roundingfn (exp, target);
6236 CASE_FLT_FN (BUILT_IN_LRINT):
6237 CASE_FLT_FN (BUILT_IN_LLRINT):
6238 CASE_FLT_FN (BUILT_IN_LROUND):
6239 CASE_FLT_FN (BUILT_IN_LLROUND):
6240 target = expand_builtin_int_roundingfn_2 (exp, target);
6245 CASE_FLT_FN (BUILT_IN_POW):
6246 target = expand_builtin_pow (exp, target, subtarget);
6251 CASE_FLT_FN (BUILT_IN_POWI):
6252 target = expand_builtin_powi (exp, target, subtarget);
6257 CASE_FLT_FN (BUILT_IN_ATAN2):
6258 CASE_FLT_FN (BUILT_IN_LDEXP):
6259 CASE_FLT_FN (BUILT_IN_SCALB):
6260 CASE_FLT_FN (BUILT_IN_SCALBN):
6261 CASE_FLT_FN (BUILT_IN_SCALBLN):
6262 if (! flag_unsafe_math_optimizations)
6265 CASE_FLT_FN (BUILT_IN_FMOD):
6266 CASE_FLT_FN (BUILT_IN_REMAINDER):
6267 CASE_FLT_FN (BUILT_IN_DREM):
6268 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6273 CASE_FLT_FN (BUILT_IN_CEXPI):
6274 target = expand_builtin_cexpi (exp, target, subtarget);
6275 gcc_assert (target);
6278 CASE_FLT_FN (BUILT_IN_SIN):
6279 CASE_FLT_FN (BUILT_IN_COS):
6280 if (! flag_unsafe_math_optimizations)
6282 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6287 CASE_FLT_FN (BUILT_IN_SINCOS):
6288 if (! flag_unsafe_math_optimizations)
6290 target = expand_builtin_sincos (exp);
6295 case BUILT_IN_APPLY_ARGS:
6296 return expand_builtin_apply_args ();
6298 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6299 FUNCTION with a copy of the parameters described by
6300 ARGUMENTS, and ARGSIZE. It returns a block of memory
6301 allocated on the stack into which is stored all the registers
6302 that might possibly be used for returning the result of a
6303 function. ARGUMENTS is the value returned by
6304 __builtin_apply_args. ARGSIZE is the number of bytes of
6305 arguments that must be copied. ??? How should this value be
6306 computed? We'll also need a safe worst case value for varargs
6308 case BUILT_IN_APPLY:
6309 if (!validate_arglist (exp, POINTER_TYPE,
6310 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6311 && !validate_arglist (exp, REFERENCE_TYPE,
6312 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6318 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6319 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6320 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6322 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6325 /* __builtin_return (RESULT) causes the function to return the
6326 value described by RESULT. RESULT is address of the block of
6327 memory returned by __builtin_apply. */
6328 case BUILT_IN_RETURN:
6329 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6330 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6333 case BUILT_IN_SAVEREGS:
6334 return expand_builtin_saveregs ();
6336 case BUILT_IN_ARGS_INFO:
6337 return expand_builtin_args_info (exp);
6339 case BUILT_IN_VA_ARG_PACK:
6340 /* All valid uses of __builtin_va_arg_pack () are removed during
6342 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6345 case BUILT_IN_VA_ARG_PACK_LEN:
6346 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6348 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6351 /* Return the address of the first anonymous stack arg. */
6352 case BUILT_IN_NEXT_ARG:
6353 if (fold_builtin_next_arg (exp, false))
6355 return expand_builtin_next_arg ();
6357 case BUILT_IN_CLEAR_CACHE:
6358 target = expand_builtin___clear_cache (exp);
6363 case BUILT_IN_CLASSIFY_TYPE:
6364 return expand_builtin_classify_type (exp);
6366 case BUILT_IN_CONSTANT_P:
6369 case BUILT_IN_FRAME_ADDRESS:
6370 case BUILT_IN_RETURN_ADDRESS:
6371 return expand_builtin_frame_address (fndecl, exp);
6373 /* Returns the address of the area where the structure is returned.
6375 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6376 if (call_expr_nargs (exp) != 0
6377 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6378 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6381 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6383 case BUILT_IN_ALLOCA:
6384 target = expand_builtin_alloca (exp, target);
6389 case BUILT_IN_STACK_SAVE:
6390 return expand_stack_save ();
6392 case BUILT_IN_STACK_RESTORE:
6393 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6396 case BUILT_IN_BSWAP32:
6397 case BUILT_IN_BSWAP64:
6398 target = expand_builtin_bswap (exp, target, subtarget);
6404 CASE_INT_FN (BUILT_IN_FFS):
6405 case BUILT_IN_FFSIMAX:
6406 target = expand_builtin_unop (target_mode, exp, target,
6407 subtarget, ffs_optab);
6412 CASE_INT_FN (BUILT_IN_CLZ):
6413 case BUILT_IN_CLZIMAX:
6414 target = expand_builtin_unop (target_mode, exp, target,
6415 subtarget, clz_optab);
6420 CASE_INT_FN (BUILT_IN_CTZ):
6421 case BUILT_IN_CTZIMAX:
6422 target = expand_builtin_unop (target_mode, exp, target,
6423 subtarget, ctz_optab);
6428 CASE_INT_FN (BUILT_IN_POPCOUNT):
6429 case BUILT_IN_POPCOUNTIMAX:
6430 target = expand_builtin_unop (target_mode, exp, target,
6431 subtarget, popcount_optab);
6436 CASE_INT_FN (BUILT_IN_PARITY):
6437 case BUILT_IN_PARITYIMAX:
6438 target = expand_builtin_unop (target_mode, exp, target,
6439 subtarget, parity_optab);
6444 case BUILT_IN_STRLEN:
6445 target = expand_builtin_strlen (exp, target, target_mode);
6450 case BUILT_IN_STRCPY:
6451 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6456 case BUILT_IN_STRNCPY:
6457 target = expand_builtin_strncpy (exp, target, mode);
6462 case BUILT_IN_STPCPY:
6463 target = expand_builtin_stpcpy (exp, target, mode);
6468 case BUILT_IN_STRCAT:
6469 target = expand_builtin_strcat (fndecl, exp, target, mode);
6474 case BUILT_IN_STRNCAT:
6475 target = expand_builtin_strncat (exp, target, mode);
6480 case BUILT_IN_STRSPN:
6481 target = expand_builtin_strspn (exp, target, mode);
6486 case BUILT_IN_STRCSPN:
6487 target = expand_builtin_strcspn (exp, target, mode);
6492 case BUILT_IN_STRSTR:
6493 target = expand_builtin_strstr (exp, target, mode);
6498 case BUILT_IN_STRPBRK:
6499 target = expand_builtin_strpbrk (exp, target, mode);
6504 case BUILT_IN_INDEX:
6505 case BUILT_IN_STRCHR:
6506 target = expand_builtin_strchr (exp, target, mode);
6511 case BUILT_IN_RINDEX:
6512 case BUILT_IN_STRRCHR:
6513 target = expand_builtin_strrchr (exp, target, mode);
6518 case BUILT_IN_MEMCPY:
6519 target = expand_builtin_memcpy (exp, target, mode);
6524 case BUILT_IN_MEMPCPY:
6525 target = expand_builtin_mempcpy (exp, target, mode);
6530 case BUILT_IN_MEMMOVE:
6531 target = expand_builtin_memmove (exp, target, mode, ignore);
6536 case BUILT_IN_BCOPY:
6537 target = expand_builtin_bcopy (exp, ignore);
6542 case BUILT_IN_MEMSET:
6543 target = expand_builtin_memset (exp, target, mode);
6548 case BUILT_IN_BZERO:
6549 target = expand_builtin_bzero (exp);
6554 case BUILT_IN_STRCMP:
6555 target = expand_builtin_strcmp (exp, target, mode);
6560 case BUILT_IN_STRNCMP:
6561 target = expand_builtin_strncmp (exp, target, mode);
6566 case BUILT_IN_MEMCHR:
6567 target = expand_builtin_memchr (exp, target, mode);
6573 case BUILT_IN_MEMCMP:
6574 target = expand_builtin_memcmp (exp, target, mode);
6579 case BUILT_IN_SETJMP:
6580 /* This should have been lowered to the builtins below. */
6583 case BUILT_IN_SETJMP_SETUP:
6584 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6585 and the receiver label. */
6586 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6588 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6589 VOIDmode, EXPAND_NORMAL);
6590 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6591 rtx label_r = label_rtx (label);
6593 /* This is copied from the handling of non-local gotos. */
6594 expand_builtin_setjmp_setup (buf_addr, label_r);
6595 nonlocal_goto_handler_labels
6596 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6597 nonlocal_goto_handler_labels);
6598 /* ??? Do not let expand_label treat us as such since we would
6599 not want to be both on the list of non-local labels and on
6600 the list of forced labels. */
6601 FORCED_LABEL (label) = 0;
6606 case BUILT_IN_SETJMP_DISPATCHER:
6607 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6608 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6610 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6611 rtx label_r = label_rtx (label);
6613 /* Remove the dispatcher label from the list of non-local labels
6614 since the receiver labels have been added to it above. */
6615 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6620 case BUILT_IN_SETJMP_RECEIVER:
6621 /* __builtin_setjmp_receiver is passed the receiver label. */
6622 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6624 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6625 rtx label_r = label_rtx (label);
6627 expand_builtin_setjmp_receiver (label_r);
6632 /* __builtin_longjmp is passed a pointer to an array of five words.
6633 It's similar to the C library longjmp function but works with
6634 __builtin_setjmp above. */
6635 case BUILT_IN_LONGJMP:
6636 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6638 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6639 VOIDmode, EXPAND_NORMAL);
6640 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6642 if (value != const1_rtx)
6644 error ("%<__builtin_longjmp%> second argument must be 1");
6648 expand_builtin_longjmp (buf_addr, value);
6653 case BUILT_IN_NONLOCAL_GOTO:
6654 target = expand_builtin_nonlocal_goto (exp);
6659 /* This updates the setjmp buffer that is its argument with the value
6660 of the current stack pointer. */
6661 case BUILT_IN_UPDATE_SETJMP_BUF:
6662 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6665 = expand_normal (CALL_EXPR_ARG (exp, 0));
6667 expand_builtin_update_setjmp_buf (buf_addr);
6673 expand_builtin_trap ();
6676 case BUILT_IN_PRINTF:
6677 target = expand_builtin_printf (exp, target, mode, false);
6682 case BUILT_IN_PRINTF_UNLOCKED:
6683 target = expand_builtin_printf (exp, target, mode, true);
6688 case BUILT_IN_FPUTS:
6689 target = expand_builtin_fputs (exp, target, false);
6693 case BUILT_IN_FPUTS_UNLOCKED:
6694 target = expand_builtin_fputs (exp, target, true);
6699 case BUILT_IN_FPRINTF:
6700 target = expand_builtin_fprintf (exp, target, mode, false);
6705 case BUILT_IN_FPRINTF_UNLOCKED:
6706 target = expand_builtin_fprintf (exp, target, mode, true);
6711 case BUILT_IN_SPRINTF:
6712 target = expand_builtin_sprintf (exp, target, mode);
6717 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6718 case BUILT_IN_SIGNBITD32:
6719 case BUILT_IN_SIGNBITD64:
6720 case BUILT_IN_SIGNBITD128:
6721 target = expand_builtin_signbit (exp, target);
6726 /* Various hooks for the DWARF 2 __throw routine. */
6727 case BUILT_IN_UNWIND_INIT:
6728 expand_builtin_unwind_init ();
6730 case BUILT_IN_DWARF_CFA:
6731 return virtual_cfa_rtx;
6732 #ifdef DWARF2_UNWIND_INFO
6733 case BUILT_IN_DWARF_SP_COLUMN:
6734 return expand_builtin_dwarf_sp_column ();
6735 case BUILT_IN_INIT_DWARF_REG_SIZES:
6736 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6739 case BUILT_IN_FROB_RETURN_ADDR:
6740 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6741 case BUILT_IN_EXTRACT_RETURN_ADDR:
6742 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6743 case BUILT_IN_EH_RETURN:
6744 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6745 CALL_EXPR_ARG (exp, 1));
6747 #ifdef EH_RETURN_DATA_REGNO
6748 case BUILT_IN_EH_RETURN_DATA_REGNO:
6749 return expand_builtin_eh_return_data_regno (exp);
6751 case BUILT_IN_EXTEND_POINTER:
6752 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6754 case BUILT_IN_VA_START:
6755 return expand_builtin_va_start (exp);
6756 case BUILT_IN_VA_END:
6757 return expand_builtin_va_end (exp);
6758 case BUILT_IN_VA_COPY:
6759 return expand_builtin_va_copy (exp);
6760 case BUILT_IN_EXPECT:
6761 return expand_builtin_expect (exp, target);
6762 case BUILT_IN_PREFETCH:
6763 expand_builtin_prefetch (exp);
6766 case BUILT_IN_PROFILE_FUNC_ENTER:
6767 return expand_builtin_profile_func (false);
6768 case BUILT_IN_PROFILE_FUNC_EXIT:
6769 return expand_builtin_profile_func (true);
6771 case BUILT_IN_INIT_TRAMPOLINE:
6772 return expand_builtin_init_trampoline (exp);
6773 case BUILT_IN_ADJUST_TRAMPOLINE:
6774 return expand_builtin_adjust_trampoline (exp);
6777 case BUILT_IN_EXECL:
6778 case BUILT_IN_EXECV:
6779 case BUILT_IN_EXECLP:
6780 case BUILT_IN_EXECLE:
6781 case BUILT_IN_EXECVP:
6782 case BUILT_IN_EXECVE:
6783 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6788 case BUILT_IN_FETCH_AND_ADD_1:
6789 case BUILT_IN_FETCH_AND_ADD_2:
6790 case BUILT_IN_FETCH_AND_ADD_4:
6791 case BUILT_IN_FETCH_AND_ADD_8:
6792 case BUILT_IN_FETCH_AND_ADD_16:
6793 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6794 target = expand_builtin_sync_operation (mode, exp, PLUS,
6795 false, target, ignore);
6800 case BUILT_IN_FETCH_AND_SUB_1:
6801 case BUILT_IN_FETCH_AND_SUB_2:
6802 case BUILT_IN_FETCH_AND_SUB_4:
6803 case BUILT_IN_FETCH_AND_SUB_8:
6804 case BUILT_IN_FETCH_AND_SUB_16:
6805 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6806 target = expand_builtin_sync_operation (mode, exp, MINUS,
6807 false, target, ignore);
6812 case BUILT_IN_FETCH_AND_OR_1:
6813 case BUILT_IN_FETCH_AND_OR_2:
6814 case BUILT_IN_FETCH_AND_OR_4:
6815 case BUILT_IN_FETCH_AND_OR_8:
6816 case BUILT_IN_FETCH_AND_OR_16:
6817 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6818 target = expand_builtin_sync_operation (mode, exp, IOR,
6819 false, target, ignore);
6824 case BUILT_IN_FETCH_AND_AND_1:
6825 case BUILT_IN_FETCH_AND_AND_2:
6826 case BUILT_IN_FETCH_AND_AND_4:
6827 case BUILT_IN_FETCH_AND_AND_8:
6828 case BUILT_IN_FETCH_AND_AND_16:
6829 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6830 target = expand_builtin_sync_operation (mode, exp, AND,
6831 false, target, ignore);
6836 case BUILT_IN_FETCH_AND_XOR_1:
6837 case BUILT_IN_FETCH_AND_XOR_2:
6838 case BUILT_IN_FETCH_AND_XOR_4:
6839 case BUILT_IN_FETCH_AND_XOR_8:
6840 case BUILT_IN_FETCH_AND_XOR_16:
6841 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6842 target = expand_builtin_sync_operation (mode, exp, XOR,
6843 false, target, ignore);
6848 case BUILT_IN_FETCH_AND_NAND_1:
6849 case BUILT_IN_FETCH_AND_NAND_2:
6850 case BUILT_IN_FETCH_AND_NAND_4:
6851 case BUILT_IN_FETCH_AND_NAND_8:
6852 case BUILT_IN_FETCH_AND_NAND_16:
6853 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6854 target = expand_builtin_sync_operation (mode, exp, NOT,
6855 false, target, ignore);
6860 case BUILT_IN_ADD_AND_FETCH_1:
6861 case BUILT_IN_ADD_AND_FETCH_2:
6862 case BUILT_IN_ADD_AND_FETCH_4:
6863 case BUILT_IN_ADD_AND_FETCH_8:
6864 case BUILT_IN_ADD_AND_FETCH_16:
6865 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6866 target = expand_builtin_sync_operation (mode, exp, PLUS,
6867 true, target, ignore);
6872 case BUILT_IN_SUB_AND_FETCH_1:
6873 case BUILT_IN_SUB_AND_FETCH_2:
6874 case BUILT_IN_SUB_AND_FETCH_4:
6875 case BUILT_IN_SUB_AND_FETCH_8:
6876 case BUILT_IN_SUB_AND_FETCH_16:
6877 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6878 target = expand_builtin_sync_operation (mode, exp, MINUS,
6879 true, target, ignore);
6884 case BUILT_IN_OR_AND_FETCH_1:
6885 case BUILT_IN_OR_AND_FETCH_2:
6886 case BUILT_IN_OR_AND_FETCH_4:
6887 case BUILT_IN_OR_AND_FETCH_8:
6888 case BUILT_IN_OR_AND_FETCH_16:
6889 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6890 target = expand_builtin_sync_operation (mode, exp, IOR,
6891 true, target, ignore);
6896 case BUILT_IN_AND_AND_FETCH_1:
6897 case BUILT_IN_AND_AND_FETCH_2:
6898 case BUILT_IN_AND_AND_FETCH_4:
6899 case BUILT_IN_AND_AND_FETCH_8:
6900 case BUILT_IN_AND_AND_FETCH_16:
6901 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6902 target = expand_builtin_sync_operation (mode, exp, AND,
6903 true, target, ignore);
6908 case BUILT_IN_XOR_AND_FETCH_1:
6909 case BUILT_IN_XOR_AND_FETCH_2:
6910 case BUILT_IN_XOR_AND_FETCH_4:
6911 case BUILT_IN_XOR_AND_FETCH_8:
6912 case BUILT_IN_XOR_AND_FETCH_16:
6913 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6914 target = expand_builtin_sync_operation (mode, exp, XOR,
6915 true, target, ignore);
6920 case BUILT_IN_NAND_AND_FETCH_1:
6921 case BUILT_IN_NAND_AND_FETCH_2:
6922 case BUILT_IN_NAND_AND_FETCH_4:
6923 case BUILT_IN_NAND_AND_FETCH_8:
6924 case BUILT_IN_NAND_AND_FETCH_16:
6925 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6926 target = expand_builtin_sync_operation (mode, exp, NOT,
6927 true, target, ignore);
6932 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6933 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6934 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6935 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6936 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6937 if (mode == VOIDmode)
6938 mode = TYPE_MODE (boolean_type_node);
6939 if (!target || !register_operand (target, mode))
6940 target = gen_reg_rtx (mode);
6942 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6943 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6948 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6949 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6950 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6951 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6952 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6953 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6954 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6959 case BUILT_IN_LOCK_TEST_AND_SET_1:
6960 case BUILT_IN_LOCK_TEST_AND_SET_2:
6961 case BUILT_IN_LOCK_TEST_AND_SET_4:
6962 case BUILT_IN_LOCK_TEST_AND_SET_8:
6963 case BUILT_IN_LOCK_TEST_AND_SET_16:
6964 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6965 target = expand_builtin_lock_test_and_set (mode, exp, target);
6970 case BUILT_IN_LOCK_RELEASE_1:
6971 case BUILT_IN_LOCK_RELEASE_2:
6972 case BUILT_IN_LOCK_RELEASE_4:
6973 case BUILT_IN_LOCK_RELEASE_8:
6974 case BUILT_IN_LOCK_RELEASE_16:
6975 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6976 expand_builtin_lock_release (mode, exp);
6979 case BUILT_IN_SYNCHRONIZE:
6980 expand_builtin_synchronize ();
6983 case BUILT_IN_OBJECT_SIZE:
6984 return expand_builtin_object_size (exp);
6986 case BUILT_IN_MEMCPY_CHK:
6987 case BUILT_IN_MEMPCPY_CHK:
6988 case BUILT_IN_MEMMOVE_CHK:
6989 case BUILT_IN_MEMSET_CHK:
6990 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6995 case BUILT_IN_STRCPY_CHK:
6996 case BUILT_IN_STPCPY_CHK:
6997 case BUILT_IN_STRNCPY_CHK:
6998 case BUILT_IN_STRCAT_CHK:
6999 case BUILT_IN_STRNCAT_CHK:
7000 case BUILT_IN_SNPRINTF_CHK:
7001 case BUILT_IN_VSNPRINTF_CHK:
7002 maybe_emit_chk_warning (exp, fcode);
7005 case BUILT_IN_SPRINTF_CHK:
7006 case BUILT_IN_VSPRINTF_CHK:
7007 maybe_emit_sprintf_chk_warning (exp, fcode);
7010 default: /* just do library call, if unknown builtin */
7014 /* The switch statement above can drop through to cause the function
7015 to be called normally. */
7016 return expand_call (exp, target, ignore);
7019 /* Determine whether a tree node represents a call to a built-in
7020 function. If the tree T is a call to a built-in function with
7021 the right number of arguments of the appropriate types, return
7022 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7023 Otherwise the return value is END_BUILTINS. */
7025 enum built_in_function
7026 builtin_mathfn_code (const_tree t)
7028 const_tree fndecl, arg, parmlist;
7029 const_tree argtype, parmtype;
7030 const_call_expr_arg_iterator iter;
7032 if (TREE_CODE (t) != CALL_EXPR
7033 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7034 return END_BUILTINS;
7036 fndecl = get_callee_fndecl (t);
7037 if (fndecl == NULL_TREE
7038 || TREE_CODE (fndecl) != FUNCTION_DECL
7039 || ! DECL_BUILT_IN (fndecl)
7040 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7041 return END_BUILTINS;
7043 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7044 init_const_call_expr_arg_iterator (t, &iter);
7045 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7047 /* If a function doesn't take a variable number of arguments,
7048 the last element in the list will have type `void'. */
7049 parmtype = TREE_VALUE (parmlist);
7050 if (VOID_TYPE_P (parmtype))
7052 if (more_const_call_expr_args_p (&iter))
7053 return END_BUILTINS;
7054 return DECL_FUNCTION_CODE (fndecl);
7057 if (! more_const_call_expr_args_p (&iter))
7058 return END_BUILTINS;
7060 arg = next_const_call_expr_arg (&iter);
7061 argtype = TREE_TYPE (arg);
7063 if (SCALAR_FLOAT_TYPE_P (parmtype))
7065 if (! SCALAR_FLOAT_TYPE_P (argtype))
7066 return END_BUILTINS;
7068 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7070 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7071 return END_BUILTINS;
7073 else if (POINTER_TYPE_P (parmtype))
7075 if (! POINTER_TYPE_P (argtype))
7076 return END_BUILTINS;
7078 else if (INTEGRAL_TYPE_P (parmtype))
7080 if (! INTEGRAL_TYPE_P (argtype))
7081 return END_BUILTINS;
7084 return END_BUILTINS;
7087 /* Variable-length argument list. */
7088 return DECL_FUNCTION_CODE (fndecl);
7091 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7092 evaluate to a constant. */
7095 fold_builtin_constant_p (tree arg)
7097 /* We return 1 for a numeric type that's known to be a constant
7098 value at compile-time or for an aggregate type that's a
7099 literal constant. */
7102 /* If we know this is a constant, emit the constant of one. */
7103 if (CONSTANT_CLASS_P (arg)
7104 || (TREE_CODE (arg) == CONSTRUCTOR
7105 && TREE_CONSTANT (arg)))
7106 return integer_one_node;
7107 if (TREE_CODE (arg) == ADDR_EXPR)
7109 tree op = TREE_OPERAND (arg, 0);
7110 if (TREE_CODE (op) == STRING_CST
7111 || (TREE_CODE (op) == ARRAY_REF
7112 && integer_zerop (TREE_OPERAND (op, 1))
7113 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7114 return integer_one_node;
7117 /* If this expression has side effects, show we don't know it to be a
7118 constant. Likewise if it's a pointer or aggregate type since in
7119 those case we only want literals, since those are only optimized
7120 when generating RTL, not later.
7121 And finally, if we are compiling an initializer, not code, we
7122 need to return a definite result now; there's not going to be any
7123 more optimization done. */
7124 if (TREE_SIDE_EFFECTS (arg)
7125 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7126 || POINTER_TYPE_P (TREE_TYPE (arg))
7128 || folding_initializer)
7129 return integer_zero_node;
7134 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7135 return it as a truthvalue. */
7138 build_builtin_expect_predicate (tree pred, tree expected)
7140 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7142 fn = built_in_decls[BUILT_IN_EXPECT];
7143 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7144 ret_type = TREE_TYPE (TREE_TYPE (fn));
7145 pred_type = TREE_VALUE (arg_types);
7146 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7148 pred = fold_convert (pred_type, pred);
7149 expected = fold_convert (expected_type, expected);
7150 call_expr = build_call_expr (fn, 2, pred, expected);
7152 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7153 build_int_cst (ret_type, 0));
7156 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7157 NULL_TREE if no simplification is possible. */
7160 fold_builtin_expect (tree arg0, tree arg1)
7163 enum tree_code code;
7165 /* If this is a builtin_expect within a builtin_expect keep the
7166 inner one. See through a comparison against a constant. It
7167 might have been added to create a thruthvalue. */
7169 if (COMPARISON_CLASS_P (inner)
7170 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7171 inner = TREE_OPERAND (inner, 0);
7173 if (TREE_CODE (inner) == CALL_EXPR
7174 && (fndecl = get_callee_fndecl (inner))
7175 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7176 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7179 /* Distribute the expected value over short-circuiting operators.
7180 See through the cast from truthvalue_type_node to long. */
7182 while (TREE_CODE (inner) == NOP_EXPR
7183 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7184 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7185 inner = TREE_OPERAND (inner, 0);
7187 code = TREE_CODE (inner);
7188 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7190 tree op0 = TREE_OPERAND (inner, 0);
7191 tree op1 = TREE_OPERAND (inner, 1);
7193 op0 = build_builtin_expect_predicate (op0, arg1);
7194 op1 = build_builtin_expect_predicate (op1, arg1);
7195 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7197 return fold_convert (TREE_TYPE (arg0), inner);
7200 /* If the argument isn't invariant then there's nothing else we can do. */
7201 if (!TREE_CONSTANT (arg0))
7204 /* If we expect that a comparison against the argument will fold to
7205 a constant return the constant. In practice, this means a true
7206 constant or the address of a non-weak symbol. */
7209 if (TREE_CODE (inner) == ADDR_EXPR)
7213 inner = TREE_OPERAND (inner, 0);
7215 while (TREE_CODE (inner) == COMPONENT_REF
7216 || TREE_CODE (inner) == ARRAY_REF);
7217 if (DECL_P (inner) && DECL_WEAK (inner))
7221 /* Otherwise, ARG0 already has the proper type for the return value. */
7225 /* Fold a call to __builtin_classify_type with argument ARG. */
7228 fold_builtin_classify_type (tree arg)
7231 return build_int_cst (NULL_TREE, no_type_class);
7233 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7236 /* Fold a call to __builtin_strlen with argument ARG. */
7239 fold_builtin_strlen (tree arg)
7241 if (!validate_arg (arg, POINTER_TYPE))
7245 tree len = c_strlen (arg, 0);
7249 /* Convert from the internal "sizetype" type to "size_t". */
7251 len = fold_convert (size_type_node, len);
7259 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7262 fold_builtin_inf (tree type, int warn)
7264 REAL_VALUE_TYPE real;
7266 /* __builtin_inff is intended to be usable to define INFINITY on all
7267 targets. If an infinity is not available, INFINITY expands "to a
7268 positive constant of type float that overflows at translation
7269 time", footnote "In this case, using INFINITY will violate the
7270 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7271 Thus we pedwarn to ensure this constraint violation is
7273 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7274 pedwarn (0, "target format does not support infinity");
7277 return build_real (type, real);
7280 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7283 fold_builtin_nan (tree arg, tree type, int quiet)
7285 REAL_VALUE_TYPE real;
7288 if (!validate_arg (arg, POINTER_TYPE))
7290 str = c_getstr (arg);
7294 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7297 return build_real (type, real);
7300 /* Return true if the floating point expression T has an integer value.
7301 We also allow +Inf, -Inf and NaN to be considered integer values. */
7304 integer_valued_real_p (tree t)
7306 switch (TREE_CODE (t))
7313 return integer_valued_real_p (TREE_OPERAND (t, 0));
7318 return integer_valued_real_p (TREE_OPERAND (t, 1));
7325 return integer_valued_real_p (TREE_OPERAND (t, 0))
7326 && integer_valued_real_p (TREE_OPERAND (t, 1));
7329 return integer_valued_real_p (TREE_OPERAND (t, 1))
7330 && integer_valued_real_p (TREE_OPERAND (t, 2));
7333 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7337 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7338 if (TREE_CODE (type) == INTEGER_TYPE)
7340 if (TREE_CODE (type) == REAL_TYPE)
7341 return integer_valued_real_p (TREE_OPERAND (t, 0));
7346 switch (builtin_mathfn_code (t))
7348 CASE_FLT_FN (BUILT_IN_CEIL):
7349 CASE_FLT_FN (BUILT_IN_FLOOR):
7350 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7351 CASE_FLT_FN (BUILT_IN_RINT):
7352 CASE_FLT_FN (BUILT_IN_ROUND):
7353 CASE_FLT_FN (BUILT_IN_TRUNC):
7356 CASE_FLT_FN (BUILT_IN_FMIN):
7357 CASE_FLT_FN (BUILT_IN_FMAX):
7358 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7359 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7372 /* FNDECL is assumed to be a builtin where truncation can be propagated
7373 across (for instance floor((double)f) == (double)floorf (f).
7374 Do the transformation for a call with argument ARG. */
7377 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7379 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7381 if (!validate_arg (arg, REAL_TYPE))
7384 /* Integer rounding functions are idempotent. */
7385 if (fcode == builtin_mathfn_code (arg))
7388 /* If argument is already integer valued, and we don't need to worry
7389 about setting errno, there's no need to perform rounding. */
7390 if (! flag_errno_math && integer_valued_real_p (arg))
7395 tree arg0 = strip_float_extensions (arg);
7396 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7397 tree newtype = TREE_TYPE (arg0);
7400 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7401 && (decl = mathfn_built_in (newtype, fcode)))
7402 return fold_convert (ftype,
7403 build_call_expr (decl, 1,
7404 fold_convert (newtype, arg0)));
7409 /* FNDECL is assumed to be builtin which can narrow the FP type of
7410 the argument, for instance lround((double)f) -> lroundf (f).
7411 Do the transformation for a call with argument ARG. */
7414 fold_fixed_mathfn (tree fndecl, tree arg)
7416 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7418 if (!validate_arg (arg, REAL_TYPE))
7421 /* If argument is already integer valued, and we don't need to worry
7422 about setting errno, there's no need to perform rounding. */
7423 if (! flag_errno_math && integer_valued_real_p (arg))
7424 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7428 tree ftype = TREE_TYPE (arg);
7429 tree arg0 = strip_float_extensions (arg);
7430 tree newtype = TREE_TYPE (arg0);
7433 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7434 && (decl = mathfn_built_in (newtype, fcode)))
7435 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7438 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7439 sizeof (long long) == sizeof (long). */
7440 if (TYPE_PRECISION (long_long_integer_type_node)
7441 == TYPE_PRECISION (long_integer_type_node))
7443 tree newfn = NULL_TREE;
7446 CASE_FLT_FN (BUILT_IN_LLCEIL):
7447 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7450 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7451 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7454 CASE_FLT_FN (BUILT_IN_LLROUND):
7455 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7458 CASE_FLT_FN (BUILT_IN_LLRINT):
7459 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7468 tree newcall = build_call_expr(newfn, 1, arg);
7469 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7476 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7477 return type. Return NULL_TREE if no simplification can be made. */
7480 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7484 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7485 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7488 /* Calculate the result when the argument is a constant. */
7489 if (TREE_CODE (arg) == COMPLEX_CST
7490 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7494 if (TREE_CODE (arg) == COMPLEX_EXPR)
7496 tree real = TREE_OPERAND (arg, 0);
7497 tree imag = TREE_OPERAND (arg, 1);
7499 /* If either part is zero, cabs is fabs of the other. */
7500 if (real_zerop (real))
7501 return fold_build1 (ABS_EXPR, type, imag);
7502 if (real_zerop (imag))
7503 return fold_build1 (ABS_EXPR, type, real);
7505 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7506 if (flag_unsafe_math_optimizations
7507 && operand_equal_p (real, imag, OEP_PURE_SAME))
7509 const REAL_VALUE_TYPE sqrt2_trunc
7510 = real_value_truncate (TYPE_MODE (type),
7511 *get_real_const (rv_sqrt2));
7513 return fold_build2 (MULT_EXPR, type,
7514 fold_build1 (ABS_EXPR, type, real),
7515 build_real (type, sqrt2_trunc));
7519 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7520 if (TREE_CODE (arg) == NEGATE_EXPR
7521 || TREE_CODE (arg) == CONJ_EXPR)
7522 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7524 /* Don't do this when optimizing for size. */
7525 if (flag_unsafe_math_optimizations
7526 && optimize && !optimize_size)
7528 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7530 if (sqrtfn != NULL_TREE)
7532 tree rpart, ipart, result;
7534 arg = builtin_save_expr (arg);
7536 rpart = fold_build1 (REALPART_EXPR, type, arg);
7537 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7539 rpart = builtin_save_expr (rpart);
7540 ipart = builtin_save_expr (ipart);
7542 result = fold_build2 (PLUS_EXPR, type,
7543 fold_build2 (MULT_EXPR, type,
7545 fold_build2 (MULT_EXPR, type,
7548 return build_call_expr (sqrtfn, 1, result);
7555 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7556 Return NULL_TREE if no simplification can be made. */
7559 fold_builtin_sqrt (tree arg, tree type)
7562 enum built_in_function fcode;
7565 if (!validate_arg (arg, REAL_TYPE))
7568 /* Calculate the result when the argument is a constant. */
7569 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7572 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7573 fcode = builtin_mathfn_code (arg);
7574 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7576 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7577 arg = fold_build2 (MULT_EXPR, type,
7578 CALL_EXPR_ARG (arg, 0),
7579 build_real (type, dconsthalf));
7580 return build_call_expr (expfn, 1, arg);
7583 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7584 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7586 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7590 tree arg0 = CALL_EXPR_ARG (arg, 0);
7592 /* The inner root was either sqrt or cbrt. */
7593 REAL_VALUE_TYPE dconstroot =
7594 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7596 /* Adjust for the outer root. */
7597 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7598 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7599 tree_root = build_real (type, dconstroot);
7600 return build_call_expr (powfn, 2, arg0, tree_root);
7604 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7605 if (flag_unsafe_math_optimizations
7606 && (fcode == BUILT_IN_POW
7607 || fcode == BUILT_IN_POWF
7608 || fcode == BUILT_IN_POWL))
7610 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7611 tree arg0 = CALL_EXPR_ARG (arg, 0);
7612 tree arg1 = CALL_EXPR_ARG (arg, 1);
7614 if (!tree_expr_nonnegative_p (arg0))
7615 arg0 = build1 (ABS_EXPR, type, arg0);
7616 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7617 build_real (type, dconsthalf));
7618 return build_call_expr (powfn, 2, arg0, narg1);
7624 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7625 Return NULL_TREE if no simplification can be made. */
7628 fold_builtin_cbrt (tree arg, tree type)
7630 const enum built_in_function fcode = builtin_mathfn_code (arg);
7633 if (!validate_arg (arg, REAL_TYPE))
7636 /* Calculate the result when the argument is a constant. */
7637 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7640 if (flag_unsafe_math_optimizations)
7642 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7643 if (BUILTIN_EXPONENT_P (fcode))
7645 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7646 const REAL_VALUE_TYPE third_trunc =
7647 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7648 arg = fold_build2 (MULT_EXPR, type,
7649 CALL_EXPR_ARG (arg, 0),
7650 build_real (type, third_trunc));
7651 return build_call_expr (expfn, 1, arg);
7654 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7655 if (BUILTIN_SQRT_P (fcode))
7657 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7661 tree arg0 = CALL_EXPR_ARG (arg, 0);
7663 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7665 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7666 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7667 tree_root = build_real (type, dconstroot);
7668 return build_call_expr (powfn, 2, arg0, tree_root);
7672 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7673 if (BUILTIN_CBRT_P (fcode))
7675 tree arg0 = CALL_EXPR_ARG (arg, 0);
7676 if (tree_expr_nonnegative_p (arg0))
7678 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7683 REAL_VALUE_TYPE dconstroot;
7685 real_arithmetic (&dconstroot, MULT_EXPR,
7686 get_real_const (rv_third),
7687 get_real_const (rv_third));
7688 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7689 tree_root = build_real (type, dconstroot);
7690 return build_call_expr (powfn, 2, arg0, tree_root);
7695 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7696 if (fcode == BUILT_IN_POW
7697 || fcode == BUILT_IN_POWF
7698 || fcode == BUILT_IN_POWL)
7700 tree arg00 = CALL_EXPR_ARG (arg, 0);
7701 tree arg01 = CALL_EXPR_ARG (arg, 1);
7702 if (tree_expr_nonnegative_p (arg00))
7704 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7705 const REAL_VALUE_TYPE dconstroot
7706 = real_value_truncate (TYPE_MODE (type),
7707 *get_real_const (rv_third));
7708 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7709 build_real (type, dconstroot));
7710 return build_call_expr (powfn, 2, arg00, narg01);
7717 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7718 TYPE is the type of the return value. Return NULL_TREE if no
7719 simplification can be made. */
7722 fold_builtin_cos (tree arg, tree type, tree fndecl)
7726 if (!validate_arg (arg, REAL_TYPE))
7729 /* Calculate the result when the argument is a constant. */
7730 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7733 /* Optimize cos(-x) into cos (x). */
7734 if ((narg = fold_strip_sign_ops (arg)))
7735 return build_call_expr (fndecl, 1, narg);
7740 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7741 Return NULL_TREE if no simplification can be made. */
7744 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7746 if (validate_arg (arg, REAL_TYPE))
7750 /* Calculate the result when the argument is a constant. */
7751 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7754 /* Optimize cosh(-x) into cosh (x). */
7755 if ((narg = fold_strip_sign_ops (arg)))
7756 return build_call_expr (fndecl, 1, narg);
7762 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7763 Return NULL_TREE if no simplification can be made. */
7766 fold_builtin_tan (tree arg, tree type)
7768 enum built_in_function fcode;
7771 if (!validate_arg (arg, REAL_TYPE))
7774 /* Calculate the result when the argument is a constant. */
7775 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7778 /* Optimize tan(atan(x)) = x. */
7779 fcode = builtin_mathfn_code (arg);
7780 if (flag_unsafe_math_optimizations
7781 && (fcode == BUILT_IN_ATAN
7782 || fcode == BUILT_IN_ATANF
7783 || fcode == BUILT_IN_ATANL))
7784 return CALL_EXPR_ARG (arg, 0);
7789 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7790 NULL_TREE if no simplification can be made. */
7793 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7798 if (!validate_arg (arg0, REAL_TYPE)
7799 || !validate_arg (arg1, POINTER_TYPE)
7800 || !validate_arg (arg2, POINTER_TYPE))
7803 type = TREE_TYPE (arg0);
7805 /* Calculate the result when the argument is a constant. */
7806 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7809 /* Canonicalize sincos to cexpi. */
7810 if (!TARGET_C99_FUNCTIONS)
7812 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7816 call = build_call_expr (fn, 1, arg0);
7817 call = builtin_save_expr (call);
7819 return build2 (COMPOUND_EXPR, type,
7820 build2 (MODIFY_EXPR, void_type_node,
7821 build_fold_indirect_ref (arg1),
7822 build1 (IMAGPART_EXPR, type, call)),
7823 build2 (MODIFY_EXPR, void_type_node,
7824 build_fold_indirect_ref (arg2),
7825 build1 (REALPART_EXPR, type, call)));
7828 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7829 NULL_TREE if no simplification can be made. */
7832 fold_builtin_cexp (tree arg0, tree type)
7835 tree realp, imagp, ifn;
7837 if (!validate_arg (arg0, COMPLEX_TYPE))
7840 rtype = TREE_TYPE (TREE_TYPE (arg0));
7842 /* In case we can figure out the real part of arg0 and it is constant zero
7844 if (!TARGET_C99_FUNCTIONS)
7846 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7850 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7851 && real_zerop (realp))
7853 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7854 return build_call_expr (ifn, 1, narg);
7857 /* In case we can easily decompose real and imaginary parts split cexp
7858 to exp (r) * cexpi (i). */
7859 if (flag_unsafe_math_optimizations
7862 tree rfn, rcall, icall;
7864 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7868 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7872 icall = build_call_expr (ifn, 1, imagp);
7873 icall = builtin_save_expr (icall);
7874 rcall = build_call_expr (rfn, 1, realp);
7875 rcall = builtin_save_expr (rcall);
7876 return fold_build2 (COMPLEX_EXPR, type,
7877 fold_build2 (MULT_EXPR, rtype,
7879 fold_build1 (REALPART_EXPR, rtype, icall)),
7880 fold_build2 (MULT_EXPR, rtype,
7882 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7888 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7889 Return NULL_TREE if no simplification can be made. */
7892 fold_builtin_trunc (tree fndecl, tree arg)
7894 if (!validate_arg (arg, REAL_TYPE))
7897 /* Optimize trunc of constant value. */
7898 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7900 REAL_VALUE_TYPE r, x;
7901 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7903 x = TREE_REAL_CST (arg);
7904 real_trunc (&r, TYPE_MODE (type), &x);
7905 return build_real (type, r);
7908 return fold_trunc_transparent_mathfn (fndecl, arg);
7911 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7912 Return NULL_TREE if no simplification can be made. */
7915 fold_builtin_floor (tree fndecl, tree arg)
7917 if (!validate_arg (arg, REAL_TYPE))
7920 /* Optimize floor of constant value. */
7921 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7925 x = TREE_REAL_CST (arg);
7926 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7928 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7931 real_floor (&r, TYPE_MODE (type), &x);
7932 return build_real (type, r);
7936 /* Fold floor (x) where x is nonnegative to trunc (x). */
7937 if (tree_expr_nonnegative_p (arg))
7939 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7941 return build_call_expr (truncfn, 1, arg);
7944 return fold_trunc_transparent_mathfn (fndecl, arg);
7947 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7948 Return NULL_TREE if no simplification can be made. */
7951 fold_builtin_ceil (tree fndecl, tree arg)
7953 if (!validate_arg (arg, REAL_TYPE))
7956 /* Optimize ceil of constant value. */
7957 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7961 x = TREE_REAL_CST (arg);
7962 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7964 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7967 real_ceil (&r, TYPE_MODE (type), &x);
7968 return build_real (type, r);
7972 return fold_trunc_transparent_mathfn (fndecl, arg);
7975 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7976 Return NULL_TREE if no simplification can be made. */
7979 fold_builtin_round (tree fndecl, tree arg)
7981 if (!validate_arg (arg, REAL_TYPE))
7984 /* Optimize round of constant value. */
7985 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7989 x = TREE_REAL_CST (arg);
7990 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7992 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7995 real_round (&r, TYPE_MODE (type), &x);
7996 return build_real (type, r);
8000 return fold_trunc_transparent_mathfn (fndecl, arg);
8003 /* Fold function call to builtin lround, lroundf or lroundl (or the
8004 corresponding long long versions) and other rounding functions. ARG
8005 is the argument to the call. Return NULL_TREE if no simplification
8009 fold_builtin_int_roundingfn (tree fndecl, tree arg)
8011 if (!validate_arg (arg, REAL_TYPE))
8014 /* Optimize lround of constant value. */
8015 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8017 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8019 if (real_isfinite (&x))
8021 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8022 tree ftype = TREE_TYPE (arg);
8023 unsigned HOST_WIDE_INT lo2;
8024 HOST_WIDE_INT hi, lo;
8027 switch (DECL_FUNCTION_CODE (fndecl))
8029 CASE_FLT_FN (BUILT_IN_LFLOOR):
8030 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8031 real_floor (&r, TYPE_MODE (ftype), &x);
8034 CASE_FLT_FN (BUILT_IN_LCEIL):
8035 CASE_FLT_FN (BUILT_IN_LLCEIL):
8036 real_ceil (&r, TYPE_MODE (ftype), &x);
8039 CASE_FLT_FN (BUILT_IN_LROUND):
8040 CASE_FLT_FN (BUILT_IN_LLROUND):
8041 real_round (&r, TYPE_MODE (ftype), &x);
8048 REAL_VALUE_TO_INT (&lo, &hi, r);
8049 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8050 return build_int_cst_wide (itype, lo2, hi);
8054 switch (DECL_FUNCTION_CODE (fndecl))
8056 CASE_FLT_FN (BUILT_IN_LFLOOR):
8057 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8058 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8059 if (tree_expr_nonnegative_p (arg))
8060 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8066 return fold_fixed_mathfn (fndecl, arg);
8069 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8070 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8071 the argument to the call. Return NULL_TREE if no simplification can
8075 fold_builtin_bitop (tree fndecl, tree arg)
8077 if (!validate_arg (arg, INTEGER_TYPE))
8080 /* Optimize for constant argument. */
8081 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8083 HOST_WIDE_INT hi, width, result;
8084 unsigned HOST_WIDE_INT lo;
8087 type = TREE_TYPE (arg);
8088 width = TYPE_PRECISION (type);
8089 lo = TREE_INT_CST_LOW (arg);
8091 /* Clear all the bits that are beyond the type's precision. */
8092 if (width > HOST_BITS_PER_WIDE_INT)
8094 hi = TREE_INT_CST_HIGH (arg);
8095 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8096 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8101 if (width < HOST_BITS_PER_WIDE_INT)
8102 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8105 switch (DECL_FUNCTION_CODE (fndecl))
8107 CASE_INT_FN (BUILT_IN_FFS):
8109 result = exact_log2 (lo & -lo) + 1;
8111 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8116 CASE_INT_FN (BUILT_IN_CLZ):
8118 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8120 result = width - floor_log2 (lo) - 1;
8121 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8125 CASE_INT_FN (BUILT_IN_CTZ):
8127 result = exact_log2 (lo & -lo);
8129 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8130 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8134 CASE_INT_FN (BUILT_IN_POPCOUNT):
8137 result++, lo &= lo - 1;
8139 result++, hi &= hi - 1;
8142 CASE_INT_FN (BUILT_IN_PARITY):
8145 result++, lo &= lo - 1;
8147 result++, hi &= hi - 1;
8155 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8161 /* Fold function call to builtin_bswap and the long and long long
8162 variants. Return NULL_TREE if no simplification can be made. */
8164 fold_builtin_bswap (tree fndecl, tree arg)
8166 if (! validate_arg (arg, INTEGER_TYPE))
8169 /* Optimize constant value. */
8170 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8172 HOST_WIDE_INT hi, width, r_hi = 0;
8173 unsigned HOST_WIDE_INT lo, r_lo = 0;
8176 type = TREE_TYPE (arg);
8177 width = TYPE_PRECISION (type);
8178 lo = TREE_INT_CST_LOW (arg);
8179 hi = TREE_INT_CST_HIGH (arg);
8181 switch (DECL_FUNCTION_CODE (fndecl))
8183 case BUILT_IN_BSWAP32:
8184 case BUILT_IN_BSWAP64:
8188 for (s = 0; s < width; s += 8)
8190 int d = width - s - 8;
8191 unsigned HOST_WIDE_INT byte;
8193 if (s < HOST_BITS_PER_WIDE_INT)
8194 byte = (lo >> s) & 0xff;
8196 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8198 if (d < HOST_BITS_PER_WIDE_INT)
8201 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8211 if (width < HOST_BITS_PER_WIDE_INT)
8212 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8214 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8220 /* Return true if EXPR is the real constant contained in VALUE. */
8223 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8227 return ((TREE_CODE (expr) == REAL_CST
8228 && !TREE_OVERFLOW (expr)
8229 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8230 || (TREE_CODE (expr) == COMPLEX_CST
8231 && real_dconstp (TREE_REALPART (expr), value)
8232 && real_zerop (TREE_IMAGPART (expr))));
8235 /* A subroutine of fold_builtin to fold the various logarithmic
8236 functions. Return NULL_TREE if no simplification can me made.
8237 FUNC is the corresponding MPFR logarithm function. */
8240 fold_builtin_logarithm (tree fndecl, tree arg,
8241 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8243 if (validate_arg (arg, REAL_TYPE))
8245 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8247 const enum built_in_function fcode = builtin_mathfn_code (arg);
8249 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8250 instead we'll look for 'e' truncated to MODE. So only do
8251 this if flag_unsafe_math_optimizations is set. */
8252 if (flag_unsafe_math_optimizations && func == mpfr_log)
8254 const REAL_VALUE_TYPE e_truncated =
8255 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8256 if (real_dconstp (arg, &e_truncated))
8257 return build_real (type, dconst1);
8260 /* Calculate the result when the argument is a constant. */
8261 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8264 /* Special case, optimize logN(expN(x)) = x. */
8265 if (flag_unsafe_math_optimizations
8266 && ((func == mpfr_log
8267 && (fcode == BUILT_IN_EXP
8268 || fcode == BUILT_IN_EXPF
8269 || fcode == BUILT_IN_EXPL))
8270 || (func == mpfr_log2
8271 && (fcode == BUILT_IN_EXP2
8272 || fcode == BUILT_IN_EXP2F
8273 || fcode == BUILT_IN_EXP2L))
8274 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8275 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8277 /* Optimize logN(func()) for various exponential functions. We
8278 want to determine the value "x" and the power "exponent" in
8279 order to transform logN(x**exponent) into exponent*logN(x). */
8280 if (flag_unsafe_math_optimizations)
8282 tree exponent = 0, x = 0;
8286 CASE_FLT_FN (BUILT_IN_EXP):
8287 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8288 x = build_real (type,
8289 real_value_truncate (TYPE_MODE (type),
8290 *get_real_const (rv_e)));
8291 exponent = CALL_EXPR_ARG (arg, 0);
8293 CASE_FLT_FN (BUILT_IN_EXP2):
8294 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8295 x = build_real (type, dconst2);
8296 exponent = CALL_EXPR_ARG (arg, 0);
8298 CASE_FLT_FN (BUILT_IN_EXP10):
8299 CASE_FLT_FN (BUILT_IN_POW10):
8300 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8302 REAL_VALUE_TYPE dconst10;
8303 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8304 x = build_real (type, dconst10);
8306 exponent = CALL_EXPR_ARG (arg, 0);
8308 CASE_FLT_FN (BUILT_IN_SQRT):
8309 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8310 x = CALL_EXPR_ARG (arg, 0);
8311 exponent = build_real (type, dconsthalf);
8313 CASE_FLT_FN (BUILT_IN_CBRT):
8314 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8315 x = CALL_EXPR_ARG (arg, 0);
8316 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8317 *get_real_const (rv_third)));
8319 CASE_FLT_FN (BUILT_IN_POW):
8320 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8321 x = CALL_EXPR_ARG (arg, 0);
8322 exponent = CALL_EXPR_ARG (arg, 1);
8328 /* Now perform the optimization. */
8331 tree logfn = build_call_expr (fndecl, 1, x);
8332 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8340 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8341 NULL_TREE if no simplification can be made. */
8344 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8346 tree res, narg0, narg1;
8348 if (!validate_arg (arg0, REAL_TYPE)
8349 || !validate_arg (arg1, REAL_TYPE))
8352 /* Calculate the result when the argument is a constant. */
8353 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8356 /* If either argument to hypot has a negate or abs, strip that off.
8357 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8358 narg0 = fold_strip_sign_ops (arg0);
8359 narg1 = fold_strip_sign_ops (arg1);
8362 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8363 narg1 ? narg1 : arg1);
8366 /* If either argument is zero, hypot is fabs of the other. */
8367 if (real_zerop (arg0))
8368 return fold_build1 (ABS_EXPR, type, arg1);
8369 else if (real_zerop (arg1))
8370 return fold_build1 (ABS_EXPR, type, arg0);
8372 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8373 if (flag_unsafe_math_optimizations
8374 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8376 const REAL_VALUE_TYPE sqrt2_trunc
8377 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8378 return fold_build2 (MULT_EXPR, type,
8379 fold_build1 (ABS_EXPR, type, arg0),
8380 build_real (type, sqrt2_trunc));
8387 /* Fold a builtin function call to pow, powf, or powl. Return
8388 NULL_TREE if no simplification can be made. */
8390 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8394 if (!validate_arg (arg0, REAL_TYPE)
8395 || !validate_arg (arg1, REAL_TYPE))
8398 /* Calculate the result when the argument is a constant. */
8399 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8402 /* Optimize pow(1.0,y) = 1.0. */
8403 if (real_onep (arg0))
8404 return omit_one_operand (type, build_real (type, dconst1), arg1);
8406 if (TREE_CODE (arg1) == REAL_CST
8407 && !TREE_OVERFLOW (arg1))
8409 REAL_VALUE_TYPE cint;
8413 c = TREE_REAL_CST (arg1);
8415 /* Optimize pow(x,0.0) = 1.0. */
8416 if (REAL_VALUES_EQUAL (c, dconst0))
8417 return omit_one_operand (type, build_real (type, dconst1),
8420 /* Optimize pow(x,1.0) = x. */
8421 if (REAL_VALUES_EQUAL (c, dconst1))
8424 /* Optimize pow(x,-1.0) = 1.0/x. */
8425 if (REAL_VALUES_EQUAL (c, dconstm1))
8426 return fold_build2 (RDIV_EXPR, type,
8427 build_real (type, dconst1), arg0);
8429 /* Optimize pow(x,0.5) = sqrt(x). */
8430 if (flag_unsafe_math_optimizations
8431 && REAL_VALUES_EQUAL (c, dconsthalf))
8433 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8435 if (sqrtfn != NULL_TREE)
8436 return build_call_expr (sqrtfn, 1, arg0);
8439 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8440 if (flag_unsafe_math_optimizations)
8442 const REAL_VALUE_TYPE dconstroot
8443 = real_value_truncate (TYPE_MODE (type),
8444 *get_real_const (rv_third));
8446 if (REAL_VALUES_EQUAL (c, dconstroot))
8448 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8449 if (cbrtfn != NULL_TREE)
8450 return build_call_expr (cbrtfn, 1, arg0);
8454 /* Check for an integer exponent. */
8455 n = real_to_integer (&c);
8456 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8457 if (real_identical (&c, &cint))
8459 /* Attempt to evaluate pow at compile-time. */
8460 if (TREE_CODE (arg0) == REAL_CST
8461 && !TREE_OVERFLOW (arg0))
8466 x = TREE_REAL_CST (arg0);
8467 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8468 if (flag_unsafe_math_optimizations || !inexact)
8469 return build_real (type, x);
8472 /* Strip sign ops from even integer powers. */
8473 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8475 tree narg0 = fold_strip_sign_ops (arg0);
8477 return build_call_expr (fndecl, 2, narg0, arg1);
8482 if (flag_unsafe_math_optimizations)
8484 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8486 /* Optimize pow(expN(x),y) = expN(x*y). */
8487 if (BUILTIN_EXPONENT_P (fcode))
8489 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8490 tree arg = CALL_EXPR_ARG (arg0, 0);
8491 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8492 return build_call_expr (expfn, 1, arg);
8495 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8496 if (BUILTIN_SQRT_P (fcode))
8498 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8499 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8500 build_real (type, dconsthalf));
8501 return build_call_expr (fndecl, 2, narg0, narg1);
8504 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8505 if (BUILTIN_CBRT_P (fcode))
8507 tree arg = CALL_EXPR_ARG (arg0, 0);
8508 if (tree_expr_nonnegative_p (arg))
8510 const REAL_VALUE_TYPE dconstroot
8511 = real_value_truncate (TYPE_MODE (type),
8512 *get_real_const (rv_third));
8513 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8514 build_real (type, dconstroot));
8515 return build_call_expr (fndecl, 2, arg, narg1);
8519 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8520 if (fcode == BUILT_IN_POW
8521 || fcode == BUILT_IN_POWF
8522 || fcode == BUILT_IN_POWL)
8524 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8525 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8526 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8527 return build_call_expr (fndecl, 2, arg00, narg1);
8534 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8535 Return NULL_TREE if no simplification can be made. */
8537 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8538 tree arg0, tree arg1, tree type)
8540 if (!validate_arg (arg0, REAL_TYPE)
8541 || !validate_arg (arg1, INTEGER_TYPE))
8544 /* Optimize pow(1.0,y) = 1.0. */
8545 if (real_onep (arg0))
8546 return omit_one_operand (type, build_real (type, dconst1), arg1);
8548 if (host_integerp (arg1, 0))
8550 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8552 /* Evaluate powi at compile-time. */
8553 if (TREE_CODE (arg0) == REAL_CST
8554 && !TREE_OVERFLOW (arg0))
8557 x = TREE_REAL_CST (arg0);
8558 real_powi (&x, TYPE_MODE (type), &x, c);
8559 return build_real (type, x);
8562 /* Optimize pow(x,0) = 1.0. */
8564 return omit_one_operand (type, build_real (type, dconst1),
8567 /* Optimize pow(x,1) = x. */
8571 /* Optimize pow(x,-1) = 1.0/x. */
8573 return fold_build2 (RDIV_EXPR, type,
8574 build_real (type, dconst1), arg0);
8580 /* A subroutine of fold_builtin to fold the various exponent
8581 functions. Return NULL_TREE if no simplification can be made.
8582 FUNC is the corresponding MPFR exponent function. */
8585 fold_builtin_exponent (tree fndecl, tree arg,
8586 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8588 if (validate_arg (arg, REAL_TYPE))
8590 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8593 /* Calculate the result when the argument is a constant. */
8594 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8597 /* Optimize expN(logN(x)) = x. */
8598 if (flag_unsafe_math_optimizations)
8600 const enum built_in_function fcode = builtin_mathfn_code (arg);
8602 if ((func == mpfr_exp
8603 && (fcode == BUILT_IN_LOG
8604 || fcode == BUILT_IN_LOGF
8605 || fcode == BUILT_IN_LOGL))
8606 || (func == mpfr_exp2
8607 && (fcode == BUILT_IN_LOG2
8608 || fcode == BUILT_IN_LOG2F
8609 || fcode == BUILT_IN_LOG2L))
8610 || (func == mpfr_exp10
8611 && (fcode == BUILT_IN_LOG10
8612 || fcode == BUILT_IN_LOG10F
8613 || fcode == BUILT_IN_LOG10L)))
8614 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8621 /* Return true if VAR is a VAR_DECL or a component thereof. */
8624 var_decl_component_p (tree var)
8627 while (handled_component_p (inner))
8628 inner = TREE_OPERAND (inner, 0);
8629 return SSA_VAR_P (inner);
8632 /* Fold function call to builtin memset. Return
8633 NULL_TREE if no simplification can be made. */
8636 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8639 unsigned HOST_WIDE_INT length, cval;
8641 if (! validate_arg (dest, POINTER_TYPE)
8642 || ! validate_arg (c, INTEGER_TYPE)
8643 || ! validate_arg (len, INTEGER_TYPE))
8646 if (! host_integerp (len, 1))
8649 /* If the LEN parameter is zero, return DEST. */
8650 if (integer_zerop (len))
8651 return omit_one_operand (type, dest, c);
8653 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8658 if (TREE_CODE (var) != ADDR_EXPR)
8661 var = TREE_OPERAND (var, 0);
8662 if (TREE_THIS_VOLATILE (var))
8665 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8666 && !POINTER_TYPE_P (TREE_TYPE (var)))
8669 if (! var_decl_component_p (var))
8672 length = tree_low_cst (len, 1);
8673 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8674 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8678 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8681 if (integer_zerop (c))
8685 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8688 cval = tree_low_cst (c, 1);
8692 cval |= (cval << 31) << 1;
8695 ret = build_int_cst_type (TREE_TYPE (var), cval);
8696 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8700 return omit_one_operand (type, dest, ret);
8703 /* Fold function call to builtin memset. Return
8704 NULL_TREE if no simplification can be made. */
8707 fold_builtin_bzero (tree dest, tree size, bool ignore)
8709 if (! validate_arg (dest, POINTER_TYPE)
8710 || ! validate_arg (size, INTEGER_TYPE))
8716 /* New argument list transforming bzero(ptr x, int y) to
8717 memset(ptr x, int 0, size_t y). This is done this way
8718 so that if it isn't expanded inline, we fallback to
8719 calling bzero instead of memset. */
8721 return fold_builtin_memset (dest, integer_zero_node,
8722 fold_convert (sizetype, size),
8723 void_type_node, ignore);
8726 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8727 NULL_TREE if no simplification can be made.
8728 If ENDP is 0, return DEST (like memcpy).
8729 If ENDP is 1, return DEST+LEN (like mempcpy).
8730 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8731 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8735 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8737 tree destvar, srcvar, expr;
8739 if (! validate_arg (dest, POINTER_TYPE)
8740 || ! validate_arg (src, POINTER_TYPE)
8741 || ! validate_arg (len, INTEGER_TYPE))
8744 /* If the LEN parameter is zero, return DEST. */
8745 if (integer_zerop (len))
8746 return omit_one_operand (type, dest, src);
8748 /* If SRC and DEST are the same (and not volatile), return
8749 DEST{,+LEN,+LEN-1}. */
8750 if (operand_equal_p (src, dest, 0))
8754 tree srctype, desttype;
8757 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8758 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8760 /* Both DEST and SRC must be pointer types.
8761 ??? This is what old code did. Is the testing for pointer types
8764 If either SRC is readonly or length is 1, we can use memcpy. */
8765 if (dest_align && src_align
8766 && (readonly_data_expr (src)
8767 || (host_integerp (len, 1)
8768 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8769 tree_low_cst (len, 1)))))
8771 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8774 return build_call_expr (fn, 3, dest, src, len);
8779 if (!host_integerp (len, 0))
8782 This logic lose for arguments like (type *)malloc (sizeof (type)),
8783 since we strip the casts of up to VOID return value from malloc.
8784 Perhaps we ought to inherit type from non-VOID argument here? */
8787 srctype = TREE_TYPE (TREE_TYPE (src));
8788 desttype = TREE_TYPE (TREE_TYPE (dest));
8789 if (!srctype || !desttype
8790 || !TYPE_SIZE_UNIT (srctype)
8791 || !TYPE_SIZE_UNIT (desttype)
8792 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8793 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8794 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8795 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8798 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8799 < (int) TYPE_ALIGN (desttype)
8800 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8801 < (int) TYPE_ALIGN (srctype)))
8805 dest = builtin_save_expr (dest);
8807 srcvar = build_fold_indirect_ref (src);
8808 if (TREE_THIS_VOLATILE (srcvar))
8810 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8812 /* With memcpy, it is possible to bypass aliasing rules, so without
8813 this check i.e. execute/20060930-2.c would be misoptimized, because
8814 it use conflicting alias set to hold argument for the memcpy call.
8815 This check is probably unnecessary with -fno-strict-aliasing.
8816 Similarly for destvar. See also PR29286. */
8817 if (!var_decl_component_p (srcvar)
8818 /* Accept: memcpy (*char_var, "test", 1); that simplify
8820 || is_gimple_min_invariant (srcvar)
8821 || readonly_data_expr (src))
8824 destvar = build_fold_indirect_ref (dest);
8825 if (TREE_THIS_VOLATILE (destvar))
8827 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8829 if (!var_decl_component_p (destvar))
8832 if (srctype == desttype
8833 || (gimple_in_ssa_p (cfun)
8834 && useless_type_conversion_p (desttype, srctype)))
8836 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8837 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8838 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8839 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8840 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8842 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8843 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8849 if (endp == 0 || endp == 3)
8850 return omit_one_operand (type, dest, expr);
8856 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8859 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8860 dest = fold_convert (type, dest);
8862 dest = omit_one_operand (type, dest, expr);
8866 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8867 If LEN is not NULL, it represents the length of the string to be
8868 copied. Return NULL_TREE if no simplification can be made. */
8871 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8875 if (!validate_arg (dest, POINTER_TYPE)
8876 || !validate_arg (src, POINTER_TYPE))
8879 /* If SRC and DEST are the same (and not volatile), return DEST. */
8880 if (operand_equal_p (src, dest, 0))
8881 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8886 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8892 len = c_strlen (src, 1);
8893 if (! len || TREE_SIDE_EFFECTS (len))
8897 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8898 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8899 build_call_expr (fn, 3, dest, src, len));
8902 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8903 If SLEN is not NULL, it represents the length of the source string.
8904 Return NULL_TREE if no simplification can be made. */
8907 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8911 if (!validate_arg (dest, POINTER_TYPE)
8912 || !validate_arg (src, POINTER_TYPE)
8913 || !validate_arg (len, INTEGER_TYPE))
8916 /* If the LEN parameter is zero, return DEST. */
8917 if (integer_zerop (len))
8918 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8920 /* We can't compare slen with len as constants below if len is not a
8922 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8926 slen = c_strlen (src, 1);
8928 /* Now, we must be passed a constant src ptr parameter. */
8929 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8932 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8934 /* We do not support simplification of this case, though we do
8935 support it when expanding trees into RTL. */
8936 /* FIXME: generate a call to __builtin_memset. */
8937 if (tree_int_cst_lt (slen, len))
8940 /* OK transform into builtin memcpy. */
8941 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8944 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8945 build_call_expr (fn, 3, dest, src, len));
8948 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8949 arguments to the call, and TYPE is its return type.
8950 Return NULL_TREE if no simplification can be made. */
8953 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8955 if (!validate_arg (arg1, POINTER_TYPE)
8956 || !validate_arg (arg2, INTEGER_TYPE)
8957 || !validate_arg (len, INTEGER_TYPE))
8963 if (TREE_CODE (arg2) != INTEGER_CST
8964 || !host_integerp (len, 1))
8967 p1 = c_getstr (arg1);
8968 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8974 if (target_char_cast (arg2, &c))
8977 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8980 return build_int_cst (TREE_TYPE (arg1), 0);
8982 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8984 return fold_convert (type, tem);
8990 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8991 Return NULL_TREE if no simplification can be made. */
8994 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8996 const char *p1, *p2;
8998 if (!validate_arg (arg1, POINTER_TYPE)
8999 || !validate_arg (arg2, POINTER_TYPE)
9000 || !validate_arg (len, INTEGER_TYPE))
9003 /* If the LEN parameter is zero, return zero. */
9004 if (integer_zerop (len))
9005 return omit_two_operands (integer_type_node, integer_zero_node,
9008 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9009 if (operand_equal_p (arg1, arg2, 0))
9010 return omit_one_operand (integer_type_node, integer_zero_node, len);
9012 p1 = c_getstr (arg1);
9013 p2 = c_getstr (arg2);
9015 /* If all arguments are constant, and the value of len is not greater
9016 than the lengths of arg1 and arg2, evaluate at compile-time. */
9017 if (host_integerp (len, 1) && p1 && p2
9018 && compare_tree_int (len, strlen (p1) + 1) <= 0
9019 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9021 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9024 return integer_one_node;
9026 return integer_minus_one_node;
9028 return integer_zero_node;
9031 /* If len parameter is one, return an expression corresponding to
9032 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9033 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9035 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9036 tree cst_uchar_ptr_node
9037 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9039 tree ind1 = fold_convert (integer_type_node,
9040 build1 (INDIRECT_REF, cst_uchar_node,
9041 fold_convert (cst_uchar_ptr_node,
9043 tree ind2 = fold_convert (integer_type_node,
9044 build1 (INDIRECT_REF, cst_uchar_node,
9045 fold_convert (cst_uchar_ptr_node,
9047 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9053 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9054 Return NULL_TREE if no simplification can be made. */
9057 fold_builtin_strcmp (tree arg1, tree arg2)
9059 const char *p1, *p2;
9061 if (!validate_arg (arg1, POINTER_TYPE)
9062 || !validate_arg (arg2, POINTER_TYPE))
9065 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9066 if (operand_equal_p (arg1, arg2, 0))
9067 return integer_zero_node;
9069 p1 = c_getstr (arg1);
9070 p2 = c_getstr (arg2);
9074 const int i = strcmp (p1, p2);
9076 return integer_minus_one_node;
9078 return integer_one_node;
9080 return integer_zero_node;
9083 /* If the second arg is "", return *(const unsigned char*)arg1. */
9084 if (p2 && *p2 == '\0')
9086 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9087 tree cst_uchar_ptr_node
9088 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9090 return fold_convert (integer_type_node,
9091 build1 (INDIRECT_REF, cst_uchar_node,
9092 fold_convert (cst_uchar_ptr_node,
9096 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9097 if (p1 && *p1 == '\0')
9099 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9100 tree cst_uchar_ptr_node
9101 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9103 tree temp = fold_convert (integer_type_node,
9104 build1 (INDIRECT_REF, cst_uchar_node,
9105 fold_convert (cst_uchar_ptr_node,
9107 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9113 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9114 Return NULL_TREE if no simplification can be made. */
9117 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9119 const char *p1, *p2;
9121 if (!validate_arg (arg1, POINTER_TYPE)
9122 || !validate_arg (arg2, POINTER_TYPE)
9123 || !validate_arg (len, INTEGER_TYPE))
9126 /* If the LEN parameter is zero, return zero. */
9127 if (integer_zerop (len))
9128 return omit_two_operands (integer_type_node, integer_zero_node,
9131 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9132 if (operand_equal_p (arg1, arg2, 0))
9133 return omit_one_operand (integer_type_node, integer_zero_node, len);
9135 p1 = c_getstr (arg1);
9136 p2 = c_getstr (arg2);
9138 if (host_integerp (len, 1) && p1 && p2)
9140 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9142 return integer_one_node;
9144 return integer_minus_one_node;
9146 return integer_zero_node;
9149 /* If the second arg is "", and the length is greater than zero,
9150 return *(const unsigned char*)arg1. */
9151 if (p2 && *p2 == '\0'
9152 && TREE_CODE (len) == INTEGER_CST
9153 && tree_int_cst_sgn (len) == 1)
9155 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9156 tree cst_uchar_ptr_node
9157 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9159 return fold_convert (integer_type_node,
9160 build1 (INDIRECT_REF, cst_uchar_node,
9161 fold_convert (cst_uchar_ptr_node,
9165 /* If the first arg is "", and the length is greater than zero,
9166 return -*(const unsigned char*)arg2. */
9167 if (p1 && *p1 == '\0'
9168 && TREE_CODE (len) == INTEGER_CST
9169 && tree_int_cst_sgn (len) == 1)
9171 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9172 tree cst_uchar_ptr_node
9173 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9175 tree temp = fold_convert (integer_type_node,
9176 build1 (INDIRECT_REF, cst_uchar_node,
9177 fold_convert (cst_uchar_ptr_node,
9179 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9182 /* If len parameter is one, return an expression corresponding to
9183 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9184 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9186 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9187 tree cst_uchar_ptr_node
9188 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9190 tree ind1 = fold_convert (integer_type_node,
9191 build1 (INDIRECT_REF, cst_uchar_node,
9192 fold_convert (cst_uchar_ptr_node,
9194 tree ind2 = fold_convert (integer_type_node,
9195 build1 (INDIRECT_REF, cst_uchar_node,
9196 fold_convert (cst_uchar_ptr_node,
9198 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9204 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9205 ARG. Return NULL_TREE if no simplification can be made. */
9208 fold_builtin_signbit (tree arg, tree type)
9212 if (!validate_arg (arg, REAL_TYPE))
9215 /* If ARG is a compile-time constant, determine the result. */
9216 if (TREE_CODE (arg) == REAL_CST
9217 && !TREE_OVERFLOW (arg))
9221 c = TREE_REAL_CST (arg);
9222 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9223 return fold_convert (type, temp);
9226 /* If ARG is non-negative, the result is always zero. */
9227 if (tree_expr_nonnegative_p (arg))
9228 return omit_one_operand (type, integer_zero_node, arg);
9230 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9231 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9232 return fold_build2 (LT_EXPR, type, arg,
9233 build_real (TREE_TYPE (arg), dconst0));
9238 /* Fold function call to builtin copysign, copysignf or copysignl with
9239 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9243 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9247 if (!validate_arg (arg1, REAL_TYPE)
9248 || !validate_arg (arg2, REAL_TYPE))
9251 /* copysign(X,X) is X. */
9252 if (operand_equal_p (arg1, arg2, 0))
9253 return fold_convert (type, arg1);
9255 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9256 if (TREE_CODE (arg1) == REAL_CST
9257 && TREE_CODE (arg2) == REAL_CST
9258 && !TREE_OVERFLOW (arg1)
9259 && !TREE_OVERFLOW (arg2))
9261 REAL_VALUE_TYPE c1, c2;
9263 c1 = TREE_REAL_CST (arg1);
9264 c2 = TREE_REAL_CST (arg2);
9265 /* c1.sign := c2.sign. */
9266 real_copysign (&c1, &c2);
9267 return build_real (type, c1);
9270 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9271 Remember to evaluate Y for side-effects. */
9272 if (tree_expr_nonnegative_p (arg2))
9273 return omit_one_operand (type,
9274 fold_build1 (ABS_EXPR, type, arg1),
9277 /* Strip sign changing operations for the first argument. */
9278 tem = fold_strip_sign_ops (arg1);
9280 return build_call_expr (fndecl, 2, tem, arg2);
9285 /* Fold a call to builtin isascii with argument ARG. */
9288 fold_builtin_isascii (tree arg)
9290 if (!validate_arg (arg, INTEGER_TYPE))
9294 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9295 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9296 build_int_cst (NULL_TREE,
9297 ~ (unsigned HOST_WIDE_INT) 0x7f));
9298 return fold_build2 (EQ_EXPR, integer_type_node,
9299 arg, integer_zero_node);
9303 /* Fold a call to builtin toascii with argument ARG. */
9306 fold_builtin_toascii (tree arg)
9308 if (!validate_arg (arg, INTEGER_TYPE))
9311 /* Transform toascii(c) -> (c & 0x7f). */
9312 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9313 build_int_cst (NULL_TREE, 0x7f));
9316 /* Fold a call to builtin isdigit with argument ARG. */
9319 fold_builtin_isdigit (tree arg)
9321 if (!validate_arg (arg, INTEGER_TYPE))
9325 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9326 /* According to the C standard, isdigit is unaffected by locale.
9327 However, it definitely is affected by the target character set. */
9328 unsigned HOST_WIDE_INT target_digit0
9329 = lang_hooks.to_target_charset ('0');
9331 if (target_digit0 == 0)
9334 arg = fold_convert (unsigned_type_node, arg);
9335 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9336 build_int_cst (unsigned_type_node, target_digit0));
9337 return fold_build2 (LE_EXPR, integer_type_node, arg,
9338 build_int_cst (unsigned_type_node, 9));
9342 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9345 fold_builtin_fabs (tree arg, tree type)
9347 if (!validate_arg (arg, REAL_TYPE))
9350 arg = fold_convert (type, arg);
9351 if (TREE_CODE (arg) == REAL_CST)
9352 return fold_abs_const (arg, type);
9353 return fold_build1 (ABS_EXPR, type, arg);
9356 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9359 fold_builtin_abs (tree arg, tree type)
9361 if (!validate_arg (arg, INTEGER_TYPE))
9364 arg = fold_convert (type, arg);
9365 if (TREE_CODE (arg) == INTEGER_CST)
9366 return fold_abs_const (arg, type);
9367 return fold_build1 (ABS_EXPR, type, arg);
9370 /* Fold a call to builtin fmin or fmax. */
9373 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9375 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9377 /* Calculate the result when the argument is a constant. */
9378 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9383 /* If either argument is NaN, return the other one. Avoid the
9384 transformation if we get (and honor) a signalling NaN. Using
9385 omit_one_operand() ensures we create a non-lvalue. */
9386 if (TREE_CODE (arg0) == REAL_CST
9387 && real_isnan (&TREE_REAL_CST (arg0))
9388 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9389 || ! TREE_REAL_CST (arg0).signalling))
9390 return omit_one_operand (type, arg1, arg0);
9391 if (TREE_CODE (arg1) == REAL_CST
9392 && real_isnan (&TREE_REAL_CST (arg1))
9393 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9394 || ! TREE_REAL_CST (arg1).signalling))
9395 return omit_one_operand (type, arg0, arg1);
9397 /* Transform fmin/fmax(x,x) -> x. */
9398 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9399 return omit_one_operand (type, arg0, arg1);
9401 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9402 functions to return the numeric arg if the other one is NaN.
9403 These tree codes don't honor that, so only transform if
9404 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9405 handled, so we don't have to worry about it either. */
9406 if (flag_finite_math_only)
9407 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9408 fold_convert (type, arg0),
9409 fold_convert (type, arg1));
9414 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9417 fold_builtin_carg (tree arg, tree type)
9419 if (validate_arg (arg, COMPLEX_TYPE))
9421 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9425 tree new_arg = builtin_save_expr (arg);
9426 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9427 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9428 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9435 /* Fold a call to builtin logb/ilogb. */
9438 fold_builtin_logb (tree arg, tree rettype)
9440 if (! validate_arg (arg, REAL_TYPE))
9445 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9447 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9453 /* If arg is Inf or NaN and we're logb, return it. */
9454 if (TREE_CODE (rettype) == REAL_TYPE)
9455 return fold_convert (rettype, arg);
9456 /* Fall through... */
9458 /* Zero may set errno and/or raise an exception for logb, also
9459 for ilogb we don't know FP_ILOGB0. */
9462 /* For normal numbers, proceed iff radix == 2. In GCC,
9463 normalized significands are in the range [0.5, 1.0). We
9464 want the exponent as if they were [1.0, 2.0) so get the
9465 exponent and subtract 1. */
9466 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9467 return fold_convert (rettype, build_int_cst (NULL_TREE,
9468 REAL_EXP (value)-1));
9476 /* Fold a call to builtin significand, if radix == 2. */
9479 fold_builtin_significand (tree arg, tree rettype)
9481 if (! validate_arg (arg, REAL_TYPE))
9486 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9488 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9495 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9496 return fold_convert (rettype, arg);
9498 /* For normal numbers, proceed iff radix == 2. */
9499 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9501 REAL_VALUE_TYPE result = *value;
9502 /* In GCC, normalized significands are in the range [0.5,
9503 1.0). We want them to be [1.0, 2.0) so set the
9505 SET_REAL_EXP (&result, 1);
9506 return build_real (rettype, result);
9515 /* Fold a call to builtin frexp, we can assume the base is 2. */
9518 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9520 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9525 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9528 arg1 = build_fold_indirect_ref (arg1);
9530 /* Proceed if a valid pointer type was passed in. */
9531 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9533 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9539 /* For +-0, return (*exp = 0, +-0). */
9540 exp = integer_zero_node;
9545 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9546 return omit_one_operand (rettype, arg0, arg1);
9549 /* Since the frexp function always expects base 2, and in
9550 GCC normalized significands are already in the range
9551 [0.5, 1.0), we have exactly what frexp wants. */
9552 REAL_VALUE_TYPE frac_rvt = *value;
9553 SET_REAL_EXP (&frac_rvt, 0);
9554 frac = build_real (rettype, frac_rvt);
9555 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9562 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9563 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9564 TREE_SIDE_EFFECTS (arg1) = 1;
9565 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9571 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9572 then we can assume the base is two. If it's false, then we have to
9573 check the mode of the TYPE parameter in certain cases. */
9576 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9578 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9583 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9584 if (real_zerop (arg0) || integer_zerop (arg1)
9585 || (TREE_CODE (arg0) == REAL_CST
9586 && !real_isfinite (&TREE_REAL_CST (arg0))))
9587 return omit_one_operand (type, arg0, arg1);
9589 /* If both arguments are constant, then try to evaluate it. */
9590 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9591 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9592 && host_integerp (arg1, 0))
9594 /* Bound the maximum adjustment to twice the range of the
9595 mode's valid exponents. Use abs to ensure the range is
9596 positive as a sanity check. */
9597 const long max_exp_adj = 2 *
9598 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9599 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9601 /* Get the user-requested adjustment. */
9602 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9604 /* The requested adjustment must be inside this range. This
9605 is a preliminary cap to avoid things like overflow, we
9606 may still fail to compute the result for other reasons. */
9607 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9609 REAL_VALUE_TYPE initial_result;
9611 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9613 /* Ensure we didn't overflow. */
9614 if (! real_isinf (&initial_result))
9616 const REAL_VALUE_TYPE trunc_result
9617 = real_value_truncate (TYPE_MODE (type), initial_result);
9619 /* Only proceed if the target mode can hold the
9621 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9622 return build_real (type, trunc_result);
9631 /* Fold a call to builtin modf. */
9634 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9636 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9641 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9644 arg1 = build_fold_indirect_ref (arg1);
9646 /* Proceed if a valid pointer type was passed in. */
9647 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9649 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9650 REAL_VALUE_TYPE trunc, frac;
9656 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9657 trunc = frac = *value;
9660 /* For +-Inf, return (*arg1 = arg0, +-0). */
9662 frac.sign = value->sign;
9666 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9667 real_trunc (&trunc, VOIDmode, value);
9668 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9669 /* If the original number was negative and already
9670 integral, then the fractional part is -0.0. */
9671 if (value->sign && frac.cl == rvc_zero)
9672 frac.sign = value->sign;
9676 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9677 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9678 build_real (rettype, trunc));
9679 TREE_SIDE_EFFECTS (arg1) = 1;
9680 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9681 build_real (rettype, frac));
9687 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9688 ARG is the argument for the call. */
9691 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9693 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9696 if (!validate_arg (arg, REAL_TYPE))
9699 switch (builtin_index)
9701 case BUILT_IN_ISINF:
9702 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9703 return omit_one_operand (type, integer_zero_node, arg);
9705 if (TREE_CODE (arg) == REAL_CST)
9707 r = TREE_REAL_CST (arg);
9708 if (real_isinf (&r))
9709 return real_compare (GT_EXPR, &r, &dconst0)
9710 ? integer_one_node : integer_minus_one_node;
9712 return integer_zero_node;
9717 case BUILT_IN_ISINF_SIGN:
9719 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9720 /* In a boolean context, GCC will fold the inner COND_EXPR to
9721 1. So e.g. "if (isinf_sign(x))" would be folded to just
9722 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9723 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9724 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9725 tree tmp = NULL_TREE;
9727 arg = builtin_save_expr (arg);
9729 if (signbit_fn && isinf_fn)
9731 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9732 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9734 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9735 signbit_call, integer_zero_node);
9736 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9737 isinf_call, integer_zero_node);
9739 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9740 integer_minus_one_node, integer_one_node);
9741 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9748 case BUILT_IN_ISFINITE:
9749 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9750 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9751 return omit_one_operand (type, integer_one_node, arg);
9753 if (TREE_CODE (arg) == REAL_CST)
9755 r = TREE_REAL_CST (arg);
9756 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9761 case BUILT_IN_ISNAN:
9762 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9763 return omit_one_operand (type, integer_zero_node, arg);
9765 if (TREE_CODE (arg) == REAL_CST)
9767 r = TREE_REAL_CST (arg);
9768 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9771 arg = builtin_save_expr (arg);
9772 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9779 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9780 This builtin will generate code to return the appropriate floating
9781 point classification depending on the value of the floating point
9782 number passed in. The possible return values must be supplied as
9783 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9784 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9785 one floating point argument which is "type generic". */
9788 fold_builtin_fpclassify (tree exp)
9790 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9791 arg, type, res, tmp;
9792 enum machine_mode mode;
9796 /* Verify the required arguments in the original call. */
9797 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9798 INTEGER_TYPE, INTEGER_TYPE,
9799 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9802 fp_nan = CALL_EXPR_ARG (exp, 0);
9803 fp_infinite = CALL_EXPR_ARG (exp, 1);
9804 fp_normal = CALL_EXPR_ARG (exp, 2);
9805 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9806 fp_zero = CALL_EXPR_ARG (exp, 4);
9807 arg = CALL_EXPR_ARG (exp, 5);
9808 type = TREE_TYPE (arg);
9809 mode = TYPE_MODE (type);
9810 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9814 (fabs(x) == Inf ? FP_INFINITE :
9815 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9816 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9818 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9819 build_real (type, dconst0));
9820 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9822 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9823 real_from_string (&r, buf);
9824 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9825 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9827 if (HONOR_INFINITIES (mode))
9830 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9831 build_real (type, r));
9832 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9835 if (HONOR_NANS (mode))
9837 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9838 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9844 /* Fold a call to an unordered comparison function such as
9845 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9846 being called and ARG0 and ARG1 are the arguments for the call.
9847 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9848 the opposite of the desired result. UNORDERED_CODE is used
9849 for modes that can hold NaNs and ORDERED_CODE is used for
9853 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9854 enum tree_code unordered_code,
9855 enum tree_code ordered_code)
9857 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9858 enum tree_code code;
9860 enum tree_code code0, code1;
9861 tree cmp_type = NULL_TREE;
9863 type0 = TREE_TYPE (arg0);
9864 type1 = TREE_TYPE (arg1);
9866 code0 = TREE_CODE (type0);
9867 code1 = TREE_CODE (type1);
9869 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9870 /* Choose the wider of two real types. */
9871 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9873 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9875 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9878 arg0 = fold_convert (cmp_type, arg0);
9879 arg1 = fold_convert (cmp_type, arg1);
9881 if (unordered_code == UNORDERED_EXPR)
9883 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9884 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9885 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9888 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9890 return fold_build1 (TRUTH_NOT_EXPR, type,
9891 fold_build2 (code, type, arg0, arg1));
9894 /* Fold a call to built-in function FNDECL with 0 arguments.
9895 IGNORE is true if the result of the function call is ignored. This
9896 function returns NULL_TREE if no simplification was possible. */
9899 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9901 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9902 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9905 CASE_FLT_FN (BUILT_IN_INF):
9906 case BUILT_IN_INFD32:
9907 case BUILT_IN_INFD64:
9908 case BUILT_IN_INFD128:
9909 return fold_builtin_inf (type, true);
9911 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9912 return fold_builtin_inf (type, false);
9914 case BUILT_IN_CLASSIFY_TYPE:
9915 return fold_builtin_classify_type (NULL_TREE);
9923 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9924 IGNORE is true if the result of the function call is ignored. This
9925 function returns NULL_TREE if no simplification was possible. */
9928 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9930 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9931 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9935 case BUILT_IN_CONSTANT_P:
9937 tree val = fold_builtin_constant_p (arg0);
9939 /* Gimplification will pull the CALL_EXPR for the builtin out of
9940 an if condition. When not optimizing, we'll not CSE it back.
9941 To avoid link error types of regressions, return false now. */
9942 if (!val && !optimize)
9943 val = integer_zero_node;
9948 case BUILT_IN_CLASSIFY_TYPE:
9949 return fold_builtin_classify_type (arg0);
9951 case BUILT_IN_STRLEN:
9952 return fold_builtin_strlen (arg0);
9954 CASE_FLT_FN (BUILT_IN_FABS):
9955 return fold_builtin_fabs (arg0, type);
9959 case BUILT_IN_LLABS:
9960 case BUILT_IN_IMAXABS:
9961 return fold_builtin_abs (arg0, type);
9963 CASE_FLT_FN (BUILT_IN_CONJ):
9964 if (validate_arg (arg0, COMPLEX_TYPE))
9965 return fold_build1 (CONJ_EXPR, type, arg0);
9968 CASE_FLT_FN (BUILT_IN_CREAL):
9969 if (validate_arg (arg0, COMPLEX_TYPE))
9970 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9973 CASE_FLT_FN (BUILT_IN_CIMAG):
9974 if (validate_arg (arg0, COMPLEX_TYPE))
9975 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9978 CASE_FLT_FN (BUILT_IN_CCOS):
9979 CASE_FLT_FN (BUILT_IN_CCOSH):
9980 /* These functions are "even", i.e. f(x) == f(-x). */
9981 if (validate_arg (arg0, COMPLEX_TYPE))
9983 tree narg = fold_strip_sign_ops (arg0);
9985 return build_call_expr (fndecl, 1, narg);
9989 CASE_FLT_FN (BUILT_IN_CABS):
9990 return fold_builtin_cabs (arg0, type, fndecl);
9992 CASE_FLT_FN (BUILT_IN_CARG):
9993 return fold_builtin_carg (arg0, type);
9995 CASE_FLT_FN (BUILT_IN_SQRT):
9996 return fold_builtin_sqrt (arg0, type);
9998 CASE_FLT_FN (BUILT_IN_CBRT):
9999 return fold_builtin_cbrt (arg0, type);
10001 CASE_FLT_FN (BUILT_IN_ASIN):
10002 if (validate_arg (arg0, REAL_TYPE))
10003 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10004 &dconstm1, &dconst1, true);
10007 CASE_FLT_FN (BUILT_IN_ACOS):
10008 if (validate_arg (arg0, REAL_TYPE))
10009 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10010 &dconstm1, &dconst1, true);
10013 CASE_FLT_FN (BUILT_IN_ATAN):
10014 if (validate_arg (arg0, REAL_TYPE))
10015 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10018 CASE_FLT_FN (BUILT_IN_ASINH):
10019 if (validate_arg (arg0, REAL_TYPE))
10020 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10023 CASE_FLT_FN (BUILT_IN_ACOSH):
10024 if (validate_arg (arg0, REAL_TYPE))
10025 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10026 &dconst1, NULL, true);
10029 CASE_FLT_FN (BUILT_IN_ATANH):
10030 if (validate_arg (arg0, REAL_TYPE))
10031 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10032 &dconstm1, &dconst1, false);
10035 CASE_FLT_FN (BUILT_IN_SIN):
10036 if (validate_arg (arg0, REAL_TYPE))
10037 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10040 CASE_FLT_FN (BUILT_IN_COS):
10041 return fold_builtin_cos (arg0, type, fndecl);
10044 CASE_FLT_FN (BUILT_IN_TAN):
10045 return fold_builtin_tan (arg0, type);
10047 CASE_FLT_FN (BUILT_IN_CEXP):
10048 return fold_builtin_cexp (arg0, type);
10050 CASE_FLT_FN (BUILT_IN_CEXPI):
10051 if (validate_arg (arg0, REAL_TYPE))
10052 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10055 CASE_FLT_FN (BUILT_IN_SINH):
10056 if (validate_arg (arg0, REAL_TYPE))
10057 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10060 CASE_FLT_FN (BUILT_IN_COSH):
10061 return fold_builtin_cosh (arg0, type, fndecl);
10063 CASE_FLT_FN (BUILT_IN_TANH):
10064 if (validate_arg (arg0, REAL_TYPE))
10065 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10068 CASE_FLT_FN (BUILT_IN_ERF):
10069 if (validate_arg (arg0, REAL_TYPE))
10070 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10073 CASE_FLT_FN (BUILT_IN_ERFC):
10074 if (validate_arg (arg0, REAL_TYPE))
10075 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10078 CASE_FLT_FN (BUILT_IN_TGAMMA):
10079 if (validate_arg (arg0, REAL_TYPE))
10080 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10083 CASE_FLT_FN (BUILT_IN_EXP):
10084 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10086 CASE_FLT_FN (BUILT_IN_EXP2):
10087 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10089 CASE_FLT_FN (BUILT_IN_EXP10):
10090 CASE_FLT_FN (BUILT_IN_POW10):
10091 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10093 CASE_FLT_FN (BUILT_IN_EXPM1):
10094 if (validate_arg (arg0, REAL_TYPE))
10095 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10098 CASE_FLT_FN (BUILT_IN_LOG):
10099 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10101 CASE_FLT_FN (BUILT_IN_LOG2):
10102 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10104 CASE_FLT_FN (BUILT_IN_LOG10):
10105 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10107 CASE_FLT_FN (BUILT_IN_LOG1P):
10108 if (validate_arg (arg0, REAL_TYPE))
10109 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10110 &dconstm1, NULL, false);
10113 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10114 CASE_FLT_FN (BUILT_IN_J0):
10115 if (validate_arg (arg0, REAL_TYPE))
10116 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10120 CASE_FLT_FN (BUILT_IN_J1):
10121 if (validate_arg (arg0, REAL_TYPE))
10122 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10126 CASE_FLT_FN (BUILT_IN_Y0):
10127 if (validate_arg (arg0, REAL_TYPE))
10128 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10129 &dconst0, NULL, false);
10132 CASE_FLT_FN (BUILT_IN_Y1):
10133 if (validate_arg (arg0, REAL_TYPE))
10134 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10135 &dconst0, NULL, false);
10139 CASE_FLT_FN (BUILT_IN_NAN):
10140 case BUILT_IN_NAND32:
10141 case BUILT_IN_NAND64:
10142 case BUILT_IN_NAND128:
10143 return fold_builtin_nan (arg0, type, true);
10145 CASE_FLT_FN (BUILT_IN_NANS):
10146 return fold_builtin_nan (arg0, type, false);
10148 CASE_FLT_FN (BUILT_IN_FLOOR):
10149 return fold_builtin_floor (fndecl, arg0);
10151 CASE_FLT_FN (BUILT_IN_CEIL):
10152 return fold_builtin_ceil (fndecl, arg0);
10154 CASE_FLT_FN (BUILT_IN_TRUNC):
10155 return fold_builtin_trunc (fndecl, arg0);
10157 CASE_FLT_FN (BUILT_IN_ROUND):
10158 return fold_builtin_round (fndecl, arg0);
10160 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10161 CASE_FLT_FN (BUILT_IN_RINT):
10162 return fold_trunc_transparent_mathfn (fndecl, arg0);
10164 CASE_FLT_FN (BUILT_IN_LCEIL):
10165 CASE_FLT_FN (BUILT_IN_LLCEIL):
10166 CASE_FLT_FN (BUILT_IN_LFLOOR):
10167 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10168 CASE_FLT_FN (BUILT_IN_LROUND):
10169 CASE_FLT_FN (BUILT_IN_LLROUND):
10170 return fold_builtin_int_roundingfn (fndecl, arg0);
10172 CASE_FLT_FN (BUILT_IN_LRINT):
10173 CASE_FLT_FN (BUILT_IN_LLRINT):
10174 return fold_fixed_mathfn (fndecl, arg0);
10176 case BUILT_IN_BSWAP32:
10177 case BUILT_IN_BSWAP64:
10178 return fold_builtin_bswap (fndecl, arg0);
10180 CASE_INT_FN (BUILT_IN_FFS):
10181 CASE_INT_FN (BUILT_IN_CLZ):
10182 CASE_INT_FN (BUILT_IN_CTZ):
10183 CASE_INT_FN (BUILT_IN_POPCOUNT):
10184 CASE_INT_FN (BUILT_IN_PARITY):
10185 return fold_builtin_bitop (fndecl, arg0);
10187 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10188 return fold_builtin_signbit (arg0, type);
10190 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10191 return fold_builtin_significand (arg0, type);
10193 CASE_FLT_FN (BUILT_IN_ILOGB):
10194 CASE_FLT_FN (BUILT_IN_LOGB):
10195 return fold_builtin_logb (arg0, type);
10197 case BUILT_IN_ISASCII:
10198 return fold_builtin_isascii (arg0);
10200 case BUILT_IN_TOASCII:
10201 return fold_builtin_toascii (arg0);
10203 case BUILT_IN_ISDIGIT:
10204 return fold_builtin_isdigit (arg0);
10206 CASE_FLT_FN (BUILT_IN_FINITE):
10207 case BUILT_IN_FINITED32:
10208 case BUILT_IN_FINITED64:
10209 case BUILT_IN_FINITED128:
10210 case BUILT_IN_ISFINITE:
10211 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10213 CASE_FLT_FN (BUILT_IN_ISINF):
10214 case BUILT_IN_ISINFD32:
10215 case BUILT_IN_ISINFD64:
10216 case BUILT_IN_ISINFD128:
10217 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10219 case BUILT_IN_ISINF_SIGN:
10220 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10222 CASE_FLT_FN (BUILT_IN_ISNAN):
10223 case BUILT_IN_ISNAND32:
10224 case BUILT_IN_ISNAND64:
10225 case BUILT_IN_ISNAND128:
10226 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10228 case BUILT_IN_PRINTF:
10229 case BUILT_IN_PRINTF_UNLOCKED:
10230 case BUILT_IN_VPRINTF:
10231 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10241 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10242 IGNORE is true if the result of the function call is ignored. This
10243 function returns NULL_TREE if no simplification was possible. */
10246 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10248 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10249 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10253 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10254 CASE_FLT_FN (BUILT_IN_JN):
10255 if (validate_arg (arg0, INTEGER_TYPE)
10256 && validate_arg (arg1, REAL_TYPE))
10257 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10260 CASE_FLT_FN (BUILT_IN_YN):
10261 if (validate_arg (arg0, INTEGER_TYPE)
10262 && validate_arg (arg1, REAL_TYPE))
10263 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10267 CASE_FLT_FN (BUILT_IN_DREM):
10268 CASE_FLT_FN (BUILT_IN_REMAINDER):
10269 if (validate_arg (arg0, REAL_TYPE)
10270 && validate_arg(arg1, REAL_TYPE))
10271 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10274 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10275 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10276 if (validate_arg (arg0, REAL_TYPE)
10277 && validate_arg(arg1, POINTER_TYPE))
10278 return do_mpfr_lgamma_r (arg0, arg1, type);
10282 CASE_FLT_FN (BUILT_IN_ATAN2):
10283 if (validate_arg (arg0, REAL_TYPE)
10284 && validate_arg(arg1, REAL_TYPE))
10285 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10288 CASE_FLT_FN (BUILT_IN_FDIM):
10289 if (validate_arg (arg0, REAL_TYPE)
10290 && validate_arg(arg1, REAL_TYPE))
10291 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10294 CASE_FLT_FN (BUILT_IN_HYPOT):
10295 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10297 CASE_FLT_FN (BUILT_IN_LDEXP):
10298 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10299 CASE_FLT_FN (BUILT_IN_SCALBN):
10300 CASE_FLT_FN (BUILT_IN_SCALBLN):
10301 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10303 CASE_FLT_FN (BUILT_IN_FREXP):
10304 return fold_builtin_frexp (arg0, arg1, type);
10306 CASE_FLT_FN (BUILT_IN_MODF):
10307 return fold_builtin_modf (arg0, arg1, type);
10309 case BUILT_IN_BZERO:
10310 return fold_builtin_bzero (arg0, arg1, ignore);
10312 case BUILT_IN_FPUTS:
10313 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10315 case BUILT_IN_FPUTS_UNLOCKED:
10316 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10318 case BUILT_IN_STRSTR:
10319 return fold_builtin_strstr (arg0, arg1, type);
10321 case BUILT_IN_STRCAT:
10322 return fold_builtin_strcat (arg0, arg1);
10324 case BUILT_IN_STRSPN:
10325 return fold_builtin_strspn (arg0, arg1);
10327 case BUILT_IN_STRCSPN:
10328 return fold_builtin_strcspn (arg0, arg1);
10330 case BUILT_IN_STRCHR:
10331 case BUILT_IN_INDEX:
10332 return fold_builtin_strchr (arg0, arg1, type);
10334 case BUILT_IN_STRRCHR:
10335 case BUILT_IN_RINDEX:
10336 return fold_builtin_strrchr (arg0, arg1, type);
10338 case BUILT_IN_STRCPY:
10339 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10341 case BUILT_IN_STRCMP:
10342 return fold_builtin_strcmp (arg0, arg1);
10344 case BUILT_IN_STRPBRK:
10345 return fold_builtin_strpbrk (arg0, arg1, type);
10347 case BUILT_IN_EXPECT:
10348 return fold_builtin_expect (arg0, arg1);
10350 CASE_FLT_FN (BUILT_IN_POW):
10351 return fold_builtin_pow (fndecl, arg0, arg1, type);
10353 CASE_FLT_FN (BUILT_IN_POWI):
10354 return fold_builtin_powi (fndecl, arg0, arg1, type);
10356 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10357 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10359 CASE_FLT_FN (BUILT_IN_FMIN):
10360 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10362 CASE_FLT_FN (BUILT_IN_FMAX):
10363 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10365 case BUILT_IN_ISGREATER:
10366 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10367 case BUILT_IN_ISGREATEREQUAL:
10368 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10369 case BUILT_IN_ISLESS:
10370 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10371 case BUILT_IN_ISLESSEQUAL:
10372 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10373 case BUILT_IN_ISLESSGREATER:
10374 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10375 case BUILT_IN_ISUNORDERED:
10376 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10379 /* We do the folding for va_start in the expander. */
10380 case BUILT_IN_VA_START:
10383 case BUILT_IN_SPRINTF:
10384 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10386 case BUILT_IN_OBJECT_SIZE:
10387 return fold_builtin_object_size (arg0, arg1);
10389 case BUILT_IN_PRINTF:
10390 case BUILT_IN_PRINTF_UNLOCKED:
10391 case BUILT_IN_VPRINTF:
10392 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10394 case BUILT_IN_PRINTF_CHK:
10395 case BUILT_IN_VPRINTF_CHK:
10396 if (!validate_arg (arg0, INTEGER_TYPE)
10397 || TREE_SIDE_EFFECTS (arg0))
10400 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10403 case BUILT_IN_FPRINTF:
10404 case BUILT_IN_FPRINTF_UNLOCKED:
10405 case BUILT_IN_VFPRINTF:
10406 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10415 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10416 and ARG2. IGNORE is true if the result of the function call is ignored.
10417 This function returns NULL_TREE if no simplification was possible. */
10420 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10422 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10423 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10427 CASE_FLT_FN (BUILT_IN_SINCOS):
10428 return fold_builtin_sincos (arg0, arg1, arg2);
10430 CASE_FLT_FN (BUILT_IN_FMA):
10431 if (validate_arg (arg0, REAL_TYPE)
10432 && validate_arg(arg1, REAL_TYPE)
10433 && validate_arg(arg2, REAL_TYPE))
10434 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10437 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10438 CASE_FLT_FN (BUILT_IN_REMQUO):
10439 if (validate_arg (arg0, REAL_TYPE)
10440 && validate_arg(arg1, REAL_TYPE)
10441 && validate_arg(arg2, POINTER_TYPE))
10442 return do_mpfr_remquo (arg0, arg1, arg2);
10446 case BUILT_IN_MEMSET:
10447 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10449 case BUILT_IN_BCOPY:
10450 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10452 case BUILT_IN_MEMCPY:
10453 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10455 case BUILT_IN_MEMPCPY:
10456 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10458 case BUILT_IN_MEMMOVE:
10459 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10461 case BUILT_IN_STRNCAT:
10462 return fold_builtin_strncat (arg0, arg1, arg2);
10464 case BUILT_IN_STRNCPY:
10465 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10467 case BUILT_IN_STRNCMP:
10468 return fold_builtin_strncmp (arg0, arg1, arg2);
10470 case BUILT_IN_MEMCHR:
10471 return fold_builtin_memchr (arg0, arg1, arg2, type);
10473 case BUILT_IN_BCMP:
10474 case BUILT_IN_MEMCMP:
10475 return fold_builtin_memcmp (arg0, arg1, arg2);;
10477 case BUILT_IN_SPRINTF:
10478 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10480 case BUILT_IN_STRCPY_CHK:
10481 case BUILT_IN_STPCPY_CHK:
10482 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10485 case BUILT_IN_STRCAT_CHK:
10486 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10488 case BUILT_IN_PRINTF_CHK:
10489 case BUILT_IN_VPRINTF_CHK:
10490 if (!validate_arg (arg0, INTEGER_TYPE)
10491 || TREE_SIDE_EFFECTS (arg0))
10494 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10497 case BUILT_IN_FPRINTF:
10498 case BUILT_IN_FPRINTF_UNLOCKED:
10499 case BUILT_IN_VFPRINTF:
10500 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10502 case BUILT_IN_FPRINTF_CHK:
10503 case BUILT_IN_VFPRINTF_CHK:
10504 if (!validate_arg (arg1, INTEGER_TYPE)
10505 || TREE_SIDE_EFFECTS (arg1))
10508 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10517 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10518 ARG2, and ARG3. IGNORE is true if the result of the function call is
10519 ignored. This function returns NULL_TREE if no simplification was
10523 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10526 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10530 case BUILT_IN_MEMCPY_CHK:
10531 case BUILT_IN_MEMPCPY_CHK:
10532 case BUILT_IN_MEMMOVE_CHK:
10533 case BUILT_IN_MEMSET_CHK:
10534 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10536 DECL_FUNCTION_CODE (fndecl));
10538 case BUILT_IN_STRNCPY_CHK:
10539 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10541 case BUILT_IN_STRNCAT_CHK:
10542 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10544 case BUILT_IN_FPRINTF_CHK:
10545 case BUILT_IN_VFPRINTF_CHK:
10546 if (!validate_arg (arg1, INTEGER_TYPE)
10547 || TREE_SIDE_EFFECTS (arg1))
10550 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10560 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10561 arguments, where NARGS <= 4. IGNORE is true if the result of the
10562 function call is ignored. This function returns NULL_TREE if no
10563 simplification was possible. Note that this only folds builtins with
10564 fixed argument patterns. Foldings that do varargs-to-varargs
10565 transformations, or that match calls with more than 4 arguments,
10566 need to be handled with fold_builtin_varargs instead. */
10568 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10571 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10573 tree ret = NULL_TREE;
10578 ret = fold_builtin_0 (fndecl, ignore);
10581 ret = fold_builtin_1 (fndecl, args[0], ignore);
10584 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10587 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10590 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10598 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10599 TREE_NO_WARNING (ret) = 1;
10605 /* Builtins with folding operations that operate on "..." arguments
10606 need special handling; we need to store the arguments in a convenient
10607 data structure before attempting any folding. Fortunately there are
10608 only a few builtins that fall into this category. FNDECL is the
10609 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10610 result of the function call is ignored. */
10613 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10615 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10616 tree ret = NULL_TREE;
10620 case BUILT_IN_SPRINTF_CHK:
10621 case BUILT_IN_VSPRINTF_CHK:
10622 ret = fold_builtin_sprintf_chk (exp, fcode);
10625 case BUILT_IN_SNPRINTF_CHK:
10626 case BUILT_IN_VSNPRINTF_CHK:
10627 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10630 case BUILT_IN_FPCLASSIFY:
10631 ret = fold_builtin_fpclassify (exp);
10639 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10640 TREE_NO_WARNING (ret) = 1;
10646 /* A wrapper function for builtin folding that prevents warnings for
10647 "statement without effect" and the like, caused by removing the
10648 call node earlier than the warning is generated. */
10651 fold_call_expr (tree exp, bool ignore)
10653 tree ret = NULL_TREE;
10654 tree fndecl = get_callee_fndecl (exp);
10656 && TREE_CODE (fndecl) == FUNCTION_DECL
10657 && DECL_BUILT_IN (fndecl)
10658 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10659 yet. Defer folding until we see all the arguments
10660 (after inlining). */
10661 && !CALL_EXPR_VA_ARG_PACK (exp))
10663 int nargs = call_expr_nargs (exp);
10665 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10666 instead last argument is __builtin_va_arg_pack (). Defer folding
10667 even in that case, until arguments are finalized. */
10668 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10670 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10672 && TREE_CODE (fndecl2) == FUNCTION_DECL
10673 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10674 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10678 /* FIXME: Don't use a list in this interface. */
10679 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10680 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10683 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10685 tree *args = CALL_EXPR_ARGP (exp);
10686 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10689 ret = fold_builtin_varargs (fndecl, exp, ignore);
10692 /* Propagate location information from original call to
10693 expansion of builtin. Otherwise things like
10694 maybe_emit_chk_warning, that operate on the expansion
10695 of a builtin, will use the wrong location information. */
10696 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10698 tree realret = ret;
10699 if (TREE_CODE (ret) == NOP_EXPR)
10700 realret = TREE_OPERAND (ret, 0);
10701 if (CAN_HAVE_LOCATION_P (realret)
10702 && !EXPR_HAS_LOCATION (realret))
10703 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10713 /* Conveniently construct a function call expression. FNDECL names the
10714 function to be called and ARGLIST is a TREE_LIST of arguments. */
10717 build_function_call_expr (tree fndecl, tree arglist)
10719 tree fntype = TREE_TYPE (fndecl);
10720 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10721 int n = list_length (arglist);
10722 tree *argarray = (tree *) alloca (n * sizeof (tree));
10725 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10726 argarray[i] = TREE_VALUE (arglist);
10727 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10730 /* Conveniently construct a function call expression. FNDECL names the
10731 function to be called, N is the number of arguments, and the "..."
10732 parameters are the argument expressions. */
10735 build_call_expr (tree fndecl, int n, ...)
10738 tree fntype = TREE_TYPE (fndecl);
10739 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10740 tree *argarray = (tree *) alloca (n * sizeof (tree));
10744 for (i = 0; i < n; i++)
10745 argarray[i] = va_arg (ap, tree);
10747 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10750 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10751 N arguments are passed in the array ARGARRAY. */
10754 fold_builtin_call_array (tree type,
10759 tree ret = NULL_TREE;
10763 if (TREE_CODE (fn) == ADDR_EXPR)
10765 tree fndecl = TREE_OPERAND (fn, 0);
10766 if (TREE_CODE (fndecl) == FUNCTION_DECL
10767 && DECL_BUILT_IN (fndecl))
10769 /* If last argument is __builtin_va_arg_pack (), arguments to this
10770 function are not finalized yet. Defer folding until they are. */
10771 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10773 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10775 && TREE_CODE (fndecl2) == FUNCTION_DECL
10776 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10777 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10778 return build_call_array (type, fn, n, argarray);
10780 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10782 tree arglist = NULL_TREE;
10783 for (i = n - 1; i >= 0; i--)
10784 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10785 ret = targetm.fold_builtin (fndecl, arglist, false);
10789 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10791 /* First try the transformations that don't require consing up
10793 ret = fold_builtin_n (fndecl, argarray, n, false);
10798 /* If we got this far, we need to build an exp. */
10799 exp = build_call_array (type, fn, n, argarray);
10800 ret = fold_builtin_varargs (fndecl, exp, false);
10801 return ret ? ret : exp;
10805 return build_call_array (type, fn, n, argarray);
10808 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10809 along with N new arguments specified as the "..." parameters. SKIP
10810 is the number of arguments in EXP to be omitted. This function is used
10811 to do varargs-to-varargs transformations. */
10814 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10816 int oldnargs = call_expr_nargs (exp);
10817 int nargs = oldnargs - skip + n;
10818 tree fntype = TREE_TYPE (fndecl);
10819 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10827 buffer = XALLOCAVEC (tree, nargs);
10829 for (i = 0; i < n; i++)
10830 buffer[i] = va_arg (ap, tree);
10832 for (j = skip; j < oldnargs; j++, i++)
10833 buffer[i] = CALL_EXPR_ARG (exp, j);
10836 buffer = CALL_EXPR_ARGP (exp) + skip;
10838 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10841 /* Validate a single argument ARG against a tree code CODE representing
10845 validate_arg (const_tree arg, enum tree_code code)
10849 else if (code == POINTER_TYPE)
10850 return POINTER_TYPE_P (TREE_TYPE (arg));
10851 else if (code == INTEGER_TYPE)
10852 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10853 return code == TREE_CODE (TREE_TYPE (arg));
10856 /* This function validates the types of a function call argument list
10857 against a specified list of tree_codes. If the last specifier is a 0,
10858 that represents an ellipses, otherwise the last specifier must be a
10861 This is the GIMPLE version of validate_arglist. Eventually we want to
10862 completely convert builtins.c to work from GIMPLEs and the tree based
10863 validate_arglist will then be removed. */
10866 validate_gimple_arglist (const_gimple call, ...)
10868 enum tree_code code;
10874 va_start (ap, call);
10879 code = va_arg (ap, enum tree_code);
10883 /* This signifies an ellipses, any further arguments are all ok. */
10887 /* This signifies an endlink, if no arguments remain, return
10888 true, otherwise return false. */
10889 res = (i == gimple_call_num_args (call));
10892 /* If no parameters remain or the parameter's code does not
10893 match the specified code, return false. Otherwise continue
10894 checking any remaining arguments. */
10895 arg = gimple_call_arg (call, i++);
10896 if (!validate_arg (arg, code))
10903 /* We need gotos here since we can only have one VA_CLOSE in a
10911 /* This function validates the types of a function call argument list
10912 against a specified list of tree_codes. If the last specifier is a 0,
10913 that represents an ellipses, otherwise the last specifier must be a
10917 validate_arglist (const_tree callexpr, ...)
10919 enum tree_code code;
10922 const_call_expr_arg_iterator iter;
10925 va_start (ap, callexpr);
10926 init_const_call_expr_arg_iterator (callexpr, &iter);
10930 code = va_arg (ap, enum tree_code);
10934 /* This signifies an ellipses, any further arguments are all ok. */
10938 /* This signifies an endlink, if no arguments remain, return
10939 true, otherwise return false. */
10940 res = !more_const_call_expr_args_p (&iter);
10943 /* If no parameters remain or the parameter's code does not
10944 match the specified code, return false. Otherwise continue
10945 checking any remaining arguments. */
10946 arg = next_const_call_expr_arg (&iter);
10947 if (!validate_arg (arg, code))
10954 /* We need gotos here since we can only have one VA_CLOSE in a
10962 /* Default target-specific builtin expander that does nothing. */
10965 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10966 rtx target ATTRIBUTE_UNUSED,
10967 rtx subtarget ATTRIBUTE_UNUSED,
10968 enum machine_mode mode ATTRIBUTE_UNUSED,
10969 int ignore ATTRIBUTE_UNUSED)
10974 /* Returns true is EXP represents data that would potentially reside
10975 in a readonly section. */
10978 readonly_data_expr (tree exp)
10982 if (TREE_CODE (exp) != ADDR_EXPR)
10985 exp = get_base_address (TREE_OPERAND (exp, 0));
10989 /* Make sure we call decl_readonly_section only for trees it
10990 can handle (since it returns true for everything it doesn't
10992 if (TREE_CODE (exp) == STRING_CST
10993 || TREE_CODE (exp) == CONSTRUCTOR
10994 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10995 return decl_readonly_section (exp, 0);
11000 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11001 to the call, and TYPE is its return type.
11003 Return NULL_TREE if no simplification was possible, otherwise return the
11004 simplified form of the call as a tree.
11006 The simplified form may be a constant or other expression which
11007 computes the same value, but in a more efficient manner (including
11008 calls to other builtin functions).
11010 The call may contain arguments which need to be evaluated, but
11011 which are not useful to determine the result of the call. In
11012 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11013 COMPOUND_EXPR will be an argument which must be evaluated.
11014 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11015 COMPOUND_EXPR in the chain will contain the tree for the simplified
11016 form of the builtin function call. */
11019 fold_builtin_strstr (tree s1, tree s2, tree type)
11021 if (!validate_arg (s1, POINTER_TYPE)
11022 || !validate_arg (s2, POINTER_TYPE))
11027 const char *p1, *p2;
11029 p2 = c_getstr (s2);
11033 p1 = c_getstr (s1);
11036 const char *r = strstr (p1, p2);
11040 return build_int_cst (TREE_TYPE (s1), 0);
11042 /* Return an offset into the constant string argument. */
11043 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11044 s1, size_int (r - p1));
11045 return fold_convert (type, tem);
11048 /* The argument is const char *, and the result is char *, so we need
11049 a type conversion here to avoid a warning. */
11051 return fold_convert (type, s1);
11056 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11060 /* New argument list transforming strstr(s1, s2) to
11061 strchr(s1, s2[0]). */
11062 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11066 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11067 the call, and TYPE is its return type.
11069 Return NULL_TREE if no simplification was possible, otherwise return the
11070 simplified form of the call as a tree.
11072 The simplified form may be a constant or other expression which
11073 computes the same value, but in a more efficient manner (including
11074 calls to other builtin functions).
11076 The call may contain arguments which need to be evaluated, but
11077 which are not useful to determine the result of the call. In
11078 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11079 COMPOUND_EXPR will be an argument which must be evaluated.
11080 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11081 COMPOUND_EXPR in the chain will contain the tree for the simplified
11082 form of the builtin function call. */
11085 fold_builtin_strchr (tree s1, tree s2, tree type)
11087 if (!validate_arg (s1, POINTER_TYPE)
11088 || !validate_arg (s2, INTEGER_TYPE))
11094 if (TREE_CODE (s2) != INTEGER_CST)
11097 p1 = c_getstr (s1);
11104 if (target_char_cast (s2, &c))
11107 r = strchr (p1, c);
11110 return build_int_cst (TREE_TYPE (s1), 0);
11112 /* Return an offset into the constant string argument. */
11113 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11114 s1, size_int (r - p1));
11115 return fold_convert (type, tem);
11121 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11122 the call, and TYPE is its return type.
11124 Return NULL_TREE if no simplification was possible, otherwise return the
11125 simplified form of the call as a tree.
11127 The simplified form may be a constant or other expression which
11128 computes the same value, but in a more efficient manner (including
11129 calls to other builtin functions).
11131 The call may contain arguments which need to be evaluated, but
11132 which are not useful to determine the result of the call. In
11133 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11134 COMPOUND_EXPR will be an argument which must be evaluated.
11135 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11136 COMPOUND_EXPR in the chain will contain the tree for the simplified
11137 form of the builtin function call. */
11140 fold_builtin_strrchr (tree s1, tree s2, tree type)
11142 if (!validate_arg (s1, POINTER_TYPE)
11143 || !validate_arg (s2, INTEGER_TYPE))
11150 if (TREE_CODE (s2) != INTEGER_CST)
11153 p1 = c_getstr (s1);
11160 if (target_char_cast (s2, &c))
11163 r = strrchr (p1, c);
11166 return build_int_cst (TREE_TYPE (s1), 0);
11168 /* Return an offset into the constant string argument. */
11169 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11170 s1, size_int (r - p1));
11171 return fold_convert (type, tem);
11174 if (! integer_zerop (s2))
11177 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11181 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11182 return build_call_expr (fn, 2, s1, s2);
11186 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11187 to the call, and TYPE is its return type.
11189 Return NULL_TREE if no simplification was possible, otherwise return the
11190 simplified form of the call as a tree.
11192 The simplified form may be a constant or other expression which
11193 computes the same value, but in a more efficient manner (including
11194 calls to other builtin functions).
11196 The call may contain arguments which need to be evaluated, but
11197 which are not useful to determine the result of the call. In
11198 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11199 COMPOUND_EXPR will be an argument which must be evaluated.
11200 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11201 COMPOUND_EXPR in the chain will contain the tree for the simplified
11202 form of the builtin function call. */
11205 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11207 if (!validate_arg (s1, POINTER_TYPE)
11208 || !validate_arg (s2, POINTER_TYPE))
11213 const char *p1, *p2;
11215 p2 = c_getstr (s2);
11219 p1 = c_getstr (s1);
11222 const char *r = strpbrk (p1, p2);
11226 return build_int_cst (TREE_TYPE (s1), 0);
11228 /* Return an offset into the constant string argument. */
11229 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11230 s1, size_int (r - p1));
11231 return fold_convert (type, tem);
11235 /* strpbrk(x, "") == NULL.
11236 Evaluate and ignore s1 in case it had side-effects. */
11237 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11240 return NULL_TREE; /* Really call strpbrk. */
11242 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11246 /* New argument list transforming strpbrk(s1, s2) to
11247 strchr(s1, s2[0]). */
11248 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11252 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11255 Return NULL_TREE if no simplification was possible, otherwise return the
11256 simplified form of the call as a tree.
11258 The simplified form may be a constant or other expression which
11259 computes the same value, but in a more efficient manner (including
11260 calls to other builtin functions).
11262 The call may contain arguments which need to be evaluated, but
11263 which are not useful to determine the result of the call. In
11264 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11265 COMPOUND_EXPR will be an argument which must be evaluated.
11266 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11267 COMPOUND_EXPR in the chain will contain the tree for the simplified
11268 form of the builtin function call. */
11271 fold_builtin_strcat (tree dst, tree src)
11273 if (!validate_arg (dst, POINTER_TYPE)
11274 || !validate_arg (src, POINTER_TYPE))
11278 const char *p = c_getstr (src);
11280 /* If the string length is zero, return the dst parameter. */
11281 if (p && *p == '\0')
11288 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11289 arguments to the call.
11291 Return NULL_TREE if no simplification was possible, otherwise return the
11292 simplified form of the call as a tree.
11294 The simplified form may be a constant or other expression which
11295 computes the same value, but in a more efficient manner (including
11296 calls to other builtin functions).
11298 The call may contain arguments which need to be evaluated, but
11299 which are not useful to determine the result of the call. In
11300 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11301 COMPOUND_EXPR will be an argument which must be evaluated.
11302 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11303 COMPOUND_EXPR in the chain will contain the tree for the simplified
11304 form of the builtin function call. */
11307 fold_builtin_strncat (tree dst, tree src, tree len)
11309 if (!validate_arg (dst, POINTER_TYPE)
11310 || !validate_arg (src, POINTER_TYPE)
11311 || !validate_arg (len, INTEGER_TYPE))
11315 const char *p = c_getstr (src);
11317 /* If the requested length is zero, or the src parameter string
11318 length is zero, return the dst parameter. */
11319 if (integer_zerop (len) || (p && *p == '\0'))
11320 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11322 /* If the requested len is greater than or equal to the string
11323 length, call strcat. */
11324 if (TREE_CODE (len) == INTEGER_CST && p
11325 && compare_tree_int (len, strlen (p)) >= 0)
11327 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11329 /* If the replacement _DECL isn't initialized, don't do the
11334 return build_call_expr (fn, 2, dst, src);
11340 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11343 Return NULL_TREE if no simplification was possible, otherwise return the
11344 simplified form of the call as a tree.
11346 The simplified form may be a constant or other expression which
11347 computes the same value, but in a more efficient manner (including
11348 calls to other builtin functions).
11350 The call may contain arguments which need to be evaluated, but
11351 which are not useful to determine the result of the call. In
11352 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11353 COMPOUND_EXPR will be an argument which must be evaluated.
11354 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11355 COMPOUND_EXPR in the chain will contain the tree for the simplified
11356 form of the builtin function call. */
11359 fold_builtin_strspn (tree s1, tree s2)
11361 if (!validate_arg (s1, POINTER_TYPE)
11362 || !validate_arg (s2, POINTER_TYPE))
11366 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11368 /* If both arguments are constants, evaluate at compile-time. */
11371 const size_t r = strspn (p1, p2);
11372 return size_int (r);
11375 /* If either argument is "", return NULL_TREE. */
11376 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11377 /* Evaluate and ignore both arguments in case either one has
11379 return omit_two_operands (integer_type_node, integer_zero_node,
11385 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11388 Return NULL_TREE if no simplification was possible, otherwise return the
11389 simplified form of the call as a tree.
11391 The simplified form may be a constant or other expression which
11392 computes the same value, but in a more efficient manner (including
11393 calls to other builtin functions).
11395 The call may contain arguments which need to be evaluated, but
11396 which are not useful to determine the result of the call. In
11397 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11398 COMPOUND_EXPR will be an argument which must be evaluated.
11399 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11400 COMPOUND_EXPR in the chain will contain the tree for the simplified
11401 form of the builtin function call. */
11404 fold_builtin_strcspn (tree s1, tree s2)
11406 if (!validate_arg (s1, POINTER_TYPE)
11407 || !validate_arg (s2, POINTER_TYPE))
11411 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11413 /* If both arguments are constants, evaluate at compile-time. */
11416 const size_t r = strcspn (p1, p2);
11417 return size_int (r);
11420 /* If the first argument is "", return NULL_TREE. */
11421 if (p1 && *p1 == '\0')
11423 /* Evaluate and ignore argument s2 in case it has
11425 return omit_one_operand (integer_type_node,
11426 integer_zero_node, s2);
11429 /* If the second argument is "", return __builtin_strlen(s1). */
11430 if (p2 && *p2 == '\0')
11432 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11434 /* If the replacement _DECL isn't initialized, don't do the
11439 return build_call_expr (fn, 1, s1);
11445 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11446 to the call. IGNORE is true if the value returned
11447 by the builtin will be ignored. UNLOCKED is true is true if this
11448 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11449 the known length of the string. Return NULL_TREE if no simplification
11453 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11455 /* If we're using an unlocked function, assume the other unlocked
11456 functions exist explicitly. */
11457 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11458 : implicit_built_in_decls[BUILT_IN_FPUTC];
11459 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11460 : implicit_built_in_decls[BUILT_IN_FWRITE];
11462 /* If the return value is used, don't do the transformation. */
11466 /* Verify the arguments in the original call. */
11467 if (!validate_arg (arg0, POINTER_TYPE)
11468 || !validate_arg (arg1, POINTER_TYPE))
11472 len = c_strlen (arg0, 0);
11474 /* Get the length of the string passed to fputs. If the length
11475 can't be determined, punt. */
11477 || TREE_CODE (len) != INTEGER_CST)
11480 switch (compare_tree_int (len, 1))
11482 case -1: /* length is 0, delete the call entirely . */
11483 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11485 case 0: /* length is 1, call fputc. */
11487 const char *p = c_getstr (arg0);
11492 return build_call_expr (fn_fputc, 2,
11493 build_int_cst (NULL_TREE, p[0]), arg1);
11499 case 1: /* length is greater than 1, call fwrite. */
11501 /* If optimizing for size keep fputs. */
11504 /* New argument list transforming fputs(string, stream) to
11505 fwrite(string, 1, len, stream). */
11507 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11512 gcc_unreachable ();
11517 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11518 produced. False otherwise. This is done so that we don't output the error
11519 or warning twice or three times. */
11522 fold_builtin_next_arg (tree exp, bool va_start_p)
11524 tree fntype = TREE_TYPE (current_function_decl);
11525 int nargs = call_expr_nargs (exp);
11528 if (TYPE_ARG_TYPES (fntype) == 0
11529 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11530 == void_type_node))
11532 error ("%<va_start%> used in function with fixed args");
11538 if (va_start_p && (nargs != 2))
11540 error ("wrong number of arguments to function %<va_start%>");
11543 arg = CALL_EXPR_ARG (exp, 1);
11545 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11546 when we checked the arguments and if needed issued a warning. */
11551 /* Evidently an out of date version of <stdarg.h>; can't validate
11552 va_start's second argument, but can still work as intended. */
11553 warning (0, "%<__builtin_next_arg%> called without an argument");
11556 else if (nargs > 1)
11558 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11561 arg = CALL_EXPR_ARG (exp, 0);
11564 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11565 or __builtin_next_arg (0) the first time we see it, after checking
11566 the arguments and if needed issuing a warning. */
11567 if (!integer_zerop (arg))
11569 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11571 /* Strip off all nops for the sake of the comparison. This
11572 is not quite the same as STRIP_NOPS. It does more.
11573 We must also strip off INDIRECT_EXPR for C++ reference
11575 while (CONVERT_EXPR_P (arg)
11576 || TREE_CODE (arg) == INDIRECT_REF)
11577 arg = TREE_OPERAND (arg, 0);
11578 if (arg != last_parm)
11580 /* FIXME: Sometimes with the tree optimizers we can get the
11581 not the last argument even though the user used the last
11582 argument. We just warn and set the arg to be the last
11583 argument so that we will get wrong-code because of
11585 warning (0, "second parameter of %<va_start%> not last named argument");
11587 /* We want to verify the second parameter just once before the tree
11588 optimizers are run and then avoid keeping it in the tree,
11589 as otherwise we could warn even for correct code like:
11590 void foo (int i, ...)
11591 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11593 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11595 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11601 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11602 ORIG may be null if this is a 2-argument call. We don't attempt to
11603 simplify calls with more than 3 arguments.
11605 Return NULL_TREE if no simplification was possible, otherwise return the
11606 simplified form of the call as a tree. If IGNORED is true, it means that
11607 the caller does not use the returned value of the function. */
11610 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11613 const char *fmt_str = NULL;
11615 /* Verify the required arguments in the original call. We deal with two
11616 types of sprintf() calls: 'sprintf (str, fmt)' and
11617 'sprintf (dest, "%s", orig)'. */
11618 if (!validate_arg (dest, POINTER_TYPE)
11619 || !validate_arg (fmt, POINTER_TYPE))
11621 if (orig && !validate_arg (orig, POINTER_TYPE))
11624 /* Check whether the format is a literal string constant. */
11625 fmt_str = c_getstr (fmt);
11626 if (fmt_str == NULL)
11630 retval = NULL_TREE;
11632 if (!init_target_chars ())
11635 /* If the format doesn't contain % args or %%, use strcpy. */
11636 if (strchr (fmt_str, target_percent) == NULL)
11638 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11643 /* Don't optimize sprintf (buf, "abc", ptr++). */
11647 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11648 'format' is known to contain no % formats. */
11649 call = build_call_expr (fn, 2, dest, fmt);
11651 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11654 /* If the format is "%s", use strcpy if the result isn't used. */
11655 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11658 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11663 /* Don't crash on sprintf (str1, "%s"). */
11667 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11670 retval = c_strlen (orig, 1);
11671 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11674 call = build_call_expr (fn, 2, dest, orig);
11677 if (call && retval)
11679 retval = fold_convert
11680 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11682 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11688 /* Expand a call EXP to __builtin_object_size. */
11691 expand_builtin_object_size (tree exp)
11694 int object_size_type;
11695 tree fndecl = get_callee_fndecl (exp);
11697 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11699 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11701 expand_builtin_trap ();
11705 ost = CALL_EXPR_ARG (exp, 1);
11708 if (TREE_CODE (ost) != INTEGER_CST
11709 || tree_int_cst_sgn (ost) < 0
11710 || compare_tree_int (ost, 3) > 0)
11712 error ("%Klast argument of %D is not integer constant between 0 and 3",
11714 expand_builtin_trap ();
11718 object_size_type = tree_low_cst (ost, 0);
11720 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11723 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11724 FCODE is the BUILT_IN_* to use.
11725 Return NULL_RTX if we failed; the caller should emit a normal call,
11726 otherwise try to get the result in TARGET, if convenient (and in
11727 mode MODE if that's convenient). */
11730 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11731 enum built_in_function fcode)
11733 tree dest, src, len, size;
11735 if (!validate_arglist (exp,
11737 fcode == BUILT_IN_MEMSET_CHK
11738 ? INTEGER_TYPE : POINTER_TYPE,
11739 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11742 dest = CALL_EXPR_ARG (exp, 0);
11743 src = CALL_EXPR_ARG (exp, 1);
11744 len = CALL_EXPR_ARG (exp, 2);
11745 size = CALL_EXPR_ARG (exp, 3);
11747 if (! host_integerp (size, 1))
11750 if (host_integerp (len, 1) || integer_all_onesp (size))
11754 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11756 warning (0, "%Kcall to %D will always overflow destination buffer",
11757 exp, get_callee_fndecl (exp));
11762 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11763 mem{cpy,pcpy,move,set} is available. */
11766 case BUILT_IN_MEMCPY_CHK:
11767 fn = built_in_decls[BUILT_IN_MEMCPY];
11769 case BUILT_IN_MEMPCPY_CHK:
11770 fn = built_in_decls[BUILT_IN_MEMPCPY];
11772 case BUILT_IN_MEMMOVE_CHK:
11773 fn = built_in_decls[BUILT_IN_MEMMOVE];
11775 case BUILT_IN_MEMSET_CHK:
11776 fn = built_in_decls[BUILT_IN_MEMSET];
11785 fn = build_call_expr (fn, 3, dest, src, len);
11786 STRIP_TYPE_NOPS (fn);
11787 while (TREE_CODE (fn) == COMPOUND_EXPR)
11789 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11791 fn = TREE_OPERAND (fn, 1);
11793 if (TREE_CODE (fn) == CALL_EXPR)
11794 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11795 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11797 else if (fcode == BUILT_IN_MEMSET_CHK)
11801 unsigned int dest_align
11802 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11804 /* If DEST is not a pointer type, call the normal function. */
11805 if (dest_align == 0)
11808 /* If SRC and DEST are the same (and not volatile), do nothing. */
11809 if (operand_equal_p (src, dest, 0))
11813 if (fcode != BUILT_IN_MEMPCPY_CHK)
11815 /* Evaluate and ignore LEN in case it has side-effects. */
11816 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11817 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11820 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11821 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11824 /* __memmove_chk special case. */
11825 if (fcode == BUILT_IN_MEMMOVE_CHK)
11827 unsigned int src_align
11828 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11830 if (src_align == 0)
11833 /* If src is categorized for a readonly section we can use
11834 normal __memcpy_chk. */
11835 if (readonly_data_expr (src))
11837 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11840 fn = build_call_expr (fn, 4, dest, src, len, size);
11841 STRIP_TYPE_NOPS (fn);
11842 while (TREE_CODE (fn) == COMPOUND_EXPR)
11844 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11846 fn = TREE_OPERAND (fn, 1);
11848 if (TREE_CODE (fn) == CALL_EXPR)
11849 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11850 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11857 /* Emit warning if a buffer overflow is detected at compile time. */
11860 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11867 case BUILT_IN_STRCPY_CHK:
11868 case BUILT_IN_STPCPY_CHK:
11869 /* For __strcat_chk the warning will be emitted only if overflowing
11870 by at least strlen (dest) + 1 bytes. */
11871 case BUILT_IN_STRCAT_CHK:
11872 len = CALL_EXPR_ARG (exp, 1);
11873 size = CALL_EXPR_ARG (exp, 2);
11876 case BUILT_IN_STRNCAT_CHK:
11877 case BUILT_IN_STRNCPY_CHK:
11878 len = CALL_EXPR_ARG (exp, 2);
11879 size = CALL_EXPR_ARG (exp, 3);
11881 case BUILT_IN_SNPRINTF_CHK:
11882 case BUILT_IN_VSNPRINTF_CHK:
11883 len = CALL_EXPR_ARG (exp, 1);
11884 size = CALL_EXPR_ARG (exp, 3);
11887 gcc_unreachable ();
11893 if (! host_integerp (size, 1) || integer_all_onesp (size))
11898 len = c_strlen (len, 1);
11899 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11902 else if (fcode == BUILT_IN_STRNCAT_CHK)
11904 tree src = CALL_EXPR_ARG (exp, 1);
11905 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11907 src = c_strlen (src, 1);
11908 if (! src || ! host_integerp (src, 1))
11910 warning (0, "%Kcall to %D might overflow destination buffer",
11911 exp, get_callee_fndecl (exp));
11914 else if (tree_int_cst_lt (src, size))
11917 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11920 warning (0, "%Kcall to %D will always overflow destination buffer",
11921 exp, get_callee_fndecl (exp));
11924 /* Emit warning if a buffer overflow is detected at compile time
11925 in __sprintf_chk/__vsprintf_chk calls. */
11928 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11930 tree dest, size, len, fmt, flag;
11931 const char *fmt_str;
11932 int nargs = call_expr_nargs (exp);
11934 /* Verify the required arguments in the original call. */
11938 dest = CALL_EXPR_ARG (exp, 0);
11939 flag = CALL_EXPR_ARG (exp, 1);
11940 size = CALL_EXPR_ARG (exp, 2);
11941 fmt = CALL_EXPR_ARG (exp, 3);
11943 if (! host_integerp (size, 1) || integer_all_onesp (size))
11946 /* Check whether the format is a literal string constant. */
11947 fmt_str = c_getstr (fmt);
11948 if (fmt_str == NULL)
11951 if (!init_target_chars ())
11954 /* If the format doesn't contain % args or %%, we know its size. */
11955 if (strchr (fmt_str, target_percent) == 0)
11956 len = build_int_cstu (size_type_node, strlen (fmt_str));
11957 /* If the format is "%s" and first ... argument is a string literal,
11959 else if (fcode == BUILT_IN_SPRINTF_CHK
11960 && strcmp (fmt_str, target_percent_s) == 0)
11966 arg = CALL_EXPR_ARG (exp, 4);
11967 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11970 len = c_strlen (arg, 1);
11971 if (!len || ! host_integerp (len, 1))
11977 if (! tree_int_cst_lt (len, size))
11979 warning (0, "%Kcall to %D will always overflow destination buffer",
11980 exp, get_callee_fndecl (exp));
11984 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11988 fold_builtin_object_size (tree ptr, tree ost)
11990 tree ret = NULL_TREE;
11991 int object_size_type;
11993 if (!validate_arg (ptr, POINTER_TYPE)
11994 || !validate_arg (ost, INTEGER_TYPE))
11999 if (TREE_CODE (ost) != INTEGER_CST
12000 || tree_int_cst_sgn (ost) < 0
12001 || compare_tree_int (ost, 3) > 0)
12004 object_size_type = tree_low_cst (ost, 0);
12006 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12007 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12008 and (size_t) 0 for types 2 and 3. */
12009 if (TREE_SIDE_EFFECTS (ptr))
12010 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12012 if (TREE_CODE (ptr) == ADDR_EXPR)
12013 ret = build_int_cstu (size_type_node,
12014 compute_builtin_object_size (ptr, object_size_type));
12016 else if (TREE_CODE (ptr) == SSA_NAME)
12018 unsigned HOST_WIDE_INT bytes;
12020 /* If object size is not known yet, delay folding until
12021 later. Maybe subsequent passes will help determining
12023 bytes = compute_builtin_object_size (ptr, object_size_type);
12024 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12026 ret = build_int_cstu (size_type_node, bytes);
12031 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12032 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12033 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12040 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12041 DEST, SRC, LEN, and SIZE are the arguments to the call.
12042 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12043 code of the builtin. If MAXLEN is not NULL, it is maximum length
12044 passed as third argument. */
12047 fold_builtin_memory_chk (tree fndecl,
12048 tree dest, tree src, tree len, tree size,
12049 tree maxlen, bool ignore,
12050 enum built_in_function fcode)
12054 if (!validate_arg (dest, POINTER_TYPE)
12055 || !validate_arg (src,
12056 (fcode == BUILT_IN_MEMSET_CHK
12057 ? INTEGER_TYPE : POINTER_TYPE))
12058 || !validate_arg (len, INTEGER_TYPE)
12059 || !validate_arg (size, INTEGER_TYPE))
12062 /* If SRC and DEST are the same (and not volatile), return DEST
12063 (resp. DEST+LEN for __mempcpy_chk). */
12064 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12066 if (fcode != BUILT_IN_MEMPCPY_CHK)
12067 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12070 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12071 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
12075 if (! host_integerp (size, 1))
12078 if (! integer_all_onesp (size))
12080 if (! host_integerp (len, 1))
12082 /* If LEN is not constant, try MAXLEN too.
12083 For MAXLEN only allow optimizing into non-_ocs function
12084 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12085 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12087 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12089 /* (void) __mempcpy_chk () can be optimized into
12090 (void) __memcpy_chk (). */
12091 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12095 return build_call_expr (fn, 4, dest, src, len, size);
12103 if (tree_int_cst_lt (size, maxlen))
12108 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12109 mem{cpy,pcpy,move,set} is available. */
12112 case BUILT_IN_MEMCPY_CHK:
12113 fn = built_in_decls[BUILT_IN_MEMCPY];
12115 case BUILT_IN_MEMPCPY_CHK:
12116 fn = built_in_decls[BUILT_IN_MEMPCPY];
12118 case BUILT_IN_MEMMOVE_CHK:
12119 fn = built_in_decls[BUILT_IN_MEMMOVE];
12121 case BUILT_IN_MEMSET_CHK:
12122 fn = built_in_decls[BUILT_IN_MEMSET];
12131 return build_call_expr (fn, 3, dest, src, len);
12134 /* Fold a call to the __st[rp]cpy_chk builtin.
12135 DEST, SRC, and SIZE are the arguments to the call.
12136 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12137 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12138 strings passed as second argument. */
12141 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12142 tree maxlen, bool ignore,
12143 enum built_in_function fcode)
12147 if (!validate_arg (dest, POINTER_TYPE)
12148 || !validate_arg (src, POINTER_TYPE)
12149 || !validate_arg (size, INTEGER_TYPE))
12152 /* If SRC and DEST are the same (and not volatile), return DEST. */
12153 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12154 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12156 if (! host_integerp (size, 1))
12159 if (! integer_all_onesp (size))
12161 len = c_strlen (src, 1);
12162 if (! len || ! host_integerp (len, 1))
12164 /* If LEN is not constant, try MAXLEN too.
12165 For MAXLEN only allow optimizing into non-_ocs function
12166 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12167 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12169 if (fcode == BUILT_IN_STPCPY_CHK)
12174 /* If return value of __stpcpy_chk is ignored,
12175 optimize into __strcpy_chk. */
12176 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12180 return build_call_expr (fn, 3, dest, src, size);
12183 if (! len || TREE_SIDE_EFFECTS (len))
12186 /* If c_strlen returned something, but not a constant,
12187 transform __strcpy_chk into __memcpy_chk. */
12188 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12192 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12193 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12194 build_call_expr (fn, 4,
12195 dest, src, len, size));
12201 if (! tree_int_cst_lt (maxlen, size))
12205 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12206 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12207 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12211 return build_call_expr (fn, 2, dest, src);
12214 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12215 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12216 length passed as third argument. */
12219 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12224 if (!validate_arg (dest, POINTER_TYPE)
12225 || !validate_arg (src, POINTER_TYPE)
12226 || !validate_arg (len, INTEGER_TYPE)
12227 || !validate_arg (size, INTEGER_TYPE))
12230 if (! host_integerp (size, 1))
12233 if (! integer_all_onesp (size))
12235 if (! host_integerp (len, 1))
12237 /* If LEN is not constant, try MAXLEN too.
12238 For MAXLEN only allow optimizing into non-_ocs function
12239 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12240 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12246 if (tree_int_cst_lt (size, maxlen))
12250 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12251 fn = built_in_decls[BUILT_IN_STRNCPY];
12255 return build_call_expr (fn, 3, dest, src, len);
12258 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12259 are the arguments to the call. */
12262 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12267 if (!validate_arg (dest, POINTER_TYPE)
12268 || !validate_arg (src, POINTER_TYPE)
12269 || !validate_arg (size, INTEGER_TYPE))
12272 p = c_getstr (src);
12273 /* If the SRC parameter is "", return DEST. */
12274 if (p && *p == '\0')
12275 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12277 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12280 /* If __builtin_strcat_chk is used, assume strcat is available. */
12281 fn = built_in_decls[BUILT_IN_STRCAT];
12285 return build_call_expr (fn, 2, dest, src);
12288 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12292 fold_builtin_strncat_chk (tree fndecl,
12293 tree dest, tree src, tree len, tree size)
12298 if (!validate_arg (dest, POINTER_TYPE)
12299 || !validate_arg (src, POINTER_TYPE)
12300 || !validate_arg (size, INTEGER_TYPE)
12301 || !validate_arg (size, INTEGER_TYPE))
12304 p = c_getstr (src);
12305 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12306 if (p && *p == '\0')
12307 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12308 else if (integer_zerop (len))
12309 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12311 if (! host_integerp (size, 1))
12314 if (! integer_all_onesp (size))
12316 tree src_len = c_strlen (src, 1);
12318 && host_integerp (src_len, 1)
12319 && host_integerp (len, 1)
12320 && ! tree_int_cst_lt (len, src_len))
12322 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12323 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12327 return build_call_expr (fn, 3, dest, src, size);
12332 /* If __builtin_strncat_chk is used, assume strncat is available. */
12333 fn = built_in_decls[BUILT_IN_STRNCAT];
12337 return build_call_expr (fn, 3, dest, src, len);
12340 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12341 a normal call should be emitted rather than expanding the function
12342 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12345 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12347 tree dest, size, len, fn, fmt, flag;
12348 const char *fmt_str;
12349 int nargs = call_expr_nargs (exp);
12351 /* Verify the required arguments in the original call. */
12354 dest = CALL_EXPR_ARG (exp, 0);
12355 if (!validate_arg (dest, POINTER_TYPE))
12357 flag = CALL_EXPR_ARG (exp, 1);
12358 if (!validate_arg (flag, INTEGER_TYPE))
12360 size = CALL_EXPR_ARG (exp, 2);
12361 if (!validate_arg (size, INTEGER_TYPE))
12363 fmt = CALL_EXPR_ARG (exp, 3);
12364 if (!validate_arg (fmt, POINTER_TYPE))
12367 if (! host_integerp (size, 1))
12372 if (!init_target_chars ())
12375 /* Check whether the format is a literal string constant. */
12376 fmt_str = c_getstr (fmt);
12377 if (fmt_str != NULL)
12379 /* If the format doesn't contain % args or %%, we know the size. */
12380 if (strchr (fmt_str, target_percent) == 0)
12382 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12383 len = build_int_cstu (size_type_node, strlen (fmt_str));
12385 /* If the format is "%s" and first ... argument is a string literal,
12386 we know the size too. */
12387 else if (fcode == BUILT_IN_SPRINTF_CHK
12388 && strcmp (fmt_str, target_percent_s) == 0)
12394 arg = CALL_EXPR_ARG (exp, 4);
12395 if (validate_arg (arg, POINTER_TYPE))
12397 len = c_strlen (arg, 1);
12398 if (! len || ! host_integerp (len, 1))
12405 if (! integer_all_onesp (size))
12407 if (! len || ! tree_int_cst_lt (len, size))
12411 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12412 or if format doesn't contain % chars or is "%s". */
12413 if (! integer_zerop (flag))
12415 if (fmt_str == NULL)
12417 if (strchr (fmt_str, target_percent) != NULL
12418 && strcmp (fmt_str, target_percent_s))
12422 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12423 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12424 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12428 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12431 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12432 a normal call should be emitted rather than expanding the function
12433 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12434 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12435 passed as second argument. */
12438 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12439 enum built_in_function fcode)
12441 tree dest, size, len, fn, fmt, flag;
12442 const char *fmt_str;
12444 /* Verify the required arguments in the original call. */
12445 if (call_expr_nargs (exp) < 5)
12447 dest = CALL_EXPR_ARG (exp, 0);
12448 if (!validate_arg (dest, POINTER_TYPE))
12450 len = CALL_EXPR_ARG (exp, 1);
12451 if (!validate_arg (len, INTEGER_TYPE))
12453 flag = CALL_EXPR_ARG (exp, 2);
12454 if (!validate_arg (flag, INTEGER_TYPE))
12456 size = CALL_EXPR_ARG (exp, 3);
12457 if (!validate_arg (size, INTEGER_TYPE))
12459 fmt = CALL_EXPR_ARG (exp, 4);
12460 if (!validate_arg (fmt, POINTER_TYPE))
12463 if (! host_integerp (size, 1))
12466 if (! integer_all_onesp (size))
12468 if (! host_integerp (len, 1))
12470 /* If LEN is not constant, try MAXLEN too.
12471 For MAXLEN only allow optimizing into non-_ocs function
12472 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12473 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12479 if (tree_int_cst_lt (size, maxlen))
12483 if (!init_target_chars ())
12486 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12487 or if format doesn't contain % chars or is "%s". */
12488 if (! integer_zerop (flag))
12490 fmt_str = c_getstr (fmt);
12491 if (fmt_str == NULL)
12493 if (strchr (fmt_str, target_percent) != NULL
12494 && strcmp (fmt_str, target_percent_s))
12498 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12500 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12501 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12505 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12508 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12509 FMT and ARG are the arguments to the call; we don't fold cases with
12510 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12512 Return NULL_TREE if no simplification was possible, otherwise return the
12513 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12514 code of the function to be simplified. */
12517 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12518 enum built_in_function fcode)
12520 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12521 const char *fmt_str = NULL;
12523 /* If the return value is used, don't do the transformation. */
12527 /* Verify the required arguments in the original call. */
12528 if (!validate_arg (fmt, POINTER_TYPE))
12531 /* Check whether the format is a literal string constant. */
12532 fmt_str = c_getstr (fmt);
12533 if (fmt_str == NULL)
12536 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12538 /* If we're using an unlocked function, assume the other
12539 unlocked functions exist explicitly. */
12540 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12541 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12545 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12546 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12549 if (!init_target_chars ())
12552 if (strcmp (fmt_str, target_percent_s) == 0
12553 || strchr (fmt_str, target_percent) == NULL)
12557 if (strcmp (fmt_str, target_percent_s) == 0)
12559 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12562 if (!arg || !validate_arg (arg, POINTER_TYPE))
12565 str = c_getstr (arg);
12571 /* The format specifier doesn't contain any '%' characters. */
12572 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12578 /* If the string was "", printf does nothing. */
12579 if (str[0] == '\0')
12580 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12582 /* If the string has length of 1, call putchar. */
12583 if (str[1] == '\0')
12585 /* Given printf("c"), (where c is any one character,)
12586 convert "c"[0] to an int and pass that to the replacement
12588 newarg = build_int_cst (NULL_TREE, str[0]);
12590 call = build_call_expr (fn_putchar, 1, newarg);
12594 /* If the string was "string\n", call puts("string"). */
12595 size_t len = strlen (str);
12596 if ((unsigned char)str[len - 1] == target_newline)
12598 /* Create a NUL-terminated string that's one char shorter
12599 than the original, stripping off the trailing '\n'. */
12600 char *newstr = XALLOCAVEC (char, len);
12601 memcpy (newstr, str, len - 1);
12602 newstr[len - 1] = 0;
12604 newarg = build_string_literal (len, newstr);
12606 call = build_call_expr (fn_puts, 1, newarg);
12609 /* We'd like to arrange to call fputs(string,stdout) here,
12610 but we need stdout and don't have a way to get it yet. */
12615 /* The other optimizations can be done only on the non-va_list variants. */
12616 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12619 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12620 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12622 if (!arg || !validate_arg (arg, POINTER_TYPE))
12625 call = build_call_expr (fn_puts, 1, arg);
12628 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12629 else if (strcmp (fmt_str, target_percent_c) == 0)
12631 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12634 call = build_call_expr (fn_putchar, 1, arg);
12640 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12643 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12644 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12645 more than 3 arguments, and ARG may be null in the 2-argument case.
12647 Return NULL_TREE if no simplification was possible, otherwise return the
12648 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12649 code of the function to be simplified. */
12652 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12653 enum built_in_function fcode)
12655 tree fn_fputc, fn_fputs, call = NULL_TREE;
12656 const char *fmt_str = NULL;
12658 /* If the return value is used, don't do the transformation. */
12662 /* Verify the required arguments in the original call. */
12663 if (!validate_arg (fp, POINTER_TYPE))
12665 if (!validate_arg (fmt, POINTER_TYPE))
12668 /* Check whether the format is a literal string constant. */
12669 fmt_str = c_getstr (fmt);
12670 if (fmt_str == NULL)
12673 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12675 /* If we're using an unlocked function, assume the other
12676 unlocked functions exist explicitly. */
12677 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12678 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12682 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12683 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12686 if (!init_target_chars ())
12689 /* If the format doesn't contain % args or %%, use strcpy. */
12690 if (strchr (fmt_str, target_percent) == NULL)
12692 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12696 /* If the format specifier was "", fprintf does nothing. */
12697 if (fmt_str[0] == '\0')
12699 /* If FP has side-effects, just wait until gimplification is
12701 if (TREE_SIDE_EFFECTS (fp))
12704 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12707 /* When "string" doesn't contain %, replace all cases of
12708 fprintf (fp, string) with fputs (string, fp). The fputs
12709 builtin will take care of special cases like length == 1. */
12711 call = build_call_expr (fn_fputs, 2, fmt, fp);
12714 /* The other optimizations can be done only on the non-va_list variants. */
12715 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12718 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12719 else if (strcmp (fmt_str, target_percent_s) == 0)
12721 if (!arg || !validate_arg (arg, POINTER_TYPE))
12724 call = build_call_expr (fn_fputs, 2, arg, fp);
12727 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12728 else if (strcmp (fmt_str, target_percent_c) == 0)
12730 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12733 call = build_call_expr (fn_fputc, 2, arg, fp);
12738 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12741 /* Initialize format string characters in the target charset. */
12744 init_target_chars (void)
12749 target_newline = lang_hooks.to_target_charset ('\n');
12750 target_percent = lang_hooks.to_target_charset ('%');
12751 target_c = lang_hooks.to_target_charset ('c');
12752 target_s = lang_hooks.to_target_charset ('s');
12753 if (target_newline == 0 || target_percent == 0 || target_c == 0
12757 target_percent_c[0] = target_percent;
12758 target_percent_c[1] = target_c;
12759 target_percent_c[2] = '\0';
12761 target_percent_s[0] = target_percent;
12762 target_percent_s[1] = target_s;
12763 target_percent_s[2] = '\0';
12765 target_percent_s_newline[0] = target_percent;
12766 target_percent_s_newline[1] = target_s;
12767 target_percent_s_newline[2] = target_newline;
12768 target_percent_s_newline[3] = '\0';
12775 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12776 and no overflow/underflow occurred. INEXACT is true if M was not
12777 exactly calculated. TYPE is the tree type for the result. This
12778 function assumes that you cleared the MPFR flags and then
12779 calculated M to see if anything subsequently set a flag prior to
12780 entering this function. Return NULL_TREE if any checks fail. */
12783 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12785 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12786 overflow/underflow occurred. If -frounding-math, proceed iff the
12787 result of calling FUNC was exact. */
12788 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12789 && (!flag_rounding_math || !inexact))
12791 REAL_VALUE_TYPE rr;
12793 real_from_mpfr (&rr, m, type, GMP_RNDN);
12794 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12795 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12796 but the mpft_t is not, then we underflowed in the
12798 if (real_isfinite (&rr)
12799 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12801 REAL_VALUE_TYPE rmode;
12803 real_convert (&rmode, TYPE_MODE (type), &rr);
12804 /* Proceed iff the specified mode can hold the value. */
12805 if (real_identical (&rmode, &rr))
12806 return build_real (type, rmode);
12812 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12813 FUNC on it and return the resulting value as a tree with type TYPE.
12814 If MIN and/or MAX are not NULL, then the supplied ARG must be
12815 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12816 acceptable values, otherwise they are not. The mpfr precision is
12817 set to the precision of TYPE. We assume that function FUNC returns
12818 zero if the result could be calculated exactly within the requested
12822 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12823 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12826 tree result = NULL_TREE;
12830 /* To proceed, MPFR must exactly represent the target floating point
12831 format, which only happens when the target base equals two. */
12832 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12833 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12835 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12837 if (real_isfinite (ra)
12838 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12839 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12841 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12845 mpfr_init2 (m, prec);
12846 mpfr_from_real (m, ra, GMP_RNDN);
12847 mpfr_clear_flags ();
12848 inexact = func (m, m, GMP_RNDN);
12849 result = do_mpfr_ckconv (m, type, inexact);
12857 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12858 FUNC on it and return the resulting value as a tree with type TYPE.
12859 The mpfr precision is set to the precision of TYPE. We assume that
12860 function FUNC returns zero if the result could be calculated
12861 exactly within the requested precision. */
12864 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12865 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12867 tree result = NULL_TREE;
12872 /* To proceed, MPFR must exactly represent the target floating point
12873 format, which only happens when the target base equals two. */
12874 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12875 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12876 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12878 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12879 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12881 if (real_isfinite (ra1) && real_isfinite (ra2))
12883 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12887 mpfr_inits2 (prec, m1, m2, NULL);
12888 mpfr_from_real (m1, ra1, GMP_RNDN);
12889 mpfr_from_real (m2, ra2, GMP_RNDN);
12890 mpfr_clear_flags ();
12891 inexact = func (m1, m1, m2, GMP_RNDN);
12892 result = do_mpfr_ckconv (m1, type, inexact);
12893 mpfr_clears (m1, m2, NULL);
12900 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12901 FUNC on it and return the resulting value as a tree with type TYPE.
12902 The mpfr precision is set to the precision of TYPE. We assume that
12903 function FUNC returns zero if the result could be calculated
12904 exactly within the requested precision. */
12907 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12908 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12910 tree result = NULL_TREE;
12916 /* To proceed, MPFR must exactly represent the target floating point
12917 format, which only happens when the target base equals two. */
12918 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12919 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12920 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12921 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12923 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12924 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12925 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12927 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12929 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12933 mpfr_inits2 (prec, m1, m2, m3, NULL);
12934 mpfr_from_real (m1, ra1, GMP_RNDN);
12935 mpfr_from_real (m2, ra2, GMP_RNDN);
12936 mpfr_from_real (m3, ra3, GMP_RNDN);
12937 mpfr_clear_flags ();
12938 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12939 result = do_mpfr_ckconv (m1, type, inexact);
12940 mpfr_clears (m1, m2, m3, NULL);
12947 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12948 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12949 If ARG_SINP and ARG_COSP are NULL then the result is returned
12950 as a complex value.
12951 The type is taken from the type of ARG and is used for setting the
12952 precision of the calculation and results. */
12955 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12957 tree const type = TREE_TYPE (arg);
12958 tree result = NULL_TREE;
12962 /* To proceed, MPFR must exactly represent the target floating point
12963 format, which only happens when the target base equals two. */
12964 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12965 && TREE_CODE (arg) == REAL_CST
12966 && !TREE_OVERFLOW (arg))
12968 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12970 if (real_isfinite (ra))
12972 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12973 tree result_s, result_c;
12977 mpfr_inits2 (prec, m, ms, mc, NULL);
12978 mpfr_from_real (m, ra, GMP_RNDN);
12979 mpfr_clear_flags ();
12980 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12981 result_s = do_mpfr_ckconv (ms, type, inexact);
12982 result_c = do_mpfr_ckconv (mc, type, inexact);
12983 mpfr_clears (m, ms, mc, NULL);
12984 if (result_s && result_c)
12986 /* If we are to return in a complex value do so. */
12987 if (!arg_sinp && !arg_cosp)
12988 return build_complex (build_complex_type (type),
12989 result_c, result_s);
12991 /* Dereference the sin/cos pointer arguments. */
12992 arg_sinp = build_fold_indirect_ref (arg_sinp);
12993 arg_cosp = build_fold_indirect_ref (arg_cosp);
12994 /* Proceed if valid pointer type were passed in. */
12995 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12996 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12998 /* Set the values. */
12999 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13001 TREE_SIDE_EFFECTS (result_s) = 1;
13002 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13004 TREE_SIDE_EFFECTS (result_c) = 1;
13005 /* Combine the assignments into a compound expr. */
13006 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13007 result_s, result_c));
13015 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
13016 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13017 two-argument mpfr order N Bessel function FUNC on them and return
13018 the resulting value as a tree with type TYPE. The mpfr precision
13019 is set to the precision of TYPE. We assume that function FUNC
13020 returns zero if the result could be calculated exactly within the
13021 requested precision. */
13023 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13024 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13025 const REAL_VALUE_TYPE *min, bool inclusive)
13027 tree result = NULL_TREE;
13032 /* To proceed, MPFR must exactly represent the target floating point
13033 format, which only happens when the target base equals two. */
13034 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13035 && host_integerp (arg1, 0)
13036 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13038 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13039 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13042 && real_isfinite (ra)
13043 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13045 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13049 mpfr_init2 (m, prec);
13050 mpfr_from_real (m, ra, GMP_RNDN);
13051 mpfr_clear_flags ();
13052 inexact = func (m, n, m, GMP_RNDN);
13053 result = do_mpfr_ckconv (m, type, inexact);
13061 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13062 the pointer *(ARG_QUO) and return the result. The type is taken
13063 from the type of ARG0 and is used for setting the precision of the
13064 calculation and results. */
13067 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13069 tree const type = TREE_TYPE (arg0);
13070 tree result = NULL_TREE;
13075 /* To proceed, MPFR must exactly represent the target floating point
13076 format, which only happens when the target base equals two. */
13077 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13078 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13079 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13081 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13082 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13084 if (real_isfinite (ra0) && real_isfinite (ra1))
13086 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13091 mpfr_inits2 (prec, m0, m1, NULL);
13092 mpfr_from_real (m0, ra0, GMP_RNDN);
13093 mpfr_from_real (m1, ra1, GMP_RNDN);
13094 mpfr_clear_flags ();
13095 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
13096 /* Remquo is independent of the rounding mode, so pass
13097 inexact=0 to do_mpfr_ckconv(). */
13098 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13099 mpfr_clears (m0, m1, NULL);
13102 /* MPFR calculates quo in the host's long so it may
13103 return more bits in quo than the target int can hold
13104 if sizeof(host long) > sizeof(target int). This can
13105 happen even for native compilers in LP64 mode. In
13106 these cases, modulo the quo value with the largest
13107 number that the target int can hold while leaving one
13108 bit for the sign. */
13109 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13110 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13112 /* Dereference the quo pointer argument. */
13113 arg_quo = build_fold_indirect_ref (arg_quo);
13114 /* Proceed iff a valid pointer type was passed in. */
13115 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13117 /* Set the value. */
13118 tree result_quo = fold_build2 (MODIFY_EXPR,
13119 TREE_TYPE (arg_quo), arg_quo,
13120 build_int_cst (NULL, integer_quo));
13121 TREE_SIDE_EFFECTS (result_quo) = 1;
13122 /* Combine the quo assignment with the rem. */
13123 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13124 result_quo, result_rem));
13132 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13133 resulting value as a tree with type TYPE. The mpfr precision is
13134 set to the precision of TYPE. We assume that this mpfr function
13135 returns zero if the result could be calculated exactly within the
13136 requested precision. In addition, the integer pointer represented
13137 by ARG_SG will be dereferenced and set to the appropriate signgam
13141 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13143 tree result = NULL_TREE;
13147 /* To proceed, MPFR must exactly represent the target floating point
13148 format, which only happens when the target base equals two. Also
13149 verify ARG is a constant and that ARG_SG is an int pointer. */
13150 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13151 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13152 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13153 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13155 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13157 /* In addition to NaN and Inf, the argument cannot be zero or a
13158 negative integer. */
13159 if (real_isfinite (ra)
13160 && ra->cl != rvc_zero
13161 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13163 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13168 mpfr_init2 (m, prec);
13169 mpfr_from_real (m, ra, GMP_RNDN);
13170 mpfr_clear_flags ();
13171 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
13172 result_lg = do_mpfr_ckconv (m, type, inexact);
13178 /* Dereference the arg_sg pointer argument. */
13179 arg_sg = build_fold_indirect_ref (arg_sg);
13180 /* Assign the signgam value into *arg_sg. */
13181 result_sg = fold_build2 (MODIFY_EXPR,
13182 TREE_TYPE (arg_sg), arg_sg,
13183 build_int_cst (NULL, sg));
13184 TREE_SIDE_EFFECTS (result_sg) = 1;
13185 /* Combine the signgam assignment with the lgamma result. */
13186 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13187 result_sg, result_lg));
13197 The functions below provide an alternate interface for folding
13198 builtin function calls presented as GIMPLE_CALL statements rather
13199 than as CALL_EXPRs. The folded result is still expressed as a
13200 tree. There is too much code duplication in the handling of
13201 varargs functions, and a more intrusive re-factoring would permit
13202 better sharing of code between the tree and statement-based
13203 versions of these functions. */
13205 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13206 along with N new arguments specified as the "..." parameters. SKIP
13207 is the number of arguments in STMT to be omitted. This function is used
13208 to do varargs-to-varargs transformations. */
13211 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13213 int oldnargs = gimple_call_num_args (stmt);
13214 int nargs = oldnargs - skip + n;
13215 tree fntype = TREE_TYPE (fndecl);
13216 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13221 buffer = XALLOCAVEC (tree, nargs);
13223 for (i = 0; i < n; i++)
13224 buffer[i] = va_arg (ap, tree);
13226 for (j = skip; j < oldnargs; j++, i++)
13227 buffer[i] = gimple_call_arg (stmt, j);
13229 return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
13232 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13233 a normal call should be emitted rather than expanding the function
13234 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13237 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13239 tree dest, size, len, fn, fmt, flag;
13240 const char *fmt_str;
13241 int nargs = gimple_call_num_args (stmt);
13243 /* Verify the required arguments in the original call. */
13246 dest = gimple_call_arg (stmt, 0);
13247 if (!validate_arg (dest, POINTER_TYPE))
13249 flag = gimple_call_arg (stmt, 1);
13250 if (!validate_arg (flag, INTEGER_TYPE))
13252 size = gimple_call_arg (stmt, 2);
13253 if (!validate_arg (size, INTEGER_TYPE))
13255 fmt = gimple_call_arg (stmt, 3);
13256 if (!validate_arg (fmt, POINTER_TYPE))
13259 if (! host_integerp (size, 1))
13264 if (!init_target_chars ())
13267 /* Check whether the format is a literal string constant. */
13268 fmt_str = c_getstr (fmt);
13269 if (fmt_str != NULL)
13271 /* If the format doesn't contain % args or %%, we know the size. */
13272 if (strchr (fmt_str, target_percent) == 0)
13274 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13275 len = build_int_cstu (size_type_node, strlen (fmt_str));
13277 /* If the format is "%s" and first ... argument is a string literal,
13278 we know the size too. */
13279 else if (fcode == BUILT_IN_SPRINTF_CHK
13280 && strcmp (fmt_str, target_percent_s) == 0)
13286 arg = gimple_call_arg (stmt, 4);
13287 if (validate_arg (arg, POINTER_TYPE))
13289 len = c_strlen (arg, 1);
13290 if (! len || ! host_integerp (len, 1))
13297 if (! integer_all_onesp (size))
13299 if (! len || ! tree_int_cst_lt (len, size))
13303 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13304 or if format doesn't contain % chars or is "%s". */
13305 if (! integer_zerop (flag))
13307 if (fmt_str == NULL)
13309 if (strchr (fmt_str, target_percent) != NULL
13310 && strcmp (fmt_str, target_percent_s))
13314 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13315 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13316 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13320 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13323 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13324 a normal call should be emitted rather than expanding the function
13325 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13326 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13327 passed as second argument. */
13330 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13331 enum built_in_function fcode)
13333 tree dest, size, len, fn, fmt, flag;
13334 const char *fmt_str;
13336 /* Verify the required arguments in the original call. */
13337 if (gimple_call_num_args (stmt) < 5)
13339 dest = gimple_call_arg (stmt, 0);
13340 if (!validate_arg (dest, POINTER_TYPE))
13342 len = gimple_call_arg (stmt, 1);
13343 if (!validate_arg (len, INTEGER_TYPE))
13345 flag = gimple_call_arg (stmt, 2);
13346 if (!validate_arg (flag, INTEGER_TYPE))
13348 size = gimple_call_arg (stmt, 3);
13349 if (!validate_arg (size, INTEGER_TYPE))
13351 fmt = gimple_call_arg (stmt, 4);
13352 if (!validate_arg (fmt, POINTER_TYPE))
13355 if (! host_integerp (size, 1))
13358 if (! integer_all_onesp (size))
13360 if (! host_integerp (len, 1))
13362 /* If LEN is not constant, try MAXLEN too.
13363 For MAXLEN only allow optimizing into non-_ocs function
13364 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13365 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13371 if (tree_int_cst_lt (size, maxlen))
13375 if (!init_target_chars ())
13378 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13379 or if format doesn't contain % chars or is "%s". */
13380 if (! integer_zerop (flag))
13382 fmt_str = c_getstr (fmt);
13383 if (fmt_str == NULL)
13385 if (strchr (fmt_str, target_percent) != NULL
13386 && strcmp (fmt_str, target_percent_s))
13390 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13392 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13393 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13397 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13400 /* Builtins with folding operations that operate on "..." arguments
13401 need special handling; we need to store the arguments in a convenient
13402 data structure before attempting any folding. Fortunately there are
13403 only a few builtins that fall into this category. FNDECL is the
13404 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13405 result of the function call is ignored. */
13408 gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
13410 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13411 tree ret = NULL_TREE;
13415 case BUILT_IN_SPRINTF_CHK:
13416 case BUILT_IN_VSPRINTF_CHK:
13417 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13420 case BUILT_IN_SNPRINTF_CHK:
13421 case BUILT_IN_VSNPRINTF_CHK:
13422 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13429 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13430 TREE_NO_WARNING (ret) = 1;
13436 /* A wrapper function for builtin folding that prevents warnings for
13437 "statement without effect" and the like, caused by removing the
13438 call node earlier than the warning is generated. */
13441 fold_call_stmt (gimple stmt, bool ignore)
13443 tree ret = NULL_TREE;
13444 tree fndecl = gimple_call_fndecl (stmt);
13446 && TREE_CODE (fndecl) == FUNCTION_DECL
13447 && DECL_BUILT_IN (fndecl)
13448 && !gimple_call_va_arg_pack_p (stmt))
13450 int nargs = gimple_call_num_args (stmt);
13452 /* FIXME: Don't use a list in this interface. */
13453 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13455 tree arglist = NULL_TREE;
13457 for (i = nargs - 1; i >= 0; i--)
13458 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13459 return targetm.fold_builtin (fndecl, arglist, ignore);
13463 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13465 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13467 for (i = 0; i < nargs; i++)
13468 args[i] = gimple_call_arg (stmt, i);
13469 ret = fold_builtin_n (fndecl, args, nargs, ignore);
13472 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13475 /* Propagate location information from original call to
13476 expansion of builtin. Otherwise things like
13477 maybe_emit_chk_warning, that operate on the expansion
13478 of a builtin, will use the wrong location information. */
13479 if (gimple_has_location (stmt))
13481 tree realret = ret;
13482 if (TREE_CODE (ret) == NOP_EXPR)
13483 realret = TREE_OPERAND (ret, 0);
13484 if (CAN_HAVE_LOCATION_P (realret)
13485 && !EXPR_HAS_LOCATION (realret))
13486 SET_EXPR_LOCATION (realret, gimple_location (stmt));