1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
250 if (strncmp (name, "__sync_", 7) == 0)
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
280 switch (TREE_CODE (exp))
283 exp = TREE_OPERAND (exp, 0);
284 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
287 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
288 align = MIN (inner, max_align);
291 case POINTER_PLUS_EXPR:
292 /* If sum of pointer + int, restrict our maximum alignment to that
293 imposed by the integer. If not, we can't do any better than
295 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
298 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
299 & (max_align / BITS_PER_UNIT - 1))
303 exp = TREE_OPERAND (exp, 0);
307 /* See what we are pointing at and look at its alignment. */
308 exp = TREE_OPERAND (exp, 0);
310 if (handled_component_p (exp))
312 HOST_WIDE_INT bitsize, bitpos;
314 enum machine_mode mode;
315 int unsignedp, volatilep;
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
320 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
321 if (offset && TREE_CODE (offset) == PLUS_EXPR
322 && host_integerp (TREE_OPERAND (offset, 1), 1))
324 /* Any overflow in calculating offset_bits won't change
327 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
331 inner = MIN (inner, (offset_bits & -offset_bits));
332 offset = TREE_OPERAND (offset, 0);
334 if (offset && TREE_CODE (offset) == MULT_EXPR
335 && host_integerp (TREE_OPERAND (offset, 1), 1))
337 /* Any overflow in calculating offset_factor won't change
339 unsigned offset_factor
340 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
344 inner = MIN (inner, (offset_factor & -offset_factor));
347 inner = MIN (inner, BITS_PER_UNIT);
350 align = MIN (inner, DECL_ALIGN (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 else if (CONSTANT_CLASS_P (exp))
353 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
355 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
356 || TREE_CODE (exp) == INDIRECT_REF)
357 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
359 align = MIN (align, inner);
360 return MIN (align, max_align);
368 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
369 way, because it could contain a zero byte in the middle.
370 TREE_STRING_LENGTH is the size of the character array, not the string.
372 ONLY_VALUE should be nonzero if the result is not going to be emitted
373 into the instruction stream and zero if it is going to be expanded.
374 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
375 is returned, otherwise NULL, since
376 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
377 evaluate the side-effects.
379 The value returned is of type `ssizetype'.
381 Unfortunately, string_constant can't access the values of const char
382 arrays with initializers, so neither can we do so here. */
385 c_strlen (tree src, int only_value)
388 HOST_WIDE_INT offset;
393 if (TREE_CODE (src) == COND_EXPR
394 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
398 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
399 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
400 if (tree_int_cst_equal (len1, len2))
404 if (TREE_CODE (src) == COMPOUND_EXPR
405 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
406 return c_strlen (TREE_OPERAND (src, 1), only_value);
408 src = string_constant (src, &offset_node);
412 max = TREE_STRING_LENGTH (src) - 1;
413 ptr = TREE_STRING_POINTER (src);
415 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
417 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
418 compute the offset to the following null if we don't know where to
419 start searching for it. */
422 for (i = 0; i < max; i++)
426 /* We don't know the starting offset, but we do know that the string
427 has no internal zero bytes. We can assume that the offset falls
428 within the bounds of the string; otherwise, the programmer deserves
429 what he gets. Subtract the offset from the length of the string,
430 and return that. This would perhaps not be valid if we were dealing
431 with named arrays in addition to literal string constants. */
433 return size_diffop (size_int (max), offset_node);
436 /* We have a known offset into the string. Start searching there for
437 a null character if we can represent it as a single HOST_WIDE_INT. */
438 if (offset_node == 0)
440 else if (! host_integerp (offset_node, 0))
443 offset = tree_low_cst (offset_node, 0);
445 /* If the offset is known to be out of bounds, warn, and call strlen at
447 if (offset < 0 || offset > max)
449 /* Suppress multiple warnings for propagated constant strings. */
450 if (! TREE_NO_WARNING (src))
452 warning (0, "offset outside bounds of constant string");
453 TREE_NO_WARNING (src) = 1;
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
475 src = string_constant (src, &offset_node);
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
492 c_readstr (const char *str, enum machine_mode mode)
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
518 return immed_double_const (c[0], c[1], mode);
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
526 target_char_cast (tree cst, char *p)
528 unsigned HOST_WIDE_INT val, hostval;
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
554 builtin_save_expr (tree exp)
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
561 return save_expr (exp);
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
591 tem = hard_frame_pointer_rtx;
593 /* Tell reload not to eliminate the frame pointer. */
594 crtl->accesses_prior_frames = 1;
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
603 SETUP_FRAME_ADDRESSES ();
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
648 /* Alias set used for setjmp buffer. */
649 static alias_set_type setjmp_alias_set = -1;
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
665 buf_addr = convert_memory_address (Pmode, buf_addr);
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 cfun->calls_setjmp = 1;
699 /* We have a nonlocal label. */
700 cfun->has_nonlocal_label = 1;
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_use (hard_frame_pointer_rtx);
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_clobber (static_chain_rtx);
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_clobber (hard_frame_pointer_rtx);
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
732 #ifdef ELIMINABLE_REGS
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
741 if (i == ARRAY_SIZE (elim_regs))
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx,
747 copy_to_reg (get_arg_pointer_save_area ()));
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
776 expand_builtin_longjmp (rtx buf_addr, rtx value)
778 rtx fp, lab, stack, insn, last;
779 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 if (setjmp_alias_set == -1)
782 setjmp_alias_set = new_alias_set ();
784 buf_addr = convert_memory_address (Pmode, buf_addr);
786 buf_addr = force_reg (Pmode, buf_addr);
788 /* We used to store value in static_chain_rtx, but that fails if pointers
789 are smaller than integers. We instead require that the user must pass
790 a second argument of 1, because that is what builtin_setjmp will
791 return. This also makes EH slightly more efficient, since we are no
792 longer copying around a value that we don't care about. */
793 gcc_assert (value == const1_rtx);
795 last = get_last_insn ();
796 #ifdef HAVE_builtin_longjmp
797 if (HAVE_builtin_longjmp)
798 emit_insn (gen_builtin_longjmp (buf_addr));
802 fp = gen_rtx_MEM (Pmode, buf_addr);
803 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
804 GET_MODE_SIZE (Pmode)));
806 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (fp, setjmp_alias_set);
809 set_mem_alias_set (lab, setjmp_alias_set);
810 set_mem_alias_set (stack, setjmp_alias_set);
812 /* Pick up FP, label, and SP from the block and jump. This code is
813 from expand_goto in stmt.c; see there for detailed comments. */
814 #ifdef HAVE_nonlocal_goto
815 if (HAVE_nonlocal_goto)
816 /* We have to pass a value to the nonlocal_goto pattern that will
817 get copied into the static_chain pointer, but it does not matter
818 what that value is, because builtin_setjmp does not use it. */
819 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
823 lab = copy_to_reg (lab);
825 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
826 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
828 emit_move_insn (hard_frame_pointer_rtx, fp);
829 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
831 emit_use (hard_frame_pointer_rtx);
832 emit_use (stack_pointer_rtx);
833 emit_indirect_jump (lab);
837 /* Search backwards and mark the jump insn as a non-local goto.
838 Note that this precludes the use of __builtin_longjmp to a
839 __builtin_setjmp target in the same function. However, we've
840 already cautioned the user that these functions are for
841 internal exception handling use only. */
842 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
844 gcc_assert (insn != last);
848 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
851 else if (CALL_P (insn))
856 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
857 and the address of the save area. */
860 expand_builtin_nonlocal_goto (tree exp)
862 tree t_label, t_save_area;
863 rtx r_label, r_save_area, r_fp, r_sp, insn;
865 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
868 t_label = CALL_EXPR_ARG (exp, 0);
869 t_save_area = CALL_EXPR_ARG (exp, 1);
871 r_label = expand_normal (t_label);
872 r_label = convert_memory_address (Pmode, r_label);
873 r_save_area = expand_normal (t_save_area);
874 r_save_area = convert_memory_address (Pmode, r_save_area);
875 /* Copy the address of the save location to a register just in case it was based
876 on the frame pointer. */
877 r_save_area = copy_to_reg (r_save_area);
878 r_fp = gen_rtx_MEM (Pmode, r_save_area);
879 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
880 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
882 crtl->has_nonlocal_goto = 1;
884 #ifdef HAVE_nonlocal_goto
885 /* ??? We no longer need to pass the static chain value, afaik. */
886 if (HAVE_nonlocal_goto)
887 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
891 r_label = copy_to_reg (r_label);
893 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
894 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
896 /* Restore frame pointer for containing function.
897 This sets the actual hard register used for the frame pointer
898 to the location of the function's incoming static chain info.
899 The non-local goto handler will then adjust it to contain the
900 proper value and reload the argument pointer, if needed. */
901 emit_move_insn (hard_frame_pointer_rtx, r_fp);
902 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
904 /* USE of hard_frame_pointer_rtx added for consistency;
905 not clear if really needed. */
906 emit_use (hard_frame_pointer_rtx);
907 emit_use (stack_pointer_rtx);
909 /* If the architecture is using a GP register, we must
910 conservatively assume that the target function makes use of it.
911 The prologue of functions with nonlocal gotos must therefore
912 initialize the GP register to the appropriate value, and we
913 must then make sure that this value is live at the point
914 of the jump. (Note that this doesn't necessarily apply
915 to targets with a nonlocal_goto pattern; they are free
916 to implement it in their own way. Note also that this is
917 a no-op if the GP register is a global invariant.) */
918 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
919 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
920 emit_use (pic_offset_table_rtx);
922 emit_indirect_jump (r_label);
925 /* Search backwards to the jump insn and mark it as a
927 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
931 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
934 else if (CALL_P (insn))
941 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
942 (not all will be used on all machines) that was passed to __builtin_setjmp.
943 It updates the stack pointer in that block to correspond to the current
947 expand_builtin_update_setjmp_buf (rtx buf_addr)
949 enum machine_mode sa_mode = Pmode;
953 #ifdef HAVE_save_stack_nonlocal
954 if (HAVE_save_stack_nonlocal)
955 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
957 #ifdef STACK_SAVEAREA_MODE
958 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
962 = gen_rtx_MEM (sa_mode,
965 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
969 emit_insn (gen_setjmp ());
972 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
975 /* Expand a call to __builtin_prefetch. For a target that does not support
976 data prefetch, evaluate the memory address argument in case it has side
980 expand_builtin_prefetch (tree exp)
982 tree arg0, arg1, arg2;
986 if (!validate_arglist (exp, POINTER_TYPE, 0))
989 arg0 = CALL_EXPR_ARG (exp, 0);
991 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
992 zero (read) and argument 2 (locality) defaults to 3 (high degree of
994 nargs = call_expr_nargs (exp);
996 arg1 = CALL_EXPR_ARG (exp, 1);
998 arg1 = integer_zero_node;
1000 arg2 = CALL_EXPR_ARG (exp, 2);
1002 arg2 = build_int_cst (NULL_TREE, 3);
1004 /* Argument 0 is an address. */
1005 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1007 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1008 if (TREE_CODE (arg1) != INTEGER_CST)
1010 error ("second argument to %<__builtin_prefetch%> must be a constant");
1011 arg1 = integer_zero_node;
1013 op1 = expand_normal (arg1);
1014 /* Argument 1 must be either zero or one. */
1015 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1017 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1022 /* Argument 2 (locality) must be a compile-time constant int. */
1023 if (TREE_CODE (arg2) != INTEGER_CST)
1025 error ("third argument to %<__builtin_prefetch%> must be a constant");
1026 arg2 = integer_zero_node;
1028 op2 = expand_normal (arg2);
1029 /* Argument 2 must be 0, 1, 2, or 3. */
1030 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1032 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1036 #ifdef HAVE_prefetch
1039 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1041 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1042 || (GET_MODE (op0) != Pmode))
1044 op0 = convert_memory_address (Pmode, op0);
1045 op0 = force_reg (Pmode, op0);
1047 emit_insn (gen_prefetch (op0, op1, op2));
1051 /* Don't do anything with direct references to volatile memory, but
1052 generate code to handle other side effects. */
1053 if (!MEM_P (op0) && side_effects_p (op0))
1057 /* Get a MEM rtx for expression EXP which is the address of an operand
1058 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1059 the maximum length of the block of memory that might be accessed or
1063 get_memory_rtx (tree exp, tree len)
1065 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1066 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1068 /* Get an expression we can use to find the attributes to assign to MEM.
1069 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1070 we can. First remove any nops. */
1071 while (CONVERT_EXPR_P (exp)
1072 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1073 exp = TREE_OPERAND (exp, 0);
1075 if (TREE_CODE (exp) == ADDR_EXPR)
1076 exp = TREE_OPERAND (exp, 0);
1077 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1078 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1082 /* Honor attributes derived from exp, except for the alias set
1083 (as builtin stringops may alias with anything) and the size
1084 (as stringops may access multiple array elements). */
1087 set_mem_attributes (mem, exp, 0);
1089 /* Allow the string and memory builtins to overflow from one
1090 field into another, see http://gcc.gnu.org/PR23561.
1091 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1092 memory accessed by the string or memory builtin will fit
1093 within the field. */
1094 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1096 tree mem_expr = MEM_EXPR (mem);
1097 HOST_WIDE_INT offset = -1, length = -1;
1100 while (TREE_CODE (inner) == ARRAY_REF
1101 || CONVERT_EXPR_P (inner)
1102 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1103 || TREE_CODE (inner) == SAVE_EXPR)
1104 inner = TREE_OPERAND (inner, 0);
1106 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1108 if (MEM_OFFSET (mem)
1109 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1110 offset = INTVAL (MEM_OFFSET (mem));
1112 if (offset >= 0 && len && host_integerp (len, 0))
1113 length = tree_low_cst (len, 0);
1115 while (TREE_CODE (inner) == COMPONENT_REF)
1117 tree field = TREE_OPERAND (inner, 1);
1118 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1119 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1121 /* Bitfields are generally not byte-addressable. */
1122 gcc_assert (!DECL_BIT_FIELD (field)
1123 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1124 % BITS_PER_UNIT) == 0
1125 && host_integerp (DECL_SIZE (field), 0)
1126 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1127 % BITS_PER_UNIT) == 0));
1129 /* If we can prove that the memory starting at XEXP (mem, 0) and
1130 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1131 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1132 fields without DECL_SIZE_UNIT like flexible array members. */
1134 && DECL_SIZE_UNIT (field)
1135 && host_integerp (DECL_SIZE_UNIT (field), 0))
1138 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1141 && offset + length <= size)
1146 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1147 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1148 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1156 mem_expr = TREE_OPERAND (mem_expr, 0);
1157 inner = TREE_OPERAND (inner, 0);
1160 if (mem_expr == NULL)
1162 if (mem_expr != MEM_EXPR (mem))
1164 set_mem_expr (mem, mem_expr);
1165 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1168 set_mem_alias_set (mem, 0);
1169 set_mem_size (mem, NULL_RTX);
1175 /* Built-in functions to perform an untyped call and return. */
1177 /* For each register that may be used for calling a function, this
1178 gives a mode used to copy the register's value. VOIDmode indicates
1179 the register is not used for calling a function. If the machine
1180 has register windows, this gives only the outbound registers.
1181 INCOMING_REGNO gives the corresponding inbound register. */
1182 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1184 /* For each register that may be used for returning values, this gives
1185 a mode used to copy the register's value. VOIDmode indicates the
1186 register is not used for returning values. If the machine has
1187 register windows, this gives only the outbound registers.
1188 INCOMING_REGNO gives the corresponding inbound register. */
1189 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1191 /* For each register that may be used for calling a function, this
1192 gives the offset of that register into the block returned by
1193 __builtin_apply_args. 0 indicates that the register is not
1194 used for calling a function. */
1195 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1197 /* Return the size required for the block returned by __builtin_apply_args,
1198 and initialize apply_args_mode. */
1201 apply_args_size (void)
1203 static int size = -1;
1206 enum machine_mode mode;
1208 /* The values computed by this function never change. */
1211 /* The first value is the incoming arg-pointer. */
1212 size = GET_MODE_SIZE (Pmode);
1214 /* The second value is the structure value address unless this is
1215 passed as an "invisible" first argument. */
1216 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1217 size += GET_MODE_SIZE (Pmode);
1219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1220 if (FUNCTION_ARG_REGNO_P (regno))
1222 mode = reg_raw_mode[regno];
1224 gcc_assert (mode != VOIDmode);
1226 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1227 if (size % align != 0)
1228 size = CEIL (size, align) * align;
1229 apply_args_reg_offset[regno] = size;
1230 size += GET_MODE_SIZE (mode);
1231 apply_args_mode[regno] = mode;
1235 apply_args_mode[regno] = VOIDmode;
1236 apply_args_reg_offset[regno] = 0;
1242 /* Return the size required for the block returned by __builtin_apply,
1243 and initialize apply_result_mode. */
1246 apply_result_size (void)
1248 static int size = -1;
1250 enum machine_mode mode;
1252 /* The values computed by this function never change. */
1257 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1258 if (FUNCTION_VALUE_REGNO_P (regno))
1260 mode = reg_raw_mode[regno];
1262 gcc_assert (mode != VOIDmode);
1264 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1265 if (size % align != 0)
1266 size = CEIL (size, align) * align;
1267 size += GET_MODE_SIZE (mode);
1268 apply_result_mode[regno] = mode;
1271 apply_result_mode[regno] = VOIDmode;
1273 /* Allow targets that use untyped_call and untyped_return to override
1274 the size so that machine-specific information can be stored here. */
1275 #ifdef APPLY_RESULT_SIZE
1276 size = APPLY_RESULT_SIZE;
1282 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1283 /* Create a vector describing the result block RESULT. If SAVEP is true,
1284 the result block is used to save the values; otherwise it is used to
1285 restore the values. */
1288 result_vector (int savep, rtx result)
1290 int regno, size, align, nelts;
1291 enum machine_mode mode;
1293 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1296 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1297 if ((mode = apply_result_mode[regno]) != VOIDmode)
1299 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1300 if (size % align != 0)
1301 size = CEIL (size, align) * align;
1302 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1303 mem = adjust_address (result, mode, size);
1304 savevec[nelts++] = (savep
1305 ? gen_rtx_SET (VOIDmode, mem, reg)
1306 : gen_rtx_SET (VOIDmode, reg, mem));
1307 size += GET_MODE_SIZE (mode);
1309 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1311 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1313 /* Save the state required to perform an untyped call with the same
1314 arguments as were passed to the current function. */
1317 expand_builtin_apply_args_1 (void)
1320 int size, align, regno;
1321 enum machine_mode mode;
1322 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1324 /* Create a block where the arg-pointer, structure value address,
1325 and argument registers can be saved. */
1326 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1328 /* Walk past the arg-pointer and structure value address. */
1329 size = GET_MODE_SIZE (Pmode);
1330 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1331 size += GET_MODE_SIZE (Pmode);
1333 /* Save each register used in calling a function to the block. */
1334 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1335 if ((mode = apply_args_mode[regno]) != VOIDmode)
1337 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1338 if (size % align != 0)
1339 size = CEIL (size, align) * align;
1341 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1343 emit_move_insn (adjust_address (registers, mode, size), tem);
1344 size += GET_MODE_SIZE (mode);
1347 /* Save the arg pointer to the block. */
1348 tem = copy_to_reg (virtual_incoming_args_rtx);
1349 #ifdef STACK_GROWS_DOWNWARD
1350 /* We need the pointer as the caller actually passed them to us, not
1351 as we might have pretended they were passed. Make sure it's a valid
1352 operand, as emit_move_insn isn't expected to handle a PLUS. */
1354 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1357 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1359 size = GET_MODE_SIZE (Pmode);
1361 /* Save the structure value address unless this is passed as an
1362 "invisible" first argument. */
1363 if (struct_incoming_value)
1365 emit_move_insn (adjust_address (registers, Pmode, size),
1366 copy_to_reg (struct_incoming_value));
1367 size += GET_MODE_SIZE (Pmode);
1370 /* Return the address of the block. */
1371 return copy_addr_to_reg (XEXP (registers, 0));
1374 /* __builtin_apply_args returns block of memory allocated on
1375 the stack into which is stored the arg pointer, structure
1376 value address, static chain, and all the registers that might
1377 possibly be used in performing a function call. The code is
1378 moved to the start of the function so the incoming values are
1382 expand_builtin_apply_args (void)
1384 /* Don't do __builtin_apply_args more than once in a function.
1385 Save the result of the first call and reuse it. */
1386 if (apply_args_value != 0)
1387 return apply_args_value;
1389 /* When this function is called, it means that registers must be
1390 saved on entry to this function. So we migrate the
1391 call to the first insn of this function. */
1396 temp = expand_builtin_apply_args_1 ();
1400 apply_args_value = temp;
1402 /* Put the insns after the NOTE that starts the function.
1403 If this is inside a start_sequence, make the outer-level insn
1404 chain current, so the code is placed at the start of the
1406 push_topmost_sequence ();
1407 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1408 pop_topmost_sequence ();
1413 /* Perform an untyped call and save the state required to perform an
1414 untyped return of whatever value was returned by the given function. */
1417 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1419 int size, align, regno;
1420 enum machine_mode mode;
1421 rtx incoming_args, result, reg, dest, src, call_insn;
1422 rtx old_stack_level = 0;
1423 rtx call_fusage = 0;
1424 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1426 arguments = convert_memory_address (Pmode, arguments);
1428 /* Create a block where the return registers can be saved. */
1429 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1431 /* Fetch the arg pointer from the ARGUMENTS block. */
1432 incoming_args = gen_reg_rtx (Pmode);
1433 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1434 #ifndef STACK_GROWS_DOWNWARD
1435 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1436 incoming_args, 0, OPTAB_LIB_WIDEN);
1439 /* Push a new argument block and copy the arguments. Do not allow
1440 the (potential) memcpy call below to interfere with our stack
1442 do_pending_stack_adjust ();
1445 /* Save the stack with nonlocal if available. */
1446 #ifdef HAVE_save_stack_nonlocal
1447 if (HAVE_save_stack_nonlocal)
1448 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1451 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1453 /* Allocate a block of memory onto the stack and copy the memory
1454 arguments to the outgoing arguments address. */
1455 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1456 dest = virtual_outgoing_args_rtx;
1457 #ifndef STACK_GROWS_DOWNWARD
1458 if (GET_CODE (argsize) == CONST_INT)
1459 dest = plus_constant (dest, -INTVAL (argsize));
1461 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1463 dest = gen_rtx_MEM (BLKmode, dest);
1464 set_mem_align (dest, PARM_BOUNDARY);
1465 src = gen_rtx_MEM (BLKmode, incoming_args);
1466 set_mem_align (src, PARM_BOUNDARY);
1467 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1469 /* Refer to the argument block. */
1471 arguments = gen_rtx_MEM (BLKmode, arguments);
1472 set_mem_align (arguments, PARM_BOUNDARY);
1474 /* Walk past the arg-pointer and structure value address. */
1475 size = GET_MODE_SIZE (Pmode);
1477 size += GET_MODE_SIZE (Pmode);
1479 /* Restore each of the registers previously saved. Make USE insns
1480 for each of these registers for use in making the call. */
1481 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1482 if ((mode = apply_args_mode[regno]) != VOIDmode)
1484 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1485 if (size % align != 0)
1486 size = CEIL (size, align) * align;
1487 reg = gen_rtx_REG (mode, regno);
1488 emit_move_insn (reg, adjust_address (arguments, mode, size));
1489 use_reg (&call_fusage, reg);
1490 size += GET_MODE_SIZE (mode);
1493 /* Restore the structure value address unless this is passed as an
1494 "invisible" first argument. */
1495 size = GET_MODE_SIZE (Pmode);
1498 rtx value = gen_reg_rtx (Pmode);
1499 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1500 emit_move_insn (struct_value, value);
1501 if (REG_P (struct_value))
1502 use_reg (&call_fusage, struct_value);
1503 size += GET_MODE_SIZE (Pmode);
1506 /* All arguments and registers used for the call are set up by now! */
1507 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1509 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1510 and we don't want to load it into a register as an optimization,
1511 because prepare_call_address already did it if it should be done. */
1512 if (GET_CODE (function) != SYMBOL_REF)
1513 function = memory_address (FUNCTION_MODE, function);
1515 /* Generate the actual call instruction and save the return value. */
1516 #ifdef HAVE_untyped_call
1517 if (HAVE_untyped_call)
1518 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1519 result, result_vector (1, result)));
1522 #ifdef HAVE_call_value
1523 if (HAVE_call_value)
1527 /* Locate the unique return register. It is not possible to
1528 express a call that sets more than one return register using
1529 call_value; use untyped_call for that. In fact, untyped_call
1530 only needs to save the return registers in the given block. */
1531 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1532 if ((mode = apply_result_mode[regno]) != VOIDmode)
1534 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1536 valreg = gen_rtx_REG (mode, regno);
1539 emit_call_insn (GEN_CALL_VALUE (valreg,
1540 gen_rtx_MEM (FUNCTION_MODE, function),
1541 const0_rtx, NULL_RTX, const0_rtx));
1543 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1549 /* Find the CALL insn we just emitted, and attach the register usage
1551 call_insn = last_call_insn ();
1552 add_function_usage_to (call_insn, call_fusage);
1554 /* Restore the stack. */
1555 #ifdef HAVE_save_stack_nonlocal
1556 if (HAVE_save_stack_nonlocal)
1557 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1560 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1564 /* Return the address of the result block. */
1565 result = copy_addr_to_reg (XEXP (result, 0));
1566 return convert_memory_address (ptr_mode, result);
1569 /* Perform an untyped return. */
1572 expand_builtin_return (rtx result)
1574 int size, align, regno;
1575 enum machine_mode mode;
1577 rtx call_fusage = 0;
1579 result = convert_memory_address (Pmode, result);
1581 apply_result_size ();
1582 result = gen_rtx_MEM (BLKmode, result);
1584 #ifdef HAVE_untyped_return
1585 if (HAVE_untyped_return)
1587 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1593 /* Restore the return value and note that each value is used. */
1595 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1596 if ((mode = apply_result_mode[regno]) != VOIDmode)
1598 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1599 if (size % align != 0)
1600 size = CEIL (size, align) * align;
1601 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1602 emit_move_insn (reg, adjust_address (result, mode, size));
1604 push_to_sequence (call_fusage);
1606 call_fusage = get_insns ();
1608 size += GET_MODE_SIZE (mode);
1611 /* Put the USE insns before the return. */
1612 emit_insn (call_fusage);
1614 /* Return whatever values was restored by jumping directly to the end
1616 expand_naked_return ();
1619 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1621 static enum type_class
1622 type_to_class (tree type)
1624 switch (TREE_CODE (type))
1626 case VOID_TYPE: return void_type_class;
1627 case INTEGER_TYPE: return integer_type_class;
1628 case ENUMERAL_TYPE: return enumeral_type_class;
1629 case BOOLEAN_TYPE: return boolean_type_class;
1630 case POINTER_TYPE: return pointer_type_class;
1631 case REFERENCE_TYPE: return reference_type_class;
1632 case OFFSET_TYPE: return offset_type_class;
1633 case REAL_TYPE: return real_type_class;
1634 case COMPLEX_TYPE: return complex_type_class;
1635 case FUNCTION_TYPE: return function_type_class;
1636 case METHOD_TYPE: return method_type_class;
1637 case RECORD_TYPE: return record_type_class;
1639 case QUAL_UNION_TYPE: return union_type_class;
1640 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1641 ? string_type_class : array_type_class);
1642 case LANG_TYPE: return lang_type_class;
1643 default: return no_type_class;
1647 /* Expand a call EXP to __builtin_classify_type. */
1650 expand_builtin_classify_type (tree exp)
1652 if (call_expr_nargs (exp))
1653 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1654 return GEN_INT (no_type_class);
1657 /* This helper macro, meant to be used in mathfn_built_in below,
1658 determines which among a set of three builtin math functions is
1659 appropriate for a given type mode. The `F' and `L' cases are
1660 automatically generated from the `double' case. */
1661 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1662 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1663 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1664 fcodel = BUILT_IN_MATHFN##L ; break;
1665 /* Similar to above, but appends _R after any F/L suffix. */
1666 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1667 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1668 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1669 fcodel = BUILT_IN_MATHFN##L_R ; break;
1671 /* Return mathematic function equivalent to FN but operating directly
1672 on TYPE, if available. If IMPLICIT is true find the function in
1673 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1674 can't do the conversion, return zero. */
1677 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1679 tree const *const fn_arr
1680 = implicit ? implicit_built_in_decls : built_in_decls;
1681 enum built_in_function fcode, fcodef, fcodel;
1685 CASE_MATHFN (BUILT_IN_ACOS)
1686 CASE_MATHFN (BUILT_IN_ACOSH)
1687 CASE_MATHFN (BUILT_IN_ASIN)
1688 CASE_MATHFN (BUILT_IN_ASINH)
1689 CASE_MATHFN (BUILT_IN_ATAN)
1690 CASE_MATHFN (BUILT_IN_ATAN2)
1691 CASE_MATHFN (BUILT_IN_ATANH)
1692 CASE_MATHFN (BUILT_IN_CBRT)
1693 CASE_MATHFN (BUILT_IN_CEIL)
1694 CASE_MATHFN (BUILT_IN_CEXPI)
1695 CASE_MATHFN (BUILT_IN_COPYSIGN)
1696 CASE_MATHFN (BUILT_IN_COS)
1697 CASE_MATHFN (BUILT_IN_COSH)
1698 CASE_MATHFN (BUILT_IN_DREM)
1699 CASE_MATHFN (BUILT_IN_ERF)
1700 CASE_MATHFN (BUILT_IN_ERFC)
1701 CASE_MATHFN (BUILT_IN_EXP)
1702 CASE_MATHFN (BUILT_IN_EXP10)
1703 CASE_MATHFN (BUILT_IN_EXP2)
1704 CASE_MATHFN (BUILT_IN_EXPM1)
1705 CASE_MATHFN (BUILT_IN_FABS)
1706 CASE_MATHFN (BUILT_IN_FDIM)
1707 CASE_MATHFN (BUILT_IN_FLOOR)
1708 CASE_MATHFN (BUILT_IN_FMA)
1709 CASE_MATHFN (BUILT_IN_FMAX)
1710 CASE_MATHFN (BUILT_IN_FMIN)
1711 CASE_MATHFN (BUILT_IN_FMOD)
1712 CASE_MATHFN (BUILT_IN_FREXP)
1713 CASE_MATHFN (BUILT_IN_GAMMA)
1714 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1715 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1716 CASE_MATHFN (BUILT_IN_HYPOT)
1717 CASE_MATHFN (BUILT_IN_ILOGB)
1718 CASE_MATHFN (BUILT_IN_INF)
1719 CASE_MATHFN (BUILT_IN_ISINF)
1720 CASE_MATHFN (BUILT_IN_J0)
1721 CASE_MATHFN (BUILT_IN_J1)
1722 CASE_MATHFN (BUILT_IN_JN)
1723 CASE_MATHFN (BUILT_IN_LCEIL)
1724 CASE_MATHFN (BUILT_IN_LDEXP)
1725 CASE_MATHFN (BUILT_IN_LFLOOR)
1726 CASE_MATHFN (BUILT_IN_LGAMMA)
1727 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1728 CASE_MATHFN (BUILT_IN_LLCEIL)
1729 CASE_MATHFN (BUILT_IN_LLFLOOR)
1730 CASE_MATHFN (BUILT_IN_LLRINT)
1731 CASE_MATHFN (BUILT_IN_LLROUND)
1732 CASE_MATHFN (BUILT_IN_LOG)
1733 CASE_MATHFN (BUILT_IN_LOG10)
1734 CASE_MATHFN (BUILT_IN_LOG1P)
1735 CASE_MATHFN (BUILT_IN_LOG2)
1736 CASE_MATHFN (BUILT_IN_LOGB)
1737 CASE_MATHFN (BUILT_IN_LRINT)
1738 CASE_MATHFN (BUILT_IN_LROUND)
1739 CASE_MATHFN (BUILT_IN_MODF)
1740 CASE_MATHFN (BUILT_IN_NAN)
1741 CASE_MATHFN (BUILT_IN_NANS)
1742 CASE_MATHFN (BUILT_IN_NEARBYINT)
1743 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1744 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1745 CASE_MATHFN (BUILT_IN_POW)
1746 CASE_MATHFN (BUILT_IN_POWI)
1747 CASE_MATHFN (BUILT_IN_POW10)
1748 CASE_MATHFN (BUILT_IN_REMAINDER)
1749 CASE_MATHFN (BUILT_IN_REMQUO)
1750 CASE_MATHFN (BUILT_IN_RINT)
1751 CASE_MATHFN (BUILT_IN_ROUND)
1752 CASE_MATHFN (BUILT_IN_SCALB)
1753 CASE_MATHFN (BUILT_IN_SCALBLN)
1754 CASE_MATHFN (BUILT_IN_SCALBN)
1755 CASE_MATHFN (BUILT_IN_SIGNBIT)
1756 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1757 CASE_MATHFN (BUILT_IN_SIN)
1758 CASE_MATHFN (BUILT_IN_SINCOS)
1759 CASE_MATHFN (BUILT_IN_SINH)
1760 CASE_MATHFN (BUILT_IN_SQRT)
1761 CASE_MATHFN (BUILT_IN_TAN)
1762 CASE_MATHFN (BUILT_IN_TANH)
1763 CASE_MATHFN (BUILT_IN_TGAMMA)
1764 CASE_MATHFN (BUILT_IN_TRUNC)
1765 CASE_MATHFN (BUILT_IN_Y0)
1766 CASE_MATHFN (BUILT_IN_Y1)
1767 CASE_MATHFN (BUILT_IN_YN)
1773 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1774 return fn_arr[fcode];
1775 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1776 return fn_arr[fcodef];
1777 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1778 return fn_arr[fcodel];
1783 /* Like mathfn_built_in_1(), but always use the implicit array. */
1786 mathfn_built_in (tree type, enum built_in_function fn)
1788 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1791 /* If errno must be maintained, expand the RTL to check if the result,
1792 TARGET, of a built-in function call, EXP, is NaN, and if so set
1796 expand_errno_check (tree exp, rtx target)
1798 rtx lab = gen_label_rtx ();
1800 /* Test the result; if it is NaN, set errno=EDOM because
1801 the argument was not in the domain. */
1802 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1806 /* If this built-in doesn't throw an exception, set errno directly. */
1807 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1809 #ifdef GEN_ERRNO_RTX
1810 rtx errno_rtx = GEN_ERRNO_RTX;
1813 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1815 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1821 /* Make sure the library call isn't expanded as a tail call. */
1822 CALL_EXPR_TAILCALL (exp) = 0;
1824 /* We can't set errno=EDOM directly; let the library call do it.
1825 Pop the arguments right away in case the call gets deleted. */
1827 expand_call (exp, target, 0);
1832 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1833 Return NULL_RTX if a normal call should be emitted rather than expanding
1834 the function in-line. EXP is the expression that is a call to the builtin
1835 function; if convenient, the result should be placed in TARGET.
1836 SUBTARGET may be used as the target for computing one of EXP's operands. */
1839 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1841 optab builtin_optab;
1842 rtx op0, insns, before_call;
1843 tree fndecl = get_callee_fndecl (exp);
1844 enum machine_mode mode;
1845 bool errno_set = false;
1848 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1851 arg = CALL_EXPR_ARG (exp, 0);
1853 switch (DECL_FUNCTION_CODE (fndecl))
1855 CASE_FLT_FN (BUILT_IN_SQRT):
1856 errno_set = ! tree_expr_nonnegative_p (arg);
1857 builtin_optab = sqrt_optab;
1859 CASE_FLT_FN (BUILT_IN_EXP):
1860 errno_set = true; builtin_optab = exp_optab; break;
1861 CASE_FLT_FN (BUILT_IN_EXP10):
1862 CASE_FLT_FN (BUILT_IN_POW10):
1863 errno_set = true; builtin_optab = exp10_optab; break;
1864 CASE_FLT_FN (BUILT_IN_EXP2):
1865 errno_set = true; builtin_optab = exp2_optab; break;
1866 CASE_FLT_FN (BUILT_IN_EXPM1):
1867 errno_set = true; builtin_optab = expm1_optab; break;
1868 CASE_FLT_FN (BUILT_IN_LOGB):
1869 errno_set = true; builtin_optab = logb_optab; break;
1870 CASE_FLT_FN (BUILT_IN_LOG):
1871 errno_set = true; builtin_optab = log_optab; break;
1872 CASE_FLT_FN (BUILT_IN_LOG10):
1873 errno_set = true; builtin_optab = log10_optab; break;
1874 CASE_FLT_FN (BUILT_IN_LOG2):
1875 errno_set = true; builtin_optab = log2_optab; break;
1876 CASE_FLT_FN (BUILT_IN_LOG1P):
1877 errno_set = true; builtin_optab = log1p_optab; break;
1878 CASE_FLT_FN (BUILT_IN_ASIN):
1879 builtin_optab = asin_optab; break;
1880 CASE_FLT_FN (BUILT_IN_ACOS):
1881 builtin_optab = acos_optab; break;
1882 CASE_FLT_FN (BUILT_IN_TAN):
1883 builtin_optab = tan_optab; break;
1884 CASE_FLT_FN (BUILT_IN_ATAN):
1885 builtin_optab = atan_optab; break;
1886 CASE_FLT_FN (BUILT_IN_FLOOR):
1887 builtin_optab = floor_optab; break;
1888 CASE_FLT_FN (BUILT_IN_CEIL):
1889 builtin_optab = ceil_optab; break;
1890 CASE_FLT_FN (BUILT_IN_TRUNC):
1891 builtin_optab = btrunc_optab; break;
1892 CASE_FLT_FN (BUILT_IN_ROUND):
1893 builtin_optab = round_optab; break;
1894 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1895 builtin_optab = nearbyint_optab;
1896 if (flag_trapping_math)
1898 /* Else fallthrough and expand as rint. */
1899 CASE_FLT_FN (BUILT_IN_RINT):
1900 builtin_optab = rint_optab; break;
1905 /* Make a suitable register to place result in. */
1906 mode = TYPE_MODE (TREE_TYPE (exp));
1908 if (! flag_errno_math || ! HONOR_NANS (mode))
1911 /* Before working hard, check whether the instruction is available. */
1912 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1914 target = gen_reg_rtx (mode);
1916 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1917 need to expand the argument again. This way, we will not perform
1918 side-effects more the once. */
1919 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1921 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1925 /* Compute into TARGET.
1926 Set TARGET to wherever the result comes back. */
1927 target = expand_unop (mode, builtin_optab, op0, target, 0);
1932 expand_errno_check (exp, target);
1934 /* Output the entire sequence. */
1935 insns = get_insns ();
1941 /* If we were unable to expand via the builtin, stop the sequence
1942 (without outputting the insns) and call to the library function
1943 with the stabilized argument list. */
1947 before_call = get_last_insn ();
1949 return expand_call (exp, target, target == const0_rtx);
1952 /* Expand a call to the builtin binary math functions (pow and atan2).
1953 Return NULL_RTX if a normal call should be emitted rather than expanding the
1954 function in-line. EXP is the expression that is a call to the builtin
1955 function; if convenient, the result should be placed in TARGET.
1956 SUBTARGET may be used as the target for computing one of EXP's
1960 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1962 optab builtin_optab;
1963 rtx op0, op1, insns;
1964 int op1_type = REAL_TYPE;
1965 tree fndecl = get_callee_fndecl (exp);
1967 enum machine_mode mode;
1968 bool errno_set = true;
1970 switch (DECL_FUNCTION_CODE (fndecl))
1972 CASE_FLT_FN (BUILT_IN_SCALBN):
1973 CASE_FLT_FN (BUILT_IN_SCALBLN):
1974 CASE_FLT_FN (BUILT_IN_LDEXP):
1975 op1_type = INTEGER_TYPE;
1980 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
1983 arg0 = CALL_EXPR_ARG (exp, 0);
1984 arg1 = CALL_EXPR_ARG (exp, 1);
1986 switch (DECL_FUNCTION_CODE (fndecl))
1988 CASE_FLT_FN (BUILT_IN_POW):
1989 builtin_optab = pow_optab; break;
1990 CASE_FLT_FN (BUILT_IN_ATAN2):
1991 builtin_optab = atan2_optab; break;
1992 CASE_FLT_FN (BUILT_IN_SCALB):
1993 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
1995 builtin_optab = scalb_optab; break;
1996 CASE_FLT_FN (BUILT_IN_SCALBN):
1997 CASE_FLT_FN (BUILT_IN_SCALBLN):
1998 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2000 /* Fall through... */
2001 CASE_FLT_FN (BUILT_IN_LDEXP):
2002 builtin_optab = ldexp_optab; break;
2003 CASE_FLT_FN (BUILT_IN_FMOD):
2004 builtin_optab = fmod_optab; break;
2005 CASE_FLT_FN (BUILT_IN_REMAINDER):
2006 CASE_FLT_FN (BUILT_IN_DREM):
2007 builtin_optab = remainder_optab; break;
2012 /* Make a suitable register to place result in. */
2013 mode = TYPE_MODE (TREE_TYPE (exp));
2015 /* Before working hard, check whether the instruction is available. */
2016 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2019 target = gen_reg_rtx (mode);
2021 if (! flag_errno_math || ! HONOR_NANS (mode))
2024 /* Always stabilize the argument list. */
2025 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2026 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2028 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2029 op1 = expand_normal (arg1);
2033 /* Compute into TARGET.
2034 Set TARGET to wherever the result comes back. */
2035 target = expand_binop (mode, builtin_optab, op0, op1,
2036 target, 0, OPTAB_DIRECT);
2038 /* If we were unable to expand via the builtin, stop the sequence
2039 (without outputting the insns) and call to the library function
2040 with the stabilized argument list. */
2044 return expand_call (exp, target, target == const0_rtx);
2048 expand_errno_check (exp, target);
2050 /* Output the entire sequence. */
2051 insns = get_insns ();
2058 /* Expand a call to the builtin sin and cos math functions.
2059 Return NULL_RTX if a normal call should be emitted rather than expanding the
2060 function in-line. EXP is the expression that is a call to the builtin
2061 function; if convenient, the result should be placed in TARGET.
2062 SUBTARGET may be used as the target for computing one of EXP's
2066 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2068 optab builtin_optab;
2070 tree fndecl = get_callee_fndecl (exp);
2071 enum machine_mode mode;
2074 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2077 arg = CALL_EXPR_ARG (exp, 0);
2079 switch (DECL_FUNCTION_CODE (fndecl))
2081 CASE_FLT_FN (BUILT_IN_SIN):
2082 CASE_FLT_FN (BUILT_IN_COS):
2083 builtin_optab = sincos_optab; break;
2088 /* Make a suitable register to place result in. */
2089 mode = TYPE_MODE (TREE_TYPE (exp));
2091 /* Check if sincos insn is available, otherwise fallback
2092 to sin or cos insn. */
2093 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2094 switch (DECL_FUNCTION_CODE (fndecl))
2096 CASE_FLT_FN (BUILT_IN_SIN):
2097 builtin_optab = sin_optab; break;
2098 CASE_FLT_FN (BUILT_IN_COS):
2099 builtin_optab = cos_optab; break;
2104 /* Before working hard, check whether the instruction is available. */
2105 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2107 target = gen_reg_rtx (mode);
2109 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2110 need to expand the argument again. This way, we will not perform
2111 side-effects more the once. */
2112 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2114 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2118 /* Compute into TARGET.
2119 Set TARGET to wherever the result comes back. */
2120 if (builtin_optab == sincos_optab)
2124 switch (DECL_FUNCTION_CODE (fndecl))
2126 CASE_FLT_FN (BUILT_IN_SIN):
2127 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2129 CASE_FLT_FN (BUILT_IN_COS):
2130 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2135 gcc_assert (result);
2139 target = expand_unop (mode, builtin_optab, op0, target, 0);
2144 /* Output the entire sequence. */
2145 insns = get_insns ();
2151 /* If we were unable to expand via the builtin, stop the sequence
2152 (without outputting the insns) and call to the library function
2153 with the stabilized argument list. */
2157 target = expand_call (exp, target, target == const0_rtx);
2162 /* Expand a call to one of the builtin math functions that operate on
2163 floating point argument and output an integer result (ilogb, isinf,
2165 Return 0 if a normal call should be emitted rather than expanding the
2166 function in-line. EXP is the expression that is a call to the builtin
2167 function; if convenient, the result should be placed in TARGET.
2168 SUBTARGET may be used as the target for computing one of EXP's operands. */
2171 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2173 optab builtin_optab = 0;
2174 enum insn_code icode = CODE_FOR_nothing;
2176 tree fndecl = get_callee_fndecl (exp);
2177 enum machine_mode mode;
2178 bool errno_set = false;
2181 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2184 arg = CALL_EXPR_ARG (exp, 0);
2186 switch (DECL_FUNCTION_CODE (fndecl))
2188 CASE_FLT_FN (BUILT_IN_ILOGB):
2189 errno_set = true; builtin_optab = ilogb_optab; break;
2190 CASE_FLT_FN (BUILT_IN_ISINF):
2191 builtin_optab = isinf_optab; break;
2192 case BUILT_IN_ISNORMAL:
2193 case BUILT_IN_ISFINITE:
2194 CASE_FLT_FN (BUILT_IN_FINITE):
2195 /* These builtins have no optabs (yet). */
2201 /* There's no easy way to detect the case we need to set EDOM. */
2202 if (flag_errno_math && errno_set)
2205 /* Optab mode depends on the mode of the input argument. */
2206 mode = TYPE_MODE (TREE_TYPE (arg));
2209 icode = optab_handler (builtin_optab, mode)->insn_code;
2211 /* Before working hard, check whether the instruction is available. */
2212 if (icode != CODE_FOR_nothing)
2214 /* Make a suitable register to place result in. */
2216 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2217 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2219 gcc_assert (insn_data[icode].operand[0].predicate
2220 (target, GET_MODE (target)));
2222 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2223 need to expand the argument again. This way, we will not perform
2224 side-effects more the once. */
2225 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2227 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2229 if (mode != GET_MODE (op0))
2230 op0 = convert_to_mode (mode, op0, 0);
2232 /* Compute into TARGET.
2233 Set TARGET to wherever the result comes back. */
2234 emit_unop_insn (icode, target, op0, UNKNOWN);
2238 /* If there is no optab, try generic code. */
2239 switch (DECL_FUNCTION_CODE (fndecl))
2243 CASE_FLT_FN (BUILT_IN_ISINF):
2245 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2246 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2247 tree const type = TREE_TYPE (arg);
2251 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2252 real_from_string (&r, buf);
2253 result = build_call_expr (isgr_fn, 2,
2254 fold_build1 (ABS_EXPR, type, arg),
2255 build_real (type, r));
2256 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2258 CASE_FLT_FN (BUILT_IN_FINITE):
2259 case BUILT_IN_ISFINITE:
2261 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2262 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2263 tree const type = TREE_TYPE (arg);
2267 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2268 real_from_string (&r, buf);
2269 result = build_call_expr (isle_fn, 2,
2270 fold_build1 (ABS_EXPR, type, arg),
2271 build_real (type, r));
2272 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2274 case BUILT_IN_ISNORMAL:
2276 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2277 islessequal(fabs(x),DBL_MAX). */
2278 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2279 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2280 tree const type = TREE_TYPE (arg);
2281 REAL_VALUE_TYPE rmax, rmin;
2284 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2285 real_from_string (&rmax, buf);
2286 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2287 real_from_string (&rmin, buf);
2288 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2289 result = build_call_expr (isle_fn, 2, arg,
2290 build_real (type, rmax));
2291 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2292 build_call_expr (isge_fn, 2, arg,
2293 build_real (type, rmin)));
2294 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2300 target = expand_call (exp, target, target == const0_rtx);
2305 /* Expand a call to the builtin sincos math function.
2306 Return NULL_RTX if a normal call should be emitted rather than expanding the
2307 function in-line. EXP is the expression that is a call to the builtin
2311 expand_builtin_sincos (tree exp)
2313 rtx op0, op1, op2, target1, target2;
2314 enum machine_mode mode;
2315 tree arg, sinp, cosp;
2318 if (!validate_arglist (exp, REAL_TYPE,
2319 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2322 arg = CALL_EXPR_ARG (exp, 0);
2323 sinp = CALL_EXPR_ARG (exp, 1);
2324 cosp = CALL_EXPR_ARG (exp, 2);
2326 /* Make a suitable register to place result in. */
2327 mode = TYPE_MODE (TREE_TYPE (arg));
2329 /* Check if sincos insn is available, otherwise emit the call. */
2330 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2333 target1 = gen_reg_rtx (mode);
2334 target2 = gen_reg_rtx (mode);
2336 op0 = expand_normal (arg);
2337 op1 = expand_normal (build_fold_indirect_ref (sinp));
2338 op2 = expand_normal (build_fold_indirect_ref (cosp));
2340 /* Compute into target1 and target2.
2341 Set TARGET to wherever the result comes back. */
2342 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2343 gcc_assert (result);
2345 /* Move target1 and target2 to the memory locations indicated
2347 emit_move_insn (op1, target1);
2348 emit_move_insn (op2, target2);
2353 /* Expand a call to the internal cexpi builtin to the sincos math function.
2354 EXP is the expression that is a call to the builtin function; if convenient,
2355 the result should be placed in TARGET. SUBTARGET may be used as the target
2356 for computing one of EXP's operands. */
2359 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2361 tree fndecl = get_callee_fndecl (exp);
2363 enum machine_mode mode;
2366 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2369 arg = CALL_EXPR_ARG (exp, 0);
2370 type = TREE_TYPE (arg);
2371 mode = TYPE_MODE (TREE_TYPE (arg));
2373 /* Try expanding via a sincos optab, fall back to emitting a libcall
2374 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2375 is only generated from sincos, cexp or if we have either of them. */
2376 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2378 op1 = gen_reg_rtx (mode);
2379 op2 = gen_reg_rtx (mode);
2381 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2383 /* Compute into op1 and op2. */
2384 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2386 else if (TARGET_HAS_SINCOS)
2388 tree call, fn = NULL_TREE;
2392 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2393 fn = built_in_decls[BUILT_IN_SINCOSF];
2394 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2395 fn = built_in_decls[BUILT_IN_SINCOS];
2396 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2397 fn = built_in_decls[BUILT_IN_SINCOSL];
2401 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2402 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2403 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2404 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2405 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2406 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2408 /* Make sure not to fold the sincos call again. */
2409 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2410 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2411 call, 3, arg, top1, top2));
2415 tree call, fn = NULL_TREE, narg;
2416 tree ctype = build_complex_type (type);
2418 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2419 fn = built_in_decls[BUILT_IN_CEXPF];
2420 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2421 fn = built_in_decls[BUILT_IN_CEXP];
2422 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2423 fn = built_in_decls[BUILT_IN_CEXPL];
2427 /* If we don't have a decl for cexp create one. This is the
2428 friendliest fallback if the user calls __builtin_cexpi
2429 without full target C99 function support. */
2430 if (fn == NULL_TREE)
2433 const char *name = NULL;
2435 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2437 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2439 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2442 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2443 fn = build_fn_decl (name, fntype);
2446 narg = fold_build2 (COMPLEX_EXPR, ctype,
2447 build_real (type, dconst0), arg);
2449 /* Make sure not to fold the cexp call again. */
2450 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2451 return expand_expr (build_call_nary (ctype, call, 1, narg),
2452 target, VOIDmode, EXPAND_NORMAL);
2455 /* Now build the proper return type. */
2456 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2457 make_tree (TREE_TYPE (arg), op2),
2458 make_tree (TREE_TYPE (arg), op1)),
2459 target, VOIDmode, EXPAND_NORMAL);
2462 /* Expand a call to one of the builtin rounding functions gcc defines
2463 as an extension (lfloor and lceil). As these are gcc extensions we
2464 do not need to worry about setting errno to EDOM.
2465 If expanding via optab fails, lower expression to (int)(floor(x)).
2466 EXP is the expression that is a call to the builtin function;
2467 if convenient, the result should be placed in TARGET. SUBTARGET may
2468 be used as the target for computing one of EXP's operands. */
2471 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2473 convert_optab builtin_optab;
2474 rtx op0, insns, tmp;
2475 tree fndecl = get_callee_fndecl (exp);
2476 enum built_in_function fallback_fn;
2477 tree fallback_fndecl;
2478 enum machine_mode mode;
2481 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2484 arg = CALL_EXPR_ARG (exp, 0);
2486 switch (DECL_FUNCTION_CODE (fndecl))
2488 CASE_FLT_FN (BUILT_IN_LCEIL):
2489 CASE_FLT_FN (BUILT_IN_LLCEIL):
2490 builtin_optab = lceil_optab;
2491 fallback_fn = BUILT_IN_CEIL;
2494 CASE_FLT_FN (BUILT_IN_LFLOOR):
2495 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2496 builtin_optab = lfloor_optab;
2497 fallback_fn = BUILT_IN_FLOOR;
2504 /* Make a suitable register to place result in. */
2505 mode = TYPE_MODE (TREE_TYPE (exp));
2507 target = gen_reg_rtx (mode);
2509 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2510 need to expand the argument again. This way, we will not perform
2511 side-effects more the once. */
2512 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2514 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2518 /* Compute into TARGET. */
2519 if (expand_sfix_optab (target, op0, builtin_optab))
2521 /* Output the entire sequence. */
2522 insns = get_insns ();
2528 /* If we were unable to expand via the builtin, stop the sequence
2529 (without outputting the insns). */
2532 /* Fall back to floating point rounding optab. */
2533 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2535 /* For non-C99 targets we may end up without a fallback fndecl here
2536 if the user called __builtin_lfloor directly. In this case emit
2537 a call to the floor/ceil variants nevertheless. This should result
2538 in the best user experience for not full C99 targets. */
2539 if (fallback_fndecl == NULL_TREE)
2542 const char *name = NULL;
2544 switch (DECL_FUNCTION_CODE (fndecl))
2546 case BUILT_IN_LCEIL:
2547 case BUILT_IN_LLCEIL:
2550 case BUILT_IN_LCEILF:
2551 case BUILT_IN_LLCEILF:
2554 case BUILT_IN_LCEILL:
2555 case BUILT_IN_LLCEILL:
2558 case BUILT_IN_LFLOOR:
2559 case BUILT_IN_LLFLOOR:
2562 case BUILT_IN_LFLOORF:
2563 case BUILT_IN_LLFLOORF:
2566 case BUILT_IN_LFLOORL:
2567 case BUILT_IN_LLFLOORL:
2574 fntype = build_function_type_list (TREE_TYPE (arg),
2575 TREE_TYPE (arg), NULL_TREE);
2576 fallback_fndecl = build_fn_decl (name, fntype);
2579 exp = build_call_expr (fallback_fndecl, 1, arg);
2581 tmp = expand_normal (exp);
2583 /* Truncate the result of floating point optab to integer
2584 via expand_fix (). */
2585 target = gen_reg_rtx (mode);
2586 expand_fix (target, tmp, 0);
2591 /* Expand a call to one of the builtin math functions doing integer
2593 Return 0 if a normal call should be emitted rather than expanding the
2594 function in-line. EXP is the expression that is a call to the builtin
2595 function; if convenient, the result should be placed in TARGET.
2596 SUBTARGET may be used as the target for computing one of EXP's operands. */
2599 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2601 convert_optab builtin_optab;
2603 tree fndecl = get_callee_fndecl (exp);
2605 enum machine_mode mode;
2607 /* There's no easy way to detect the case we need to set EDOM. */
2608 if (flag_errno_math)
2611 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2614 arg = CALL_EXPR_ARG (exp, 0);
2616 switch (DECL_FUNCTION_CODE (fndecl))
2618 CASE_FLT_FN (BUILT_IN_LRINT):
2619 CASE_FLT_FN (BUILT_IN_LLRINT):
2620 builtin_optab = lrint_optab; break;
2621 CASE_FLT_FN (BUILT_IN_LROUND):
2622 CASE_FLT_FN (BUILT_IN_LLROUND):
2623 builtin_optab = lround_optab; break;
2628 /* Make a suitable register to place result in. */
2629 mode = TYPE_MODE (TREE_TYPE (exp));
2631 target = gen_reg_rtx (mode);
2633 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2634 need to expand the argument again. This way, we will not perform
2635 side-effects more the once. */
2636 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2638 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2642 if (expand_sfix_optab (target, op0, builtin_optab))
2644 /* Output the entire sequence. */
2645 insns = get_insns ();
2651 /* If we were unable to expand via the builtin, stop the sequence
2652 (without outputting the insns) and call to the library function
2653 with the stabilized argument list. */
2656 target = expand_call (exp, target, target == const0_rtx);
2661 /* To evaluate powi(x,n), the floating point value x raised to the
2662 constant integer exponent n, we use a hybrid algorithm that
2663 combines the "window method" with look-up tables. For an
2664 introduction to exponentiation algorithms and "addition chains",
2665 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2666 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2667 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2668 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2670 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2671 multiplications to inline before calling the system library's pow
2672 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2673 so this default never requires calling pow, powf or powl. */
2675 #ifndef POWI_MAX_MULTS
2676 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2679 /* The size of the "optimal power tree" lookup table. All
2680 exponents less than this value are simply looked up in the
2681 powi_table below. This threshold is also used to size the
2682 cache of pseudo registers that hold intermediate results. */
2683 #define POWI_TABLE_SIZE 256
2685 /* The size, in bits of the window, used in the "window method"
2686 exponentiation algorithm. This is equivalent to a radix of
2687 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2688 #define POWI_WINDOW_SIZE 3
2690 /* The following table is an efficient representation of an
2691 "optimal power tree". For each value, i, the corresponding
2692 value, j, in the table states than an optimal evaluation
2693 sequence for calculating pow(x,i) can be found by evaluating
2694 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2695 100 integers is given in Knuth's "Seminumerical algorithms". */
2697 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2699 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2700 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2701 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2702 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2703 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2704 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2705 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2706 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2707 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2708 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2709 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2710 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2711 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2712 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2713 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2714 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2715 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2716 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2717 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2718 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2719 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2720 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2721 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2722 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2723 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2724 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2725 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2726 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2727 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2728 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2729 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2730 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2734 /* Return the number of multiplications required to calculate
2735 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2736 subroutine of powi_cost. CACHE is an array indicating
2737 which exponents have already been calculated. */
2740 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2742 /* If we've already calculated this exponent, then this evaluation
2743 doesn't require any additional multiplications. */
2748 return powi_lookup_cost (n - powi_table[n], cache)
2749 + powi_lookup_cost (powi_table[n], cache) + 1;
2752 /* Return the number of multiplications required to calculate
2753 powi(x,n) for an arbitrary x, given the exponent N. This
2754 function needs to be kept in sync with expand_powi below. */
2757 powi_cost (HOST_WIDE_INT n)
2759 bool cache[POWI_TABLE_SIZE];
2760 unsigned HOST_WIDE_INT digit;
2761 unsigned HOST_WIDE_INT val;
2767 /* Ignore the reciprocal when calculating the cost. */
2768 val = (n < 0) ? -n : n;
2770 /* Initialize the exponent cache. */
2771 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2776 while (val >= POWI_TABLE_SIZE)
2780 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2781 result += powi_lookup_cost (digit, cache)
2782 + POWI_WINDOW_SIZE + 1;
2783 val >>= POWI_WINDOW_SIZE;
2792 return result + powi_lookup_cost (val, cache);
2795 /* Recursive subroutine of expand_powi. This function takes the array,
2796 CACHE, of already calculated exponents and an exponent N and returns
2797 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2800 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2802 unsigned HOST_WIDE_INT digit;
2806 if (n < POWI_TABLE_SIZE)
2811 target = gen_reg_rtx (mode);
2814 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2815 op1 = expand_powi_1 (mode, powi_table[n], cache);
2819 target = gen_reg_rtx (mode);
2820 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2821 op0 = expand_powi_1 (mode, n - digit, cache);
2822 op1 = expand_powi_1 (mode, digit, cache);
2826 target = gen_reg_rtx (mode);
2827 op0 = expand_powi_1 (mode, n >> 1, cache);
2831 result = expand_mult (mode, op0, op1, target, 0);
2832 if (result != target)
2833 emit_move_insn (target, result);
2837 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2838 floating point operand in mode MODE, and N is the exponent. This
2839 function needs to be kept in sync with powi_cost above. */
2842 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2844 unsigned HOST_WIDE_INT val;
2845 rtx cache[POWI_TABLE_SIZE];
2849 return CONST1_RTX (mode);
2851 val = (n < 0) ? -n : n;
2853 memset (cache, 0, sizeof (cache));
2856 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2858 /* If the original exponent was negative, reciprocate the result. */
2860 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2861 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2866 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2867 a normal call should be emitted rather than expanding the function
2868 in-line. EXP is the expression that is a call to the builtin
2869 function; if convenient, the result should be placed in TARGET. */
2872 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2876 tree type = TREE_TYPE (exp);
2877 REAL_VALUE_TYPE cint, c, c2;
2880 enum machine_mode mode = TYPE_MODE (type);
2882 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2885 arg0 = CALL_EXPR_ARG (exp, 0);
2886 arg1 = CALL_EXPR_ARG (exp, 1);
2888 if (TREE_CODE (arg1) != REAL_CST
2889 || TREE_OVERFLOW (arg1))
2890 return expand_builtin_mathfn_2 (exp, target, subtarget);
2892 /* Handle constant exponents. */
2894 /* For integer valued exponents we can expand to an optimal multiplication
2895 sequence using expand_powi. */
2896 c = TREE_REAL_CST (arg1);
2897 n = real_to_integer (&c);
2898 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2899 if (real_identical (&c, &cint)
2900 && ((n >= -1 && n <= 2)
2901 || (flag_unsafe_math_optimizations
2903 && powi_cost (n) <= POWI_MAX_MULTS)))
2905 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2908 op = force_reg (mode, op);
2909 op = expand_powi (op, mode, n);
2914 narg0 = builtin_save_expr (arg0);
2916 /* If the exponent is not integer valued, check if it is half of an integer.
2917 In this case we can expand to sqrt (x) * x**(n/2). */
2918 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2919 if (fn != NULL_TREE)
2921 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2922 n = real_to_integer (&c2);
2923 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2924 if (real_identical (&c2, &cint)
2925 && ((flag_unsafe_math_optimizations
2927 && powi_cost (n/2) <= POWI_MAX_MULTS)
2930 tree call_expr = build_call_expr (fn, 1, narg0);
2931 /* Use expand_expr in case the newly built call expression
2932 was folded to a non-call. */
2933 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2936 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2937 op2 = force_reg (mode, op2);
2938 op2 = expand_powi (op2, mode, abs (n / 2));
2939 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2940 0, OPTAB_LIB_WIDEN);
2941 /* If the original exponent was negative, reciprocate the
2944 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2945 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2951 /* Try if the exponent is a third of an integer. In this case
2952 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2953 different from pow (x, 1./3.) due to rounding and behavior
2954 with negative x we need to constrain this transformation to
2955 unsafe math and positive x or finite math. */
2956 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2958 && flag_unsafe_math_optimizations
2959 && (tree_expr_nonnegative_p (arg0)
2960 || !HONOR_NANS (mode)))
2962 REAL_VALUE_TYPE dconst3;
2963 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2964 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2965 real_round (&c2, mode, &c2);
2966 n = real_to_integer (&c2);
2967 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2968 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2969 real_convert (&c2, mode, &c2);
2970 if (real_identical (&c2, &c)
2972 && powi_cost (n/3) <= POWI_MAX_MULTS)
2975 tree call_expr = build_call_expr (fn, 1,narg0);
2976 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2977 if (abs (n) % 3 == 2)
2978 op = expand_simple_binop (mode, MULT, op, op, op,
2979 0, OPTAB_LIB_WIDEN);
2982 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2983 op2 = force_reg (mode, op2);
2984 op2 = expand_powi (op2, mode, abs (n / 3));
2985 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2986 0, OPTAB_LIB_WIDEN);
2987 /* If the original exponent was negative, reciprocate the
2990 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2991 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2997 /* Fall back to optab expansion. */
2998 return expand_builtin_mathfn_2 (exp, target, subtarget);
3001 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3002 a normal call should be emitted rather than expanding the function
3003 in-line. EXP is the expression that is a call to the builtin
3004 function; if convenient, the result should be placed in TARGET. */
3007 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3011 enum machine_mode mode;
3012 enum machine_mode mode2;
3014 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3017 arg0 = CALL_EXPR_ARG (exp, 0);
3018 arg1 = CALL_EXPR_ARG (exp, 1);
3019 mode = TYPE_MODE (TREE_TYPE (exp));
3021 /* Handle constant power. */
3023 if (TREE_CODE (arg1) == INTEGER_CST
3024 && !TREE_OVERFLOW (arg1))
3026 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3028 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3029 Otherwise, check the number of multiplications required. */
3030 if ((TREE_INT_CST_HIGH (arg1) == 0
3031 || TREE_INT_CST_HIGH (arg1) == -1)
3032 && ((n >= -1 && n <= 2)
3034 && powi_cost (n) <= POWI_MAX_MULTS)))
3036 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3037 op0 = force_reg (mode, op0);
3038 return expand_powi (op0, mode, n);
3042 /* Emit a libcall to libgcc. */
3044 /* Mode of the 2nd argument must match that of an int. */
3045 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3047 if (target == NULL_RTX)
3048 target = gen_reg_rtx (mode);
3050 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3051 if (GET_MODE (op0) != mode)
3052 op0 = convert_to_mode (mode, op0, 0);
3053 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3054 if (GET_MODE (op1) != mode2)
3055 op1 = convert_to_mode (mode2, op1, 0);
3057 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3058 target, LCT_CONST, mode, 2,
3059 op0, mode, op1, mode2);
3064 /* Expand expression EXP which is a call to the strlen builtin. Return
3065 NULL_RTX if we failed the caller should emit a normal call, otherwise
3066 try to get the result in TARGET, if convenient. */
3069 expand_builtin_strlen (tree exp, rtx target,
3070 enum machine_mode target_mode)
3072 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3078 tree src = CALL_EXPR_ARG (exp, 0);
3079 rtx result, src_reg, char_rtx, before_strlen;
3080 enum machine_mode insn_mode = target_mode, char_mode;
3081 enum insn_code icode = CODE_FOR_nothing;
3084 /* If the length can be computed at compile-time, return it. */
3085 len = c_strlen (src, 0);
3087 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3089 /* If the length can be computed at compile-time and is constant
3090 integer, but there are side-effects in src, evaluate
3091 src for side-effects, then return len.
3092 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3093 can be optimized into: i++; x = 3; */
3094 len = c_strlen (src, 1);
3095 if (len && TREE_CODE (len) == INTEGER_CST)
3097 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3098 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3101 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3103 /* If SRC is not a pointer type, don't do this operation inline. */
3107 /* Bail out if we can't compute strlen in the right mode. */
3108 while (insn_mode != VOIDmode)
3110 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3111 if (icode != CODE_FOR_nothing)
3114 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3116 if (insn_mode == VOIDmode)
3119 /* Make a place to write the result of the instruction. */
3123 && GET_MODE (result) == insn_mode
3124 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3125 result = gen_reg_rtx (insn_mode);
3127 /* Make a place to hold the source address. We will not expand
3128 the actual source until we are sure that the expansion will
3129 not fail -- there are trees that cannot be expanded twice. */
3130 src_reg = gen_reg_rtx (Pmode);
3132 /* Mark the beginning of the strlen sequence so we can emit the
3133 source operand later. */
3134 before_strlen = get_last_insn ();
3136 char_rtx = const0_rtx;
3137 char_mode = insn_data[(int) icode].operand[2].mode;
3138 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3140 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3142 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3143 char_rtx, GEN_INT (align));
3148 /* Now that we are assured of success, expand the source. */
3150 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3152 emit_move_insn (src_reg, pat);
3157 emit_insn_after (pat, before_strlen);
3159 emit_insn_before (pat, get_insns ());
3161 /* Return the value in the proper mode for this function. */
3162 if (GET_MODE (result) == target_mode)
3164 else if (target != 0)
3165 convert_move (target, result, 0);
3167 target = convert_to_mode (target_mode, result, 0);
3173 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3174 caller should emit a normal call, otherwise try to get the result
3175 in TARGET, if convenient (and in mode MODE if that's convenient). */
3178 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3180 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3182 tree type = TREE_TYPE (exp);
3183 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3184 CALL_EXPR_ARG (exp, 1), type);
3186 return expand_expr (result, target, mode, EXPAND_NORMAL);
3191 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3192 caller should emit a normal call, otherwise try to get the result
3193 in TARGET, if convenient (and in mode MODE if that's convenient). */
3196 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3198 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3200 tree type = TREE_TYPE (exp);
3201 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3202 CALL_EXPR_ARG (exp, 1), type);
3204 return expand_expr (result, target, mode, EXPAND_NORMAL);
3206 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3211 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3212 caller should emit a normal call, otherwise try to get the result
3213 in TARGET, if convenient (and in mode MODE if that's convenient). */
3216 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3218 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3220 tree type = TREE_TYPE (exp);
3221 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3222 CALL_EXPR_ARG (exp, 1), type);
3224 return expand_expr (result, target, mode, EXPAND_NORMAL);
3229 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3230 caller should emit a normal call, otherwise try to get the result
3231 in TARGET, if convenient (and in mode MODE if that's convenient). */
3234 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3236 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3238 tree type = TREE_TYPE (exp);
3239 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3240 CALL_EXPR_ARG (exp, 1), type);
3242 return expand_expr (result, target, mode, EXPAND_NORMAL);
3247 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3248 bytes from constant string DATA + OFFSET and return it as target
3252 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3253 enum machine_mode mode)
3255 const char *str = (const char *) data;
3257 gcc_assert (offset >= 0
3258 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3259 <= strlen (str) + 1));
3261 return c_readstr (str + offset, mode);
3264 /* Expand a call EXP to the memcpy builtin.
3265 Return NULL_RTX if we failed, the caller should emit a normal call,
3266 otherwise try to get the result in TARGET, if convenient (and in
3267 mode MODE if that's convenient). */
3270 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3272 tree fndecl = get_callee_fndecl (exp);
3274 if (!validate_arglist (exp,
3275 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3279 tree dest = CALL_EXPR_ARG (exp, 0);
3280 tree src = CALL_EXPR_ARG (exp, 1);
3281 tree len = CALL_EXPR_ARG (exp, 2);
3282 const char *src_str;
3283 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3284 unsigned int dest_align
3285 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3286 rtx dest_mem, src_mem, dest_addr, len_rtx;
3287 tree result = fold_builtin_memory_op (dest, src, len,
3288 TREE_TYPE (TREE_TYPE (fndecl)),
3290 HOST_WIDE_INT expected_size = -1;
3291 unsigned int expected_align = 0;
3295 while (TREE_CODE (result) == COMPOUND_EXPR)
3297 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3299 result = TREE_OPERAND (result, 1);
3301 return expand_expr (result, target, mode, EXPAND_NORMAL);
3304 /* If DEST is not a pointer type, call the normal function. */
3305 if (dest_align == 0)
3308 /* If either SRC is not a pointer type, don't do this
3309 operation in-line. */
3313 stringop_block_profile (exp, &expected_align, &expected_size);
3314 if (expected_align < dest_align)
3315 expected_align = dest_align;
3316 dest_mem = get_memory_rtx (dest, len);
3317 set_mem_align (dest_mem, dest_align);
3318 len_rtx = expand_normal (len);
3319 src_str = c_getstr (src);
3321 /* If SRC is a string constant and block move would be done
3322 by pieces, we can avoid loading the string from memory
3323 and only stored the computed constants. */
3325 && GET_CODE (len_rtx) == CONST_INT
3326 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3327 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3328 CONST_CAST (char *, src_str),
3331 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3332 builtin_memcpy_read_str,
3333 CONST_CAST (char *, src_str),
3334 dest_align, false, 0);
3335 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3336 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3340 src_mem = get_memory_rtx (src, len);
3341 set_mem_align (src_mem, src_align);
3343 /* Copy word part most expediently. */
3344 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3345 CALL_EXPR_TAILCALL (exp)
3346 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3347 expected_align, expected_size);
3351 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3352 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3358 /* Expand a call EXP to the mempcpy builtin.
3359 Return NULL_RTX if we failed; the caller should emit a normal call,
3360 otherwise try to get the result in TARGET, if convenient (and in
3361 mode MODE if that's convenient). If ENDP is 0 return the
3362 destination pointer, if ENDP is 1 return the end pointer ala
3363 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3367 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3369 if (!validate_arglist (exp,
3370 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3374 tree dest = CALL_EXPR_ARG (exp, 0);
3375 tree src = CALL_EXPR_ARG (exp, 1);
3376 tree len = CALL_EXPR_ARG (exp, 2);
3377 return expand_builtin_mempcpy_args (dest, src, len,
3379 target, mode, /*endp=*/ 1);
3383 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3384 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3385 so that this can also be called without constructing an actual CALL_EXPR.
3386 TYPE is the return type of the call. The other arguments and return value
3387 are the same as for expand_builtin_mempcpy. */
3390 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3391 rtx target, enum machine_mode mode, int endp)
3393 /* If return value is ignored, transform mempcpy into memcpy. */
3394 if (target == const0_rtx)
3396 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3401 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3402 target, mode, EXPAND_NORMAL);
3406 const char *src_str;
3407 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3408 unsigned int dest_align
3409 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3410 rtx dest_mem, src_mem, len_rtx;
3411 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3415 while (TREE_CODE (result) == COMPOUND_EXPR)
3417 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3419 result = TREE_OPERAND (result, 1);
3421 return expand_expr (result, target, mode, EXPAND_NORMAL);
3424 /* If either SRC or DEST is not a pointer type, don't do this
3425 operation in-line. */
3426 if (dest_align == 0 || src_align == 0)
3429 /* If LEN is not constant, call the normal function. */
3430 if (! host_integerp (len, 1))
3433 len_rtx = expand_normal (len);
3434 src_str = c_getstr (src);
3436 /* If SRC is a string constant and block move would be done
3437 by pieces, we can avoid loading the string from memory
3438 and only stored the computed constants. */
3440 && GET_CODE (len_rtx) == CONST_INT
3441 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3442 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3443 CONST_CAST (char *, src_str),
3446 dest_mem = get_memory_rtx (dest, len);
3447 set_mem_align (dest_mem, dest_align);
3448 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3449 builtin_memcpy_read_str,
3450 CONST_CAST (char *, src_str),
3451 dest_align, false, endp);
3452 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3453 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3457 if (GET_CODE (len_rtx) == CONST_INT
3458 && can_move_by_pieces (INTVAL (len_rtx),
3459 MIN (dest_align, src_align)))
3461 dest_mem = get_memory_rtx (dest, len);
3462 set_mem_align (dest_mem, dest_align);
3463 src_mem = get_memory_rtx (src, len);
3464 set_mem_align (src_mem, src_align);
3465 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3466 MIN (dest_align, src_align), endp);
3467 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3468 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3476 /* Expand expression EXP, which is a call to the memmove builtin. Return
3477 NULL_RTX if we failed; the caller should emit a normal call. */
3480 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3482 if (!validate_arglist (exp,
3483 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3487 tree dest = CALL_EXPR_ARG (exp, 0);
3488 tree src = CALL_EXPR_ARG (exp, 1);
3489 tree len = CALL_EXPR_ARG (exp, 2);
3490 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3491 target, mode, ignore);
3495 /* Helper function to do the actual work for expand_builtin_memmove. The
3496 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3497 so that this can also be called without constructing an actual CALL_EXPR.
3498 TYPE is the return type of the call. The other arguments and return value
3499 are the same as for expand_builtin_memmove. */
3502 expand_builtin_memmove_args (tree dest, tree src, tree len,
3503 tree type, rtx target, enum machine_mode mode,
3506 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3510 STRIP_TYPE_NOPS (result);
3511 while (TREE_CODE (result) == COMPOUND_EXPR)
3513 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3515 result = TREE_OPERAND (result, 1);
3517 return expand_expr (result, target, mode, EXPAND_NORMAL);
3520 /* Otherwise, call the normal function. */
3524 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3525 NULL_RTX if we failed the caller should emit a normal call. */
3528 expand_builtin_bcopy (tree exp, int ignore)
3530 tree type = TREE_TYPE (exp);
3531 tree src, dest, size;
3533 if (!validate_arglist (exp,
3534 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3537 src = CALL_EXPR_ARG (exp, 0);
3538 dest = CALL_EXPR_ARG (exp, 1);
3539 size = CALL_EXPR_ARG (exp, 2);
3541 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3542 This is done this way so that if it isn't expanded inline, we fall
3543 back to calling bcopy instead of memmove. */
3544 return expand_builtin_memmove_args (dest, src,
3545 fold_convert (sizetype, size),
3546 type, const0_rtx, VOIDmode,
3551 # define HAVE_movstr 0
3552 # define CODE_FOR_movstr CODE_FOR_nothing
3555 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3556 we failed, the caller should emit a normal call, otherwise try to
3557 get the result in TARGET, if convenient. If ENDP is 0 return the
3558 destination pointer, if ENDP is 1 return the end pointer ala
3559 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3563 expand_movstr (tree dest, tree src, rtx target, int endp)
3569 const struct insn_data * data;
3574 dest_mem = get_memory_rtx (dest, NULL);
3575 src_mem = get_memory_rtx (src, NULL);
3578 target = force_reg (Pmode, XEXP (dest_mem, 0));
3579 dest_mem = replace_equiv_address (dest_mem, target);
3580 end = gen_reg_rtx (Pmode);
3584 if (target == 0 || target == const0_rtx)
3586 end = gen_reg_rtx (Pmode);
3594 data = insn_data + CODE_FOR_movstr;
3596 if (data->operand[0].mode != VOIDmode)
3597 end = gen_lowpart (data->operand[0].mode, end);
3599 insn = data->genfun (end, dest_mem, src_mem);
3605 /* movstr is supposed to set end to the address of the NUL
3606 terminator. If the caller requested a mempcpy-like return value,
3608 if (endp == 1 && target != const0_rtx)
3610 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3611 emit_move_insn (target, force_operand (tem, NULL_RTX));
3617 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3618 NULL_RTX if we failed the caller should emit a normal call, otherwise
3619 try to get the result in TARGET, if convenient (and in mode MODE if that's
3623 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3625 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3627 tree dest = CALL_EXPR_ARG (exp, 0);
3628 tree src = CALL_EXPR_ARG (exp, 1);
3629 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3634 /* Helper function to do the actual work for expand_builtin_strcpy. The
3635 arguments to the builtin_strcpy call DEST and SRC are broken out
3636 so that this can also be called without constructing an actual CALL_EXPR.
3637 The other arguments and return value are the same as for
3638 expand_builtin_strcpy. */
3641 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3642 rtx target, enum machine_mode mode)
3644 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3646 return expand_expr (result, target, mode, EXPAND_NORMAL);
3647 return expand_movstr (dest, src, target, /*endp=*/0);
3651 /* Expand a call EXP to the stpcpy builtin.
3652 Return NULL_RTX if we failed the caller should emit a normal call,
3653 otherwise try to get the result in TARGET, if convenient (and in
3654 mode MODE if that's convenient). */
3657 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3661 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3664 dst = CALL_EXPR_ARG (exp, 0);
3665 src = CALL_EXPR_ARG (exp, 1);
3667 /* If return value is ignored, transform stpcpy into strcpy. */
3668 if (target == const0_rtx)
3670 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3674 return expand_expr (build_call_expr (fn, 2, dst, src),
3675 target, mode, EXPAND_NORMAL);
3682 /* Ensure we get an actual string whose length can be evaluated at
3683 compile-time, not an expression containing a string. This is
3684 because the latter will potentially produce pessimized code
3685 when used to produce the return value. */
3686 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3687 return expand_movstr (dst, src, target, /*endp=*/2);
3689 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3690 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3691 target, mode, /*endp=*/2);
3696 if (TREE_CODE (len) == INTEGER_CST)
3698 rtx len_rtx = expand_normal (len);
3700 if (GET_CODE (len_rtx) == CONST_INT)
3702 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3703 dst, src, target, mode);
3709 if (mode != VOIDmode)
3710 target = gen_reg_rtx (mode);
3712 target = gen_reg_rtx (GET_MODE (ret));
3714 if (GET_MODE (target) != GET_MODE (ret))
3715 ret = gen_lowpart (GET_MODE (target), ret);
3717 ret = plus_constant (ret, INTVAL (len_rtx));
3718 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3726 return expand_movstr (dst, src, target, /*endp=*/2);
3730 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3731 bytes from constant string DATA + OFFSET and return it as target
3735 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3736 enum machine_mode mode)
3738 const char *str = (const char *) data;
3740 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3743 return c_readstr (str + offset, mode);
3746 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3747 NULL_RTX if we failed the caller should emit a normal call. */
3750 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3752 tree fndecl = get_callee_fndecl (exp);
3754 if (validate_arglist (exp,
3755 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3757 tree dest = CALL_EXPR_ARG (exp, 0);
3758 tree src = CALL_EXPR_ARG (exp, 1);
3759 tree len = CALL_EXPR_ARG (exp, 2);
3760 tree slen = c_strlen (src, 1);
3761 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3765 while (TREE_CODE (result) == COMPOUND_EXPR)
3767 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3769 result = TREE_OPERAND (result, 1);
3771 return expand_expr (result, target, mode, EXPAND_NORMAL);
3774 /* We must be passed a constant len and src parameter. */
3775 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3778 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3780 /* We're required to pad with trailing zeros if the requested
3781 len is greater than strlen(s2)+1. In that case try to
3782 use store_by_pieces, if it fails, punt. */
3783 if (tree_int_cst_lt (slen, len))
3785 unsigned int dest_align
3786 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3787 const char *p = c_getstr (src);
3790 if (!p || dest_align == 0 || !host_integerp (len, 1)
3791 || !can_store_by_pieces (tree_low_cst (len, 1),
3792 builtin_strncpy_read_str,
3793 CONST_CAST (char *, p),
3797 dest_mem = get_memory_rtx (dest, len);
3798 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3799 builtin_strncpy_read_str,
3800 CONST_CAST (char *, p), dest_align, false, 0);
3801 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3802 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3809 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3810 bytes from constant string DATA + OFFSET and return it as target
3814 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3815 enum machine_mode mode)
3817 const char *c = (const char *) data;
3818 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3820 memset (p, *c, GET_MODE_SIZE (mode));
3822 return c_readstr (p, mode);
3825 /* Callback routine for store_by_pieces. Return the RTL of a register
3826 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3827 char value given in the RTL register data. For example, if mode is
3828 4 bytes wide, return the RTL for 0x01010101*data. */
3831 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3832 enum machine_mode mode)
3838 size = GET_MODE_SIZE (mode);
3842 p = XALLOCAVEC (char, size);
3843 memset (p, 1, size);
3844 coeff = c_readstr (p, mode);
3846 target = convert_to_mode (mode, (rtx) data, 1);
3847 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3848 return force_reg (mode, target);
3851 /* Expand expression EXP, which is a call to the memset builtin. Return
3852 NULL_RTX if we failed the caller should emit a normal call, otherwise
3853 try to get the result in TARGET, if convenient (and in mode MODE if that's
3857 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3859 if (!validate_arglist (exp,
3860 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3864 tree dest = CALL_EXPR_ARG (exp, 0);
3865 tree val = CALL_EXPR_ARG (exp, 1);
3866 tree len = CALL_EXPR_ARG (exp, 2);
3867 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3871 /* Helper function to do the actual work for expand_builtin_memset. The
3872 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3873 so that this can also be called without constructing an actual CALL_EXPR.
3874 The other arguments and return value are the same as for
3875 expand_builtin_memset. */
3878 expand_builtin_memset_args (tree dest, tree val, tree len,
3879 rtx target, enum machine_mode mode, tree orig_exp)
3882 enum built_in_function fcode;
3884 unsigned int dest_align;
3885 rtx dest_mem, dest_addr, len_rtx;
3886 HOST_WIDE_INT expected_size = -1;
3887 unsigned int expected_align = 0;
3889 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3891 /* If DEST is not a pointer type, don't do this operation in-line. */
3892 if (dest_align == 0)
3895 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3896 if (expected_align < dest_align)
3897 expected_align = dest_align;
3899 /* If the LEN parameter is zero, return DEST. */
3900 if (integer_zerop (len))
3902 /* Evaluate and ignore VAL in case it has side-effects. */
3903 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3904 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3907 /* Stabilize the arguments in case we fail. */
3908 dest = builtin_save_expr (dest);
3909 val = builtin_save_expr (val);
3910 len = builtin_save_expr (len);
3912 len_rtx = expand_normal (len);
3913 dest_mem = get_memory_rtx (dest, len);
3915 if (TREE_CODE (val) != INTEGER_CST)
3919 val_rtx = expand_normal (val);
3920 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3923 /* Assume that we can memset by pieces if we can store
3924 * the coefficients by pieces (in the required modes).
3925 * We can't pass builtin_memset_gen_str as that emits RTL. */
3927 if (host_integerp (len, 1)
3928 && can_store_by_pieces (tree_low_cst (len, 1),
3929 builtin_memset_read_str, &c, dest_align,
3932 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3934 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3935 builtin_memset_gen_str, val_rtx, dest_align,
3938 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3939 dest_align, expected_align,
3943 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3944 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3948 if (target_char_cast (val, &c))
3953 if (host_integerp (len, 1)
3954 && can_store_by_pieces (tree_low_cst (len, 1),
3955 builtin_memset_read_str, &c, dest_align,
3957 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3958 builtin_memset_read_str, &c, dest_align, true, 0);
3959 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3960 dest_align, expected_align,
3964 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3965 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3969 set_mem_align (dest_mem, dest_align);
3970 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3971 CALL_EXPR_TAILCALL (orig_exp)
3972 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3973 expected_align, expected_size);
3977 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3978 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3984 fndecl = get_callee_fndecl (orig_exp);
3985 fcode = DECL_FUNCTION_CODE (fndecl);
3986 if (fcode == BUILT_IN_MEMSET)
3987 fn = build_call_expr (fndecl, 3, dest, val, len);
3988 else if (fcode == BUILT_IN_BZERO)
3989 fn = build_call_expr (fndecl, 2, dest, len);
3992 if (TREE_CODE (fn) == CALL_EXPR)
3993 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3994 return expand_call (fn, target, target == const0_rtx);
3997 /* Expand expression EXP, which is a call to the bzero builtin. Return
3998 NULL_RTX if we failed the caller should emit a normal call. */
4001 expand_builtin_bzero (tree exp)
4005 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4008 dest = CALL_EXPR_ARG (exp, 0);
4009 size = CALL_EXPR_ARG (exp, 1);
4011 /* New argument list transforming bzero(ptr x, int y) to
4012 memset(ptr x, int 0, size_t y). This is done this way
4013 so that if it isn't expanded inline, we fallback to
4014 calling bzero instead of memset. */
4016 return expand_builtin_memset_args (dest, integer_zero_node,
4017 fold_convert (sizetype, size),
4018 const0_rtx, VOIDmode, exp);
4021 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4022 caller should emit a normal call, otherwise try to get the result
4023 in TARGET, if convenient (and in mode MODE if that's convenient). */
4026 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4028 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4029 INTEGER_TYPE, VOID_TYPE))
4031 tree type = TREE_TYPE (exp);
4032 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4033 CALL_EXPR_ARG (exp, 1),
4034 CALL_EXPR_ARG (exp, 2), type);
4036 return expand_expr (result, target, mode, EXPAND_NORMAL);
4041 /* Expand expression EXP, which is a call to the memcmp built-in function.
4042 Return NULL_RTX if we failed and the
4043 caller should emit a normal call, otherwise try to get the result in
4044 TARGET, if convenient (and in mode MODE, if that's convenient). */
4047 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4049 if (!validate_arglist (exp,
4050 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4054 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4055 CALL_EXPR_ARG (exp, 1),
4056 CALL_EXPR_ARG (exp, 2));
4058 return expand_expr (result, target, mode, EXPAND_NORMAL);
4061 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4063 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4066 tree arg1 = CALL_EXPR_ARG (exp, 0);
4067 tree arg2 = CALL_EXPR_ARG (exp, 1);
4068 tree len = CALL_EXPR_ARG (exp, 2);
4071 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4073 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4074 enum machine_mode insn_mode;
4076 #ifdef HAVE_cmpmemsi
4078 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4081 #ifdef HAVE_cmpstrnsi
4083 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4088 /* If we don't have POINTER_TYPE, call the function. */
4089 if (arg1_align == 0 || arg2_align == 0)
4092 /* Make a place to write the result of the instruction. */
4095 && REG_P (result) && GET_MODE (result) == insn_mode
4096 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4097 result = gen_reg_rtx (insn_mode);
4099 arg1_rtx = get_memory_rtx (arg1, len);
4100 arg2_rtx = get_memory_rtx (arg2, len);
4101 arg3_rtx = expand_normal (len);
4103 /* Set MEM_SIZE as appropriate. */
4104 if (GET_CODE (arg3_rtx) == CONST_INT)
4106 set_mem_size (arg1_rtx, arg3_rtx);
4107 set_mem_size (arg2_rtx, arg3_rtx);
4110 #ifdef HAVE_cmpmemsi
4112 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4113 GEN_INT (MIN (arg1_align, arg2_align)));
4116 #ifdef HAVE_cmpstrnsi
4118 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4119 GEN_INT (MIN (arg1_align, arg2_align)));
4127 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4128 TYPE_MODE (integer_type_node), 3,
4129 XEXP (arg1_rtx, 0), Pmode,
4130 XEXP (arg2_rtx, 0), Pmode,
4131 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4132 TYPE_UNSIGNED (sizetype)),
4133 TYPE_MODE (sizetype));
4135 /* Return the value in the proper mode for this function. */
4136 mode = TYPE_MODE (TREE_TYPE (exp));
4137 if (GET_MODE (result) == mode)
4139 else if (target != 0)
4141 convert_move (target, result, 0);
4145 return convert_to_mode (mode, result, 0);
4152 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4153 if we failed the caller should emit a normal call, otherwise try to get
4154 the result in TARGET, if convenient. */
4157 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4159 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4163 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4164 CALL_EXPR_ARG (exp, 1));
4166 return expand_expr (result, target, mode, EXPAND_NORMAL);
4169 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4170 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4171 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4173 rtx arg1_rtx, arg2_rtx;
4174 rtx result, insn = NULL_RTX;
4176 tree arg1 = CALL_EXPR_ARG (exp, 0);
4177 tree arg2 = CALL_EXPR_ARG (exp, 1);
4180 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4182 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4184 /* If we don't have POINTER_TYPE, call the function. */
4185 if (arg1_align == 0 || arg2_align == 0)
4188 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4189 arg1 = builtin_save_expr (arg1);
4190 arg2 = builtin_save_expr (arg2);
4192 arg1_rtx = get_memory_rtx (arg1, NULL);
4193 arg2_rtx = get_memory_rtx (arg2, NULL);
4195 #ifdef HAVE_cmpstrsi
4196 /* Try to call cmpstrsi. */
4199 enum machine_mode insn_mode
4200 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4202 /* Make a place to write the result of the instruction. */
4205 && REG_P (result) && GET_MODE (result) == insn_mode
4206 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4207 result = gen_reg_rtx (insn_mode);
4209 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4210 GEN_INT (MIN (arg1_align, arg2_align)));
4213 #ifdef HAVE_cmpstrnsi
4214 /* Try to determine at least one length and call cmpstrnsi. */
4215 if (!insn && HAVE_cmpstrnsi)
4220 enum machine_mode insn_mode
4221 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4222 tree len1 = c_strlen (arg1, 1);
4223 tree len2 = c_strlen (arg2, 1);
4226 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4228 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4230 /* If we don't have a constant length for the first, use the length
4231 of the second, if we know it. We don't require a constant for
4232 this case; some cost analysis could be done if both are available
4233 but neither is constant. For now, assume they're equally cheap,
4234 unless one has side effects. If both strings have constant lengths,
4241 else if (TREE_SIDE_EFFECTS (len1))
4243 else if (TREE_SIDE_EFFECTS (len2))
4245 else if (TREE_CODE (len1) != INTEGER_CST)
4247 else if (TREE_CODE (len2) != INTEGER_CST)
4249 else if (tree_int_cst_lt (len1, len2))
4254 /* If both arguments have side effects, we cannot optimize. */
4255 if (!len || TREE_SIDE_EFFECTS (len))
4258 arg3_rtx = expand_normal (len);
4260 /* Make a place to write the result of the instruction. */
4263 && REG_P (result) && GET_MODE (result) == insn_mode
4264 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4265 result = gen_reg_rtx (insn_mode);
4267 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4268 GEN_INT (MIN (arg1_align, arg2_align)));
4276 /* Return the value in the proper mode for this function. */
4277 mode = TYPE_MODE (TREE_TYPE (exp));
4278 if (GET_MODE (result) == mode)
4281 return convert_to_mode (mode, result, 0);
4282 convert_move (target, result, 0);
4286 /* Expand the library call ourselves using a stabilized argument
4287 list to avoid re-evaluating the function's arguments twice. */
4288 #ifdef HAVE_cmpstrnsi
4291 fndecl = get_callee_fndecl (exp);
4292 fn = build_call_expr (fndecl, 2, arg1, arg2);
4293 if (TREE_CODE (fn) == CALL_EXPR)
4294 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4295 return expand_call (fn, target, target == const0_rtx);
4301 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4302 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4303 the result in TARGET, if convenient. */
4306 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4308 if (!validate_arglist (exp,
4309 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4313 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4314 CALL_EXPR_ARG (exp, 1),
4315 CALL_EXPR_ARG (exp, 2));
4317 return expand_expr (result, target, mode, EXPAND_NORMAL);
4320 /* If c_strlen can determine an expression for one of the string
4321 lengths, and it doesn't have side effects, then emit cmpstrnsi
4322 using length MIN(strlen(string)+1, arg3). */
4323 #ifdef HAVE_cmpstrnsi
4326 tree len, len1, len2;
4327 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4330 tree arg1 = CALL_EXPR_ARG (exp, 0);
4331 tree arg2 = CALL_EXPR_ARG (exp, 1);
4332 tree arg3 = CALL_EXPR_ARG (exp, 2);
4335 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4337 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4338 enum machine_mode insn_mode
4339 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4341 len1 = c_strlen (arg1, 1);
4342 len2 = c_strlen (arg2, 1);
4345 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4347 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4349 /* If we don't have a constant length for the first, use the length
4350 of the second, if we know it. We don't require a constant for
4351 this case; some cost analysis could be done if both are available
4352 but neither is constant. For now, assume they're equally cheap,
4353 unless one has side effects. If both strings have constant lengths,
4360 else if (TREE_SIDE_EFFECTS (len1))
4362 else if (TREE_SIDE_EFFECTS (len2))
4364 else if (TREE_CODE (len1) != INTEGER_CST)
4366 else if (TREE_CODE (len2) != INTEGER_CST)
4368 else if (tree_int_cst_lt (len1, len2))
4373 /* If both arguments have side effects, we cannot optimize. */
4374 if (!len || TREE_SIDE_EFFECTS (len))
4377 /* The actual new length parameter is MIN(len,arg3). */
4378 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4379 fold_convert (TREE_TYPE (len), arg3));
4381 /* If we don't have POINTER_TYPE, call the function. */
4382 if (arg1_align == 0 || arg2_align == 0)
4385 /* Make a place to write the result of the instruction. */
4388 && REG_P (result) && GET_MODE (result) == insn_mode
4389 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4390 result = gen_reg_rtx (insn_mode);
4392 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4393 arg1 = builtin_save_expr (arg1);
4394 arg2 = builtin_save_expr (arg2);
4395 len = builtin_save_expr (len);
4397 arg1_rtx = get_memory_rtx (arg1, len);
4398 arg2_rtx = get_memory_rtx (arg2, len);
4399 arg3_rtx = expand_normal (len);
4400 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4401 GEN_INT (MIN (arg1_align, arg2_align)));
4406 /* Return the value in the proper mode for this function. */
4407 mode = TYPE_MODE (TREE_TYPE (exp));
4408 if (GET_MODE (result) == mode)
4411 return convert_to_mode (mode, result, 0);
4412 convert_move (target, result, 0);
4416 /* Expand the library call ourselves using a stabilized argument
4417 list to avoid re-evaluating the function's arguments twice. */
4418 fndecl = get_callee_fndecl (exp);
4419 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4420 if (TREE_CODE (fn) == CALL_EXPR)
4421 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4422 return expand_call (fn, target, target == const0_rtx);
4428 /* Expand expression EXP, which is a call to the strcat builtin.
4429 Return NULL_RTX if we failed the caller should emit a normal call,
4430 otherwise try to get the result in TARGET, if convenient. */
4433 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4435 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4439 tree dst = CALL_EXPR_ARG (exp, 0);
4440 tree src = CALL_EXPR_ARG (exp, 1);
4441 const char *p = c_getstr (src);
4443 /* If the string length is zero, return the dst parameter. */
4444 if (p && *p == '\0')
4445 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4449 /* See if we can store by pieces into (dst + strlen(dst)). */
4450 tree newsrc, newdst,
4451 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4454 /* Stabilize the argument list. */
4455 newsrc = builtin_save_expr (src);
4456 dst = builtin_save_expr (dst);
4460 /* Create strlen (dst). */
4461 newdst = build_call_expr (strlen_fn, 1, dst);
4462 /* Create (dst p+ strlen (dst)). */
4464 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4465 newdst = builtin_save_expr (newdst);
4467 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4469 end_sequence (); /* Stop sequence. */
4473 /* Output the entire sequence. */
4474 insns = get_insns ();
4478 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4485 /* Expand expression EXP, which is a call to the strncat builtin.
4486 Return NULL_RTX if we failed the caller should emit a normal call,
4487 otherwise try to get the result in TARGET, if convenient. */
4490 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4492 if (validate_arglist (exp,
4493 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4495 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4496 CALL_EXPR_ARG (exp, 1),
4497 CALL_EXPR_ARG (exp, 2));
4499 return expand_expr (result, target, mode, EXPAND_NORMAL);
4504 /* Expand expression EXP, which is a call to the strspn builtin.
4505 Return NULL_RTX if we failed the caller should emit a normal call,
4506 otherwise try to get the result in TARGET, if convenient. */
4509 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4511 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4513 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4514 CALL_EXPR_ARG (exp, 1));
4516 return expand_expr (result, target, mode, EXPAND_NORMAL);
4521 /* Expand expression EXP, which is a call to the strcspn builtin.
4522 Return NULL_RTX if we failed the caller should emit a normal call,
4523 otherwise try to get the result in TARGET, if convenient. */
4526 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4528 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4530 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4531 CALL_EXPR_ARG (exp, 1));
4533 return expand_expr (result, target, mode, EXPAND_NORMAL);
4538 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4539 if that's convenient. */
4542 expand_builtin_saveregs (void)
4546 /* Don't do __builtin_saveregs more than once in a function.
4547 Save the result of the first call and reuse it. */
4548 if (saveregs_value != 0)
4549 return saveregs_value;
4551 /* When this function is called, it means that registers must be
4552 saved on entry to this function. So we migrate the call to the
4553 first insn of this function. */
4557 /* Do whatever the machine needs done in this case. */
4558 val = targetm.calls.expand_builtin_saveregs ();
4563 saveregs_value = val;
4565 /* Put the insns after the NOTE that starts the function. If this
4566 is inside a start_sequence, make the outer-level insn chain current, so
4567 the code is placed at the start of the function. */
4568 push_topmost_sequence ();
4569 emit_insn_after (seq, entry_of_function ());
4570 pop_topmost_sequence ();
4575 /* __builtin_args_info (N) returns word N of the arg space info
4576 for the current function. The number and meanings of words
4577 is controlled by the definition of CUMULATIVE_ARGS. */
4580 expand_builtin_args_info (tree exp)
4582 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4583 int *word_ptr = (int *) &crtl->args.info;
4585 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4587 if (call_expr_nargs (exp) != 0)
4589 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4590 error ("argument of %<__builtin_args_info%> must be constant");
4593 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4595 if (wordnum < 0 || wordnum >= nwords)
4596 error ("argument of %<__builtin_args_info%> out of range");
4598 return GEN_INT (word_ptr[wordnum]);
4602 error ("missing argument in %<__builtin_args_info%>");
4607 /* Expand a call to __builtin_next_arg. */
4610 expand_builtin_next_arg (void)
4612 /* Checking arguments is already done in fold_builtin_next_arg
4613 that must be called before this function. */
4614 return expand_binop (ptr_mode, add_optab,
4615 crtl->args.internal_arg_pointer,
4616 crtl->args.arg_offset_rtx,
4617 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4620 /* Make it easier for the backends by protecting the valist argument
4621 from multiple evaluations. */
4624 stabilize_va_list (tree valist, int needs_lvalue)
4626 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4628 gcc_assert (vatype != NULL_TREE);
4630 if (TREE_CODE (vatype) == ARRAY_TYPE)
4632 if (TREE_SIDE_EFFECTS (valist))
4633 valist = save_expr (valist);
4635 /* For this case, the backends will be expecting a pointer to
4636 vatype, but it's possible we've actually been given an array
4637 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4639 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4641 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4642 valist = build_fold_addr_expr_with_type (valist, p1);
4651 if (! TREE_SIDE_EFFECTS (valist))
4654 pt = build_pointer_type (vatype);
4655 valist = fold_build1 (ADDR_EXPR, pt, valist);
4656 TREE_SIDE_EFFECTS (valist) = 1;
4659 if (TREE_SIDE_EFFECTS (valist))
4660 valist = save_expr (valist);
4661 valist = build_fold_indirect_ref (valist);
4667 /* The "standard" definition of va_list is void*. */
4670 std_build_builtin_va_list (void)
4672 return ptr_type_node;
4675 /* The "standard" abi va_list is va_list_type_node. */
4678 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4680 return va_list_type_node;
4683 /* The "standard" type of va_list is va_list_type_node. */
4686 std_canonical_va_list_type (tree type)
4690 if (INDIRECT_REF_P (type))
4691 type = TREE_TYPE (type);
4692 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4693 type = TREE_TYPE (type);
4694 wtype = va_list_type_node;
4696 /* Treat structure va_list types. */
4697 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4698 htype = TREE_TYPE (htype);
4699 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4701 /* If va_list is an array type, the argument may have decayed
4702 to a pointer type, e.g. by being passed to another function.
4703 In that case, unwrap both types so that we can compare the
4704 underlying records. */
4705 if (TREE_CODE (htype) == ARRAY_TYPE
4706 || POINTER_TYPE_P (htype))
4708 wtype = TREE_TYPE (wtype);
4709 htype = TREE_TYPE (htype);
4712 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4713 return va_list_type_node;
4718 /* The "standard" implementation of va_start: just assign `nextarg' to
4722 std_expand_builtin_va_start (tree valist, rtx nextarg)
4724 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4725 convert_move (va_r, nextarg, 0);
4728 /* Expand EXP, a call to __builtin_va_start. */
4731 expand_builtin_va_start (tree exp)
4736 if (call_expr_nargs (exp) < 2)
4738 error ("too few arguments to function %<va_start%>");
4742 if (fold_builtin_next_arg (exp, true))
4745 nextarg = expand_builtin_next_arg ();
4746 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4748 if (targetm.expand_builtin_va_start)
4749 targetm.expand_builtin_va_start (valist, nextarg);
4751 std_expand_builtin_va_start (valist, nextarg);
4756 /* The "standard" implementation of va_arg: read the value from the
4757 current (padded) address and increment by the (padded) size. */
4760 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4762 tree addr, t, type_size, rounded_size, valist_tmp;
4763 unsigned HOST_WIDE_INT align, boundary;
4766 #ifdef ARGS_GROW_DOWNWARD
4767 /* All of the alignment and movement below is for args-grow-up machines.
4768 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4769 implement their own specialized gimplify_va_arg_expr routines. */
4773 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4775 type = build_pointer_type (type);
4777 align = PARM_BOUNDARY / BITS_PER_UNIT;
4778 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4780 /* When we align parameter on stack for caller, if the parameter
4781 alignment is beyond PREFERRED_STACK_BOUNDARY, it will be
4782 aligned at PREFERRED_STACK_BOUNDARY. We will match callee
4783 here with caller. */
4784 if (boundary > PREFERRED_STACK_BOUNDARY)
4785 boundary = PREFERRED_STACK_BOUNDARY;
4787 boundary /= BITS_PER_UNIT;
4789 /* Hoist the valist value into a temporary for the moment. */
4790 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4792 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4793 requires greater alignment, we must perform dynamic alignment. */
4794 if (boundary > align
4795 && !integer_zerop (TYPE_SIZE (type)))
4797 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4798 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4799 valist_tmp, size_int (boundary - 1)));
4800 gimplify_and_add (t, pre_p);
4802 t = fold_convert (sizetype, valist_tmp);
4803 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4804 fold_convert (TREE_TYPE (valist),
4805 fold_build2 (BIT_AND_EXPR, sizetype, t,
4806 size_int (-boundary))));
4807 gimplify_and_add (t, pre_p);
4812 /* If the actual alignment is less than the alignment of the type,
4813 adjust the type accordingly so that we don't assume strict alignment
4814 when dereferencing the pointer. */
4815 boundary *= BITS_PER_UNIT;
4816 if (boundary < TYPE_ALIGN (type))
4818 type = build_variant_type_copy (type);
4819 TYPE_ALIGN (type) = boundary;
4822 /* Compute the rounded size of the type. */
4823 type_size = size_in_bytes (type);
4824 rounded_size = round_up (type_size, align);
4826 /* Reduce rounded_size so it's sharable with the postqueue. */
4827 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4831 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4833 /* Small args are padded downward. */
4834 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4835 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4836 size_binop (MINUS_EXPR, rounded_size, type_size));
4837 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4840 /* Compute new value for AP. */
4841 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4842 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4843 gimplify_and_add (t, pre_p);
4845 addr = fold_convert (build_pointer_type (type), addr);
4848 addr = build_va_arg_indirect_ref (addr);
4850 return build_va_arg_indirect_ref (addr);
4853 /* Build an indirect-ref expression over the given TREE, which represents a
4854 piece of a va_arg() expansion. */
4856 build_va_arg_indirect_ref (tree addr)
4858 addr = build_fold_indirect_ref (addr);
4860 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4866 /* Return a dummy expression of type TYPE in order to keep going after an
4870 dummy_object (tree type)
4872 tree t = build_int_cst (build_pointer_type (type), 0);
4873 return build1 (INDIRECT_REF, type, t);
4876 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4877 builtin function, but a very special sort of operator. */
4879 enum gimplify_status
4880 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4882 tree promoted_type, have_va_type;
4883 tree valist = TREE_OPERAND (*expr_p, 0);
4884 tree type = TREE_TYPE (*expr_p);
4887 /* Verify that valist is of the proper type. */
4888 have_va_type = TREE_TYPE (valist);
4889 if (have_va_type == error_mark_node)
4891 have_va_type = targetm.canonical_va_list_type (have_va_type);
4893 if (have_va_type == NULL_TREE)
4895 error ("first argument to %<va_arg%> not of type %<va_list%>");
4899 /* Generate a diagnostic for requesting data of a type that cannot
4900 be passed through `...' due to type promotion at the call site. */
4901 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4904 static bool gave_help;
4906 /* Unfortunately, this is merely undefined, rather than a constraint
4907 violation, so we cannot make this an error. If this call is never
4908 executed, the program is still strictly conforming. */
4909 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4910 type, promoted_type);
4914 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4915 promoted_type, type);
4918 /* We can, however, treat "undefined" any way we please.
4919 Call abort to encourage the user to fix the program. */
4920 inform ("if this code is reached, the program will abort");
4921 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4922 append_to_statement_list (t, pre_p);
4924 /* This is dead code, but go ahead and finish so that the
4925 mode of the result comes out right. */
4926 *expr_p = dummy_object (type);
4931 /* Make it easier for the backends by protecting the valist argument
4932 from multiple evaluations. */
4933 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4935 /* For this case, the backends will be expecting a pointer to
4936 TREE_TYPE (abi), but it's possible we've
4937 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4939 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4941 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4942 valist = build_fold_addr_expr_with_type (valist, p1);
4944 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4947 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4949 if (!targetm.gimplify_va_arg_expr)
4950 /* FIXME:Once most targets are converted we should merely
4951 assert this is non-null. */
4954 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4959 /* Expand EXP, a call to __builtin_va_end. */
4962 expand_builtin_va_end (tree exp)
4964 tree valist = CALL_EXPR_ARG (exp, 0);
4966 /* Evaluate for side effects, if needed. I hate macros that don't
4968 if (TREE_SIDE_EFFECTS (valist))
4969 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4974 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4975 builtin rather than just as an assignment in stdarg.h because of the
4976 nastiness of array-type va_list types. */
4979 expand_builtin_va_copy (tree exp)
4983 dst = CALL_EXPR_ARG (exp, 0);
4984 src = CALL_EXPR_ARG (exp, 1);
4986 dst = stabilize_va_list (dst, 1);
4987 src = stabilize_va_list (src, 0);
4989 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4991 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4993 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4994 TREE_SIDE_EFFECTS (t) = 1;
4995 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4999 rtx dstb, srcb, size;
5001 /* Evaluate to pointers. */
5002 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5003 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5004 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5005 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5007 dstb = convert_memory_address (Pmode, dstb);
5008 srcb = convert_memory_address (Pmode, srcb);
5010 /* "Dereference" to BLKmode memories. */
5011 dstb = gen_rtx_MEM (BLKmode, dstb);
5012 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5013 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5014 srcb = gen_rtx_MEM (BLKmode, srcb);
5015 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5016 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5019 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5025 /* Expand a call to one of the builtin functions __builtin_frame_address or
5026 __builtin_return_address. */
5029 expand_builtin_frame_address (tree fndecl, tree exp)
5031 /* The argument must be a nonnegative integer constant.
5032 It counts the number of frames to scan up the stack.
5033 The value is the return address saved in that frame. */
5034 if (call_expr_nargs (exp) == 0)
5035 /* Warning about missing arg was already issued. */
5037 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5039 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5040 error ("invalid argument to %<__builtin_frame_address%>");
5042 error ("invalid argument to %<__builtin_return_address%>");
5048 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5049 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5051 /* Some ports cannot access arbitrary stack frames. */
5054 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5055 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5057 warning (0, "unsupported argument to %<__builtin_return_address%>");
5061 /* For __builtin_frame_address, return what we've got. */
5062 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5066 && ! CONSTANT_P (tem))
5067 tem = copy_to_mode_reg (Pmode, tem);
5072 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5073 we failed and the caller should emit a normal call, otherwise try to get
5074 the result in TARGET, if convenient. */
5077 expand_builtin_alloca (tree exp, rtx target)
5082 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5083 should always expand to function calls. These can be intercepted
5088 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5091 /* Compute the argument. */
5092 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5094 /* Allocate the desired space. */
5095 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5096 result = convert_memory_address (ptr_mode, result);
5101 /* Expand a call to a bswap builtin with argument ARG0. MODE
5102 is the mode to expand with. */
5105 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5107 enum machine_mode mode;
5111 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5114 arg = CALL_EXPR_ARG (exp, 0);
5115 mode = TYPE_MODE (TREE_TYPE (arg));
5116 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5118 target = expand_unop (mode, bswap_optab, op0, target, 1);
5120 gcc_assert (target);
5122 return convert_to_mode (mode, target, 0);
5125 /* Expand a call to a unary builtin in EXP.
5126 Return NULL_RTX if a normal call should be emitted rather than expanding the
5127 function in-line. If convenient, the result should be placed in TARGET.
5128 SUBTARGET may be used as the target for computing one of EXP's operands. */
5131 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5132 rtx subtarget, optab op_optab)
5136 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5139 /* Compute the argument. */
5140 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5141 VOIDmode, EXPAND_NORMAL);
5142 /* Compute op, into TARGET if possible.
5143 Set TARGET to wherever the result comes back. */
5144 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5145 op_optab, op0, target, 1);
5146 gcc_assert (target);
5148 return convert_to_mode (target_mode, target, 0);
5151 /* If the string passed to fputs is a constant and is one character
5152 long, we attempt to transform this call into __builtin_fputc(). */
5155 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5157 /* Verify the arguments in the original call. */
5158 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5160 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5161 CALL_EXPR_ARG (exp, 1),
5162 (target == const0_rtx),
5163 unlocked, NULL_TREE);
5165 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5170 /* Expand a call to __builtin_expect. We just return our argument
5171 as the builtin_expect semantic should've been already executed by
5172 tree branch prediction pass. */
5175 expand_builtin_expect (tree exp, rtx target)
5179 if (call_expr_nargs (exp) < 2)
5181 arg = CALL_EXPR_ARG (exp, 0);
5182 c = CALL_EXPR_ARG (exp, 1);
5184 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5185 /* When guessing was done, the hints should be already stripped away. */
5186 gcc_assert (!flag_guess_branch_prob
5187 || optimize == 0 || errorcount || sorrycount);
5192 expand_builtin_trap (void)
5196 emit_insn (gen_trap ());
5199 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5203 /* Expand EXP, a call to fabs, fabsf or fabsl.
5204 Return NULL_RTX if a normal call should be emitted rather than expanding
5205 the function inline. If convenient, the result should be placed
5206 in TARGET. SUBTARGET may be used as the target for computing
5210 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5212 enum machine_mode mode;
5216 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5219 arg = CALL_EXPR_ARG (exp, 0);
5220 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5221 mode = TYPE_MODE (TREE_TYPE (arg));
5222 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5223 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5226 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5227 Return NULL is a normal call should be emitted rather than expanding the
5228 function inline. If convenient, the result should be placed in TARGET.
5229 SUBTARGET may be used as the target for computing the operand. */
5232 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5237 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5240 arg = CALL_EXPR_ARG (exp, 0);
5241 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5243 arg = CALL_EXPR_ARG (exp, 1);
5244 op1 = expand_normal (arg);
5246 return expand_copysign (op0, op1, target);
5249 /* Create a new constant string literal and return a char* pointer to it.
5250 The STRING_CST value is the LEN characters at STR. */
5252 build_string_literal (int len, const char *str)
5254 tree t, elem, index, type;
5256 t = build_string (len, str);
5257 elem = build_type_variant (char_type_node, 1, 0);
5258 index = build_index_type (size_int (len - 1));
5259 type = build_array_type (elem, index);
5260 TREE_TYPE (t) = type;
5261 TREE_CONSTANT (t) = 1;
5262 TREE_READONLY (t) = 1;
5263 TREE_STATIC (t) = 1;
5265 type = build_pointer_type (elem);
5266 t = build1 (ADDR_EXPR, type,
5267 build4 (ARRAY_REF, elem,
5268 t, integer_zero_node, NULL_TREE, NULL_TREE));
5272 /* Expand EXP, a call to printf or printf_unlocked.
5273 Return NULL_RTX if a normal call should be emitted rather than transforming
5274 the function inline. If convenient, the result should be placed in
5275 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5278 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5281 /* If we're using an unlocked function, assume the other unlocked
5282 functions exist explicitly. */
5283 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5284 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5285 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5286 : implicit_built_in_decls[BUILT_IN_PUTS];
5287 const char *fmt_str;
5290 int nargs = call_expr_nargs (exp);
5292 /* If the return value is used, don't do the transformation. */
5293 if (target != const0_rtx)
5296 /* Verify the required arguments in the original call. */
5299 fmt = CALL_EXPR_ARG (exp, 0);
5300 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5303 /* Check whether the format is a literal string constant. */
5304 fmt_str = c_getstr (fmt);
5305 if (fmt_str == NULL)
5308 if (!init_target_chars ())
5311 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5312 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5315 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5318 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5320 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5321 else if (strcmp (fmt_str, target_percent_c) == 0)
5324 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5327 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5331 /* We can't handle anything else with % args or %% ... yet. */
5332 if (strchr (fmt_str, target_percent))
5338 /* If the format specifier was "", printf does nothing. */
5339 if (fmt_str[0] == '\0')
5341 /* If the format specifier has length of 1, call putchar. */
5342 if (fmt_str[1] == '\0')
5344 /* Given printf("c"), (where c is any one character,)
5345 convert "c"[0] to an int and pass that to the replacement
5347 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5349 fn = build_call_expr (fn_putchar, 1, arg);
5353 /* If the format specifier was "string\n", call puts("string"). */
5354 size_t len = strlen (fmt_str);
5355 if ((unsigned char)fmt_str[len - 1] == target_newline)
5357 /* Create a NUL-terminated string that's one char shorter
5358 than the original, stripping off the trailing '\n'. */
5359 char *newstr = XALLOCAVEC (char, len);
5360 memcpy (newstr, fmt_str, len - 1);
5361 newstr[len - 1] = 0;
5362 arg = build_string_literal (len, newstr);
5364 fn = build_call_expr (fn_puts, 1, arg);
5367 /* We'd like to arrange to call fputs(string,stdout) here,
5368 but we need stdout and don't have a way to get it yet. */
5375 if (TREE_CODE (fn) == CALL_EXPR)
5376 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5377 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5380 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5381 Return NULL_RTX if a normal call should be emitted rather than transforming
5382 the function inline. If convenient, the result should be placed in
5383 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5386 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5389 /* If we're using an unlocked function, assume the other unlocked
5390 functions exist explicitly. */
5391 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5392 : implicit_built_in_decls[BUILT_IN_FPUTC];
5393 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5394 : implicit_built_in_decls[BUILT_IN_FPUTS];
5395 const char *fmt_str;
5398 int nargs = call_expr_nargs (exp);
5400 /* If the return value is used, don't do the transformation. */
5401 if (target != const0_rtx)
5404 /* Verify the required arguments in the original call. */
5407 fp = CALL_EXPR_ARG (exp, 0);
5408 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5410 fmt = CALL_EXPR_ARG (exp, 1);
5411 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5414 /* Check whether the format is a literal string constant. */
5415 fmt_str = c_getstr (fmt);
5416 if (fmt_str == NULL)
5419 if (!init_target_chars ())
5422 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5423 if (strcmp (fmt_str, target_percent_s) == 0)
5426 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5428 arg = CALL_EXPR_ARG (exp, 2);
5430 fn = build_call_expr (fn_fputs, 2, arg, fp);
5432 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5433 else if (strcmp (fmt_str, target_percent_c) == 0)
5436 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5438 arg = CALL_EXPR_ARG (exp, 2);
5440 fn = build_call_expr (fn_fputc, 2, arg, fp);
5444 /* We can't handle anything else with % args or %% ... yet. */
5445 if (strchr (fmt_str, target_percent))
5451 /* If the format specifier was "", fprintf does nothing. */
5452 if (fmt_str[0] == '\0')
5454 /* Evaluate and ignore FILE* argument for side-effects. */
5455 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5459 /* When "string" doesn't contain %, replace all cases of
5460 fprintf(stream,string) with fputs(string,stream). The fputs
5461 builtin will take care of special cases like length == 1. */
5463 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5468 if (TREE_CODE (fn) == CALL_EXPR)
5469 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5470 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5473 /* Expand a call EXP to sprintf. Return NULL_RTX if
5474 a normal call should be emitted rather than expanding the function
5475 inline. If convenient, the result should be placed in TARGET with
5479 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5482 const char *fmt_str;
5483 int nargs = call_expr_nargs (exp);
5485 /* Verify the required arguments in the original call. */
5488 dest = CALL_EXPR_ARG (exp, 0);
5489 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5491 fmt = CALL_EXPR_ARG (exp, 0);
5492 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5495 /* Check whether the format is a literal string constant. */
5496 fmt_str = c_getstr (fmt);
5497 if (fmt_str == NULL)
5500 if (!init_target_chars ())
5503 /* If the format doesn't contain % args or %%, use strcpy. */
5504 if (strchr (fmt_str, target_percent) == 0)
5506 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5509 if ((nargs > 2) || ! fn)
5511 expand_expr (build_call_expr (fn, 2, dest, fmt),
5512 const0_rtx, VOIDmode, EXPAND_NORMAL);
5513 if (target == const0_rtx)
5515 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5516 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5518 /* If the format is "%s", use strcpy if the result isn't used. */
5519 else if (strcmp (fmt_str, target_percent_s) == 0)
5522 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5528 arg = CALL_EXPR_ARG (exp, 2);
5529 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5532 if (target != const0_rtx)
5534 len = c_strlen (arg, 1);
5535 if (! len || TREE_CODE (len) != INTEGER_CST)
5541 expand_expr (build_call_expr (fn, 2, dest, arg),
5542 const0_rtx, VOIDmode, EXPAND_NORMAL);
5544 if (target == const0_rtx)
5546 return expand_expr (len, target, mode, EXPAND_NORMAL);
5552 /* Expand a call to either the entry or exit function profiler. */
5555 expand_builtin_profile_func (bool exitp)
5559 this = DECL_RTL (current_function_decl);
5560 gcc_assert (MEM_P (this));
5561 this = XEXP (this, 0);
5564 which = profile_function_exit_libfunc;
5566 which = profile_function_entry_libfunc;
5568 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5569 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5576 /* Expand a call to __builtin___clear_cache. */
5579 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5581 #ifndef HAVE_clear_cache
5582 #ifdef CLEAR_INSN_CACHE
5583 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5584 does something. Just do the default expansion to a call to
5588 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5589 does nothing. There is no need to call it. Do nothing. */
5591 #endif /* CLEAR_INSN_CACHE */
5593 /* We have a "clear_cache" insn, and it will handle everything. */
5595 rtx begin_rtx, end_rtx;
5596 enum insn_code icode;
5598 /* We must not expand to a library call. If we did, any
5599 fallback library function in libgcc that might contain a call to
5600 __builtin___clear_cache() would recurse infinitely. */
5601 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5603 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5607 if (HAVE_clear_cache)
5609 icode = CODE_FOR_clear_cache;
5611 begin = CALL_EXPR_ARG (exp, 0);
5612 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5613 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5614 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5615 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5617 end = CALL_EXPR_ARG (exp, 1);
5618 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5619 end_rtx = convert_memory_address (Pmode, end_rtx);
5620 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5621 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5623 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5626 #endif /* HAVE_clear_cache */
5629 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5632 round_trampoline_addr (rtx tramp)
5634 rtx temp, addend, mask;
5636 /* If we don't need too much alignment, we'll have been guaranteed
5637 proper alignment by get_trampoline_type. */
5638 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5641 /* Round address up to desired boundary. */
5642 temp = gen_reg_rtx (Pmode);
5643 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5644 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5646 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5647 temp, 0, OPTAB_LIB_WIDEN);
5648 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5649 temp, 0, OPTAB_LIB_WIDEN);
5655 expand_builtin_init_trampoline (tree exp)
5657 tree t_tramp, t_func, t_chain;
5658 rtx r_tramp, r_func, r_chain;
5659 #ifdef TRAMPOLINE_TEMPLATE
5663 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5664 POINTER_TYPE, VOID_TYPE))
5667 t_tramp = CALL_EXPR_ARG (exp, 0);
5668 t_func = CALL_EXPR_ARG (exp, 1);
5669 t_chain = CALL_EXPR_ARG (exp, 2);
5671 r_tramp = expand_normal (t_tramp);
5672 r_func = expand_normal (t_func);
5673 r_chain = expand_normal (t_chain);
5675 /* Generate insns to initialize the trampoline. */
5676 r_tramp = round_trampoline_addr (r_tramp);
5677 #ifdef TRAMPOLINE_TEMPLATE
5678 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5679 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5680 emit_block_move (blktramp, assemble_trampoline_template (),
5681 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5683 trampolines_created = 1;
5684 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5690 expand_builtin_adjust_trampoline (tree exp)
5694 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5697 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5698 tramp = round_trampoline_addr (tramp);
5699 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5700 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5706 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5707 function. The function first checks whether the back end provides
5708 an insn to implement signbit for the respective mode. If not, it
5709 checks whether the floating point format of the value is such that
5710 the sign bit can be extracted. If that is not the case, the
5711 function returns NULL_RTX to indicate that a normal call should be
5712 emitted rather than expanding the function in-line. EXP is the
5713 expression that is a call to the builtin function; if convenient,
5714 the result should be placed in TARGET. */
5716 expand_builtin_signbit (tree exp, rtx target)
5718 const struct real_format *fmt;
5719 enum machine_mode fmode, imode, rmode;
5720 HOST_WIDE_INT hi, lo;
5723 enum insn_code icode;
5726 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5729 arg = CALL_EXPR_ARG (exp, 0);
5730 fmode = TYPE_MODE (TREE_TYPE (arg));
5731 rmode = TYPE_MODE (TREE_TYPE (exp));
5732 fmt = REAL_MODE_FORMAT (fmode);
5734 arg = builtin_save_expr (arg);
5736 /* Expand the argument yielding a RTX expression. */
5737 temp = expand_normal (arg);
5739 /* Check if the back end provides an insn that handles signbit for the
5741 icode = signbit_optab->handlers [(int) fmode].insn_code;
5742 if (icode != CODE_FOR_nothing)
5744 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5745 emit_unop_insn (icode, target, temp, UNKNOWN);
5749 /* For floating point formats without a sign bit, implement signbit
5751 bitpos = fmt->signbit_ro;
5754 /* But we can't do this if the format supports signed zero. */
5755 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5758 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5759 build_real (TREE_TYPE (arg), dconst0));
5760 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5763 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5765 imode = int_mode_for_mode (fmode);
5766 if (imode == BLKmode)
5768 temp = gen_lowpart (imode, temp);
5773 /* Handle targets with different FP word orders. */
5774 if (FLOAT_WORDS_BIG_ENDIAN)
5775 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5777 word = bitpos / BITS_PER_WORD;
5778 temp = operand_subword_force (temp, word, fmode);
5779 bitpos = bitpos % BITS_PER_WORD;
5782 /* Force the intermediate word_mode (or narrower) result into a
5783 register. This avoids attempting to create paradoxical SUBREGs
5784 of floating point modes below. */
5785 temp = force_reg (imode, temp);
5787 /* If the bitpos is within the "result mode" lowpart, the operation
5788 can be implement with a single bitwise AND. Otherwise, we need
5789 a right shift and an AND. */
5791 if (bitpos < GET_MODE_BITSIZE (rmode))
5793 if (bitpos < HOST_BITS_PER_WIDE_INT)
5796 lo = (HOST_WIDE_INT) 1 << bitpos;
5800 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5804 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5805 temp = gen_lowpart (rmode, temp);
5806 temp = expand_binop (rmode, and_optab, temp,
5807 immed_double_const (lo, hi, rmode),
5808 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5812 /* Perform a logical right shift to place the signbit in the least
5813 significant bit, then truncate the result to the desired mode
5814 and mask just this bit. */
5815 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5816 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5817 temp = gen_lowpart (rmode, temp);
5818 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5819 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5825 /* Expand fork or exec calls. TARGET is the desired target of the
5826 call. EXP is the call. FN is the
5827 identificator of the actual function. IGNORE is nonzero if the
5828 value is to be ignored. */
5831 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5836 /* If we are not profiling, just call the function. */
5837 if (!profile_arc_flag)
5840 /* Otherwise call the wrapper. This should be equivalent for the rest of
5841 compiler, so the code does not diverge, and the wrapper may run the
5842 code necessary for keeping the profiling sane. */
5844 switch (DECL_FUNCTION_CODE (fn))
5847 id = get_identifier ("__gcov_fork");
5850 case BUILT_IN_EXECL:
5851 id = get_identifier ("__gcov_execl");
5854 case BUILT_IN_EXECV:
5855 id = get_identifier ("__gcov_execv");
5858 case BUILT_IN_EXECLP:
5859 id = get_identifier ("__gcov_execlp");
5862 case BUILT_IN_EXECLE:
5863 id = get_identifier ("__gcov_execle");
5866 case BUILT_IN_EXECVP:
5867 id = get_identifier ("__gcov_execvp");
5870 case BUILT_IN_EXECVE:
5871 id = get_identifier ("__gcov_execve");
5878 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5879 DECL_EXTERNAL (decl) = 1;
5880 TREE_PUBLIC (decl) = 1;
5881 DECL_ARTIFICIAL (decl) = 1;
5882 TREE_NOTHROW (decl) = 1;
5883 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5884 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5885 call = rewrite_call_expr (exp, 0, decl, 0);
5886 return expand_call (call, target, ignore);
5891 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5892 the pointer in these functions is void*, the tree optimizers may remove
5893 casts. The mode computed in expand_builtin isn't reliable either, due
5894 to __sync_bool_compare_and_swap.
5896 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5897 group of builtins. This gives us log2 of the mode size. */
5899 static inline enum machine_mode
5900 get_builtin_sync_mode (int fcode_diff)
5902 /* The size is not negotiable, so ask not to get BLKmode in return
5903 if the target indicates that a smaller size would be better. */
5904 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5907 /* Expand the memory expression LOC and return the appropriate memory operand
5908 for the builtin_sync operations. */
5911 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5915 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5917 /* Note that we explicitly do not want any alias information for this
5918 memory, so that we kill all other live memories. Otherwise we don't
5919 satisfy the full barrier semantics of the intrinsic. */
5920 mem = validize_mem (gen_rtx_MEM (mode, addr));
5922 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5923 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5924 MEM_VOLATILE_P (mem) = 1;
5929 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5930 EXP is the CALL_EXPR. CODE is the rtx code
5931 that corresponds to the arithmetic or logical operation from the name;
5932 an exception here is that NOT actually means NAND. TARGET is an optional
5933 place for us to store the results; AFTER is true if this is the
5934 fetch_and_xxx form. IGNORE is true if we don't actually care about
5935 the result of the operation at all. */
5938 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5939 enum rtx_code code, bool after,
5940 rtx target, bool ignore)
5943 enum machine_mode old_mode;
5945 /* Expand the operands. */
5946 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5948 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5949 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5950 of CONST_INTs, where we know the old_mode only from the call argument. */
5951 old_mode = GET_MODE (val);
5952 if (old_mode == VOIDmode)
5953 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5954 val = convert_modes (mode, old_mode, val, 1);
5957 return expand_sync_operation (mem, val, code);
5959 return expand_sync_fetch_operation (mem, val, code, after, target);
5962 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5963 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5964 true if this is the boolean form. TARGET is a place for us to store the
5965 results; this is NOT optional if IS_BOOL is true. */
5968 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5969 bool is_bool, rtx target)
5971 rtx old_val, new_val, mem;
5972 enum machine_mode old_mode;
5974 /* Expand the operands. */
5975 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5978 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5979 mode, EXPAND_NORMAL);
5980 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5981 of CONST_INTs, where we know the old_mode only from the call argument. */
5982 old_mode = GET_MODE (old_val);
5983 if (old_mode == VOIDmode)
5984 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5985 old_val = convert_modes (mode, old_mode, old_val, 1);
5987 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5988 mode, EXPAND_NORMAL);
5989 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5990 of CONST_INTs, where we know the old_mode only from the call argument. */
5991 old_mode = GET_MODE (new_val);
5992 if (old_mode == VOIDmode)
5993 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5994 new_val = convert_modes (mode, old_mode, new_val, 1);
5997 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5999 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6002 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6003 general form is actually an atomic exchange, and some targets only
6004 support a reduced form with the second argument being a constant 1.
6005 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6009 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6013 enum machine_mode old_mode;
6015 /* Expand the operands. */
6016 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6017 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6018 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6019 of CONST_INTs, where we know the old_mode only from the call argument. */
6020 old_mode = GET_MODE (val);
6021 if (old_mode == VOIDmode)
6022 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6023 val = convert_modes (mode, old_mode, val, 1);
6025 return expand_sync_lock_test_and_set (mem, val, target);
6028 /* Expand the __sync_synchronize intrinsic. */
6031 expand_builtin_synchronize (void)
6035 #ifdef HAVE_memory_barrier
6036 if (HAVE_memory_barrier)
6038 emit_insn (gen_memory_barrier ());
6043 if (synchronize_libfunc != NULL_RTX)
6045 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6049 /* If no explicit memory barrier instruction is available, create an
6050 empty asm stmt with a memory clobber. */
6051 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6052 tree_cons (NULL, build_string (6, "memory"), NULL));
6053 ASM_VOLATILE_P (x) = 1;
6054 expand_asm_expr (x);
6057 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6060 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6062 enum insn_code icode;
6064 rtx val = const0_rtx;
6066 /* Expand the operands. */
6067 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6069 /* If there is an explicit operation in the md file, use it. */
6070 icode = sync_lock_release[mode];
6071 if (icode != CODE_FOR_nothing)
6073 if (!insn_data[icode].operand[1].predicate (val, mode))
6074 val = force_reg (mode, val);
6076 insn = GEN_FCN (icode) (mem, val);
6084 /* Otherwise we can implement this operation by emitting a barrier
6085 followed by a store of zero. */
6086 expand_builtin_synchronize ();
6087 emit_move_insn (mem, val);
6090 /* Expand an expression EXP that calls a built-in function,
6091 with result going to TARGET if that's convenient
6092 (and in mode MODE if that's convenient).
6093 SUBTARGET may be used as the target for computing one of EXP's operands.
6094 IGNORE is nonzero if the value is to be ignored. */
6097 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6100 tree fndecl = get_callee_fndecl (exp);
6101 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6102 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6104 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6105 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6107 /* When not optimizing, generate calls to library functions for a certain
6110 && !called_as_built_in (fndecl)
6111 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6112 && fcode != BUILT_IN_ALLOCA)
6113 return expand_call (exp, target, ignore);
6115 /* The built-in function expanders test for target == const0_rtx
6116 to determine whether the function's result will be ignored. */
6118 target = const0_rtx;
6120 /* If the result of a pure or const built-in function is ignored, and
6121 none of its arguments are volatile, we can avoid expanding the
6122 built-in call and just evaluate the arguments for side-effects. */
6123 if (target == const0_rtx
6124 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6126 bool volatilep = false;
6128 call_expr_arg_iterator iter;
6130 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6131 if (TREE_THIS_VOLATILE (arg))
6139 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6140 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6147 CASE_FLT_FN (BUILT_IN_FABS):
6148 target = expand_builtin_fabs (exp, target, subtarget);
6153 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6154 target = expand_builtin_copysign (exp, target, subtarget);
6159 /* Just do a normal library call if we were unable to fold
6161 CASE_FLT_FN (BUILT_IN_CABS):
6164 CASE_FLT_FN (BUILT_IN_EXP):
6165 CASE_FLT_FN (BUILT_IN_EXP10):
6166 CASE_FLT_FN (BUILT_IN_POW10):
6167 CASE_FLT_FN (BUILT_IN_EXP2):
6168 CASE_FLT_FN (BUILT_IN_EXPM1):
6169 CASE_FLT_FN (BUILT_IN_LOGB):
6170 CASE_FLT_FN (BUILT_IN_LOG):
6171 CASE_FLT_FN (BUILT_IN_LOG10):
6172 CASE_FLT_FN (BUILT_IN_LOG2):
6173 CASE_FLT_FN (BUILT_IN_LOG1P):
6174 CASE_FLT_FN (BUILT_IN_TAN):
6175 CASE_FLT_FN (BUILT_IN_ASIN):
6176 CASE_FLT_FN (BUILT_IN_ACOS):
6177 CASE_FLT_FN (BUILT_IN_ATAN):
6178 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6179 because of possible accuracy problems. */
6180 if (! flag_unsafe_math_optimizations)
6182 CASE_FLT_FN (BUILT_IN_SQRT):
6183 CASE_FLT_FN (BUILT_IN_FLOOR):
6184 CASE_FLT_FN (BUILT_IN_CEIL):
6185 CASE_FLT_FN (BUILT_IN_TRUNC):
6186 CASE_FLT_FN (BUILT_IN_ROUND):
6187 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6188 CASE_FLT_FN (BUILT_IN_RINT):
6189 target = expand_builtin_mathfn (exp, target, subtarget);
6194 CASE_FLT_FN (BUILT_IN_ILOGB):
6195 if (! flag_unsafe_math_optimizations)
6197 CASE_FLT_FN (BUILT_IN_ISINF):
6198 CASE_FLT_FN (BUILT_IN_FINITE):
6199 case BUILT_IN_ISFINITE:
6200 case BUILT_IN_ISNORMAL:
6201 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6206 CASE_FLT_FN (BUILT_IN_LCEIL):
6207 CASE_FLT_FN (BUILT_IN_LLCEIL):
6208 CASE_FLT_FN (BUILT_IN_LFLOOR):
6209 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6210 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6215 CASE_FLT_FN (BUILT_IN_LRINT):
6216 CASE_FLT_FN (BUILT_IN_LLRINT):
6217 CASE_FLT_FN (BUILT_IN_LROUND):
6218 CASE_FLT_FN (BUILT_IN_LLROUND):
6219 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6224 CASE_FLT_FN (BUILT_IN_POW):
6225 target = expand_builtin_pow (exp, target, subtarget);
6230 CASE_FLT_FN (BUILT_IN_POWI):
6231 target = expand_builtin_powi (exp, target, subtarget);
6236 CASE_FLT_FN (BUILT_IN_ATAN2):
6237 CASE_FLT_FN (BUILT_IN_LDEXP):
6238 CASE_FLT_FN (BUILT_IN_SCALB):
6239 CASE_FLT_FN (BUILT_IN_SCALBN):
6240 CASE_FLT_FN (BUILT_IN_SCALBLN):
6241 if (! flag_unsafe_math_optimizations)
6244 CASE_FLT_FN (BUILT_IN_FMOD):
6245 CASE_FLT_FN (BUILT_IN_REMAINDER):
6246 CASE_FLT_FN (BUILT_IN_DREM):
6247 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6252 CASE_FLT_FN (BUILT_IN_CEXPI):
6253 target = expand_builtin_cexpi (exp, target, subtarget);
6254 gcc_assert (target);
6257 CASE_FLT_FN (BUILT_IN_SIN):
6258 CASE_FLT_FN (BUILT_IN_COS):
6259 if (! flag_unsafe_math_optimizations)
6261 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6266 CASE_FLT_FN (BUILT_IN_SINCOS):
6267 if (! flag_unsafe_math_optimizations)
6269 target = expand_builtin_sincos (exp);
6274 case BUILT_IN_APPLY_ARGS:
6275 return expand_builtin_apply_args ();
6277 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6278 FUNCTION with a copy of the parameters described by
6279 ARGUMENTS, and ARGSIZE. It returns a block of memory
6280 allocated on the stack into which is stored all the registers
6281 that might possibly be used for returning the result of a
6282 function. ARGUMENTS is the value returned by
6283 __builtin_apply_args. ARGSIZE is the number of bytes of
6284 arguments that must be copied. ??? How should this value be
6285 computed? We'll also need a safe worst case value for varargs
6287 case BUILT_IN_APPLY:
6288 if (!validate_arglist (exp, POINTER_TYPE,
6289 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6290 && !validate_arglist (exp, REFERENCE_TYPE,
6291 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6297 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6298 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6299 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6301 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6304 /* __builtin_return (RESULT) causes the function to return the
6305 value described by RESULT. RESULT is address of the block of
6306 memory returned by __builtin_apply. */
6307 case BUILT_IN_RETURN:
6308 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6309 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6312 case BUILT_IN_SAVEREGS:
6313 return expand_builtin_saveregs ();
6315 case BUILT_IN_ARGS_INFO:
6316 return expand_builtin_args_info (exp);
6318 case BUILT_IN_VA_ARG_PACK:
6319 /* All valid uses of __builtin_va_arg_pack () are removed during
6321 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6324 case BUILT_IN_VA_ARG_PACK_LEN:
6325 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6327 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6330 /* Return the address of the first anonymous stack arg. */
6331 case BUILT_IN_NEXT_ARG:
6332 if (fold_builtin_next_arg (exp, false))
6334 return expand_builtin_next_arg ();
6336 case BUILT_IN_CLEAR_CACHE:
6337 target = expand_builtin___clear_cache (exp);
6342 case BUILT_IN_CLASSIFY_TYPE:
6343 return expand_builtin_classify_type (exp);
6345 case BUILT_IN_CONSTANT_P:
6348 case BUILT_IN_FRAME_ADDRESS:
6349 case BUILT_IN_RETURN_ADDRESS:
6350 return expand_builtin_frame_address (fndecl, exp);
6352 /* Returns the address of the area where the structure is returned.
6354 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6355 if (call_expr_nargs (exp) != 0
6356 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6357 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6360 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6362 case BUILT_IN_ALLOCA:
6363 target = expand_builtin_alloca (exp, target);
6368 case BUILT_IN_STACK_SAVE:
6369 return expand_stack_save ();
6371 case BUILT_IN_STACK_RESTORE:
6372 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6375 case BUILT_IN_BSWAP32:
6376 case BUILT_IN_BSWAP64:
6377 target = expand_builtin_bswap (exp, target, subtarget);
6383 CASE_INT_FN (BUILT_IN_FFS):
6384 case BUILT_IN_FFSIMAX:
6385 target = expand_builtin_unop (target_mode, exp, target,
6386 subtarget, ffs_optab);
6391 CASE_INT_FN (BUILT_IN_CLZ):
6392 case BUILT_IN_CLZIMAX:
6393 target = expand_builtin_unop (target_mode, exp, target,
6394 subtarget, clz_optab);
6399 CASE_INT_FN (BUILT_IN_CTZ):
6400 case BUILT_IN_CTZIMAX:
6401 target = expand_builtin_unop (target_mode, exp, target,
6402 subtarget, ctz_optab);
6407 CASE_INT_FN (BUILT_IN_POPCOUNT):
6408 case BUILT_IN_POPCOUNTIMAX:
6409 target = expand_builtin_unop (target_mode, exp, target,
6410 subtarget, popcount_optab);
6415 CASE_INT_FN (BUILT_IN_PARITY):
6416 case BUILT_IN_PARITYIMAX:
6417 target = expand_builtin_unop (target_mode, exp, target,
6418 subtarget, parity_optab);
6423 case BUILT_IN_STRLEN:
6424 target = expand_builtin_strlen (exp, target, target_mode);
6429 case BUILT_IN_STRCPY:
6430 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6435 case BUILT_IN_STRNCPY:
6436 target = expand_builtin_strncpy (exp, target, mode);
6441 case BUILT_IN_STPCPY:
6442 target = expand_builtin_stpcpy (exp, target, mode);
6447 case BUILT_IN_STRCAT:
6448 target = expand_builtin_strcat (fndecl, exp, target, mode);
6453 case BUILT_IN_STRNCAT:
6454 target = expand_builtin_strncat (exp, target, mode);
6459 case BUILT_IN_STRSPN:
6460 target = expand_builtin_strspn (exp, target, mode);
6465 case BUILT_IN_STRCSPN:
6466 target = expand_builtin_strcspn (exp, target, mode);
6471 case BUILT_IN_STRSTR:
6472 target = expand_builtin_strstr (exp, target, mode);
6477 case BUILT_IN_STRPBRK:
6478 target = expand_builtin_strpbrk (exp, target, mode);
6483 case BUILT_IN_INDEX:
6484 case BUILT_IN_STRCHR:
6485 target = expand_builtin_strchr (exp, target, mode);
6490 case BUILT_IN_RINDEX:
6491 case BUILT_IN_STRRCHR:
6492 target = expand_builtin_strrchr (exp, target, mode);
6497 case BUILT_IN_MEMCPY:
6498 target = expand_builtin_memcpy (exp, target, mode);
6503 case BUILT_IN_MEMPCPY:
6504 target = expand_builtin_mempcpy (exp, target, mode);
6509 case BUILT_IN_MEMMOVE:
6510 target = expand_builtin_memmove (exp, target, mode, ignore);
6515 case BUILT_IN_BCOPY:
6516 target = expand_builtin_bcopy (exp, ignore);
6521 case BUILT_IN_MEMSET:
6522 target = expand_builtin_memset (exp, target, mode);
6527 case BUILT_IN_BZERO:
6528 target = expand_builtin_bzero (exp);
6533 case BUILT_IN_STRCMP:
6534 target = expand_builtin_strcmp (exp, target, mode);
6539 case BUILT_IN_STRNCMP:
6540 target = expand_builtin_strncmp (exp, target, mode);
6545 case BUILT_IN_MEMCHR:
6546 target = expand_builtin_memchr (exp, target, mode);
6552 case BUILT_IN_MEMCMP:
6553 target = expand_builtin_memcmp (exp, target, mode);
6558 case BUILT_IN_SETJMP:
6559 /* This should have been lowered to the builtins below. */
6562 case BUILT_IN_SETJMP_SETUP:
6563 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6564 and the receiver label. */
6565 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6567 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6568 VOIDmode, EXPAND_NORMAL);
6569 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6570 rtx label_r = label_rtx (label);
6572 /* This is copied from the handling of non-local gotos. */
6573 expand_builtin_setjmp_setup (buf_addr, label_r);
6574 nonlocal_goto_handler_labels
6575 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6576 nonlocal_goto_handler_labels);
6577 /* ??? Do not let expand_label treat us as such since we would
6578 not want to be both on the list of non-local labels and on
6579 the list of forced labels. */
6580 FORCED_LABEL (label) = 0;
6585 case BUILT_IN_SETJMP_DISPATCHER:
6586 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6587 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6589 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6590 rtx label_r = label_rtx (label);
6592 /* Remove the dispatcher label from the list of non-local labels
6593 since the receiver labels have been added to it above. */
6594 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6599 case BUILT_IN_SETJMP_RECEIVER:
6600 /* __builtin_setjmp_receiver is passed the receiver label. */
6601 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6603 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6604 rtx label_r = label_rtx (label);
6606 expand_builtin_setjmp_receiver (label_r);
6611 /* __builtin_longjmp is passed a pointer to an array of five words.
6612 It's similar to the C library longjmp function but works with
6613 __builtin_setjmp above. */
6614 case BUILT_IN_LONGJMP:
6615 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6617 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6618 VOIDmode, EXPAND_NORMAL);
6619 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6621 if (value != const1_rtx)
6623 error ("%<__builtin_longjmp%> second argument must be 1");
6627 expand_builtin_longjmp (buf_addr, value);
6632 case BUILT_IN_NONLOCAL_GOTO:
6633 target = expand_builtin_nonlocal_goto (exp);
6638 /* This updates the setjmp buffer that is its argument with the value
6639 of the current stack pointer. */
6640 case BUILT_IN_UPDATE_SETJMP_BUF:
6641 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6644 = expand_normal (CALL_EXPR_ARG (exp, 0));
6646 expand_builtin_update_setjmp_buf (buf_addr);
6652 expand_builtin_trap ();
6655 case BUILT_IN_PRINTF:
6656 target = expand_builtin_printf (exp, target, mode, false);
6661 case BUILT_IN_PRINTF_UNLOCKED:
6662 target = expand_builtin_printf (exp, target, mode, true);
6667 case BUILT_IN_FPUTS:
6668 target = expand_builtin_fputs (exp, target, false);
6672 case BUILT_IN_FPUTS_UNLOCKED:
6673 target = expand_builtin_fputs (exp, target, true);
6678 case BUILT_IN_FPRINTF:
6679 target = expand_builtin_fprintf (exp, target, mode, false);
6684 case BUILT_IN_FPRINTF_UNLOCKED:
6685 target = expand_builtin_fprintf (exp, target, mode, true);
6690 case BUILT_IN_SPRINTF:
6691 target = expand_builtin_sprintf (exp, target, mode);
6696 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6697 case BUILT_IN_SIGNBITD32:
6698 case BUILT_IN_SIGNBITD64:
6699 case BUILT_IN_SIGNBITD128:
6700 target = expand_builtin_signbit (exp, target);
6705 /* Various hooks for the DWARF 2 __throw routine. */
6706 case BUILT_IN_UNWIND_INIT:
6707 expand_builtin_unwind_init ();
6709 case BUILT_IN_DWARF_CFA:
6710 return virtual_cfa_rtx;
6711 #ifdef DWARF2_UNWIND_INFO
6712 case BUILT_IN_DWARF_SP_COLUMN:
6713 return expand_builtin_dwarf_sp_column ();
6714 case BUILT_IN_INIT_DWARF_REG_SIZES:
6715 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6718 case BUILT_IN_FROB_RETURN_ADDR:
6719 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6720 case BUILT_IN_EXTRACT_RETURN_ADDR:
6721 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6722 case BUILT_IN_EH_RETURN:
6723 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6724 CALL_EXPR_ARG (exp, 1));
6726 #ifdef EH_RETURN_DATA_REGNO
6727 case BUILT_IN_EH_RETURN_DATA_REGNO:
6728 return expand_builtin_eh_return_data_regno (exp);
6730 case BUILT_IN_EXTEND_POINTER:
6731 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6733 case BUILT_IN_VA_START:
6734 return expand_builtin_va_start (exp);
6735 case BUILT_IN_VA_END:
6736 return expand_builtin_va_end (exp);
6737 case BUILT_IN_VA_COPY:
6738 return expand_builtin_va_copy (exp);
6739 case BUILT_IN_EXPECT:
6740 return expand_builtin_expect (exp, target);
6741 case BUILT_IN_PREFETCH:
6742 expand_builtin_prefetch (exp);
6745 case BUILT_IN_PROFILE_FUNC_ENTER:
6746 return expand_builtin_profile_func (false);
6747 case BUILT_IN_PROFILE_FUNC_EXIT:
6748 return expand_builtin_profile_func (true);
6750 case BUILT_IN_INIT_TRAMPOLINE:
6751 return expand_builtin_init_trampoline (exp);
6752 case BUILT_IN_ADJUST_TRAMPOLINE:
6753 return expand_builtin_adjust_trampoline (exp);
6756 case BUILT_IN_EXECL:
6757 case BUILT_IN_EXECV:
6758 case BUILT_IN_EXECLP:
6759 case BUILT_IN_EXECLE:
6760 case BUILT_IN_EXECVP:
6761 case BUILT_IN_EXECVE:
6762 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6767 case BUILT_IN_FETCH_AND_ADD_1:
6768 case BUILT_IN_FETCH_AND_ADD_2:
6769 case BUILT_IN_FETCH_AND_ADD_4:
6770 case BUILT_IN_FETCH_AND_ADD_8:
6771 case BUILT_IN_FETCH_AND_ADD_16:
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6773 target = expand_builtin_sync_operation (mode, exp, PLUS,
6774 false, target, ignore);
6779 case BUILT_IN_FETCH_AND_SUB_1:
6780 case BUILT_IN_FETCH_AND_SUB_2:
6781 case BUILT_IN_FETCH_AND_SUB_4:
6782 case BUILT_IN_FETCH_AND_SUB_8:
6783 case BUILT_IN_FETCH_AND_SUB_16:
6784 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6785 target = expand_builtin_sync_operation (mode, exp, MINUS,
6786 false, target, ignore);
6791 case BUILT_IN_FETCH_AND_OR_1:
6792 case BUILT_IN_FETCH_AND_OR_2:
6793 case BUILT_IN_FETCH_AND_OR_4:
6794 case BUILT_IN_FETCH_AND_OR_8:
6795 case BUILT_IN_FETCH_AND_OR_16:
6796 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6797 target = expand_builtin_sync_operation (mode, exp, IOR,
6798 false, target, ignore);
6803 case BUILT_IN_FETCH_AND_AND_1:
6804 case BUILT_IN_FETCH_AND_AND_2:
6805 case BUILT_IN_FETCH_AND_AND_4:
6806 case BUILT_IN_FETCH_AND_AND_8:
6807 case BUILT_IN_FETCH_AND_AND_16:
6808 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6809 target = expand_builtin_sync_operation (mode, exp, AND,
6810 false, target, ignore);
6815 case BUILT_IN_FETCH_AND_XOR_1:
6816 case BUILT_IN_FETCH_AND_XOR_2:
6817 case BUILT_IN_FETCH_AND_XOR_4:
6818 case BUILT_IN_FETCH_AND_XOR_8:
6819 case BUILT_IN_FETCH_AND_XOR_16:
6820 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6821 target = expand_builtin_sync_operation (mode, exp, XOR,
6822 false, target, ignore);
6827 case BUILT_IN_FETCH_AND_NAND_1:
6828 case BUILT_IN_FETCH_AND_NAND_2:
6829 case BUILT_IN_FETCH_AND_NAND_4:
6830 case BUILT_IN_FETCH_AND_NAND_8:
6831 case BUILT_IN_FETCH_AND_NAND_16:
6832 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6833 target = expand_builtin_sync_operation (mode, exp, NOT,
6834 false, target, ignore);
6839 case BUILT_IN_ADD_AND_FETCH_1:
6840 case BUILT_IN_ADD_AND_FETCH_2:
6841 case BUILT_IN_ADD_AND_FETCH_4:
6842 case BUILT_IN_ADD_AND_FETCH_8:
6843 case BUILT_IN_ADD_AND_FETCH_16:
6844 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6845 target = expand_builtin_sync_operation (mode, exp, PLUS,
6846 true, target, ignore);
6851 case BUILT_IN_SUB_AND_FETCH_1:
6852 case BUILT_IN_SUB_AND_FETCH_2:
6853 case BUILT_IN_SUB_AND_FETCH_4:
6854 case BUILT_IN_SUB_AND_FETCH_8:
6855 case BUILT_IN_SUB_AND_FETCH_16:
6856 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6857 target = expand_builtin_sync_operation (mode, exp, MINUS,
6858 true, target, ignore);
6863 case BUILT_IN_OR_AND_FETCH_1:
6864 case BUILT_IN_OR_AND_FETCH_2:
6865 case BUILT_IN_OR_AND_FETCH_4:
6866 case BUILT_IN_OR_AND_FETCH_8:
6867 case BUILT_IN_OR_AND_FETCH_16:
6868 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6869 target = expand_builtin_sync_operation (mode, exp, IOR,
6870 true, target, ignore);
6875 case BUILT_IN_AND_AND_FETCH_1:
6876 case BUILT_IN_AND_AND_FETCH_2:
6877 case BUILT_IN_AND_AND_FETCH_4:
6878 case BUILT_IN_AND_AND_FETCH_8:
6879 case BUILT_IN_AND_AND_FETCH_16:
6880 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6881 target = expand_builtin_sync_operation (mode, exp, AND,
6882 true, target, ignore);
6887 case BUILT_IN_XOR_AND_FETCH_1:
6888 case BUILT_IN_XOR_AND_FETCH_2:
6889 case BUILT_IN_XOR_AND_FETCH_4:
6890 case BUILT_IN_XOR_AND_FETCH_8:
6891 case BUILT_IN_XOR_AND_FETCH_16:
6892 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6893 target = expand_builtin_sync_operation (mode, exp, XOR,
6894 true, target, ignore);
6899 case BUILT_IN_NAND_AND_FETCH_1:
6900 case BUILT_IN_NAND_AND_FETCH_2:
6901 case BUILT_IN_NAND_AND_FETCH_4:
6902 case BUILT_IN_NAND_AND_FETCH_8:
6903 case BUILT_IN_NAND_AND_FETCH_16:
6904 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6905 target = expand_builtin_sync_operation (mode, exp, NOT,
6906 true, target, ignore);
6911 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6912 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6913 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6914 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6915 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6916 if (mode == VOIDmode)
6917 mode = TYPE_MODE (boolean_type_node);
6918 if (!target || !register_operand (target, mode))
6919 target = gen_reg_rtx (mode);
6921 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6922 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6927 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6928 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6929 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6930 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6931 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6932 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6933 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6938 case BUILT_IN_LOCK_TEST_AND_SET_1:
6939 case BUILT_IN_LOCK_TEST_AND_SET_2:
6940 case BUILT_IN_LOCK_TEST_AND_SET_4:
6941 case BUILT_IN_LOCK_TEST_AND_SET_8:
6942 case BUILT_IN_LOCK_TEST_AND_SET_16:
6943 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6944 target = expand_builtin_lock_test_and_set (mode, exp, target);
6949 case BUILT_IN_LOCK_RELEASE_1:
6950 case BUILT_IN_LOCK_RELEASE_2:
6951 case BUILT_IN_LOCK_RELEASE_4:
6952 case BUILT_IN_LOCK_RELEASE_8:
6953 case BUILT_IN_LOCK_RELEASE_16:
6954 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6955 expand_builtin_lock_release (mode, exp);
6958 case BUILT_IN_SYNCHRONIZE:
6959 expand_builtin_synchronize ();
6962 case BUILT_IN_OBJECT_SIZE:
6963 return expand_builtin_object_size (exp);
6965 case BUILT_IN_MEMCPY_CHK:
6966 case BUILT_IN_MEMPCPY_CHK:
6967 case BUILT_IN_MEMMOVE_CHK:
6968 case BUILT_IN_MEMSET_CHK:
6969 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6974 case BUILT_IN_STRCPY_CHK:
6975 case BUILT_IN_STPCPY_CHK:
6976 case BUILT_IN_STRNCPY_CHK:
6977 case BUILT_IN_STRCAT_CHK:
6978 case BUILT_IN_STRNCAT_CHK:
6979 case BUILT_IN_SNPRINTF_CHK:
6980 case BUILT_IN_VSNPRINTF_CHK:
6981 maybe_emit_chk_warning (exp, fcode);
6984 case BUILT_IN_SPRINTF_CHK:
6985 case BUILT_IN_VSPRINTF_CHK:
6986 maybe_emit_sprintf_chk_warning (exp, fcode);
6989 default: /* just do library call, if unknown builtin */
6993 /* The switch statement above can drop through to cause the function
6994 to be called normally. */
6995 return expand_call (exp, target, ignore);
6998 /* Determine whether a tree node represents a call to a built-in
6999 function. If the tree T is a call to a built-in function with
7000 the right number of arguments of the appropriate types, return
7001 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7002 Otherwise the return value is END_BUILTINS. */
7004 enum built_in_function
7005 builtin_mathfn_code (const_tree t)
7007 const_tree fndecl, arg, parmlist;
7008 const_tree argtype, parmtype;
7009 const_call_expr_arg_iterator iter;
7011 if (TREE_CODE (t) != CALL_EXPR
7012 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7013 return END_BUILTINS;
7015 fndecl = get_callee_fndecl (t);
7016 if (fndecl == NULL_TREE
7017 || TREE_CODE (fndecl) != FUNCTION_DECL
7018 || ! DECL_BUILT_IN (fndecl)
7019 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7020 return END_BUILTINS;
7022 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7023 init_const_call_expr_arg_iterator (t, &iter);
7024 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7026 /* If a function doesn't take a variable number of arguments,
7027 the last element in the list will have type `void'. */
7028 parmtype = TREE_VALUE (parmlist);
7029 if (VOID_TYPE_P (parmtype))
7031 if (more_const_call_expr_args_p (&iter))
7032 return END_BUILTINS;
7033 return DECL_FUNCTION_CODE (fndecl);
7036 if (! more_const_call_expr_args_p (&iter))
7037 return END_BUILTINS;
7039 arg = next_const_call_expr_arg (&iter);
7040 argtype = TREE_TYPE (arg);
7042 if (SCALAR_FLOAT_TYPE_P (parmtype))
7044 if (! SCALAR_FLOAT_TYPE_P (argtype))
7045 return END_BUILTINS;
7047 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7049 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7050 return END_BUILTINS;
7052 else if (POINTER_TYPE_P (parmtype))
7054 if (! POINTER_TYPE_P (argtype))
7055 return END_BUILTINS;
7057 else if (INTEGRAL_TYPE_P (parmtype))
7059 if (! INTEGRAL_TYPE_P (argtype))
7060 return END_BUILTINS;
7063 return END_BUILTINS;
7066 /* Variable-length argument list. */
7067 return DECL_FUNCTION_CODE (fndecl);
7070 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7071 evaluate to a constant. */
7074 fold_builtin_constant_p (tree arg)
7076 /* We return 1 for a numeric type that's known to be a constant
7077 value at compile-time or for an aggregate type that's a
7078 literal constant. */
7081 /* If we know this is a constant, emit the constant of one. */
7082 if (CONSTANT_CLASS_P (arg)
7083 || (TREE_CODE (arg) == CONSTRUCTOR
7084 && TREE_CONSTANT (arg)))
7085 return integer_one_node;
7086 if (TREE_CODE (arg) == ADDR_EXPR)
7088 tree op = TREE_OPERAND (arg, 0);
7089 if (TREE_CODE (op) == STRING_CST
7090 || (TREE_CODE (op) == ARRAY_REF
7091 && integer_zerop (TREE_OPERAND (op, 1))
7092 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7093 return integer_one_node;
7096 /* If this expression has side effects, show we don't know it to be a
7097 constant. Likewise if it's a pointer or aggregate type since in
7098 those case we only want literals, since those are only optimized
7099 when generating RTL, not later.
7100 And finally, if we are compiling an initializer, not code, we
7101 need to return a definite result now; there's not going to be any
7102 more optimization done. */
7103 if (TREE_SIDE_EFFECTS (arg)
7104 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7105 || POINTER_TYPE_P (TREE_TYPE (arg))
7107 || folding_initializer)
7108 return integer_zero_node;
7113 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7114 return it as a truthvalue. */
7117 build_builtin_expect_predicate (tree pred, tree expected)
7119 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7121 fn = built_in_decls[BUILT_IN_EXPECT];
7122 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7123 ret_type = TREE_TYPE (TREE_TYPE (fn));
7124 pred_type = TREE_VALUE (arg_types);
7125 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7127 pred = fold_convert (pred_type, pred);
7128 expected = fold_convert (expected_type, expected);
7129 call_expr = build_call_expr (fn, 2, pred, expected);
7131 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7132 build_int_cst (ret_type, 0));
7135 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7136 NULL_TREE if no simplification is possible. */
7139 fold_builtin_expect (tree arg0, tree arg1)
7142 enum tree_code code;
7144 /* If this is a builtin_expect within a builtin_expect keep the
7145 inner one. See through a comparison against a constant. It
7146 might have been added to create a thruthvalue. */
7148 if (COMPARISON_CLASS_P (inner)
7149 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7150 inner = TREE_OPERAND (inner, 0);
7152 if (TREE_CODE (inner) == CALL_EXPR
7153 && (fndecl = get_callee_fndecl (inner))
7154 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7155 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7158 /* Distribute the expected value over short-circuiting operators.
7159 See through the cast from truthvalue_type_node to long. */
7161 while (TREE_CODE (inner) == NOP_EXPR
7162 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7163 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7164 inner = TREE_OPERAND (inner, 0);
7166 code = TREE_CODE (inner);
7167 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7169 tree op0 = TREE_OPERAND (inner, 0);
7170 tree op1 = TREE_OPERAND (inner, 1);
7172 op0 = build_builtin_expect_predicate (op0, arg1);
7173 op1 = build_builtin_expect_predicate (op1, arg1);
7174 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7176 return fold_convert (TREE_TYPE (arg0), inner);
7179 /* If the argument isn't invariant then there's nothing else we can do. */
7180 if (!TREE_CONSTANT (arg0))
7183 /* If we expect that a comparison against the argument will fold to
7184 a constant return the constant. In practice, this means a true
7185 constant or the address of a non-weak symbol. */
7188 if (TREE_CODE (inner) == ADDR_EXPR)
7192 inner = TREE_OPERAND (inner, 0);
7194 while (TREE_CODE (inner) == COMPONENT_REF
7195 || TREE_CODE (inner) == ARRAY_REF);
7196 if (DECL_P (inner) && DECL_WEAK (inner))
7200 /* Otherwise, ARG0 already has the proper type for the return value. */
7204 /* Fold a call to __builtin_classify_type with argument ARG. */
7207 fold_builtin_classify_type (tree arg)
7210 return build_int_cst (NULL_TREE, no_type_class);
7212 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7215 /* Fold a call to __builtin_strlen with argument ARG. */
7218 fold_builtin_strlen (tree arg)
7220 if (!validate_arg (arg, POINTER_TYPE))
7224 tree len = c_strlen (arg, 0);
7228 /* Convert from the internal "sizetype" type to "size_t". */
7230 len = fold_convert (size_type_node, len);
7238 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7241 fold_builtin_inf (tree type, int warn)
7243 REAL_VALUE_TYPE real;
7245 /* __builtin_inff is intended to be usable to define INFINITY on all
7246 targets. If an infinity is not available, INFINITY expands "to a
7247 positive constant of type float that overflows at translation
7248 time", footnote "In this case, using INFINITY will violate the
7249 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7250 Thus we pedwarn to ensure this constraint violation is
7252 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7253 pedwarn ("target format does not support infinity");
7256 return build_real (type, real);
7259 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7262 fold_builtin_nan (tree arg, tree type, int quiet)
7264 REAL_VALUE_TYPE real;
7267 if (!validate_arg (arg, POINTER_TYPE))
7269 str = c_getstr (arg);
7273 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7276 return build_real (type, real);
7279 /* Return true if the floating point expression T has an integer value.
7280 We also allow +Inf, -Inf and NaN to be considered integer values. */
7283 integer_valued_real_p (tree t)
7285 switch (TREE_CODE (t))
7292 return integer_valued_real_p (TREE_OPERAND (t, 0));
7297 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7304 return integer_valued_real_p (TREE_OPERAND (t, 0))
7305 && integer_valued_real_p (TREE_OPERAND (t, 1));
7308 return integer_valued_real_p (TREE_OPERAND (t, 1))
7309 && integer_valued_real_p (TREE_OPERAND (t, 2));
7312 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7316 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7317 if (TREE_CODE (type) == INTEGER_TYPE)
7319 if (TREE_CODE (type) == REAL_TYPE)
7320 return integer_valued_real_p (TREE_OPERAND (t, 0));
7325 switch (builtin_mathfn_code (t))
7327 CASE_FLT_FN (BUILT_IN_CEIL):
7328 CASE_FLT_FN (BUILT_IN_FLOOR):
7329 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7330 CASE_FLT_FN (BUILT_IN_RINT):
7331 CASE_FLT_FN (BUILT_IN_ROUND):
7332 CASE_FLT_FN (BUILT_IN_TRUNC):
7335 CASE_FLT_FN (BUILT_IN_FMIN):
7336 CASE_FLT_FN (BUILT_IN_FMAX):
7337 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7338 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7351 /* FNDECL is assumed to be a builtin where truncation can be propagated
7352 across (for instance floor((double)f) == (double)floorf (f).
7353 Do the transformation for a call with argument ARG. */
7356 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7358 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7360 if (!validate_arg (arg, REAL_TYPE))
7363 /* Integer rounding functions are idempotent. */
7364 if (fcode == builtin_mathfn_code (arg))
7367 /* If argument is already integer valued, and we don't need to worry
7368 about setting errno, there's no need to perform rounding. */
7369 if (! flag_errno_math && integer_valued_real_p (arg))
7374 tree arg0 = strip_float_extensions (arg);
7375 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7376 tree newtype = TREE_TYPE (arg0);
7379 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7380 && (decl = mathfn_built_in (newtype, fcode)))
7381 return fold_convert (ftype,
7382 build_call_expr (decl, 1,
7383 fold_convert (newtype, arg0)));
7388 /* FNDECL is assumed to be builtin which can narrow the FP type of
7389 the argument, for instance lround((double)f) -> lroundf (f).
7390 Do the transformation for a call with argument ARG. */
7393 fold_fixed_mathfn (tree fndecl, tree arg)
7395 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7397 if (!validate_arg (arg, REAL_TYPE))
7400 /* If argument is already integer valued, and we don't need to worry
7401 about setting errno, there's no need to perform rounding. */
7402 if (! flag_errno_math && integer_valued_real_p (arg))
7403 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7407 tree ftype = TREE_TYPE (arg);
7408 tree arg0 = strip_float_extensions (arg);
7409 tree newtype = TREE_TYPE (arg0);
7412 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7413 && (decl = mathfn_built_in (newtype, fcode)))
7414 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7417 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7418 sizeof (long long) == sizeof (long). */
7419 if (TYPE_PRECISION (long_long_integer_type_node)
7420 == TYPE_PRECISION (long_integer_type_node))
7422 tree newfn = NULL_TREE;
7425 CASE_FLT_FN (BUILT_IN_LLCEIL):
7426 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7429 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7430 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7433 CASE_FLT_FN (BUILT_IN_LLROUND):
7434 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7437 CASE_FLT_FN (BUILT_IN_LLRINT):
7438 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7447 tree newcall = build_call_expr(newfn, 1, arg);
7448 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7455 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7456 return type. Return NULL_TREE if no simplification can be made. */
7459 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7463 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7464 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7467 /* Calculate the result when the argument is a constant. */
7468 if (TREE_CODE (arg) == COMPLEX_CST
7469 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7473 if (TREE_CODE (arg) == COMPLEX_EXPR)
7475 tree real = TREE_OPERAND (arg, 0);
7476 tree imag = TREE_OPERAND (arg, 1);
7478 /* If either part is zero, cabs is fabs of the other. */
7479 if (real_zerop (real))
7480 return fold_build1 (ABS_EXPR, type, imag);
7481 if (real_zerop (imag))
7482 return fold_build1 (ABS_EXPR, type, real);
7484 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7485 if (flag_unsafe_math_optimizations
7486 && operand_equal_p (real, imag, OEP_PURE_SAME))
7488 const REAL_VALUE_TYPE sqrt2_trunc
7489 = real_value_truncate (TYPE_MODE (type),
7490 *get_real_const (rv_sqrt2));
7492 return fold_build2 (MULT_EXPR, type,
7493 fold_build1 (ABS_EXPR, type, real),
7494 build_real (type, sqrt2_trunc));
7498 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7499 if (TREE_CODE (arg) == NEGATE_EXPR
7500 || TREE_CODE (arg) == CONJ_EXPR)
7501 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7503 /* Don't do this when optimizing for size. */
7504 if (flag_unsafe_math_optimizations
7505 && optimize && !optimize_size)
7507 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7509 if (sqrtfn != NULL_TREE)
7511 tree rpart, ipart, result;
7513 arg = builtin_save_expr (arg);
7515 rpart = fold_build1 (REALPART_EXPR, type, arg);
7516 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7518 rpart = builtin_save_expr (rpart);
7519 ipart = builtin_save_expr (ipart);
7521 result = fold_build2 (PLUS_EXPR, type,
7522 fold_build2 (MULT_EXPR, type,
7524 fold_build2 (MULT_EXPR, type,
7527 return build_call_expr (sqrtfn, 1, result);
7534 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7535 Return NULL_TREE if no simplification can be made. */
7538 fold_builtin_sqrt (tree arg, tree type)
7541 enum built_in_function fcode;
7544 if (!validate_arg (arg, REAL_TYPE))
7547 /* Calculate the result when the argument is a constant. */
7548 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7551 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7552 fcode = builtin_mathfn_code (arg);
7553 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7555 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7556 arg = fold_build2 (MULT_EXPR, type,
7557 CALL_EXPR_ARG (arg, 0),
7558 build_real (type, dconsthalf));
7559 return build_call_expr (expfn, 1, arg);
7562 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7563 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7565 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7569 tree arg0 = CALL_EXPR_ARG (arg, 0);
7571 /* The inner root was either sqrt or cbrt. */
7572 REAL_VALUE_TYPE dconstroot =
7573 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7575 /* Adjust for the outer root. */
7576 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7577 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7578 tree_root = build_real (type, dconstroot);
7579 return build_call_expr (powfn, 2, arg0, tree_root);
7583 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7584 if (flag_unsafe_math_optimizations
7585 && (fcode == BUILT_IN_POW
7586 || fcode == BUILT_IN_POWF
7587 || fcode == BUILT_IN_POWL))
7589 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7590 tree arg0 = CALL_EXPR_ARG (arg, 0);
7591 tree arg1 = CALL_EXPR_ARG (arg, 1);
7593 if (!tree_expr_nonnegative_p (arg0))
7594 arg0 = build1 (ABS_EXPR, type, arg0);
7595 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7596 build_real (type, dconsthalf));
7597 return build_call_expr (powfn, 2, arg0, narg1);
7603 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7604 Return NULL_TREE if no simplification can be made. */
7607 fold_builtin_cbrt (tree arg, tree type)
7609 const enum built_in_function fcode = builtin_mathfn_code (arg);
7612 if (!validate_arg (arg, REAL_TYPE))
7615 /* Calculate the result when the argument is a constant. */
7616 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7619 if (flag_unsafe_math_optimizations)
7621 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7622 if (BUILTIN_EXPONENT_P (fcode))
7624 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7625 const REAL_VALUE_TYPE third_trunc =
7626 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7627 arg = fold_build2 (MULT_EXPR, type,
7628 CALL_EXPR_ARG (arg, 0),
7629 build_real (type, third_trunc));
7630 return build_call_expr (expfn, 1, arg);
7633 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7634 if (BUILTIN_SQRT_P (fcode))
7636 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7640 tree arg0 = CALL_EXPR_ARG (arg, 0);
7642 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7644 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7645 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7646 tree_root = build_real (type, dconstroot);
7647 return build_call_expr (powfn, 2, arg0, tree_root);
7651 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7652 if (BUILTIN_CBRT_P (fcode))
7654 tree arg0 = CALL_EXPR_ARG (arg, 0);
7655 if (tree_expr_nonnegative_p (arg0))
7657 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7662 REAL_VALUE_TYPE dconstroot;
7664 real_arithmetic (&dconstroot, MULT_EXPR,
7665 get_real_const (rv_third),
7666 get_real_const (rv_third));
7667 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7668 tree_root = build_real (type, dconstroot);
7669 return build_call_expr (powfn, 2, arg0, tree_root);
7674 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7675 if (fcode == BUILT_IN_POW
7676 || fcode == BUILT_IN_POWF
7677 || fcode == BUILT_IN_POWL)
7679 tree arg00 = CALL_EXPR_ARG (arg, 0);
7680 tree arg01 = CALL_EXPR_ARG (arg, 1);
7681 if (tree_expr_nonnegative_p (arg00))
7683 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7684 const REAL_VALUE_TYPE dconstroot
7685 = real_value_truncate (TYPE_MODE (type),
7686 *get_real_const (rv_third));
7687 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7688 build_real (type, dconstroot));
7689 return build_call_expr (powfn, 2, arg00, narg01);
7696 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7697 TYPE is the type of the return value. Return NULL_TREE if no
7698 simplification can be made. */
7701 fold_builtin_cos (tree arg, tree type, tree fndecl)
7705 if (!validate_arg (arg, REAL_TYPE))
7708 /* Calculate the result when the argument is a constant. */
7709 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7712 /* Optimize cos(-x) into cos (x). */
7713 if ((narg = fold_strip_sign_ops (arg)))
7714 return build_call_expr (fndecl, 1, narg);
7719 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7720 Return NULL_TREE if no simplification can be made. */
7723 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7725 if (validate_arg (arg, REAL_TYPE))
7729 /* Calculate the result when the argument is a constant. */
7730 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7733 /* Optimize cosh(-x) into cosh (x). */
7734 if ((narg = fold_strip_sign_ops (arg)))
7735 return build_call_expr (fndecl, 1, narg);
7741 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7742 Return NULL_TREE if no simplification can be made. */
7745 fold_builtin_tan (tree arg, tree type)
7747 enum built_in_function fcode;
7750 if (!validate_arg (arg, REAL_TYPE))
7753 /* Calculate the result when the argument is a constant. */
7754 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7757 /* Optimize tan(atan(x)) = x. */
7758 fcode = builtin_mathfn_code (arg);
7759 if (flag_unsafe_math_optimizations
7760 && (fcode == BUILT_IN_ATAN
7761 || fcode == BUILT_IN_ATANF
7762 || fcode == BUILT_IN_ATANL))
7763 return CALL_EXPR_ARG (arg, 0);
7768 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7769 NULL_TREE if no simplification can be made. */
7772 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7777 if (!validate_arg (arg0, REAL_TYPE)
7778 || !validate_arg (arg1, POINTER_TYPE)
7779 || !validate_arg (arg2, POINTER_TYPE))
7782 type = TREE_TYPE (arg0);
7784 /* Calculate the result when the argument is a constant. */
7785 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7788 /* Canonicalize sincos to cexpi. */
7789 if (!TARGET_C99_FUNCTIONS)
7791 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7795 call = build_call_expr (fn, 1, arg0);
7796 call = builtin_save_expr (call);
7798 return build2 (COMPOUND_EXPR, type,
7799 build2 (MODIFY_EXPR, void_type_node,
7800 build_fold_indirect_ref (arg1),
7801 build1 (IMAGPART_EXPR, type, call)),
7802 build2 (MODIFY_EXPR, void_type_node,
7803 build_fold_indirect_ref (arg2),
7804 build1 (REALPART_EXPR, type, call)));
7807 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7808 NULL_TREE if no simplification can be made. */
7811 fold_builtin_cexp (tree arg0, tree type)
7814 tree realp, imagp, ifn;
7816 if (!validate_arg (arg0, COMPLEX_TYPE))
7819 rtype = TREE_TYPE (TREE_TYPE (arg0));
7821 /* In case we can figure out the real part of arg0 and it is constant zero
7823 if (!TARGET_C99_FUNCTIONS)
7825 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7829 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7830 && real_zerop (realp))
7832 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7833 return build_call_expr (ifn, 1, narg);
7836 /* In case we can easily decompose real and imaginary parts split cexp
7837 to exp (r) * cexpi (i). */
7838 if (flag_unsafe_math_optimizations
7841 tree rfn, rcall, icall;
7843 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7847 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7851 icall = build_call_expr (ifn, 1, imagp);
7852 icall = builtin_save_expr (icall);
7853 rcall = build_call_expr (rfn, 1, realp);
7854 rcall = builtin_save_expr (rcall);
7855 return fold_build2 (COMPLEX_EXPR, type,
7856 fold_build2 (MULT_EXPR, rtype,
7858 fold_build1 (REALPART_EXPR, rtype, icall)),
7859 fold_build2 (MULT_EXPR, rtype,
7861 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7867 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7868 Return NULL_TREE if no simplification can be made. */
7871 fold_builtin_trunc (tree fndecl, tree arg)
7873 if (!validate_arg (arg, REAL_TYPE))
7876 /* Optimize trunc of constant value. */
7877 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7879 REAL_VALUE_TYPE r, x;
7880 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7882 x = TREE_REAL_CST (arg);
7883 real_trunc (&r, TYPE_MODE (type), &x);
7884 return build_real (type, r);
7887 return fold_trunc_transparent_mathfn (fndecl, arg);
7890 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7891 Return NULL_TREE if no simplification can be made. */
7894 fold_builtin_floor (tree fndecl, tree arg)
7896 if (!validate_arg (arg, REAL_TYPE))
7899 /* Optimize floor of constant value. */
7900 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7904 x = TREE_REAL_CST (arg);
7905 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7907 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7910 real_floor (&r, TYPE_MODE (type), &x);
7911 return build_real (type, r);
7915 /* Fold floor (x) where x is nonnegative to trunc (x). */
7916 if (tree_expr_nonnegative_p (arg))
7918 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7920 return build_call_expr (truncfn, 1, arg);
7923 return fold_trunc_transparent_mathfn (fndecl, arg);
7926 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7927 Return NULL_TREE if no simplification can be made. */
7930 fold_builtin_ceil (tree fndecl, tree arg)
7932 if (!validate_arg (arg, REAL_TYPE))
7935 /* Optimize ceil of constant value. */
7936 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7940 x = TREE_REAL_CST (arg);
7941 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7943 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7946 real_ceil (&r, TYPE_MODE (type), &x);
7947 return build_real (type, r);
7951 return fold_trunc_transparent_mathfn (fndecl, arg);
7954 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7955 Return NULL_TREE if no simplification can be made. */
7958 fold_builtin_round (tree fndecl, tree arg)
7960 if (!validate_arg (arg, REAL_TYPE))
7963 /* Optimize round of constant value. */
7964 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7968 x = TREE_REAL_CST (arg);
7969 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7971 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7974 real_round (&r, TYPE_MODE (type), &x);
7975 return build_real (type, r);
7979 return fold_trunc_transparent_mathfn (fndecl, arg);
7982 /* Fold function call to builtin lround, lroundf or lroundl (or the
7983 corresponding long long versions) and other rounding functions. ARG
7984 is the argument to the call. Return NULL_TREE if no simplification
7988 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7990 if (!validate_arg (arg, REAL_TYPE))
7993 /* Optimize lround of constant value. */
7994 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7996 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7998 if (real_isfinite (&x))
8000 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8001 tree ftype = TREE_TYPE (arg);
8002 unsigned HOST_WIDE_INT lo2;
8003 HOST_WIDE_INT hi, lo;
8006 switch (DECL_FUNCTION_CODE (fndecl))
8008 CASE_FLT_FN (BUILT_IN_LFLOOR):
8009 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8010 real_floor (&r, TYPE_MODE (ftype), &x);
8013 CASE_FLT_FN (BUILT_IN_LCEIL):
8014 CASE_FLT_FN (BUILT_IN_LLCEIL):
8015 real_ceil (&r, TYPE_MODE (ftype), &x);
8018 CASE_FLT_FN (BUILT_IN_LROUND):
8019 CASE_FLT_FN (BUILT_IN_LLROUND):
8020 real_round (&r, TYPE_MODE (ftype), &x);
8027 REAL_VALUE_TO_INT (&lo, &hi, r);
8028 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8029 return build_int_cst_wide (itype, lo2, hi);
8033 switch (DECL_FUNCTION_CODE (fndecl))
8035 CASE_FLT_FN (BUILT_IN_LFLOOR):
8036 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8037 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8038 if (tree_expr_nonnegative_p (arg))
8039 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8045 return fold_fixed_mathfn (fndecl, arg);
8048 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8049 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8050 the argument to the call. Return NULL_TREE if no simplification can
8054 fold_builtin_bitop (tree fndecl, tree arg)
8056 if (!validate_arg (arg, INTEGER_TYPE))
8059 /* Optimize for constant argument. */
8060 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8062 HOST_WIDE_INT hi, width, result;
8063 unsigned HOST_WIDE_INT lo;
8066 type = TREE_TYPE (arg);
8067 width = TYPE_PRECISION (type);
8068 lo = TREE_INT_CST_LOW (arg);
8070 /* Clear all the bits that are beyond the type's precision. */
8071 if (width > HOST_BITS_PER_WIDE_INT)
8073 hi = TREE_INT_CST_HIGH (arg);
8074 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8075 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8080 if (width < HOST_BITS_PER_WIDE_INT)
8081 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8084 switch (DECL_FUNCTION_CODE (fndecl))
8086 CASE_INT_FN (BUILT_IN_FFS):
8088 result = exact_log2 (lo & -lo) + 1;
8090 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8095 CASE_INT_FN (BUILT_IN_CLZ):
8097 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8099 result = width - floor_log2 (lo) - 1;
8100 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8104 CASE_INT_FN (BUILT_IN_CTZ):
8106 result = exact_log2 (lo & -lo);
8108 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8109 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8113 CASE_INT_FN (BUILT_IN_POPCOUNT):
8116 result++, lo &= lo - 1;
8118 result++, hi &= hi - 1;
8121 CASE_INT_FN (BUILT_IN_PARITY):
8124 result++, lo &= lo - 1;
8126 result++, hi &= hi - 1;
8134 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8140 /* Fold function call to builtin_bswap and the long and long long
8141 variants. Return NULL_TREE if no simplification can be made. */
8143 fold_builtin_bswap (tree fndecl, tree arg)
8145 if (! validate_arg (arg, INTEGER_TYPE))
8148 /* Optimize constant value. */
8149 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8151 HOST_WIDE_INT hi, width, r_hi = 0;
8152 unsigned HOST_WIDE_INT lo, r_lo = 0;
8155 type = TREE_TYPE (arg);
8156 width = TYPE_PRECISION (type);
8157 lo = TREE_INT_CST_LOW (arg);
8158 hi = TREE_INT_CST_HIGH (arg);
8160 switch (DECL_FUNCTION_CODE (fndecl))
8162 case BUILT_IN_BSWAP32:
8163 case BUILT_IN_BSWAP64:
8167 for (s = 0; s < width; s += 8)
8169 int d = width - s - 8;
8170 unsigned HOST_WIDE_INT byte;
8172 if (s < HOST_BITS_PER_WIDE_INT)
8173 byte = (lo >> s) & 0xff;
8175 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8177 if (d < HOST_BITS_PER_WIDE_INT)
8180 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8190 if (width < HOST_BITS_PER_WIDE_INT)
8191 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8193 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8199 /* Return true if EXPR is the real constant contained in VALUE. */
8202 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8206 return ((TREE_CODE (expr) == REAL_CST
8207 && !TREE_OVERFLOW (expr)
8208 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8209 || (TREE_CODE (expr) == COMPLEX_CST
8210 && real_dconstp (TREE_REALPART (expr), value)
8211 && real_zerop (TREE_IMAGPART (expr))));
8214 /* A subroutine of fold_builtin to fold the various logarithmic
8215 functions. Return NULL_TREE if no simplification can me made.
8216 FUNC is the corresponding MPFR logarithm function. */
8219 fold_builtin_logarithm (tree fndecl, tree arg,
8220 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8222 if (validate_arg (arg, REAL_TYPE))
8224 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8226 const enum built_in_function fcode = builtin_mathfn_code (arg);
8228 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8229 instead we'll look for 'e' truncated to MODE. So only do
8230 this if flag_unsafe_math_optimizations is set. */
8231 if (flag_unsafe_math_optimizations && func == mpfr_log)
8233 const REAL_VALUE_TYPE e_truncated =
8234 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8235 if (real_dconstp (arg, &e_truncated))
8236 return build_real (type, dconst1);
8239 /* Calculate the result when the argument is a constant. */
8240 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8243 /* Special case, optimize logN(expN(x)) = x. */
8244 if (flag_unsafe_math_optimizations
8245 && ((func == mpfr_log
8246 && (fcode == BUILT_IN_EXP
8247 || fcode == BUILT_IN_EXPF
8248 || fcode == BUILT_IN_EXPL))
8249 || (func == mpfr_log2
8250 && (fcode == BUILT_IN_EXP2
8251 || fcode == BUILT_IN_EXP2F
8252 || fcode == BUILT_IN_EXP2L))
8253 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8254 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8256 /* Optimize logN(func()) for various exponential functions. We
8257 want to determine the value "x" and the power "exponent" in
8258 order to transform logN(x**exponent) into exponent*logN(x). */
8259 if (flag_unsafe_math_optimizations)
8261 tree exponent = 0, x = 0;
8265 CASE_FLT_FN (BUILT_IN_EXP):
8266 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8267 x = build_real (type,
8268 real_value_truncate (TYPE_MODE (type),
8269 *get_real_const (rv_e)));
8270 exponent = CALL_EXPR_ARG (arg, 0);
8272 CASE_FLT_FN (BUILT_IN_EXP2):
8273 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8274 x = build_real (type, dconst2);
8275 exponent = CALL_EXPR_ARG (arg, 0);
8277 CASE_FLT_FN (BUILT_IN_EXP10):
8278 CASE_FLT_FN (BUILT_IN_POW10):
8279 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8281 REAL_VALUE_TYPE dconst10;
8282 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8283 x = build_real (type, dconst10);
8285 exponent = CALL_EXPR_ARG (arg, 0);
8287 CASE_FLT_FN (BUILT_IN_SQRT):
8288 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8289 x = CALL_EXPR_ARG (arg, 0);
8290 exponent = build_real (type, dconsthalf);
8292 CASE_FLT_FN (BUILT_IN_CBRT):
8293 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8294 x = CALL_EXPR_ARG (arg, 0);
8295 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8296 *get_real_const (rv_third)));
8298 CASE_FLT_FN (BUILT_IN_POW):
8299 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8300 x = CALL_EXPR_ARG (arg, 0);
8301 exponent = CALL_EXPR_ARG (arg, 1);
8307 /* Now perform the optimization. */
8310 tree logfn = build_call_expr (fndecl, 1, x);
8311 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8319 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8320 NULL_TREE if no simplification can be made. */
8323 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8325 tree res, narg0, narg1;
8327 if (!validate_arg (arg0, REAL_TYPE)
8328 || !validate_arg (arg1, REAL_TYPE))
8331 /* Calculate the result when the argument is a constant. */
8332 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8335 /* If either argument to hypot has a negate or abs, strip that off.
8336 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8337 narg0 = fold_strip_sign_ops (arg0);
8338 narg1 = fold_strip_sign_ops (arg1);
8341 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8342 narg1 ? narg1 : arg1);
8345 /* If either argument is zero, hypot is fabs of the other. */
8346 if (real_zerop (arg0))
8347 return fold_build1 (ABS_EXPR, type, arg1);
8348 else if (real_zerop (arg1))
8349 return fold_build1 (ABS_EXPR, type, arg0);
8351 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8352 if (flag_unsafe_math_optimizations
8353 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8355 const REAL_VALUE_TYPE sqrt2_trunc
8356 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8357 return fold_build2 (MULT_EXPR, type,
8358 fold_build1 (ABS_EXPR, type, arg0),
8359 build_real (type, sqrt2_trunc));
8366 /* Fold a builtin function call to pow, powf, or powl. Return
8367 NULL_TREE if no simplification can be made. */
8369 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8373 if (!validate_arg (arg0, REAL_TYPE)
8374 || !validate_arg (arg1, REAL_TYPE))
8377 /* Calculate the result when the argument is a constant. */
8378 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8381 /* Optimize pow(1.0,y) = 1.0. */
8382 if (real_onep (arg0))
8383 return omit_one_operand (type, build_real (type, dconst1), arg1);
8385 if (TREE_CODE (arg1) == REAL_CST
8386 && !TREE_OVERFLOW (arg1))
8388 REAL_VALUE_TYPE cint;
8392 c = TREE_REAL_CST (arg1);
8394 /* Optimize pow(x,0.0) = 1.0. */
8395 if (REAL_VALUES_EQUAL (c, dconst0))
8396 return omit_one_operand (type, build_real (type, dconst1),
8399 /* Optimize pow(x,1.0) = x. */
8400 if (REAL_VALUES_EQUAL (c, dconst1))
8403 /* Optimize pow(x,-1.0) = 1.0/x. */
8404 if (REAL_VALUES_EQUAL (c, dconstm1))
8405 return fold_build2 (RDIV_EXPR, type,
8406 build_real (type, dconst1), arg0);
8408 /* Optimize pow(x,0.5) = sqrt(x). */
8409 if (flag_unsafe_math_optimizations
8410 && REAL_VALUES_EQUAL (c, dconsthalf))
8412 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8414 if (sqrtfn != NULL_TREE)
8415 return build_call_expr (sqrtfn, 1, arg0);
8418 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8419 if (flag_unsafe_math_optimizations)
8421 const REAL_VALUE_TYPE dconstroot
8422 = real_value_truncate (TYPE_MODE (type),
8423 *get_real_const (rv_third));
8425 if (REAL_VALUES_EQUAL (c, dconstroot))
8427 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8428 if (cbrtfn != NULL_TREE)
8429 return build_call_expr (cbrtfn, 1, arg0);
8433 /* Check for an integer exponent. */
8434 n = real_to_integer (&c);
8435 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8436 if (real_identical (&c, &cint))
8438 /* Attempt to evaluate pow at compile-time. */
8439 if (TREE_CODE (arg0) == REAL_CST
8440 && !TREE_OVERFLOW (arg0))
8445 x = TREE_REAL_CST (arg0);
8446 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8447 if (flag_unsafe_math_optimizations || !inexact)
8448 return build_real (type, x);
8451 /* Strip sign ops from even integer powers. */
8452 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8454 tree narg0 = fold_strip_sign_ops (arg0);
8456 return build_call_expr (fndecl, 2, narg0, arg1);
8461 if (flag_unsafe_math_optimizations)
8463 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8465 /* Optimize pow(expN(x),y) = expN(x*y). */
8466 if (BUILTIN_EXPONENT_P (fcode))
8468 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8469 tree arg = CALL_EXPR_ARG (arg0, 0);
8470 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8471 return build_call_expr (expfn, 1, arg);
8474 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8475 if (BUILTIN_SQRT_P (fcode))
8477 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8478 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8479 build_real (type, dconsthalf));
8480 return build_call_expr (fndecl, 2, narg0, narg1);
8483 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8484 if (BUILTIN_CBRT_P (fcode))
8486 tree arg = CALL_EXPR_ARG (arg0, 0);
8487 if (tree_expr_nonnegative_p (arg))
8489 const REAL_VALUE_TYPE dconstroot
8490 = real_value_truncate (TYPE_MODE (type),
8491 *get_real_const (rv_third));
8492 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8493 build_real (type, dconstroot));
8494 return build_call_expr (fndecl, 2, arg, narg1);
8498 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8499 if (fcode == BUILT_IN_POW
8500 || fcode == BUILT_IN_POWF
8501 || fcode == BUILT_IN_POWL)
8503 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8504 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8505 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8506 return build_call_expr (fndecl, 2, arg00, narg1);
8513 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8514 Return NULL_TREE if no simplification can be made. */
8516 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8517 tree arg0, tree arg1, tree type)
8519 if (!validate_arg (arg0, REAL_TYPE)
8520 || !validate_arg (arg1, INTEGER_TYPE))
8523 /* Optimize pow(1.0,y) = 1.0. */
8524 if (real_onep (arg0))
8525 return omit_one_operand (type, build_real (type, dconst1), arg1);
8527 if (host_integerp (arg1, 0))
8529 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8531 /* Evaluate powi at compile-time. */
8532 if (TREE_CODE (arg0) == REAL_CST
8533 && !TREE_OVERFLOW (arg0))
8536 x = TREE_REAL_CST (arg0);
8537 real_powi (&x, TYPE_MODE (type), &x, c);
8538 return build_real (type, x);
8541 /* Optimize pow(x,0) = 1.0. */
8543 return omit_one_operand (type, build_real (type, dconst1),
8546 /* Optimize pow(x,1) = x. */
8550 /* Optimize pow(x,-1) = 1.0/x. */
8552 return fold_build2 (RDIV_EXPR, type,
8553 build_real (type, dconst1), arg0);
8559 /* A subroutine of fold_builtin to fold the various exponent
8560 functions. Return NULL_TREE if no simplification can be made.
8561 FUNC is the corresponding MPFR exponent function. */
8564 fold_builtin_exponent (tree fndecl, tree arg,
8565 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8567 if (validate_arg (arg, REAL_TYPE))
8569 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8572 /* Calculate the result when the argument is a constant. */
8573 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8576 /* Optimize expN(logN(x)) = x. */
8577 if (flag_unsafe_math_optimizations)
8579 const enum built_in_function fcode = builtin_mathfn_code (arg);
8581 if ((func == mpfr_exp
8582 && (fcode == BUILT_IN_LOG
8583 || fcode == BUILT_IN_LOGF
8584 || fcode == BUILT_IN_LOGL))
8585 || (func == mpfr_exp2
8586 && (fcode == BUILT_IN_LOG2
8587 || fcode == BUILT_IN_LOG2F
8588 || fcode == BUILT_IN_LOG2L))
8589 || (func == mpfr_exp10
8590 && (fcode == BUILT_IN_LOG10
8591 || fcode == BUILT_IN_LOG10F
8592 || fcode == BUILT_IN_LOG10L)))
8593 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8600 /* Return true if VAR is a VAR_DECL or a component thereof. */
8603 var_decl_component_p (tree var)
8606 while (handled_component_p (inner))
8607 inner = TREE_OPERAND (inner, 0);
8608 return SSA_VAR_P (inner);
8611 /* Fold function call to builtin memset. Return
8612 NULL_TREE if no simplification can be made. */
8615 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8618 unsigned HOST_WIDE_INT length, cval;
8620 if (! validate_arg (dest, POINTER_TYPE)
8621 || ! validate_arg (c, INTEGER_TYPE)
8622 || ! validate_arg (len, INTEGER_TYPE))
8625 if (! host_integerp (len, 1))
8628 /* If the LEN parameter is zero, return DEST. */
8629 if (integer_zerop (len))
8630 return omit_one_operand (type, dest, c);
8632 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8637 if (TREE_CODE (var) != ADDR_EXPR)
8640 var = TREE_OPERAND (var, 0);
8641 if (TREE_THIS_VOLATILE (var))
8644 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8645 && !POINTER_TYPE_P (TREE_TYPE (var)))
8648 if (! var_decl_component_p (var))
8651 length = tree_low_cst (len, 1);
8652 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8653 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8657 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8660 if (integer_zerop (c))
8664 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8667 cval = tree_low_cst (c, 1);
8671 cval |= (cval << 31) << 1;
8674 ret = build_int_cst_type (TREE_TYPE (var), cval);
8675 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8679 return omit_one_operand (type, dest, ret);
8682 /* Fold function call to builtin memset. Return
8683 NULL_TREE if no simplification can be made. */
8686 fold_builtin_bzero (tree dest, tree size, bool ignore)
8688 if (! validate_arg (dest, POINTER_TYPE)
8689 || ! validate_arg (size, INTEGER_TYPE))
8695 /* New argument list transforming bzero(ptr x, int y) to
8696 memset(ptr x, int 0, size_t y). This is done this way
8697 so that if it isn't expanded inline, we fallback to
8698 calling bzero instead of memset. */
8700 return fold_builtin_memset (dest, integer_zero_node,
8701 fold_convert (sizetype, size),
8702 void_type_node, ignore);
8705 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8706 NULL_TREE if no simplification can be made.
8707 If ENDP is 0, return DEST (like memcpy).
8708 If ENDP is 1, return DEST+LEN (like mempcpy).
8709 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8710 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8714 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8716 tree destvar, srcvar, expr;
8718 if (! validate_arg (dest, POINTER_TYPE)
8719 || ! validate_arg (src, POINTER_TYPE)
8720 || ! validate_arg (len, INTEGER_TYPE))
8723 /* If the LEN parameter is zero, return DEST. */
8724 if (integer_zerop (len))
8725 return omit_one_operand (type, dest, src);
8727 /* If SRC and DEST are the same (and not volatile), return
8728 DEST{,+LEN,+LEN-1}. */
8729 if (operand_equal_p (src, dest, 0))
8733 tree srctype, desttype;
8736 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8737 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8739 /* Both DEST and SRC must be pointer types.
8740 ??? This is what old code did. Is the testing for pointer types
8743 If either SRC is readonly or length is 1, we can use memcpy. */
8744 if (dest_align && src_align
8745 && (readonly_data_expr (src)
8746 || (host_integerp (len, 1)
8747 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8748 tree_low_cst (len, 1)))))
8750 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8753 return build_call_expr (fn, 3, dest, src, len);
8758 if (!host_integerp (len, 0))
8761 This logic lose for arguments like (type *)malloc (sizeof (type)),
8762 since we strip the casts of up to VOID return value from malloc.
8763 Perhaps we ought to inherit type from non-VOID argument here? */
8766 srctype = TREE_TYPE (TREE_TYPE (src));
8767 desttype = TREE_TYPE (TREE_TYPE (dest));
8768 if (!srctype || !desttype
8769 || !TYPE_SIZE_UNIT (srctype)
8770 || !TYPE_SIZE_UNIT (desttype)
8771 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8772 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8773 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8774 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8777 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8778 < (int) TYPE_ALIGN (desttype)
8779 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8780 < (int) TYPE_ALIGN (srctype)))
8784 dest = builtin_save_expr (dest);
8786 srcvar = build_fold_indirect_ref (src);
8787 if (TREE_THIS_VOLATILE (srcvar))
8789 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8791 /* With memcpy, it is possible to bypass aliasing rules, so without
8792 this check i.e. execute/20060930-2.c would be misoptimized, because
8793 it use conflicting alias set to hold argument for the memcpy call.
8794 This check is probably unnecessary with -fno-strict-aliasing.
8795 Similarly for destvar. See also PR29286. */
8796 if (!var_decl_component_p (srcvar)
8797 /* Accept: memcpy (*char_var, "test", 1); that simplify
8799 || is_gimple_min_invariant (srcvar)
8800 || readonly_data_expr (src))
8803 destvar = build_fold_indirect_ref (dest);
8804 if (TREE_THIS_VOLATILE (destvar))
8806 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8808 if (!var_decl_component_p (destvar))
8811 if (srctype == desttype
8812 || (gimple_in_ssa_p (cfun)
8813 && useless_type_conversion_p (desttype, srctype)))
8815 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8816 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8817 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8818 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8819 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8821 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8822 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8828 if (endp == 0 || endp == 3)
8829 return omit_one_operand (type, dest, expr);
8835 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8838 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8839 dest = fold_convert (type, dest);
8841 dest = omit_one_operand (type, dest, expr);
8845 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8846 If LEN is not NULL, it represents the length of the string to be
8847 copied. Return NULL_TREE if no simplification can be made. */
8850 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8854 if (!validate_arg (dest, POINTER_TYPE)
8855 || !validate_arg (src, POINTER_TYPE))
8858 /* If SRC and DEST are the same (and not volatile), return DEST. */
8859 if (operand_equal_p (src, dest, 0))
8860 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8865 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8871 len = c_strlen (src, 1);
8872 if (! len || TREE_SIDE_EFFECTS (len))
8876 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8877 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8878 build_call_expr (fn, 3, dest, src, len));
8881 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8882 If SLEN is not NULL, it represents the length of the source string.
8883 Return NULL_TREE if no simplification can be made. */
8886 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8890 if (!validate_arg (dest, POINTER_TYPE)
8891 || !validate_arg (src, POINTER_TYPE)
8892 || !validate_arg (len, INTEGER_TYPE))
8895 /* If the LEN parameter is zero, return DEST. */
8896 if (integer_zerop (len))
8897 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8899 /* We can't compare slen with len as constants below if len is not a
8901 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8905 slen = c_strlen (src, 1);
8907 /* Now, we must be passed a constant src ptr parameter. */
8908 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8911 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8913 /* We do not support simplification of this case, though we do
8914 support it when expanding trees into RTL. */
8915 /* FIXME: generate a call to __builtin_memset. */
8916 if (tree_int_cst_lt (slen, len))
8919 /* OK transform into builtin memcpy. */
8920 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8923 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8924 build_call_expr (fn, 3, dest, src, len));
8927 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8928 arguments to the call, and TYPE is its return type.
8929 Return NULL_TREE if no simplification can be made. */
8932 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8934 if (!validate_arg (arg1, POINTER_TYPE)
8935 || !validate_arg (arg2, INTEGER_TYPE)
8936 || !validate_arg (len, INTEGER_TYPE))
8942 if (TREE_CODE (arg2) != INTEGER_CST
8943 || !host_integerp (len, 1))
8946 p1 = c_getstr (arg1);
8947 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8953 if (target_char_cast (arg2, &c))
8956 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8959 return build_int_cst (TREE_TYPE (arg1), 0);
8961 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8963 return fold_convert (type, tem);
8969 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8970 Return NULL_TREE if no simplification can be made. */
8973 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8975 const char *p1, *p2;
8977 if (!validate_arg (arg1, POINTER_TYPE)
8978 || !validate_arg (arg2, POINTER_TYPE)
8979 || !validate_arg (len, INTEGER_TYPE))
8982 /* If the LEN parameter is zero, return zero. */
8983 if (integer_zerop (len))
8984 return omit_two_operands (integer_type_node, integer_zero_node,
8987 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8988 if (operand_equal_p (arg1, arg2, 0))
8989 return omit_one_operand (integer_type_node, integer_zero_node, len);
8991 p1 = c_getstr (arg1);
8992 p2 = c_getstr (arg2);
8994 /* If all arguments are constant, and the value of len is not greater
8995 than the lengths of arg1 and arg2, evaluate at compile-time. */
8996 if (host_integerp (len, 1) && p1 && p2
8997 && compare_tree_int (len, strlen (p1) + 1) <= 0
8998 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9000 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9003 return integer_one_node;
9005 return integer_minus_one_node;
9007 return integer_zero_node;
9010 /* If len parameter is one, return an expression corresponding to
9011 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9012 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9014 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9015 tree cst_uchar_ptr_node
9016 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9018 tree ind1 = fold_convert (integer_type_node,
9019 build1 (INDIRECT_REF, cst_uchar_node,
9020 fold_convert (cst_uchar_ptr_node,
9022 tree ind2 = fold_convert (integer_type_node,
9023 build1 (INDIRECT_REF, cst_uchar_node,
9024 fold_convert (cst_uchar_ptr_node,
9026 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9032 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9033 Return NULL_TREE if no simplification can be made. */
9036 fold_builtin_strcmp (tree arg1, tree arg2)
9038 const char *p1, *p2;
9040 if (!validate_arg (arg1, POINTER_TYPE)
9041 || !validate_arg (arg2, POINTER_TYPE))
9044 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9045 if (operand_equal_p (arg1, arg2, 0))
9046 return integer_zero_node;
9048 p1 = c_getstr (arg1);
9049 p2 = c_getstr (arg2);
9053 const int i = strcmp (p1, p2);
9055 return integer_minus_one_node;
9057 return integer_one_node;
9059 return integer_zero_node;
9062 /* If the second arg is "", return *(const unsigned char*)arg1. */
9063 if (p2 && *p2 == '\0')
9065 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9066 tree cst_uchar_ptr_node
9067 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9069 return fold_convert (integer_type_node,
9070 build1 (INDIRECT_REF, cst_uchar_node,
9071 fold_convert (cst_uchar_ptr_node,
9075 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9076 if (p1 && *p1 == '\0')
9078 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9079 tree cst_uchar_ptr_node
9080 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9082 tree temp = fold_convert (integer_type_node,
9083 build1 (INDIRECT_REF, cst_uchar_node,
9084 fold_convert (cst_uchar_ptr_node,
9086 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9092 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9093 Return NULL_TREE if no simplification can be made. */
9096 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9098 const char *p1, *p2;
9100 if (!validate_arg (arg1, POINTER_TYPE)
9101 || !validate_arg (arg2, POINTER_TYPE)
9102 || !validate_arg (len, INTEGER_TYPE))
9105 /* If the LEN parameter is zero, return zero. */
9106 if (integer_zerop (len))
9107 return omit_two_operands (integer_type_node, integer_zero_node,
9110 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9111 if (operand_equal_p (arg1, arg2, 0))
9112 return omit_one_operand (integer_type_node, integer_zero_node, len);
9114 p1 = c_getstr (arg1);
9115 p2 = c_getstr (arg2);
9117 if (host_integerp (len, 1) && p1 && p2)
9119 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9121 return integer_one_node;
9123 return integer_minus_one_node;
9125 return integer_zero_node;
9128 /* If the second arg is "", and the length is greater than zero,
9129 return *(const unsigned char*)arg1. */
9130 if (p2 && *p2 == '\0'
9131 && TREE_CODE (len) == INTEGER_CST
9132 && tree_int_cst_sgn (len) == 1)
9134 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9135 tree cst_uchar_ptr_node
9136 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9138 return fold_convert (integer_type_node,
9139 build1 (INDIRECT_REF, cst_uchar_node,
9140 fold_convert (cst_uchar_ptr_node,
9144 /* If the first arg is "", and the length is greater than zero,
9145 return -*(const unsigned char*)arg2. */
9146 if (p1 && *p1 == '\0'
9147 && TREE_CODE (len) == INTEGER_CST
9148 && tree_int_cst_sgn (len) == 1)
9150 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9151 tree cst_uchar_ptr_node
9152 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9154 tree temp = fold_convert (integer_type_node,
9155 build1 (INDIRECT_REF, cst_uchar_node,
9156 fold_convert (cst_uchar_ptr_node,
9158 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9161 /* If len parameter is one, return an expression corresponding to
9162 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9163 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9165 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9166 tree cst_uchar_ptr_node
9167 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9169 tree ind1 = fold_convert (integer_type_node,
9170 build1 (INDIRECT_REF, cst_uchar_node,
9171 fold_convert (cst_uchar_ptr_node,
9173 tree ind2 = fold_convert (integer_type_node,
9174 build1 (INDIRECT_REF, cst_uchar_node,
9175 fold_convert (cst_uchar_ptr_node,
9177 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9183 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9184 ARG. Return NULL_TREE if no simplification can be made. */
9187 fold_builtin_signbit (tree arg, tree type)
9191 if (!validate_arg (arg, REAL_TYPE))
9194 /* If ARG is a compile-time constant, determine the result. */
9195 if (TREE_CODE (arg) == REAL_CST
9196 && !TREE_OVERFLOW (arg))
9200 c = TREE_REAL_CST (arg);
9201 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9202 return fold_convert (type, temp);
9205 /* If ARG is non-negative, the result is always zero. */
9206 if (tree_expr_nonnegative_p (arg))
9207 return omit_one_operand (type, integer_zero_node, arg);
9209 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9210 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9211 return fold_build2 (LT_EXPR, type, arg,
9212 build_real (TREE_TYPE (arg), dconst0));
9217 /* Fold function call to builtin copysign, copysignf or copysignl with
9218 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9222 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9226 if (!validate_arg (arg1, REAL_TYPE)
9227 || !validate_arg (arg2, REAL_TYPE))
9230 /* copysign(X,X) is X. */
9231 if (operand_equal_p (arg1, arg2, 0))
9232 return fold_convert (type, arg1);
9234 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9235 if (TREE_CODE (arg1) == REAL_CST
9236 && TREE_CODE (arg2) == REAL_CST
9237 && !TREE_OVERFLOW (arg1)
9238 && !TREE_OVERFLOW (arg2))
9240 REAL_VALUE_TYPE c1, c2;
9242 c1 = TREE_REAL_CST (arg1);
9243 c2 = TREE_REAL_CST (arg2);
9244 /* c1.sign := c2.sign. */
9245 real_copysign (&c1, &c2);
9246 return build_real (type, c1);
9249 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9250 Remember to evaluate Y for side-effects. */
9251 if (tree_expr_nonnegative_p (arg2))
9252 return omit_one_operand (type,
9253 fold_build1 (ABS_EXPR, type, arg1),
9256 /* Strip sign changing operations for the first argument. */
9257 tem = fold_strip_sign_ops (arg1);
9259 return build_call_expr (fndecl, 2, tem, arg2);
9264 /* Fold a call to builtin isascii with argument ARG. */
9267 fold_builtin_isascii (tree arg)
9269 if (!validate_arg (arg, INTEGER_TYPE))
9273 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9274 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9275 build_int_cst (NULL_TREE,
9276 ~ (unsigned HOST_WIDE_INT) 0x7f));
9277 return fold_build2 (EQ_EXPR, integer_type_node,
9278 arg, integer_zero_node);
9282 /* Fold a call to builtin toascii with argument ARG. */
9285 fold_builtin_toascii (tree arg)
9287 if (!validate_arg (arg, INTEGER_TYPE))
9290 /* Transform toascii(c) -> (c & 0x7f). */
9291 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9292 build_int_cst (NULL_TREE, 0x7f));
9295 /* Fold a call to builtin isdigit with argument ARG. */
9298 fold_builtin_isdigit (tree arg)
9300 if (!validate_arg (arg, INTEGER_TYPE))
9304 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9305 /* According to the C standard, isdigit is unaffected by locale.
9306 However, it definitely is affected by the target character set. */
9307 unsigned HOST_WIDE_INT target_digit0
9308 = lang_hooks.to_target_charset ('0');
9310 if (target_digit0 == 0)
9313 arg = fold_convert (unsigned_type_node, arg);
9314 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9315 build_int_cst (unsigned_type_node, target_digit0));
9316 return fold_build2 (LE_EXPR, integer_type_node, arg,
9317 build_int_cst (unsigned_type_node, 9));
9321 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9324 fold_builtin_fabs (tree arg, tree type)
9326 if (!validate_arg (arg, REAL_TYPE))
9329 arg = fold_convert (type, arg);
9330 if (TREE_CODE (arg) == REAL_CST)
9331 return fold_abs_const (arg, type);
9332 return fold_build1 (ABS_EXPR, type, arg);
9335 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9338 fold_builtin_abs (tree arg, tree type)
9340 if (!validate_arg (arg, INTEGER_TYPE))
9343 arg = fold_convert (type, arg);
9344 if (TREE_CODE (arg) == INTEGER_CST)
9345 return fold_abs_const (arg, type);
9346 return fold_build1 (ABS_EXPR, type, arg);
9349 /* Fold a call to builtin fmin or fmax. */
9352 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9354 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9356 /* Calculate the result when the argument is a constant. */
9357 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9362 /* If either argument is NaN, return the other one. Avoid the
9363 transformation if we get (and honor) a signalling NaN. Using
9364 omit_one_operand() ensures we create a non-lvalue. */
9365 if (TREE_CODE (arg0) == REAL_CST
9366 && real_isnan (&TREE_REAL_CST (arg0))
9367 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9368 || ! TREE_REAL_CST (arg0).signalling))
9369 return omit_one_operand (type, arg1, arg0);
9370 if (TREE_CODE (arg1) == REAL_CST
9371 && real_isnan (&TREE_REAL_CST (arg1))
9372 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9373 || ! TREE_REAL_CST (arg1).signalling))
9374 return omit_one_operand (type, arg0, arg1);
9376 /* Transform fmin/fmax(x,x) -> x. */
9377 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9378 return omit_one_operand (type, arg0, arg1);
9380 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9381 functions to return the numeric arg if the other one is NaN.
9382 These tree codes don't honor that, so only transform if
9383 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9384 handled, so we don't have to worry about it either. */
9385 if (flag_finite_math_only)
9386 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9387 fold_convert (type, arg0),
9388 fold_convert (type, arg1));
9393 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9396 fold_builtin_carg (tree arg, tree type)
9398 if (validate_arg (arg, COMPLEX_TYPE))
9400 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9404 tree new_arg = builtin_save_expr (arg);
9405 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9406 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9407 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9414 /* Fold a call to builtin logb/ilogb. */
9417 fold_builtin_logb (tree arg, tree rettype)
9419 if (! validate_arg (arg, REAL_TYPE))
9424 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9426 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9432 /* If arg is Inf or NaN and we're logb, return it. */
9433 if (TREE_CODE (rettype) == REAL_TYPE)
9434 return fold_convert (rettype, arg);
9435 /* Fall through... */
9437 /* Zero may set errno and/or raise an exception for logb, also
9438 for ilogb we don't know FP_ILOGB0. */
9441 /* For normal numbers, proceed iff radix == 2. In GCC,
9442 normalized significands are in the range [0.5, 1.0). We
9443 want the exponent as if they were [1.0, 2.0) so get the
9444 exponent and subtract 1. */
9445 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9446 return fold_convert (rettype, build_int_cst (NULL_TREE,
9447 REAL_EXP (value)-1));
9455 /* Fold a call to builtin significand, if radix == 2. */
9458 fold_builtin_significand (tree arg, tree rettype)
9460 if (! validate_arg (arg, REAL_TYPE))
9465 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9467 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9474 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9475 return fold_convert (rettype, arg);
9477 /* For normal numbers, proceed iff radix == 2. */
9478 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9480 REAL_VALUE_TYPE result = *value;
9481 /* In GCC, normalized significands are in the range [0.5,
9482 1.0). We want them to be [1.0, 2.0) so set the
9484 SET_REAL_EXP (&result, 1);
9485 return build_real (rettype, result);
9494 /* Fold a call to builtin frexp, we can assume the base is 2. */
9497 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9499 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9504 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9507 arg1 = build_fold_indirect_ref (arg1);
9509 /* Proceed if a valid pointer type was passed in. */
9510 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9512 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9518 /* For +-0, return (*exp = 0, +-0). */
9519 exp = integer_zero_node;
9524 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9525 return omit_one_operand (rettype, arg0, arg1);
9528 /* Since the frexp function always expects base 2, and in
9529 GCC normalized significands are already in the range
9530 [0.5, 1.0), we have exactly what frexp wants. */
9531 REAL_VALUE_TYPE frac_rvt = *value;
9532 SET_REAL_EXP (&frac_rvt, 0);
9533 frac = build_real (rettype, frac_rvt);
9534 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9541 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9542 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9543 TREE_SIDE_EFFECTS (arg1) = 1;
9544 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9550 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9551 then we can assume the base is two. If it's false, then we have to
9552 check the mode of the TYPE parameter in certain cases. */
9555 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9557 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9562 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9563 if (real_zerop (arg0) || integer_zerop (arg1)
9564 || (TREE_CODE (arg0) == REAL_CST
9565 && !real_isfinite (&TREE_REAL_CST (arg0))))
9566 return omit_one_operand (type, arg0, arg1);
9568 /* If both arguments are constant, then try to evaluate it. */
9569 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9570 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9571 && host_integerp (arg1, 0))
9573 /* Bound the maximum adjustment to twice the range of the
9574 mode's valid exponents. Use abs to ensure the range is
9575 positive as a sanity check. */
9576 const long max_exp_adj = 2 *
9577 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9578 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9580 /* Get the user-requested adjustment. */
9581 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9583 /* The requested adjustment must be inside this range. This
9584 is a preliminary cap to avoid things like overflow, we
9585 may still fail to compute the result for other reasons. */
9586 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9588 REAL_VALUE_TYPE initial_result;
9590 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9592 /* Ensure we didn't overflow. */
9593 if (! real_isinf (&initial_result))
9595 const REAL_VALUE_TYPE trunc_result
9596 = real_value_truncate (TYPE_MODE (type), initial_result);
9598 /* Only proceed if the target mode can hold the
9600 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9601 return build_real (type, trunc_result);
9610 /* Fold a call to builtin modf. */
9613 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9615 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9620 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9623 arg1 = build_fold_indirect_ref (arg1);
9625 /* Proceed if a valid pointer type was passed in. */
9626 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9628 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9629 REAL_VALUE_TYPE trunc, frac;
9635 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9636 trunc = frac = *value;
9639 /* For +-Inf, return (*arg1 = arg0, +-0). */
9641 frac.sign = value->sign;
9645 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9646 real_trunc (&trunc, VOIDmode, value);
9647 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9648 /* If the original number was negative and already
9649 integral, then the fractional part is -0.0. */
9650 if (value->sign && frac.cl == rvc_zero)
9651 frac.sign = value->sign;
9655 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9656 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9657 build_real (rettype, trunc));
9658 TREE_SIDE_EFFECTS (arg1) = 1;
9659 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9660 build_real (rettype, frac));
9666 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9667 ARG is the argument for the call. */
9670 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9672 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9675 if (!validate_arg (arg, REAL_TYPE))
9678 switch (builtin_index)
9680 case BUILT_IN_ISINF:
9681 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9682 return omit_one_operand (type, integer_zero_node, arg);
9684 if (TREE_CODE (arg) == REAL_CST)
9686 r = TREE_REAL_CST (arg);
9687 if (real_isinf (&r))
9688 return real_compare (GT_EXPR, &r, &dconst0)
9689 ? integer_one_node : integer_minus_one_node;
9691 return integer_zero_node;
9696 case BUILT_IN_ISINF_SIGN:
9698 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9699 /* In a boolean context, GCC will fold the inner COND_EXPR to
9700 1. So e.g. "if (isinf_sign(x))" would be folded to just
9701 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9702 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9703 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9704 tree tmp = NULL_TREE;
9706 arg = builtin_save_expr (arg);
9708 if (signbit_fn && isinf_fn)
9710 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9711 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9713 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9714 signbit_call, integer_zero_node);
9715 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9716 isinf_call, integer_zero_node);
9718 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9719 integer_minus_one_node, integer_one_node);
9720 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9727 case BUILT_IN_ISFINITE:
9728 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9729 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9730 return omit_one_operand (type, integer_one_node, arg);
9732 if (TREE_CODE (arg) == REAL_CST)
9734 r = TREE_REAL_CST (arg);
9735 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9740 case BUILT_IN_ISNAN:
9741 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9742 return omit_one_operand (type, integer_zero_node, arg);
9744 if (TREE_CODE (arg) == REAL_CST)
9746 r = TREE_REAL_CST (arg);
9747 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9750 arg = builtin_save_expr (arg);
9751 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9758 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9759 This builtin will generate code to return the appropriate floating
9760 point classification depending on the value of the floating point
9761 number passed in. The possible return values must be supplied as
9762 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9763 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9764 one floating point argument which is "type generic". */
9767 fold_builtin_fpclassify (tree exp)
9769 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9770 arg, type, res, tmp;
9771 enum machine_mode mode;
9775 /* Verify the required arguments in the original call. */
9776 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9777 INTEGER_TYPE, INTEGER_TYPE,
9778 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9781 fp_nan = CALL_EXPR_ARG (exp, 0);
9782 fp_infinite = CALL_EXPR_ARG (exp, 1);
9783 fp_normal = CALL_EXPR_ARG (exp, 2);
9784 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9785 fp_zero = CALL_EXPR_ARG (exp, 4);
9786 arg = CALL_EXPR_ARG (exp, 5);
9787 type = TREE_TYPE (arg);
9788 mode = TYPE_MODE (type);
9789 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9793 (fabs(x) == Inf ? FP_INFINITE :
9794 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9795 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9797 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9798 build_real (type, dconst0));
9799 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9801 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9802 real_from_string (&r, buf);
9803 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9804 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9806 if (HONOR_INFINITIES (mode))
9809 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9810 build_real (type, r));
9811 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9814 if (HONOR_NANS (mode))
9816 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9817 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9823 /* Fold a call to an unordered comparison function such as
9824 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9825 being called and ARG0 and ARG1 are the arguments for the call.
9826 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9827 the opposite of the desired result. UNORDERED_CODE is used
9828 for modes that can hold NaNs and ORDERED_CODE is used for
9832 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9833 enum tree_code unordered_code,
9834 enum tree_code ordered_code)
9836 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9837 enum tree_code code;
9839 enum tree_code code0, code1;
9840 tree cmp_type = NULL_TREE;
9842 type0 = TREE_TYPE (arg0);
9843 type1 = TREE_TYPE (arg1);
9845 code0 = TREE_CODE (type0);
9846 code1 = TREE_CODE (type1);
9848 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9849 /* Choose the wider of two real types. */
9850 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9852 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9854 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9857 arg0 = fold_convert (cmp_type, arg0);
9858 arg1 = fold_convert (cmp_type, arg1);
9860 if (unordered_code == UNORDERED_EXPR)
9862 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9863 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9864 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9867 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9869 return fold_build1 (TRUTH_NOT_EXPR, type,
9870 fold_build2 (code, type, arg0, arg1));
9873 /* Fold a call to built-in function FNDECL with 0 arguments.
9874 IGNORE is true if the result of the function call is ignored. This
9875 function returns NULL_TREE if no simplification was possible. */
9878 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9880 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9881 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9884 CASE_FLT_FN (BUILT_IN_INF):
9885 case BUILT_IN_INFD32:
9886 case BUILT_IN_INFD64:
9887 case BUILT_IN_INFD128:
9888 return fold_builtin_inf (type, true);
9890 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9891 return fold_builtin_inf (type, false);
9893 case BUILT_IN_CLASSIFY_TYPE:
9894 return fold_builtin_classify_type (NULL_TREE);
9902 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9903 IGNORE is true if the result of the function call is ignored. This
9904 function returns NULL_TREE if no simplification was possible. */
9907 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9909 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9910 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9914 case BUILT_IN_CONSTANT_P:
9916 tree val = fold_builtin_constant_p (arg0);
9918 /* Gimplification will pull the CALL_EXPR for the builtin out of
9919 an if condition. When not optimizing, we'll not CSE it back.
9920 To avoid link error types of regressions, return false now. */
9921 if (!val && !optimize)
9922 val = integer_zero_node;
9927 case BUILT_IN_CLASSIFY_TYPE:
9928 return fold_builtin_classify_type (arg0);
9930 case BUILT_IN_STRLEN:
9931 return fold_builtin_strlen (arg0);
9933 CASE_FLT_FN (BUILT_IN_FABS):
9934 return fold_builtin_fabs (arg0, type);
9938 case BUILT_IN_LLABS:
9939 case BUILT_IN_IMAXABS:
9940 return fold_builtin_abs (arg0, type);
9942 CASE_FLT_FN (BUILT_IN_CONJ):
9943 if (validate_arg (arg0, COMPLEX_TYPE))
9944 return fold_build1 (CONJ_EXPR, type, arg0);
9947 CASE_FLT_FN (BUILT_IN_CREAL):
9948 if (validate_arg (arg0, COMPLEX_TYPE))
9949 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9952 CASE_FLT_FN (BUILT_IN_CIMAG):
9953 if (validate_arg (arg0, COMPLEX_TYPE))
9954 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9957 CASE_FLT_FN (BUILT_IN_CCOS):
9958 CASE_FLT_FN (BUILT_IN_CCOSH):
9959 /* These functions are "even", i.e. f(x) == f(-x). */
9960 if (validate_arg (arg0, COMPLEX_TYPE))
9962 tree narg = fold_strip_sign_ops (arg0);
9964 return build_call_expr (fndecl, 1, narg);
9968 CASE_FLT_FN (BUILT_IN_CABS):
9969 return fold_builtin_cabs (arg0, type, fndecl);
9971 CASE_FLT_FN (BUILT_IN_CARG):
9972 return fold_builtin_carg (arg0, type);
9974 CASE_FLT_FN (BUILT_IN_SQRT):
9975 return fold_builtin_sqrt (arg0, type);
9977 CASE_FLT_FN (BUILT_IN_CBRT):
9978 return fold_builtin_cbrt (arg0, type);
9980 CASE_FLT_FN (BUILT_IN_ASIN):
9981 if (validate_arg (arg0, REAL_TYPE))
9982 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9983 &dconstm1, &dconst1, true);
9986 CASE_FLT_FN (BUILT_IN_ACOS):
9987 if (validate_arg (arg0, REAL_TYPE))
9988 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9989 &dconstm1, &dconst1, true);
9992 CASE_FLT_FN (BUILT_IN_ATAN):
9993 if (validate_arg (arg0, REAL_TYPE))
9994 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9997 CASE_FLT_FN (BUILT_IN_ASINH):
9998 if (validate_arg (arg0, REAL_TYPE))
9999 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10002 CASE_FLT_FN (BUILT_IN_ACOSH):
10003 if (validate_arg (arg0, REAL_TYPE))
10004 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10005 &dconst1, NULL, true);
10008 CASE_FLT_FN (BUILT_IN_ATANH):
10009 if (validate_arg (arg0, REAL_TYPE))
10010 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10011 &dconstm1, &dconst1, false);
10014 CASE_FLT_FN (BUILT_IN_SIN):
10015 if (validate_arg (arg0, REAL_TYPE))
10016 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10019 CASE_FLT_FN (BUILT_IN_COS):
10020 return fold_builtin_cos (arg0, type, fndecl);
10023 CASE_FLT_FN (BUILT_IN_TAN):
10024 return fold_builtin_tan (arg0, type);
10026 CASE_FLT_FN (BUILT_IN_CEXP):
10027 return fold_builtin_cexp (arg0, type);
10029 CASE_FLT_FN (BUILT_IN_CEXPI):
10030 if (validate_arg (arg0, REAL_TYPE))
10031 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10034 CASE_FLT_FN (BUILT_IN_SINH):
10035 if (validate_arg (arg0, REAL_TYPE))
10036 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10039 CASE_FLT_FN (BUILT_IN_COSH):
10040 return fold_builtin_cosh (arg0, type, fndecl);
10042 CASE_FLT_FN (BUILT_IN_TANH):
10043 if (validate_arg (arg0, REAL_TYPE))
10044 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10047 CASE_FLT_FN (BUILT_IN_ERF):
10048 if (validate_arg (arg0, REAL_TYPE))
10049 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10052 CASE_FLT_FN (BUILT_IN_ERFC):
10053 if (validate_arg (arg0, REAL_TYPE))
10054 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10057 CASE_FLT_FN (BUILT_IN_TGAMMA):
10058 if (validate_arg (arg0, REAL_TYPE))
10059 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10062 CASE_FLT_FN (BUILT_IN_EXP):
10063 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10065 CASE_FLT_FN (BUILT_IN_EXP2):
10066 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10068 CASE_FLT_FN (BUILT_IN_EXP10):
10069 CASE_FLT_FN (BUILT_IN_POW10):
10070 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10072 CASE_FLT_FN (BUILT_IN_EXPM1):
10073 if (validate_arg (arg0, REAL_TYPE))
10074 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10077 CASE_FLT_FN (BUILT_IN_LOG):
10078 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10080 CASE_FLT_FN (BUILT_IN_LOG2):
10081 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10083 CASE_FLT_FN (BUILT_IN_LOG10):
10084 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10086 CASE_FLT_FN (BUILT_IN_LOG1P):
10087 if (validate_arg (arg0, REAL_TYPE))
10088 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10089 &dconstm1, NULL, false);
10092 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10093 CASE_FLT_FN (BUILT_IN_J0):
10094 if (validate_arg (arg0, REAL_TYPE))
10095 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10099 CASE_FLT_FN (BUILT_IN_J1):
10100 if (validate_arg (arg0, REAL_TYPE))
10101 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10105 CASE_FLT_FN (BUILT_IN_Y0):
10106 if (validate_arg (arg0, REAL_TYPE))
10107 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10108 &dconst0, NULL, false);
10111 CASE_FLT_FN (BUILT_IN_Y1):
10112 if (validate_arg (arg0, REAL_TYPE))
10113 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10114 &dconst0, NULL, false);
10118 CASE_FLT_FN (BUILT_IN_NAN):
10119 case BUILT_IN_NAND32:
10120 case BUILT_IN_NAND64:
10121 case BUILT_IN_NAND128:
10122 return fold_builtin_nan (arg0, type, true);
10124 CASE_FLT_FN (BUILT_IN_NANS):
10125 return fold_builtin_nan (arg0, type, false);
10127 CASE_FLT_FN (BUILT_IN_FLOOR):
10128 return fold_builtin_floor (fndecl, arg0);
10130 CASE_FLT_FN (BUILT_IN_CEIL):
10131 return fold_builtin_ceil (fndecl, arg0);
10133 CASE_FLT_FN (BUILT_IN_TRUNC):
10134 return fold_builtin_trunc (fndecl, arg0);
10136 CASE_FLT_FN (BUILT_IN_ROUND):
10137 return fold_builtin_round (fndecl, arg0);
10139 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10140 CASE_FLT_FN (BUILT_IN_RINT):
10141 return fold_trunc_transparent_mathfn (fndecl, arg0);
10143 CASE_FLT_FN (BUILT_IN_LCEIL):
10144 CASE_FLT_FN (BUILT_IN_LLCEIL):
10145 CASE_FLT_FN (BUILT_IN_LFLOOR):
10146 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10147 CASE_FLT_FN (BUILT_IN_LROUND):
10148 CASE_FLT_FN (BUILT_IN_LLROUND):
10149 return fold_builtin_int_roundingfn (fndecl, arg0);
10151 CASE_FLT_FN (BUILT_IN_LRINT):
10152 CASE_FLT_FN (BUILT_IN_LLRINT):
10153 return fold_fixed_mathfn (fndecl, arg0);
10155 case BUILT_IN_BSWAP32:
10156 case BUILT_IN_BSWAP64:
10157 return fold_builtin_bswap (fndecl, arg0);
10159 CASE_INT_FN (BUILT_IN_FFS):
10160 CASE_INT_FN (BUILT_IN_CLZ):
10161 CASE_INT_FN (BUILT_IN_CTZ):
10162 CASE_INT_FN (BUILT_IN_POPCOUNT):
10163 CASE_INT_FN (BUILT_IN_PARITY):
10164 return fold_builtin_bitop (fndecl, arg0);
10166 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10167 return fold_builtin_signbit (arg0, type);
10169 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10170 return fold_builtin_significand (arg0, type);
10172 CASE_FLT_FN (BUILT_IN_ILOGB):
10173 CASE_FLT_FN (BUILT_IN_LOGB):
10174 return fold_builtin_logb (arg0, type);
10176 case BUILT_IN_ISASCII:
10177 return fold_builtin_isascii (arg0);
10179 case BUILT_IN_TOASCII:
10180 return fold_builtin_toascii (arg0);
10182 case BUILT_IN_ISDIGIT:
10183 return fold_builtin_isdigit (arg0);
10185 CASE_FLT_FN (BUILT_IN_FINITE):
10186 case BUILT_IN_FINITED32:
10187 case BUILT_IN_FINITED64:
10188 case BUILT_IN_FINITED128:
10189 case BUILT_IN_ISFINITE:
10190 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10192 CASE_FLT_FN (BUILT_IN_ISINF):
10193 case BUILT_IN_ISINFD32:
10194 case BUILT_IN_ISINFD64:
10195 case BUILT_IN_ISINFD128:
10196 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10198 case BUILT_IN_ISINF_SIGN:
10199 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10201 CASE_FLT_FN (BUILT_IN_ISNAN):
10202 case BUILT_IN_ISNAND32:
10203 case BUILT_IN_ISNAND64:
10204 case BUILT_IN_ISNAND128:
10205 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10207 case BUILT_IN_PRINTF:
10208 case BUILT_IN_PRINTF_UNLOCKED:
10209 case BUILT_IN_VPRINTF:
10210 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10220 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10221 IGNORE is true if the result of the function call is ignored. This
10222 function returns NULL_TREE if no simplification was possible. */
10225 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10227 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10228 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10232 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10233 CASE_FLT_FN (BUILT_IN_JN):
10234 if (validate_arg (arg0, INTEGER_TYPE)
10235 && validate_arg (arg1, REAL_TYPE))
10236 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10239 CASE_FLT_FN (BUILT_IN_YN):
10240 if (validate_arg (arg0, INTEGER_TYPE)
10241 && validate_arg (arg1, REAL_TYPE))
10242 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10246 CASE_FLT_FN (BUILT_IN_DREM):
10247 CASE_FLT_FN (BUILT_IN_REMAINDER):
10248 if (validate_arg (arg0, REAL_TYPE)
10249 && validate_arg(arg1, REAL_TYPE))
10250 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10253 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10254 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10255 if (validate_arg (arg0, REAL_TYPE)
10256 && validate_arg(arg1, POINTER_TYPE))
10257 return do_mpfr_lgamma_r (arg0, arg1, type);
10261 CASE_FLT_FN (BUILT_IN_ATAN2):
10262 if (validate_arg (arg0, REAL_TYPE)
10263 && validate_arg(arg1, REAL_TYPE))
10264 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10267 CASE_FLT_FN (BUILT_IN_FDIM):
10268 if (validate_arg (arg0, REAL_TYPE)
10269 && validate_arg(arg1, REAL_TYPE))
10270 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10273 CASE_FLT_FN (BUILT_IN_HYPOT):
10274 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10276 CASE_FLT_FN (BUILT_IN_LDEXP):
10277 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10278 CASE_FLT_FN (BUILT_IN_SCALBN):
10279 CASE_FLT_FN (BUILT_IN_SCALBLN):
10280 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10282 CASE_FLT_FN (BUILT_IN_FREXP):
10283 return fold_builtin_frexp (arg0, arg1, type);
10285 CASE_FLT_FN (BUILT_IN_MODF):
10286 return fold_builtin_modf (arg0, arg1, type);
10288 case BUILT_IN_BZERO:
10289 return fold_builtin_bzero (arg0, arg1, ignore);
10291 case BUILT_IN_FPUTS:
10292 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10294 case BUILT_IN_FPUTS_UNLOCKED:
10295 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10297 case BUILT_IN_STRSTR:
10298 return fold_builtin_strstr (arg0, arg1, type);
10300 case BUILT_IN_STRCAT:
10301 return fold_builtin_strcat (arg0, arg1);
10303 case BUILT_IN_STRSPN:
10304 return fold_builtin_strspn (arg0, arg1);
10306 case BUILT_IN_STRCSPN:
10307 return fold_builtin_strcspn (arg0, arg1);
10309 case BUILT_IN_STRCHR:
10310 case BUILT_IN_INDEX:
10311 return fold_builtin_strchr (arg0, arg1, type);
10313 case BUILT_IN_STRRCHR:
10314 case BUILT_IN_RINDEX:
10315 return fold_builtin_strrchr (arg0, arg1, type);
10317 case BUILT_IN_STRCPY:
10318 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10320 case BUILT_IN_STRCMP:
10321 return fold_builtin_strcmp (arg0, arg1);
10323 case BUILT_IN_STRPBRK:
10324 return fold_builtin_strpbrk (arg0, arg1, type);
10326 case BUILT_IN_EXPECT:
10327 return fold_builtin_expect (arg0, arg1);
10329 CASE_FLT_FN (BUILT_IN_POW):
10330 return fold_builtin_pow (fndecl, arg0, arg1, type);
10332 CASE_FLT_FN (BUILT_IN_POWI):
10333 return fold_builtin_powi (fndecl, arg0, arg1, type);
10335 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10336 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10338 CASE_FLT_FN (BUILT_IN_FMIN):
10339 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10341 CASE_FLT_FN (BUILT_IN_FMAX):
10342 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10344 case BUILT_IN_ISGREATER:
10345 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10346 case BUILT_IN_ISGREATEREQUAL:
10347 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10348 case BUILT_IN_ISLESS:
10349 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10350 case BUILT_IN_ISLESSEQUAL:
10351 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10352 case BUILT_IN_ISLESSGREATER:
10353 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10354 case BUILT_IN_ISUNORDERED:
10355 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10358 /* We do the folding for va_start in the expander. */
10359 case BUILT_IN_VA_START:
10362 case BUILT_IN_SPRINTF:
10363 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10365 case BUILT_IN_OBJECT_SIZE:
10366 return fold_builtin_object_size (arg0, arg1);
10368 case BUILT_IN_PRINTF:
10369 case BUILT_IN_PRINTF_UNLOCKED:
10370 case BUILT_IN_VPRINTF:
10371 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10373 case BUILT_IN_PRINTF_CHK:
10374 case BUILT_IN_VPRINTF_CHK:
10375 if (!validate_arg (arg0, INTEGER_TYPE)
10376 || TREE_SIDE_EFFECTS (arg0))
10379 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10382 case BUILT_IN_FPRINTF:
10383 case BUILT_IN_FPRINTF_UNLOCKED:
10384 case BUILT_IN_VFPRINTF:
10385 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10394 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10395 and ARG2. IGNORE is true if the result of the function call is ignored.
10396 This function returns NULL_TREE if no simplification was possible. */
10399 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10401 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10402 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10406 CASE_FLT_FN (BUILT_IN_SINCOS):
10407 return fold_builtin_sincos (arg0, arg1, arg2);
10409 CASE_FLT_FN (BUILT_IN_FMA):
10410 if (validate_arg (arg0, REAL_TYPE)
10411 && validate_arg(arg1, REAL_TYPE)
10412 && validate_arg(arg2, REAL_TYPE))
10413 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10416 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10417 CASE_FLT_FN (BUILT_IN_REMQUO):
10418 if (validate_arg (arg0, REAL_TYPE)
10419 && validate_arg(arg1, REAL_TYPE)
10420 && validate_arg(arg2, POINTER_TYPE))
10421 return do_mpfr_remquo (arg0, arg1, arg2);
10425 case BUILT_IN_MEMSET:
10426 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10428 case BUILT_IN_BCOPY:
10429 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10431 case BUILT_IN_MEMCPY:
10432 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10434 case BUILT_IN_MEMPCPY:
10435 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10437 case BUILT_IN_MEMMOVE:
10438 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10440 case BUILT_IN_STRNCAT:
10441 return fold_builtin_strncat (arg0, arg1, arg2);
10443 case BUILT_IN_STRNCPY:
10444 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10446 case BUILT_IN_STRNCMP:
10447 return fold_builtin_strncmp (arg0, arg1, arg2);
10449 case BUILT_IN_MEMCHR:
10450 return fold_builtin_memchr (arg0, arg1, arg2, type);
10452 case BUILT_IN_BCMP:
10453 case BUILT_IN_MEMCMP:
10454 return fold_builtin_memcmp (arg0, arg1, arg2);;
10456 case BUILT_IN_SPRINTF:
10457 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10459 case BUILT_IN_STRCPY_CHK:
10460 case BUILT_IN_STPCPY_CHK:
10461 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10464 case BUILT_IN_STRCAT_CHK:
10465 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10467 case BUILT_IN_PRINTF_CHK:
10468 case BUILT_IN_VPRINTF_CHK:
10469 if (!validate_arg (arg0, INTEGER_TYPE)
10470 || TREE_SIDE_EFFECTS (arg0))
10473 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10476 case BUILT_IN_FPRINTF:
10477 case BUILT_IN_FPRINTF_UNLOCKED:
10478 case BUILT_IN_VFPRINTF:
10479 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10481 case BUILT_IN_FPRINTF_CHK:
10482 case BUILT_IN_VFPRINTF_CHK:
10483 if (!validate_arg (arg1, INTEGER_TYPE)
10484 || TREE_SIDE_EFFECTS (arg1))
10487 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10496 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10497 ARG2, and ARG3. IGNORE is true if the result of the function call is
10498 ignored. This function returns NULL_TREE if no simplification was
10502 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10505 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10509 case BUILT_IN_MEMCPY_CHK:
10510 case BUILT_IN_MEMPCPY_CHK:
10511 case BUILT_IN_MEMMOVE_CHK:
10512 case BUILT_IN_MEMSET_CHK:
10513 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10515 DECL_FUNCTION_CODE (fndecl));
10517 case BUILT_IN_STRNCPY_CHK:
10518 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10520 case BUILT_IN_STRNCAT_CHK:
10521 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10523 case BUILT_IN_FPRINTF_CHK:
10524 case BUILT_IN_VFPRINTF_CHK:
10525 if (!validate_arg (arg1, INTEGER_TYPE)
10526 || TREE_SIDE_EFFECTS (arg1))
10529 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10539 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10540 arguments, where NARGS <= 4. IGNORE is true if the result of the
10541 function call is ignored. This function returns NULL_TREE if no
10542 simplification was possible. Note that this only folds builtins with
10543 fixed argument patterns. Foldings that do varargs-to-varargs
10544 transformations, or that match calls with more than 4 arguments,
10545 need to be handled with fold_builtin_varargs instead. */
10547 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10550 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10552 tree ret = NULL_TREE;
10557 ret = fold_builtin_0 (fndecl, ignore);
10560 ret = fold_builtin_1 (fndecl, args[0], ignore);
10563 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10566 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10569 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10577 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10578 TREE_NO_WARNING (ret) = 1;
10584 /* Builtins with folding operations that operate on "..." arguments
10585 need special handling; we need to store the arguments in a convenient
10586 data structure before attempting any folding. Fortunately there are
10587 only a few builtins that fall into this category. FNDECL is the
10588 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10589 result of the function call is ignored. */
10592 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10594 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10595 tree ret = NULL_TREE;
10599 case BUILT_IN_SPRINTF_CHK:
10600 case BUILT_IN_VSPRINTF_CHK:
10601 ret = fold_builtin_sprintf_chk (exp, fcode);
10604 case BUILT_IN_SNPRINTF_CHK:
10605 case BUILT_IN_VSNPRINTF_CHK:
10606 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10609 case BUILT_IN_FPCLASSIFY:
10610 ret = fold_builtin_fpclassify (exp);
10618 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10619 TREE_NO_WARNING (ret) = 1;
10625 /* A wrapper function for builtin folding that prevents warnings for
10626 "statement without effect" and the like, caused by removing the
10627 call node earlier than the warning is generated. */
10630 fold_call_expr (tree exp, bool ignore)
10632 tree ret = NULL_TREE;
10633 tree fndecl = get_callee_fndecl (exp);
10635 && TREE_CODE (fndecl) == FUNCTION_DECL
10636 && DECL_BUILT_IN (fndecl)
10637 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10638 yet. Defer folding until we see all the arguments
10639 (after inlining). */
10640 && !CALL_EXPR_VA_ARG_PACK (exp))
10642 int nargs = call_expr_nargs (exp);
10644 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10645 instead last argument is __builtin_va_arg_pack (). Defer folding
10646 even in that case, until arguments are finalized. */
10647 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10649 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10651 && TREE_CODE (fndecl2) == FUNCTION_DECL
10652 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10653 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10657 /* FIXME: Don't use a list in this interface. */
10658 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10659 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10662 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10664 tree *args = CALL_EXPR_ARGP (exp);
10665 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10668 ret = fold_builtin_varargs (fndecl, exp, ignore);
10671 /* Propagate location information from original call to
10672 expansion of builtin. Otherwise things like
10673 maybe_emit_chk_warning, that operate on the expansion
10674 of a builtin, will use the wrong location information. */
10675 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10677 tree realret = ret;
10678 if (TREE_CODE (ret) == NOP_EXPR)
10679 realret = TREE_OPERAND (ret, 0);
10680 if (CAN_HAVE_LOCATION_P (realret)
10681 && !EXPR_HAS_LOCATION (realret))
10682 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10692 /* Conveniently construct a function call expression. FNDECL names the
10693 function to be called and ARGLIST is a TREE_LIST of arguments. */
10696 build_function_call_expr (tree fndecl, tree arglist)
10698 tree fntype = TREE_TYPE (fndecl);
10699 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10700 int n = list_length (arglist);
10701 tree *argarray = (tree *) alloca (n * sizeof (tree));
10704 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10705 argarray[i] = TREE_VALUE (arglist);
10706 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10709 /* Conveniently construct a function call expression. FNDECL names the
10710 function to be called, N is the number of arguments, and the "..."
10711 parameters are the argument expressions. */
10714 build_call_expr (tree fndecl, int n, ...)
10717 tree fntype = TREE_TYPE (fndecl);
10718 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10719 tree *argarray = (tree *) alloca (n * sizeof (tree));
10723 for (i = 0; i < n; i++)
10724 argarray[i] = va_arg (ap, tree);
10726 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10729 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10730 N arguments are passed in the array ARGARRAY. */
10733 fold_builtin_call_array (tree type,
10738 tree ret = NULL_TREE;
10742 if (TREE_CODE (fn) == ADDR_EXPR)
10744 tree fndecl = TREE_OPERAND (fn, 0);
10745 if (TREE_CODE (fndecl) == FUNCTION_DECL
10746 && DECL_BUILT_IN (fndecl))
10748 /* If last argument is __builtin_va_arg_pack (), arguments to this
10749 function are not finalized yet. Defer folding until they are. */
10750 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10752 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10754 && TREE_CODE (fndecl2) == FUNCTION_DECL
10755 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10756 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10757 return build_call_array (type, fn, n, argarray);
10759 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10761 tree arglist = NULL_TREE;
10762 for (i = n - 1; i >= 0; i--)
10763 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10764 ret = targetm.fold_builtin (fndecl, arglist, false);
10768 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10770 /* First try the transformations that don't require consing up
10772 ret = fold_builtin_n (fndecl, argarray, n, false);
10777 /* If we got this far, we need to build an exp. */
10778 exp = build_call_array (type, fn, n, argarray);
10779 ret = fold_builtin_varargs (fndecl, exp, false);
10780 return ret ? ret : exp;
10784 return build_call_array (type, fn, n, argarray);
10787 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10788 along with N new arguments specified as the "..." parameters. SKIP
10789 is the number of arguments in EXP to be omitted. This function is used
10790 to do varargs-to-varargs transformations. */
10793 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10795 int oldnargs = call_expr_nargs (exp);
10796 int nargs = oldnargs - skip + n;
10797 tree fntype = TREE_TYPE (fndecl);
10798 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10806 buffer = XALLOCAVEC (tree, nargs);
10808 for (i = 0; i < n; i++)
10809 buffer[i] = va_arg (ap, tree);
10811 for (j = skip; j < oldnargs; j++, i++)
10812 buffer[i] = CALL_EXPR_ARG (exp, j);
10815 buffer = CALL_EXPR_ARGP (exp) + skip;
10817 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10820 /* Validate a single argument ARG against a tree code CODE representing
10824 validate_arg (const_tree arg, enum tree_code code)
10828 else if (code == POINTER_TYPE)
10829 return POINTER_TYPE_P (TREE_TYPE (arg));
10830 else if (code == INTEGER_TYPE)
10831 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10832 return code == TREE_CODE (TREE_TYPE (arg));
10835 /* This function validates the types of a function call argument list
10836 against a specified list of tree_codes. If the last specifier is a 0,
10837 that represents an ellipses, otherwise the last specifier must be a
10841 validate_arglist (const_tree callexpr, ...)
10843 enum tree_code code;
10846 const_call_expr_arg_iterator iter;
10849 va_start (ap, callexpr);
10850 init_const_call_expr_arg_iterator (callexpr, &iter);
10854 code = va_arg (ap, enum tree_code);
10858 /* This signifies an ellipses, any further arguments are all ok. */
10862 /* This signifies an endlink, if no arguments remain, return
10863 true, otherwise return false. */
10864 res = !more_const_call_expr_args_p (&iter);
10867 /* If no parameters remain or the parameter's code does not
10868 match the specified code, return false. Otherwise continue
10869 checking any remaining arguments. */
10870 arg = next_const_call_expr_arg (&iter);
10871 if (!validate_arg (arg, code))
10878 /* We need gotos here since we can only have one VA_CLOSE in a
10886 /* Default target-specific builtin expander that does nothing. */
10889 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10890 rtx target ATTRIBUTE_UNUSED,
10891 rtx subtarget ATTRIBUTE_UNUSED,
10892 enum machine_mode mode ATTRIBUTE_UNUSED,
10893 int ignore ATTRIBUTE_UNUSED)
10898 /* Returns true is EXP represents data that would potentially reside
10899 in a readonly section. */
10902 readonly_data_expr (tree exp)
10906 if (TREE_CODE (exp) != ADDR_EXPR)
10909 exp = get_base_address (TREE_OPERAND (exp, 0));
10913 /* Make sure we call decl_readonly_section only for trees it
10914 can handle (since it returns true for everything it doesn't
10916 if (TREE_CODE (exp) == STRING_CST
10917 || TREE_CODE (exp) == CONSTRUCTOR
10918 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10919 return decl_readonly_section (exp, 0);
10924 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10925 to the call, and TYPE is its return type.
10927 Return NULL_TREE if no simplification was possible, otherwise return the
10928 simplified form of the call as a tree.
10930 The simplified form may be a constant or other expression which
10931 computes the same value, but in a more efficient manner (including
10932 calls to other builtin functions).
10934 The call may contain arguments which need to be evaluated, but
10935 which are not useful to determine the result of the call. In
10936 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10937 COMPOUND_EXPR will be an argument which must be evaluated.
10938 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10939 COMPOUND_EXPR in the chain will contain the tree for the simplified
10940 form of the builtin function call. */
10943 fold_builtin_strstr (tree s1, tree s2, tree type)
10945 if (!validate_arg (s1, POINTER_TYPE)
10946 || !validate_arg (s2, POINTER_TYPE))
10951 const char *p1, *p2;
10953 p2 = c_getstr (s2);
10957 p1 = c_getstr (s1);
10960 const char *r = strstr (p1, p2);
10964 return build_int_cst (TREE_TYPE (s1), 0);
10966 /* Return an offset into the constant string argument. */
10967 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10968 s1, size_int (r - p1));
10969 return fold_convert (type, tem);
10972 /* The argument is const char *, and the result is char *, so we need
10973 a type conversion here to avoid a warning. */
10975 return fold_convert (type, s1);
10980 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10984 /* New argument list transforming strstr(s1, s2) to
10985 strchr(s1, s2[0]). */
10986 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10990 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10991 the call, and TYPE is its return type.
10993 Return NULL_TREE if no simplification was possible, otherwise return the
10994 simplified form of the call as a tree.
10996 The simplified form may be a constant or other expression which
10997 computes the same value, but in a more efficient manner (including
10998 calls to other builtin functions).
11000 The call may contain arguments which need to be evaluated, but
11001 which are not useful to determine the result of the call. In
11002 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11003 COMPOUND_EXPR will be an argument which must be evaluated.
11004 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11005 COMPOUND_EXPR in the chain will contain the tree for the simplified
11006 form of the builtin function call. */
11009 fold_builtin_strchr (tree s1, tree s2, tree type)
11011 if (!validate_arg (s1, POINTER_TYPE)
11012 || !validate_arg (s2, INTEGER_TYPE))
11018 if (TREE_CODE (s2) != INTEGER_CST)
11021 p1 = c_getstr (s1);
11028 if (target_char_cast (s2, &c))
11031 r = strchr (p1, c);
11034 return build_int_cst (TREE_TYPE (s1), 0);
11036 /* Return an offset into the constant string argument. */
11037 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11038 s1, size_int (r - p1));
11039 return fold_convert (type, tem);
11045 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11046 the call, and TYPE is its return type.
11048 Return NULL_TREE if no simplification was possible, otherwise return the
11049 simplified form of the call as a tree.
11051 The simplified form may be a constant or other expression which
11052 computes the same value, but in a more efficient manner (including
11053 calls to other builtin functions).
11055 The call may contain arguments which need to be evaluated, but
11056 which are not useful to determine the result of the call. In
11057 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11058 COMPOUND_EXPR will be an argument which must be evaluated.
11059 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11060 COMPOUND_EXPR in the chain will contain the tree for the simplified
11061 form of the builtin function call. */
11064 fold_builtin_strrchr (tree s1, tree s2, tree type)
11066 if (!validate_arg (s1, POINTER_TYPE)
11067 || !validate_arg (s2, INTEGER_TYPE))
11074 if (TREE_CODE (s2) != INTEGER_CST)
11077 p1 = c_getstr (s1);
11084 if (target_char_cast (s2, &c))
11087 r = strrchr (p1, c);
11090 return build_int_cst (TREE_TYPE (s1), 0);
11092 /* Return an offset into the constant string argument. */
11093 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11094 s1, size_int (r - p1));
11095 return fold_convert (type, tem);
11098 if (! integer_zerop (s2))
11101 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11105 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11106 return build_call_expr (fn, 2, s1, s2);
11110 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11111 to the call, and TYPE is its return type.
11113 Return NULL_TREE if no simplification was possible, otherwise return the
11114 simplified form of the call as a tree.
11116 The simplified form may be a constant or other expression which
11117 computes the same value, but in a more efficient manner (including
11118 calls to other builtin functions).
11120 The call may contain arguments which need to be evaluated, but
11121 which are not useful to determine the result of the call. In
11122 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11123 COMPOUND_EXPR will be an argument which must be evaluated.
11124 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11125 COMPOUND_EXPR in the chain will contain the tree for the simplified
11126 form of the builtin function call. */
11129 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11131 if (!validate_arg (s1, POINTER_TYPE)
11132 || !validate_arg (s2, POINTER_TYPE))
11137 const char *p1, *p2;
11139 p2 = c_getstr (s2);
11143 p1 = c_getstr (s1);
11146 const char *r = strpbrk (p1, p2);
11150 return build_int_cst (TREE_TYPE (s1), 0);
11152 /* Return an offset into the constant string argument. */
11153 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11154 s1, size_int (r - p1));
11155 return fold_convert (type, tem);
11159 /* strpbrk(x, "") == NULL.
11160 Evaluate and ignore s1 in case it had side-effects. */
11161 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11164 return NULL_TREE; /* Really call strpbrk. */
11166 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11170 /* New argument list transforming strpbrk(s1, s2) to
11171 strchr(s1, s2[0]). */
11172 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11176 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11179 Return NULL_TREE if no simplification was possible, otherwise return the
11180 simplified form of the call as a tree.
11182 The simplified form may be a constant or other expression which
11183 computes the same value, but in a more efficient manner (including
11184 calls to other builtin functions).
11186 The call may contain arguments which need to be evaluated, but
11187 which are not useful to determine the result of the call. In
11188 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11189 COMPOUND_EXPR will be an argument which must be evaluated.
11190 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11191 COMPOUND_EXPR in the chain will contain the tree for the simplified
11192 form of the builtin function call. */
11195 fold_builtin_strcat (tree dst, tree src)
11197 if (!validate_arg (dst, POINTER_TYPE)
11198 || !validate_arg (src, POINTER_TYPE))
11202 const char *p = c_getstr (src);
11204 /* If the string length is zero, return the dst parameter. */
11205 if (p && *p == '\0')
11212 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11213 arguments to the call.
11215 Return NULL_TREE if no simplification was possible, otherwise return the
11216 simplified form of the call as a tree.
11218 The simplified form may be a constant or other expression which
11219 computes the same value, but in a more efficient manner (including
11220 calls to other builtin functions).
11222 The call may contain arguments which need to be evaluated, but
11223 which are not useful to determine the result of the call. In
11224 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11225 COMPOUND_EXPR will be an argument which must be evaluated.
11226 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11227 COMPOUND_EXPR in the chain will contain the tree for the simplified
11228 form of the builtin function call. */
11231 fold_builtin_strncat (tree dst, tree src, tree len)
11233 if (!validate_arg (dst, POINTER_TYPE)
11234 || !validate_arg (src, POINTER_TYPE)
11235 || !validate_arg (len, INTEGER_TYPE))
11239 const char *p = c_getstr (src);
11241 /* If the requested length is zero, or the src parameter string
11242 length is zero, return the dst parameter. */
11243 if (integer_zerop (len) || (p && *p == '\0'))
11244 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11246 /* If the requested len is greater than or equal to the string
11247 length, call strcat. */
11248 if (TREE_CODE (len) == INTEGER_CST && p
11249 && compare_tree_int (len, strlen (p)) >= 0)
11251 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11253 /* If the replacement _DECL isn't initialized, don't do the
11258 return build_call_expr (fn, 2, dst, src);
11264 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11267 Return NULL_TREE if no simplification was possible, otherwise return the
11268 simplified form of the call as a tree.
11270 The simplified form may be a constant or other expression which
11271 computes the same value, but in a more efficient manner (including
11272 calls to other builtin functions).
11274 The call may contain arguments which need to be evaluated, but
11275 which are not useful to determine the result of the call. In
11276 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11277 COMPOUND_EXPR will be an argument which must be evaluated.
11278 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11279 COMPOUND_EXPR in the chain will contain the tree for the simplified
11280 form of the builtin function call. */
11283 fold_builtin_strspn (tree s1, tree s2)
11285 if (!validate_arg (s1, POINTER_TYPE)
11286 || !validate_arg (s2, POINTER_TYPE))
11290 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11292 /* If both arguments are constants, evaluate at compile-time. */
11295 const size_t r = strspn (p1, p2);
11296 return size_int (r);
11299 /* If either argument is "", return NULL_TREE. */
11300 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11301 /* Evaluate and ignore both arguments in case either one has
11303 return omit_two_operands (integer_type_node, integer_zero_node,
11309 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11312 Return NULL_TREE if no simplification was possible, otherwise return the
11313 simplified form of the call as a tree.
11315 The simplified form may be a constant or other expression which
11316 computes the same value, but in a more efficient manner (including
11317 calls to other builtin functions).
11319 The call may contain arguments which need to be evaluated, but
11320 which are not useful to determine the result of the call. In
11321 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11322 COMPOUND_EXPR will be an argument which must be evaluated.
11323 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11324 COMPOUND_EXPR in the chain will contain the tree for the simplified
11325 form of the builtin function call. */
11328 fold_builtin_strcspn (tree s1, tree s2)
11330 if (!validate_arg (s1, POINTER_TYPE)
11331 || !validate_arg (s2, POINTER_TYPE))
11335 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11337 /* If both arguments are constants, evaluate at compile-time. */
11340 const size_t r = strcspn (p1, p2);
11341 return size_int (r);
11344 /* If the first argument is "", return NULL_TREE. */
11345 if (p1 && *p1 == '\0')
11347 /* Evaluate and ignore argument s2 in case it has
11349 return omit_one_operand (integer_type_node,
11350 integer_zero_node, s2);
11353 /* If the second argument is "", return __builtin_strlen(s1). */
11354 if (p2 && *p2 == '\0')
11356 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11358 /* If the replacement _DECL isn't initialized, don't do the
11363 return build_call_expr (fn, 1, s1);
11369 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11370 to the call. IGNORE is true if the value returned
11371 by the builtin will be ignored. UNLOCKED is true is true if this
11372 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11373 the known length of the string. Return NULL_TREE if no simplification
11377 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11379 /* If we're using an unlocked function, assume the other unlocked
11380 functions exist explicitly. */
11381 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11382 : implicit_built_in_decls[BUILT_IN_FPUTC];
11383 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11384 : implicit_built_in_decls[BUILT_IN_FWRITE];
11386 /* If the return value is used, don't do the transformation. */
11390 /* Verify the arguments in the original call. */
11391 if (!validate_arg (arg0, POINTER_TYPE)
11392 || !validate_arg (arg1, POINTER_TYPE))
11396 len = c_strlen (arg0, 0);
11398 /* Get the length of the string passed to fputs. If the length
11399 can't be determined, punt. */
11401 || TREE_CODE (len) != INTEGER_CST)
11404 switch (compare_tree_int (len, 1))
11406 case -1: /* length is 0, delete the call entirely . */
11407 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11409 case 0: /* length is 1, call fputc. */
11411 const char *p = c_getstr (arg0);
11416 return build_call_expr (fn_fputc, 2,
11417 build_int_cst (NULL_TREE, p[0]), arg1);
11423 case 1: /* length is greater than 1, call fwrite. */
11425 /* If optimizing for size keep fputs. */
11428 /* New argument list transforming fputs(string, stream) to
11429 fwrite(string, 1, len, stream). */
11431 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11436 gcc_unreachable ();
11441 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11442 produced. False otherwise. This is done so that we don't output the error
11443 or warning twice or three times. */
11445 fold_builtin_next_arg (tree exp, bool va_start_p)
11447 tree fntype = TREE_TYPE (current_function_decl);
11448 int nargs = call_expr_nargs (exp);
11451 if (TYPE_ARG_TYPES (fntype) == 0
11452 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11453 == void_type_node))
11455 error ("%<va_start%> used in function with fixed args");
11461 if (va_start_p && (nargs != 2))
11463 error ("wrong number of arguments to function %<va_start%>");
11466 arg = CALL_EXPR_ARG (exp, 1);
11468 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11469 when we checked the arguments and if needed issued a warning. */
11474 /* Evidently an out of date version of <stdarg.h>; can't validate
11475 va_start's second argument, but can still work as intended. */
11476 warning (0, "%<__builtin_next_arg%> called without an argument");
11479 else if (nargs > 1)
11481 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11484 arg = CALL_EXPR_ARG (exp, 0);
11487 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11488 or __builtin_next_arg (0) the first time we see it, after checking
11489 the arguments and if needed issuing a warning. */
11490 if (!integer_zerop (arg))
11492 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11494 /* Strip off all nops for the sake of the comparison. This
11495 is not quite the same as STRIP_NOPS. It does more.
11496 We must also strip off INDIRECT_EXPR for C++ reference
11498 while (CONVERT_EXPR_P (arg)
11499 || TREE_CODE (arg) == INDIRECT_REF)
11500 arg = TREE_OPERAND (arg, 0);
11501 if (arg != last_parm)
11503 /* FIXME: Sometimes with the tree optimizers we can get the
11504 not the last argument even though the user used the last
11505 argument. We just warn and set the arg to be the last
11506 argument so that we will get wrong-code because of
11508 warning (0, "second parameter of %<va_start%> not last named argument");
11510 /* We want to verify the second parameter just once before the tree
11511 optimizers are run and then avoid keeping it in the tree,
11512 as otherwise we could warn even for correct code like:
11513 void foo (int i, ...)
11514 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11516 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11518 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11524 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11525 ORIG may be null if this is a 2-argument call. We don't attempt to
11526 simplify calls with more than 3 arguments.
11528 Return NULL_TREE if no simplification was possible, otherwise return the
11529 simplified form of the call as a tree. If IGNORED is true, it means that
11530 the caller does not use the returned value of the function. */
11533 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11536 const char *fmt_str = NULL;
11538 /* Verify the required arguments in the original call. We deal with two
11539 types of sprintf() calls: 'sprintf (str, fmt)' and
11540 'sprintf (dest, "%s", orig)'. */
11541 if (!validate_arg (dest, POINTER_TYPE)
11542 || !validate_arg (fmt, POINTER_TYPE))
11544 if (orig && !validate_arg (orig, POINTER_TYPE))
11547 /* Check whether the format is a literal string constant. */
11548 fmt_str = c_getstr (fmt);
11549 if (fmt_str == NULL)
11553 retval = NULL_TREE;
11555 if (!init_target_chars ())
11558 /* If the format doesn't contain % args or %%, use strcpy. */
11559 if (strchr (fmt_str, target_percent) == NULL)
11561 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11566 /* Don't optimize sprintf (buf, "abc", ptr++). */
11570 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11571 'format' is known to contain no % formats. */
11572 call = build_call_expr (fn, 2, dest, fmt);
11574 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11577 /* If the format is "%s", use strcpy if the result isn't used. */
11578 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11581 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11586 /* Don't crash on sprintf (str1, "%s"). */
11590 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11593 retval = c_strlen (orig, 1);
11594 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11597 call = build_call_expr (fn, 2, dest, orig);
11600 if (call && retval)
11602 retval = fold_convert
11603 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11605 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11611 /* Expand a call EXP to __builtin_object_size. */
11614 expand_builtin_object_size (tree exp)
11617 int object_size_type;
11618 tree fndecl = get_callee_fndecl (exp);
11620 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11622 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11624 expand_builtin_trap ();
11628 ost = CALL_EXPR_ARG (exp, 1);
11631 if (TREE_CODE (ost) != INTEGER_CST
11632 || tree_int_cst_sgn (ost) < 0
11633 || compare_tree_int (ost, 3) > 0)
11635 error ("%Klast argument of %D is not integer constant between 0 and 3",
11637 expand_builtin_trap ();
11641 object_size_type = tree_low_cst (ost, 0);
11643 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11646 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11647 FCODE is the BUILT_IN_* to use.
11648 Return NULL_RTX if we failed; the caller should emit a normal call,
11649 otherwise try to get the result in TARGET, if convenient (and in
11650 mode MODE if that's convenient). */
11653 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11654 enum built_in_function fcode)
11656 tree dest, src, len, size;
11658 if (!validate_arglist (exp,
11660 fcode == BUILT_IN_MEMSET_CHK
11661 ? INTEGER_TYPE : POINTER_TYPE,
11662 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11665 dest = CALL_EXPR_ARG (exp, 0);
11666 src = CALL_EXPR_ARG (exp, 1);
11667 len = CALL_EXPR_ARG (exp, 2);
11668 size = CALL_EXPR_ARG (exp, 3);
11670 if (! host_integerp (size, 1))
11673 if (host_integerp (len, 1) || integer_all_onesp (size))
11677 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11679 warning (0, "%Kcall to %D will always overflow destination buffer",
11680 exp, get_callee_fndecl (exp));
11685 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11686 mem{cpy,pcpy,move,set} is available. */
11689 case BUILT_IN_MEMCPY_CHK:
11690 fn = built_in_decls[BUILT_IN_MEMCPY];
11692 case BUILT_IN_MEMPCPY_CHK:
11693 fn = built_in_decls[BUILT_IN_MEMPCPY];
11695 case BUILT_IN_MEMMOVE_CHK:
11696 fn = built_in_decls[BUILT_IN_MEMMOVE];
11698 case BUILT_IN_MEMSET_CHK:
11699 fn = built_in_decls[BUILT_IN_MEMSET];
11708 fn = build_call_expr (fn, 3, dest, src, len);
11709 STRIP_TYPE_NOPS (fn);
11710 while (TREE_CODE (fn) == COMPOUND_EXPR)
11712 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11714 fn = TREE_OPERAND (fn, 1);
11716 if (TREE_CODE (fn) == CALL_EXPR)
11717 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11718 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11720 else if (fcode == BUILT_IN_MEMSET_CHK)
11724 unsigned int dest_align
11725 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11727 /* If DEST is not a pointer type, call the normal function. */
11728 if (dest_align == 0)
11731 /* If SRC and DEST are the same (and not volatile), do nothing. */
11732 if (operand_equal_p (src, dest, 0))
11736 if (fcode != BUILT_IN_MEMPCPY_CHK)
11738 /* Evaluate and ignore LEN in case it has side-effects. */
11739 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11740 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11743 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11744 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11747 /* __memmove_chk special case. */
11748 if (fcode == BUILT_IN_MEMMOVE_CHK)
11750 unsigned int src_align
11751 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11753 if (src_align == 0)
11756 /* If src is categorized for a readonly section we can use
11757 normal __memcpy_chk. */
11758 if (readonly_data_expr (src))
11760 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11763 fn = build_call_expr (fn, 4, dest, src, len, size);
11764 STRIP_TYPE_NOPS (fn);
11765 while (TREE_CODE (fn) == COMPOUND_EXPR)
11767 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11769 fn = TREE_OPERAND (fn, 1);
11771 if (TREE_CODE (fn) == CALL_EXPR)
11772 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11773 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11780 /* Emit warning if a buffer overflow is detected at compile time. */
11783 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11790 case BUILT_IN_STRCPY_CHK:
11791 case BUILT_IN_STPCPY_CHK:
11792 /* For __strcat_chk the warning will be emitted only if overflowing
11793 by at least strlen (dest) + 1 bytes. */
11794 case BUILT_IN_STRCAT_CHK:
11795 len = CALL_EXPR_ARG (exp, 1);
11796 size = CALL_EXPR_ARG (exp, 2);
11799 case BUILT_IN_STRNCAT_CHK:
11800 case BUILT_IN_STRNCPY_CHK:
11801 len = CALL_EXPR_ARG (exp, 2);
11802 size = CALL_EXPR_ARG (exp, 3);
11804 case BUILT_IN_SNPRINTF_CHK:
11805 case BUILT_IN_VSNPRINTF_CHK:
11806 len = CALL_EXPR_ARG (exp, 1);
11807 size = CALL_EXPR_ARG (exp, 3);
11810 gcc_unreachable ();
11816 if (! host_integerp (size, 1) || integer_all_onesp (size))
11821 len = c_strlen (len, 1);
11822 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11825 else if (fcode == BUILT_IN_STRNCAT_CHK)
11827 tree src = CALL_EXPR_ARG (exp, 1);
11828 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11830 src = c_strlen (src, 1);
11831 if (! src || ! host_integerp (src, 1))
11833 warning (0, "%Kcall to %D might overflow destination buffer",
11834 exp, get_callee_fndecl (exp));
11837 else if (tree_int_cst_lt (src, size))
11840 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11843 warning (0, "%Kcall to %D will always overflow destination buffer",
11844 exp, get_callee_fndecl (exp));
11847 /* Emit warning if a buffer overflow is detected at compile time
11848 in __sprintf_chk/__vsprintf_chk calls. */
11851 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11853 tree dest, size, len, fmt, flag;
11854 const char *fmt_str;
11855 int nargs = call_expr_nargs (exp);
11857 /* Verify the required arguments in the original call. */
11861 dest = CALL_EXPR_ARG (exp, 0);
11862 flag = CALL_EXPR_ARG (exp, 1);
11863 size = CALL_EXPR_ARG (exp, 2);
11864 fmt = CALL_EXPR_ARG (exp, 3);
11866 if (! host_integerp (size, 1) || integer_all_onesp (size))
11869 /* Check whether the format is a literal string constant. */
11870 fmt_str = c_getstr (fmt);
11871 if (fmt_str == NULL)
11874 if (!init_target_chars ())
11877 /* If the format doesn't contain % args or %%, we know its size. */
11878 if (strchr (fmt_str, target_percent) == 0)
11879 len = build_int_cstu (size_type_node, strlen (fmt_str));
11880 /* If the format is "%s" and first ... argument is a string literal,
11882 else if (fcode == BUILT_IN_SPRINTF_CHK
11883 && strcmp (fmt_str, target_percent_s) == 0)
11889 arg = CALL_EXPR_ARG (exp, 4);
11890 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11893 len = c_strlen (arg, 1);
11894 if (!len || ! host_integerp (len, 1))
11900 if (! tree_int_cst_lt (len, size))
11902 warning (0, "%Kcall to %D will always overflow destination buffer",
11903 exp, get_callee_fndecl (exp));
11907 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11911 fold_builtin_object_size (tree ptr, tree ost)
11913 tree ret = NULL_TREE;
11914 int object_size_type;
11916 if (!validate_arg (ptr, POINTER_TYPE)
11917 || !validate_arg (ost, INTEGER_TYPE))
11922 if (TREE_CODE (ost) != INTEGER_CST
11923 || tree_int_cst_sgn (ost) < 0
11924 || compare_tree_int (ost, 3) > 0)
11927 object_size_type = tree_low_cst (ost, 0);
11929 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11930 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11931 and (size_t) 0 for types 2 and 3. */
11932 if (TREE_SIDE_EFFECTS (ptr))
11933 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11935 if (TREE_CODE (ptr) == ADDR_EXPR)
11936 ret = build_int_cstu (size_type_node,
11937 compute_builtin_object_size (ptr, object_size_type));
11939 else if (TREE_CODE (ptr) == SSA_NAME)
11941 unsigned HOST_WIDE_INT bytes;
11943 /* If object size is not known yet, delay folding until
11944 later. Maybe subsequent passes will help determining
11946 bytes = compute_builtin_object_size (ptr, object_size_type);
11947 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11949 ret = build_int_cstu (size_type_node, bytes);
11954 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11955 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11956 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11963 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11964 DEST, SRC, LEN, and SIZE are the arguments to the call.
11965 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11966 code of the builtin. If MAXLEN is not NULL, it is maximum length
11967 passed as third argument. */
11970 fold_builtin_memory_chk (tree fndecl,
11971 tree dest, tree src, tree len, tree size,
11972 tree maxlen, bool ignore,
11973 enum built_in_function fcode)
11977 if (!validate_arg (dest, POINTER_TYPE)
11978 || !validate_arg (src,
11979 (fcode == BUILT_IN_MEMSET_CHK
11980 ? INTEGER_TYPE : POINTER_TYPE))
11981 || !validate_arg (len, INTEGER_TYPE)
11982 || !validate_arg (size, INTEGER_TYPE))
11985 /* If SRC and DEST are the same (and not volatile), return DEST
11986 (resp. DEST+LEN for __mempcpy_chk). */
11987 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11989 if (fcode != BUILT_IN_MEMPCPY_CHK)
11990 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11993 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11994 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11998 if (! host_integerp (size, 1))
12001 if (! integer_all_onesp (size))
12003 if (! host_integerp (len, 1))
12005 /* If LEN is not constant, try MAXLEN too.
12006 For MAXLEN only allow optimizing into non-_ocs function
12007 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12008 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12010 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12012 /* (void) __mempcpy_chk () can be optimized into
12013 (void) __memcpy_chk (). */
12014 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12018 return build_call_expr (fn, 4, dest, src, len, size);
12026 if (tree_int_cst_lt (size, maxlen))
12031 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12032 mem{cpy,pcpy,move,set} is available. */
12035 case BUILT_IN_MEMCPY_CHK:
12036 fn = built_in_decls[BUILT_IN_MEMCPY];
12038 case BUILT_IN_MEMPCPY_CHK:
12039 fn = built_in_decls[BUILT_IN_MEMPCPY];
12041 case BUILT_IN_MEMMOVE_CHK:
12042 fn = built_in_decls[BUILT_IN_MEMMOVE];
12044 case BUILT_IN_MEMSET_CHK:
12045 fn = built_in_decls[BUILT_IN_MEMSET];
12054 return build_call_expr (fn, 3, dest, src, len);
12057 /* Fold a call to the __st[rp]cpy_chk builtin.
12058 DEST, SRC, and SIZE are the arguments to the call.
12059 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12060 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12061 strings passed as second argument. */
12064 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12065 tree maxlen, bool ignore,
12066 enum built_in_function fcode)
12070 if (!validate_arg (dest, POINTER_TYPE)
12071 || !validate_arg (src, POINTER_TYPE)
12072 || !validate_arg (size, INTEGER_TYPE))
12075 /* If SRC and DEST are the same (and not volatile), return DEST. */
12076 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12077 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12079 if (! host_integerp (size, 1))
12082 if (! integer_all_onesp (size))
12084 len = c_strlen (src, 1);
12085 if (! len || ! host_integerp (len, 1))
12087 /* If LEN is not constant, try MAXLEN too.
12088 For MAXLEN only allow optimizing into non-_ocs function
12089 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12090 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12092 if (fcode == BUILT_IN_STPCPY_CHK)
12097 /* If return value of __stpcpy_chk is ignored,
12098 optimize into __strcpy_chk. */
12099 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12103 return build_call_expr (fn, 3, dest, src, size);
12106 if (! len || TREE_SIDE_EFFECTS (len))
12109 /* If c_strlen returned something, but not a constant,
12110 transform __strcpy_chk into __memcpy_chk. */
12111 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12115 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12116 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12117 build_call_expr (fn, 4,
12118 dest, src, len, size));
12124 if (! tree_int_cst_lt (maxlen, size))
12128 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12129 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12130 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12134 return build_call_expr (fn, 2, dest, src);
12137 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12138 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12139 length passed as third argument. */
12142 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12147 if (!validate_arg (dest, POINTER_TYPE)
12148 || !validate_arg (src, POINTER_TYPE)
12149 || !validate_arg (len, INTEGER_TYPE)
12150 || !validate_arg (size, INTEGER_TYPE))
12153 if (! host_integerp (size, 1))
12156 if (! integer_all_onesp (size))
12158 if (! host_integerp (len, 1))
12160 /* If LEN is not constant, try MAXLEN too.
12161 For MAXLEN only allow optimizing into non-_ocs function
12162 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12163 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12169 if (tree_int_cst_lt (size, maxlen))
12173 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12174 fn = built_in_decls[BUILT_IN_STRNCPY];
12178 return build_call_expr (fn, 3, dest, src, len);
12181 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12182 are the arguments to the call. */
12185 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12190 if (!validate_arg (dest, POINTER_TYPE)
12191 || !validate_arg (src, POINTER_TYPE)
12192 || !validate_arg (size, INTEGER_TYPE))
12195 p = c_getstr (src);
12196 /* If the SRC parameter is "", return DEST. */
12197 if (p && *p == '\0')
12198 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12200 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12203 /* If __builtin_strcat_chk is used, assume strcat is available. */
12204 fn = built_in_decls[BUILT_IN_STRCAT];
12208 return build_call_expr (fn, 2, dest, src);
12211 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12215 fold_builtin_strncat_chk (tree fndecl,
12216 tree dest, tree src, tree len, tree size)
12221 if (!validate_arg (dest, POINTER_TYPE)
12222 || !validate_arg (src, POINTER_TYPE)
12223 || !validate_arg (size, INTEGER_TYPE)
12224 || !validate_arg (size, INTEGER_TYPE))
12227 p = c_getstr (src);
12228 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12229 if (p && *p == '\0')
12230 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12231 else if (integer_zerop (len))
12232 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12234 if (! host_integerp (size, 1))
12237 if (! integer_all_onesp (size))
12239 tree src_len = c_strlen (src, 1);
12241 && host_integerp (src_len, 1)
12242 && host_integerp (len, 1)
12243 && ! tree_int_cst_lt (len, src_len))
12245 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12246 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12250 return build_call_expr (fn, 3, dest, src, size);
12255 /* If __builtin_strncat_chk is used, assume strncat is available. */
12256 fn = built_in_decls[BUILT_IN_STRNCAT];
12260 return build_call_expr (fn, 3, dest, src, len);
12263 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12264 a normal call should be emitted rather than expanding the function
12265 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12268 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12270 tree dest, size, len, fn, fmt, flag;
12271 const char *fmt_str;
12272 int nargs = call_expr_nargs (exp);
12274 /* Verify the required arguments in the original call. */
12277 dest = CALL_EXPR_ARG (exp, 0);
12278 if (!validate_arg (dest, POINTER_TYPE))
12280 flag = CALL_EXPR_ARG (exp, 1);
12281 if (!validate_arg (flag, INTEGER_TYPE))
12283 size = CALL_EXPR_ARG (exp, 2);
12284 if (!validate_arg (size, INTEGER_TYPE))
12286 fmt = CALL_EXPR_ARG (exp, 3);
12287 if (!validate_arg (fmt, POINTER_TYPE))
12290 if (! host_integerp (size, 1))
12295 if (!init_target_chars ())
12298 /* Check whether the format is a literal string constant. */
12299 fmt_str = c_getstr (fmt);
12300 if (fmt_str != NULL)
12302 /* If the format doesn't contain % args or %%, we know the size. */
12303 if (strchr (fmt_str, target_percent) == 0)
12305 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12306 len = build_int_cstu (size_type_node, strlen (fmt_str));
12308 /* If the format is "%s" and first ... argument is a string literal,
12309 we know the size too. */
12310 else if (fcode == BUILT_IN_SPRINTF_CHK
12311 && strcmp (fmt_str, target_percent_s) == 0)
12317 arg = CALL_EXPR_ARG (exp, 4);
12318 if (validate_arg (arg, POINTER_TYPE))
12320 len = c_strlen (arg, 1);
12321 if (! len || ! host_integerp (len, 1))
12328 if (! integer_all_onesp (size))
12330 if (! len || ! tree_int_cst_lt (len, size))
12334 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12335 or if format doesn't contain % chars or is "%s". */
12336 if (! integer_zerop (flag))
12338 if (fmt_str == NULL)
12340 if (strchr (fmt_str, target_percent) != NULL
12341 && strcmp (fmt_str, target_percent_s))
12345 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12346 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12347 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12351 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12354 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12355 a normal call should be emitted rather than expanding the function
12356 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12357 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12358 passed as second argument. */
12361 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12362 enum built_in_function fcode)
12364 tree dest, size, len, fn, fmt, flag;
12365 const char *fmt_str;
12367 /* Verify the required arguments in the original call. */
12368 if (call_expr_nargs (exp) < 5)
12370 dest = CALL_EXPR_ARG (exp, 0);
12371 if (!validate_arg (dest, POINTER_TYPE))
12373 len = CALL_EXPR_ARG (exp, 1);
12374 if (!validate_arg (len, INTEGER_TYPE))
12376 flag = CALL_EXPR_ARG (exp, 2);
12377 if (!validate_arg (flag, INTEGER_TYPE))
12379 size = CALL_EXPR_ARG (exp, 3);
12380 if (!validate_arg (size, INTEGER_TYPE))
12382 fmt = CALL_EXPR_ARG (exp, 4);
12383 if (!validate_arg (fmt, POINTER_TYPE))
12386 if (! host_integerp (size, 1))
12389 if (! integer_all_onesp (size))
12391 if (! host_integerp (len, 1))
12393 /* If LEN is not constant, try MAXLEN too.
12394 For MAXLEN only allow optimizing into non-_ocs function
12395 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12396 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12402 if (tree_int_cst_lt (size, maxlen))
12406 if (!init_target_chars ())
12409 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12410 or if format doesn't contain % chars or is "%s". */
12411 if (! integer_zerop (flag))
12413 fmt_str = c_getstr (fmt);
12414 if (fmt_str == NULL)
12416 if (strchr (fmt_str, target_percent) != NULL
12417 && strcmp (fmt_str, target_percent_s))
12421 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12423 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12424 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12428 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12431 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12432 FMT and ARG are the arguments to the call; we don't fold cases with
12433 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12435 Return NULL_TREE if no simplification was possible, otherwise return the
12436 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12437 code of the function to be simplified. */
12440 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12441 enum built_in_function fcode)
12443 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12444 const char *fmt_str = NULL;
12446 /* If the return value is used, don't do the transformation. */
12450 /* Verify the required arguments in the original call. */
12451 if (!validate_arg (fmt, POINTER_TYPE))
12454 /* Check whether the format is a literal string constant. */
12455 fmt_str = c_getstr (fmt);
12456 if (fmt_str == NULL)
12459 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12461 /* If we're using an unlocked function, assume the other
12462 unlocked functions exist explicitly. */
12463 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12464 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12468 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12469 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12472 if (!init_target_chars ())
12475 if (strcmp (fmt_str, target_percent_s) == 0
12476 || strchr (fmt_str, target_percent) == NULL)
12480 if (strcmp (fmt_str, target_percent_s) == 0)
12482 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12485 if (!arg || !validate_arg (arg, POINTER_TYPE))
12488 str = c_getstr (arg);
12494 /* The format specifier doesn't contain any '%' characters. */
12495 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12501 /* If the string was "", printf does nothing. */
12502 if (str[0] == '\0')
12503 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12505 /* If the string has length of 1, call putchar. */
12506 if (str[1] == '\0')
12508 /* Given printf("c"), (where c is any one character,)
12509 convert "c"[0] to an int and pass that to the replacement
12511 newarg = build_int_cst (NULL_TREE, str[0]);
12513 call = build_call_expr (fn_putchar, 1, newarg);
12517 /* If the string was "string\n", call puts("string"). */
12518 size_t len = strlen (str);
12519 if ((unsigned char)str[len - 1] == target_newline)
12521 /* Create a NUL-terminated string that's one char shorter
12522 than the original, stripping off the trailing '\n'. */
12523 char *newstr = XALLOCAVEC (char, len);
12524 memcpy (newstr, str, len - 1);
12525 newstr[len - 1] = 0;
12527 newarg = build_string_literal (len, newstr);
12529 call = build_call_expr (fn_puts, 1, newarg);
12532 /* We'd like to arrange to call fputs(string,stdout) here,
12533 but we need stdout and don't have a way to get it yet. */
12538 /* The other optimizations can be done only on the non-va_list variants. */
12539 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12542 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12543 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12545 if (!arg || !validate_arg (arg, POINTER_TYPE))
12548 call = build_call_expr (fn_puts, 1, arg);
12551 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12552 else if (strcmp (fmt_str, target_percent_c) == 0)
12554 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12557 call = build_call_expr (fn_putchar, 1, arg);
12563 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12566 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12567 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12568 more than 3 arguments, and ARG may be null in the 2-argument case.
12570 Return NULL_TREE if no simplification was possible, otherwise return the
12571 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12572 code of the function to be simplified. */
12575 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12576 enum built_in_function fcode)
12578 tree fn_fputc, fn_fputs, call = NULL_TREE;
12579 const char *fmt_str = NULL;
12581 /* If the return value is used, don't do the transformation. */
12585 /* Verify the required arguments in the original call. */
12586 if (!validate_arg (fp, POINTER_TYPE))
12588 if (!validate_arg (fmt, POINTER_TYPE))
12591 /* Check whether the format is a literal string constant. */
12592 fmt_str = c_getstr (fmt);
12593 if (fmt_str == NULL)
12596 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12598 /* If we're using an unlocked function, assume the other
12599 unlocked functions exist explicitly. */
12600 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12601 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12605 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12606 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12609 if (!init_target_chars ())
12612 /* If the format doesn't contain % args or %%, use strcpy. */
12613 if (strchr (fmt_str, target_percent) == NULL)
12615 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12619 /* If the format specifier was "", fprintf does nothing. */
12620 if (fmt_str[0] == '\0')
12622 /* If FP has side-effects, just wait until gimplification is
12624 if (TREE_SIDE_EFFECTS (fp))
12627 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12630 /* When "string" doesn't contain %, replace all cases of
12631 fprintf (fp, string) with fputs (string, fp). The fputs
12632 builtin will take care of special cases like length == 1. */
12634 call = build_call_expr (fn_fputs, 2, fmt, fp);
12637 /* The other optimizations can be done only on the non-va_list variants. */
12638 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12641 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12642 else if (strcmp (fmt_str, target_percent_s) == 0)
12644 if (!arg || !validate_arg (arg, POINTER_TYPE))
12647 call = build_call_expr (fn_fputs, 2, arg, fp);
12650 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12651 else if (strcmp (fmt_str, target_percent_c) == 0)
12653 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12656 call = build_call_expr (fn_fputc, 2, arg, fp);
12661 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12664 /* Initialize format string characters in the target charset. */
12667 init_target_chars (void)
12672 target_newline = lang_hooks.to_target_charset ('\n');
12673 target_percent = lang_hooks.to_target_charset ('%');
12674 target_c = lang_hooks.to_target_charset ('c');
12675 target_s = lang_hooks.to_target_charset ('s');
12676 if (target_newline == 0 || target_percent == 0 || target_c == 0
12680 target_percent_c[0] = target_percent;
12681 target_percent_c[1] = target_c;
12682 target_percent_c[2] = '\0';
12684 target_percent_s[0] = target_percent;
12685 target_percent_s[1] = target_s;
12686 target_percent_s[2] = '\0';
12688 target_percent_s_newline[0] = target_percent;
12689 target_percent_s_newline[1] = target_s;
12690 target_percent_s_newline[2] = target_newline;
12691 target_percent_s_newline[3] = '\0';
12698 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12699 and no overflow/underflow occurred. INEXACT is true if M was not
12700 exactly calculated. TYPE is the tree type for the result. This
12701 function assumes that you cleared the MPFR flags and then
12702 calculated M to see if anything subsequently set a flag prior to
12703 entering this function. Return NULL_TREE if any checks fail. */
12706 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12708 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12709 overflow/underflow occurred. If -frounding-math, proceed iff the
12710 result of calling FUNC was exact. */
12711 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12712 && (!flag_rounding_math || !inexact))
12714 REAL_VALUE_TYPE rr;
12716 real_from_mpfr (&rr, m, type, GMP_RNDN);
12717 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12718 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12719 but the mpft_t is not, then we underflowed in the
12721 if (real_isfinite (&rr)
12722 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12724 REAL_VALUE_TYPE rmode;
12726 real_convert (&rmode, TYPE_MODE (type), &rr);
12727 /* Proceed iff the specified mode can hold the value. */
12728 if (real_identical (&rmode, &rr))
12729 return build_real (type, rmode);
12735 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12736 FUNC on it and return the resulting value as a tree with type TYPE.
12737 If MIN and/or MAX are not NULL, then the supplied ARG must be
12738 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12739 acceptable values, otherwise they are not. The mpfr precision is
12740 set to the precision of TYPE. We assume that function FUNC returns
12741 zero if the result could be calculated exactly within the requested
12745 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12746 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12749 tree result = NULL_TREE;
12753 /* To proceed, MPFR must exactly represent the target floating point
12754 format, which only happens when the target base equals two. */
12755 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12756 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12758 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12760 if (real_isfinite (ra)
12761 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12762 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12764 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12768 mpfr_init2 (m, prec);
12769 mpfr_from_real (m, ra, GMP_RNDN);
12770 mpfr_clear_flags ();
12771 inexact = func (m, m, GMP_RNDN);
12772 result = do_mpfr_ckconv (m, type, inexact);
12780 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12781 FUNC on it and return the resulting value as a tree with type TYPE.
12782 The mpfr precision is set to the precision of TYPE. We assume that
12783 function FUNC returns zero if the result could be calculated
12784 exactly within the requested precision. */
12787 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12788 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12790 tree result = NULL_TREE;
12795 /* To proceed, MPFR must exactly represent the target floating point
12796 format, which only happens when the target base equals two. */
12797 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12798 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12799 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12801 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12802 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12804 if (real_isfinite (ra1) && real_isfinite (ra2))
12806 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12810 mpfr_inits2 (prec, m1, m2, NULL);
12811 mpfr_from_real (m1, ra1, GMP_RNDN);
12812 mpfr_from_real (m2, ra2, GMP_RNDN);
12813 mpfr_clear_flags ();
12814 inexact = func (m1, m1, m2, GMP_RNDN);
12815 result = do_mpfr_ckconv (m1, type, inexact);
12816 mpfr_clears (m1, m2, NULL);
12823 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12824 FUNC on it and return the resulting value as a tree with type TYPE.
12825 The mpfr precision is set to the precision of TYPE. We assume that
12826 function FUNC returns zero if the result could be calculated
12827 exactly within the requested precision. */
12830 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12831 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12833 tree result = NULL_TREE;
12839 /* To proceed, MPFR must exactly represent the target floating point
12840 format, which only happens when the target base equals two. */
12841 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12842 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12843 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12844 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12846 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12847 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12848 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12850 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12852 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12856 mpfr_inits2 (prec, m1, m2, m3, NULL);
12857 mpfr_from_real (m1, ra1, GMP_RNDN);
12858 mpfr_from_real (m2, ra2, GMP_RNDN);
12859 mpfr_from_real (m3, ra3, GMP_RNDN);
12860 mpfr_clear_flags ();
12861 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12862 result = do_mpfr_ckconv (m1, type, inexact);
12863 mpfr_clears (m1, m2, m3, NULL);
12870 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12871 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12872 If ARG_SINP and ARG_COSP are NULL then the result is returned
12873 as a complex value.
12874 The type is taken from the type of ARG and is used for setting the
12875 precision of the calculation and results. */
12878 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12880 tree const type = TREE_TYPE (arg);
12881 tree result = NULL_TREE;
12885 /* To proceed, MPFR must exactly represent the target floating point
12886 format, which only happens when the target base equals two. */
12887 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12888 && TREE_CODE (arg) == REAL_CST
12889 && !TREE_OVERFLOW (arg))
12891 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12893 if (real_isfinite (ra))
12895 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12896 tree result_s, result_c;
12900 mpfr_inits2 (prec, m, ms, mc, NULL);
12901 mpfr_from_real (m, ra, GMP_RNDN);
12902 mpfr_clear_flags ();
12903 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12904 result_s = do_mpfr_ckconv (ms, type, inexact);
12905 result_c = do_mpfr_ckconv (mc, type, inexact);
12906 mpfr_clears (m, ms, mc, NULL);
12907 if (result_s && result_c)
12909 /* If we are to return in a complex value do so. */
12910 if (!arg_sinp && !arg_cosp)
12911 return build_complex (build_complex_type (type),
12912 result_c, result_s);
12914 /* Dereference the sin/cos pointer arguments. */
12915 arg_sinp = build_fold_indirect_ref (arg_sinp);
12916 arg_cosp = build_fold_indirect_ref (arg_cosp);
12917 /* Proceed if valid pointer type were passed in. */
12918 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12919 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12921 /* Set the values. */
12922 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12924 TREE_SIDE_EFFECTS (result_s) = 1;
12925 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12927 TREE_SIDE_EFFECTS (result_c) = 1;
12928 /* Combine the assignments into a compound expr. */
12929 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12930 result_s, result_c));
12938 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12939 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12940 two-argument mpfr order N Bessel function FUNC on them and return
12941 the resulting value as a tree with type TYPE. The mpfr precision
12942 is set to the precision of TYPE. We assume that function FUNC
12943 returns zero if the result could be calculated exactly within the
12944 requested precision. */
12946 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12947 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12948 const REAL_VALUE_TYPE *min, bool inclusive)
12950 tree result = NULL_TREE;
12955 /* To proceed, MPFR must exactly represent the target floating point
12956 format, which only happens when the target base equals two. */
12957 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12958 && host_integerp (arg1, 0)
12959 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12961 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12962 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12965 && real_isfinite (ra)
12966 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12968 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12972 mpfr_init2 (m, prec);
12973 mpfr_from_real (m, ra, GMP_RNDN);
12974 mpfr_clear_flags ();
12975 inexact = func (m, n, m, GMP_RNDN);
12976 result = do_mpfr_ckconv (m, type, inexact);
12984 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12985 the pointer *(ARG_QUO) and return the result. The type is taken
12986 from the type of ARG0 and is used for setting the precision of the
12987 calculation and results. */
12990 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12992 tree const type = TREE_TYPE (arg0);
12993 tree result = NULL_TREE;
12998 /* To proceed, MPFR must exactly represent the target floating point
12999 format, which only happens when the target base equals two. */
13000 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13001 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13002 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13004 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13005 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13007 if (real_isfinite (ra0) && real_isfinite (ra1))
13009 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13014 mpfr_inits2 (prec, m0, m1, NULL);
13015 mpfr_from_real (m0, ra0, GMP_RNDN);
13016 mpfr_from_real (m1, ra1, GMP_RNDN);
13017 mpfr_clear_flags ();
13018 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
13019 /* Remquo is independent of the rounding mode, so pass
13020 inexact=0 to do_mpfr_ckconv(). */
13021 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13022 mpfr_clears (m0, m1, NULL);
13025 /* MPFR calculates quo in the host's long so it may
13026 return more bits in quo than the target int can hold
13027 if sizeof(host long) > sizeof(target int). This can
13028 happen even for native compilers in LP64 mode. In
13029 these cases, modulo the quo value with the largest
13030 number that the target int can hold while leaving one
13031 bit for the sign. */
13032 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13033 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13035 /* Dereference the quo pointer argument. */
13036 arg_quo = build_fold_indirect_ref (arg_quo);
13037 /* Proceed iff a valid pointer type was passed in. */
13038 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13040 /* Set the value. */
13041 tree result_quo = fold_build2 (MODIFY_EXPR,
13042 TREE_TYPE (arg_quo), arg_quo,
13043 build_int_cst (NULL, integer_quo));
13044 TREE_SIDE_EFFECTS (result_quo) = 1;
13045 /* Combine the quo assignment with the rem. */
13046 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13047 result_quo, result_rem));
13055 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13056 resulting value as a tree with type TYPE. The mpfr precision is
13057 set to the precision of TYPE. We assume that this mpfr function
13058 returns zero if the result could be calculated exactly within the
13059 requested precision. In addition, the integer pointer represented
13060 by ARG_SG will be dereferenced and set to the appropriate signgam
13064 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13066 tree result = NULL_TREE;
13070 /* To proceed, MPFR must exactly represent the target floating point
13071 format, which only happens when the target base equals two. Also
13072 verify ARG is a constant and that ARG_SG is an int pointer. */
13073 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13074 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13075 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13076 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13078 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13080 /* In addition to NaN and Inf, the argument cannot be zero or a
13081 negative integer. */
13082 if (real_isfinite (ra)
13083 && ra->cl != rvc_zero
13084 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13086 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13091 mpfr_init2 (m, prec);
13092 mpfr_from_real (m, ra, GMP_RNDN);
13093 mpfr_clear_flags ();
13094 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
13095 result_lg = do_mpfr_ckconv (m, type, inexact);
13101 /* Dereference the arg_sg pointer argument. */
13102 arg_sg = build_fold_indirect_ref (arg_sg);
13103 /* Assign the signgam value into *arg_sg. */
13104 result_sg = fold_build2 (MODIFY_EXPR,
13105 TREE_TYPE (arg_sg), arg_sg,
13106 build_int_cst (NULL, sg));
13107 TREE_SIDE_EFFECTS (result_sg) = 1;
13108 /* Combine the signgam assignment with the lgamma result. */
13109 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13110 result_sg, result_lg));