1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
250 if (strncmp (name, "__sync_", 7) == 0)
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
280 switch (TREE_CODE (exp))
283 exp = TREE_OPERAND (exp, 0);
284 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
287 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
288 align = MIN (inner, max_align);
291 case POINTER_PLUS_EXPR:
292 /* If sum of pointer + int, restrict our maximum alignment to that
293 imposed by the integer. If not, we can't do any better than
295 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
298 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
299 & (max_align / BITS_PER_UNIT - 1))
303 exp = TREE_OPERAND (exp, 0);
307 /* See what we are pointing at and look at its alignment. */
308 exp = TREE_OPERAND (exp, 0);
310 if (handled_component_p (exp))
312 HOST_WIDE_INT bitsize, bitpos;
314 enum machine_mode mode;
315 int unsignedp, volatilep;
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
320 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
321 if (offset && TREE_CODE (offset) == PLUS_EXPR
322 && host_integerp (TREE_OPERAND (offset, 1), 1))
324 /* Any overflow in calculating offset_bits won't change
327 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
331 inner = MIN (inner, (offset_bits & -offset_bits));
332 offset = TREE_OPERAND (offset, 0);
334 if (offset && TREE_CODE (offset) == MULT_EXPR
335 && host_integerp (TREE_OPERAND (offset, 1), 1))
337 /* Any overflow in calculating offset_factor won't change
339 unsigned offset_factor
340 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
344 inner = MIN (inner, (offset_factor & -offset_factor));
347 inner = MIN (inner, BITS_PER_UNIT);
350 align = MIN (inner, DECL_ALIGN (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 else if (CONSTANT_CLASS_P (exp))
353 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
355 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
356 || TREE_CODE (exp) == INDIRECT_REF)
357 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
359 align = MIN (align, inner);
360 return MIN (align, max_align);
368 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
369 way, because it could contain a zero byte in the middle.
370 TREE_STRING_LENGTH is the size of the character array, not the string.
372 ONLY_VALUE should be nonzero if the result is not going to be emitted
373 into the instruction stream and zero if it is going to be expanded.
374 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
375 is returned, otherwise NULL, since
376 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
377 evaluate the side-effects.
379 The value returned is of type `ssizetype'.
381 Unfortunately, string_constant can't access the values of const char
382 arrays with initializers, so neither can we do so here. */
385 c_strlen (tree src, int only_value)
388 HOST_WIDE_INT offset;
393 if (TREE_CODE (src) == COND_EXPR
394 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
398 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
399 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
400 if (tree_int_cst_equal (len1, len2))
404 if (TREE_CODE (src) == COMPOUND_EXPR
405 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
406 return c_strlen (TREE_OPERAND (src, 1), only_value);
408 src = string_constant (src, &offset_node);
412 max = TREE_STRING_LENGTH (src) - 1;
413 ptr = TREE_STRING_POINTER (src);
415 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
417 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
418 compute the offset to the following null if we don't know where to
419 start searching for it. */
422 for (i = 0; i < max; i++)
426 /* We don't know the starting offset, but we do know that the string
427 has no internal zero bytes. We can assume that the offset falls
428 within the bounds of the string; otherwise, the programmer deserves
429 what he gets. Subtract the offset from the length of the string,
430 and return that. This would perhaps not be valid if we were dealing
431 with named arrays in addition to literal string constants. */
433 return size_diffop (size_int (max), offset_node);
436 /* We have a known offset into the string. Start searching there for
437 a null character if we can represent it as a single HOST_WIDE_INT. */
438 if (offset_node == 0)
440 else if (! host_integerp (offset_node, 0))
443 offset = tree_low_cst (offset_node, 0);
445 /* If the offset is known to be out of bounds, warn, and call strlen at
447 if (offset < 0 || offset > max)
449 /* Suppress multiple warnings for propagated constant strings. */
450 if (! TREE_NO_WARNING (src))
452 warning (0, "offset outside bounds of constant string");
453 TREE_NO_WARNING (src) = 1;
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
475 src = string_constant (src, &offset_node);
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
492 c_readstr (const char *str, enum machine_mode mode)
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
518 return immed_double_const (c[0], c[1], mode);
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
526 target_char_cast (tree cst, char *p)
528 unsigned HOST_WIDE_INT val, hostval;
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
554 builtin_save_expr (tree exp)
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
561 return save_expr (exp);
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
591 tem = hard_frame_pointer_rtx;
593 /* Tell reload not to eliminate the frame pointer. */
594 crtl->accesses_prior_frames = 1;
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
603 SETUP_FRAME_ADDRESSES ();
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
648 /* Alias set used for setjmp buffer. */
649 static alias_set_type setjmp_alias_set = -1;
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
665 buf_addr = convert_memory_address (Pmode, buf_addr);
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 cfun->calls_setjmp = 1;
699 /* We have a nonlocal label. */
700 cfun->has_nonlocal_label = 1;
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_use (hard_frame_pointer_rtx);
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_clobber (static_chain_rtx);
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_clobber (hard_frame_pointer_rtx);
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
732 #ifdef ELIMINABLE_REGS
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
741 if (i == ARRAY_SIZE (elim_regs))
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx,
747 copy_to_reg (get_arg_pointer_save_area ()));
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
776 expand_builtin_longjmp (rtx buf_addr, rtx value)
778 rtx fp, lab, stack, insn, last;
779 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 if (setjmp_alias_set == -1)
782 setjmp_alias_set = new_alias_set ();
784 buf_addr = convert_memory_address (Pmode, buf_addr);
786 buf_addr = force_reg (Pmode, buf_addr);
788 /* We used to store value in static_chain_rtx, but that fails if pointers
789 are smaller than integers. We instead require that the user must pass
790 a second argument of 1, because that is what builtin_setjmp will
791 return. This also makes EH slightly more efficient, since we are no
792 longer copying around a value that we don't care about. */
793 gcc_assert (value == const1_rtx);
795 last = get_last_insn ();
796 #ifdef HAVE_builtin_longjmp
797 if (HAVE_builtin_longjmp)
798 emit_insn (gen_builtin_longjmp (buf_addr));
802 fp = gen_rtx_MEM (Pmode, buf_addr);
803 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
804 GET_MODE_SIZE (Pmode)));
806 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (fp, setjmp_alias_set);
809 set_mem_alias_set (lab, setjmp_alias_set);
810 set_mem_alias_set (stack, setjmp_alias_set);
812 /* Pick up FP, label, and SP from the block and jump. This code is
813 from expand_goto in stmt.c; see there for detailed comments. */
814 #ifdef HAVE_nonlocal_goto
815 if (HAVE_nonlocal_goto)
816 /* We have to pass a value to the nonlocal_goto pattern that will
817 get copied into the static_chain pointer, but it does not matter
818 what that value is, because builtin_setjmp does not use it. */
819 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
823 lab = copy_to_reg (lab);
825 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
826 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
828 emit_move_insn (hard_frame_pointer_rtx, fp);
829 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
831 emit_use (hard_frame_pointer_rtx);
832 emit_use (stack_pointer_rtx);
833 emit_indirect_jump (lab);
837 /* Search backwards and mark the jump insn as a non-local goto.
838 Note that this precludes the use of __builtin_longjmp to a
839 __builtin_setjmp target in the same function. However, we've
840 already cautioned the user that these functions are for
841 internal exception handling use only. */
842 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
844 gcc_assert (insn != last);
848 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
852 else if (CALL_P (insn))
857 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
858 and the address of the save area. */
861 expand_builtin_nonlocal_goto (tree exp)
863 tree t_label, t_save_area;
864 rtx r_label, r_save_area, r_fp, r_sp, insn;
866 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
869 t_label = CALL_EXPR_ARG (exp, 0);
870 t_save_area = CALL_EXPR_ARG (exp, 1);
872 r_label = expand_normal (t_label);
873 r_label = convert_memory_address (Pmode, r_label);
874 r_save_area = expand_normal (t_save_area);
875 r_save_area = convert_memory_address (Pmode, r_save_area);
876 r_fp = gen_rtx_MEM (Pmode, r_save_area);
877 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
878 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
880 crtl->has_nonlocal_goto = 1;
882 #ifdef HAVE_nonlocal_goto
883 /* ??? We no longer need to pass the static chain value, afaik. */
884 if (HAVE_nonlocal_goto)
885 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
889 r_label = copy_to_reg (r_label);
891 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
892 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
894 /* Restore frame pointer for containing function.
895 This sets the actual hard register used for the frame pointer
896 to the location of the function's incoming static chain info.
897 The non-local goto handler will then adjust it to contain the
898 proper value and reload the argument pointer, if needed. */
899 emit_move_insn (hard_frame_pointer_rtx, r_fp);
900 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
902 /* USE of hard_frame_pointer_rtx added for consistency;
903 not clear if really needed. */
904 emit_use (hard_frame_pointer_rtx);
905 emit_use (stack_pointer_rtx);
907 /* If the architecture is using a GP register, we must
908 conservatively assume that the target function makes use of it.
909 The prologue of functions with nonlocal gotos must therefore
910 initialize the GP register to the appropriate value, and we
911 must then make sure that this value is live at the point
912 of the jump. (Note that this doesn't necessarily apply
913 to targets with a nonlocal_goto pattern; they are free
914 to implement it in their own way. Note also that this is
915 a no-op if the GP register is a global invariant.) */
916 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
917 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
918 emit_use (pic_offset_table_rtx);
920 emit_indirect_jump (r_label);
923 /* Search backwards to the jump insn and mark it as a
925 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
929 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
930 const0_rtx, REG_NOTES (insn));
933 else if (CALL_P (insn))
940 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
941 (not all will be used on all machines) that was passed to __builtin_setjmp.
942 It updates the stack pointer in that block to correspond to the current
946 expand_builtin_update_setjmp_buf (rtx buf_addr)
948 enum machine_mode sa_mode = Pmode;
952 #ifdef HAVE_save_stack_nonlocal
953 if (HAVE_save_stack_nonlocal)
954 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
956 #ifdef STACK_SAVEAREA_MODE
957 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
961 = gen_rtx_MEM (sa_mode,
964 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
968 emit_insn (gen_setjmp ());
971 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
974 /* Expand a call to __builtin_prefetch. For a target that does not support
975 data prefetch, evaluate the memory address argument in case it has side
979 expand_builtin_prefetch (tree exp)
981 tree arg0, arg1, arg2;
985 if (!validate_arglist (exp, POINTER_TYPE, 0))
988 arg0 = CALL_EXPR_ARG (exp, 0);
990 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
991 zero (read) and argument 2 (locality) defaults to 3 (high degree of
993 nargs = call_expr_nargs (exp);
995 arg1 = CALL_EXPR_ARG (exp, 1);
997 arg1 = integer_zero_node;
999 arg2 = CALL_EXPR_ARG (exp, 2);
1001 arg2 = build_int_cst (NULL_TREE, 3);
1003 /* Argument 0 is an address. */
1004 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1006 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1007 if (TREE_CODE (arg1) != INTEGER_CST)
1009 error ("second argument to %<__builtin_prefetch%> must be a constant");
1010 arg1 = integer_zero_node;
1012 op1 = expand_normal (arg1);
1013 /* Argument 1 must be either zero or one. */
1014 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1016 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1021 /* Argument 2 (locality) must be a compile-time constant int. */
1022 if (TREE_CODE (arg2) != INTEGER_CST)
1024 error ("third argument to %<__builtin_prefetch%> must be a constant");
1025 arg2 = integer_zero_node;
1027 op2 = expand_normal (arg2);
1028 /* Argument 2 must be 0, 1, 2, or 3. */
1029 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1031 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1035 #ifdef HAVE_prefetch
1038 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1040 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1041 || (GET_MODE (op0) != Pmode))
1043 op0 = convert_memory_address (Pmode, op0);
1044 op0 = force_reg (Pmode, op0);
1046 emit_insn (gen_prefetch (op0, op1, op2));
1050 /* Don't do anything with direct references to volatile memory, but
1051 generate code to handle other side effects. */
1052 if (!MEM_P (op0) && side_effects_p (op0))
1056 /* Get a MEM rtx for expression EXP which is the address of an operand
1057 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1058 the maximum length of the block of memory that might be accessed or
1062 get_memory_rtx (tree exp, tree len)
1064 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1065 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1067 /* Get an expression we can use to find the attributes to assign to MEM.
1068 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1069 we can. First remove any nops. */
1070 while (CONVERT_EXPR_P (exp)
1071 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1072 exp = TREE_OPERAND (exp, 0);
1074 if (TREE_CODE (exp) == ADDR_EXPR)
1075 exp = TREE_OPERAND (exp, 0);
1076 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1077 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1081 /* Honor attributes derived from exp, except for the alias set
1082 (as builtin stringops may alias with anything) and the size
1083 (as stringops may access multiple array elements). */
1086 set_mem_attributes (mem, exp, 0);
1088 /* Allow the string and memory builtins to overflow from one
1089 field into another, see http://gcc.gnu.org/PR23561.
1090 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1091 memory accessed by the string or memory builtin will fit
1092 within the field. */
1093 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1095 tree mem_expr = MEM_EXPR (mem);
1096 HOST_WIDE_INT offset = -1, length = -1;
1099 while (TREE_CODE (inner) == ARRAY_REF
1100 || CONVERT_EXPR_P (inner)
1101 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1102 || TREE_CODE (inner) == SAVE_EXPR)
1103 inner = TREE_OPERAND (inner, 0);
1105 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1107 if (MEM_OFFSET (mem)
1108 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1109 offset = INTVAL (MEM_OFFSET (mem));
1111 if (offset >= 0 && len && host_integerp (len, 0))
1112 length = tree_low_cst (len, 0);
1114 while (TREE_CODE (inner) == COMPONENT_REF)
1116 tree field = TREE_OPERAND (inner, 1);
1117 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1118 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1120 /* Bitfields are generally not byte-addressable. */
1121 gcc_assert (!DECL_BIT_FIELD (field)
1122 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1123 % BITS_PER_UNIT) == 0
1124 && host_integerp (DECL_SIZE (field), 0)
1125 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1126 % BITS_PER_UNIT) == 0));
1128 /* If we can prove that the memory starting at XEXP (mem, 0) and
1129 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1130 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1131 fields without DECL_SIZE_UNIT like flexible array members. */
1133 && DECL_SIZE_UNIT (field)
1134 && host_integerp (DECL_SIZE_UNIT (field), 0))
1137 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1140 && offset + length <= size)
1145 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1146 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1147 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1155 mem_expr = TREE_OPERAND (mem_expr, 0);
1156 inner = TREE_OPERAND (inner, 0);
1159 if (mem_expr == NULL)
1161 if (mem_expr != MEM_EXPR (mem))
1163 set_mem_expr (mem, mem_expr);
1164 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1167 set_mem_alias_set (mem, 0);
1168 set_mem_size (mem, NULL_RTX);
1174 /* Built-in functions to perform an untyped call and return. */
1176 /* For each register that may be used for calling a function, this
1177 gives a mode used to copy the register's value. VOIDmode indicates
1178 the register is not used for calling a function. If the machine
1179 has register windows, this gives only the outbound registers.
1180 INCOMING_REGNO gives the corresponding inbound register. */
1181 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1183 /* For each register that may be used for returning values, this gives
1184 a mode used to copy the register's value. VOIDmode indicates the
1185 register is not used for returning values. If the machine has
1186 register windows, this gives only the outbound registers.
1187 INCOMING_REGNO gives the corresponding inbound register. */
1188 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1190 /* For each register that may be used for calling a function, this
1191 gives the offset of that register into the block returned by
1192 __builtin_apply_args. 0 indicates that the register is not
1193 used for calling a function. */
1194 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1196 /* Return the size required for the block returned by __builtin_apply_args,
1197 and initialize apply_args_mode. */
1200 apply_args_size (void)
1202 static int size = -1;
1205 enum machine_mode mode;
1207 /* The values computed by this function never change. */
1210 /* The first value is the incoming arg-pointer. */
1211 size = GET_MODE_SIZE (Pmode);
1213 /* The second value is the structure value address unless this is
1214 passed as an "invisible" first argument. */
1215 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1216 size += GET_MODE_SIZE (Pmode);
1218 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1219 if (FUNCTION_ARG_REGNO_P (regno))
1221 mode = reg_raw_mode[regno];
1223 gcc_assert (mode != VOIDmode);
1225 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1226 if (size % align != 0)
1227 size = CEIL (size, align) * align;
1228 apply_args_reg_offset[regno] = size;
1229 size += GET_MODE_SIZE (mode);
1230 apply_args_mode[regno] = mode;
1234 apply_args_mode[regno] = VOIDmode;
1235 apply_args_reg_offset[regno] = 0;
1241 /* Return the size required for the block returned by __builtin_apply,
1242 and initialize apply_result_mode. */
1245 apply_result_size (void)
1247 static int size = -1;
1249 enum machine_mode mode;
1251 /* The values computed by this function never change. */
1256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1257 if (FUNCTION_VALUE_REGNO_P (regno))
1259 mode = reg_raw_mode[regno];
1261 gcc_assert (mode != VOIDmode);
1263 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1264 if (size % align != 0)
1265 size = CEIL (size, align) * align;
1266 size += GET_MODE_SIZE (mode);
1267 apply_result_mode[regno] = mode;
1270 apply_result_mode[regno] = VOIDmode;
1272 /* Allow targets that use untyped_call and untyped_return to override
1273 the size so that machine-specific information can be stored here. */
1274 #ifdef APPLY_RESULT_SIZE
1275 size = APPLY_RESULT_SIZE;
1281 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1282 /* Create a vector describing the result block RESULT. If SAVEP is true,
1283 the result block is used to save the values; otherwise it is used to
1284 restore the values. */
1287 result_vector (int savep, rtx result)
1289 int regno, size, align, nelts;
1290 enum machine_mode mode;
1292 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1295 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1296 if ((mode = apply_result_mode[regno]) != VOIDmode)
1298 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1299 if (size % align != 0)
1300 size = CEIL (size, align) * align;
1301 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1302 mem = adjust_address (result, mode, size);
1303 savevec[nelts++] = (savep
1304 ? gen_rtx_SET (VOIDmode, mem, reg)
1305 : gen_rtx_SET (VOIDmode, reg, mem));
1306 size += GET_MODE_SIZE (mode);
1308 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1310 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1312 /* Save the state required to perform an untyped call with the same
1313 arguments as were passed to the current function. */
1316 expand_builtin_apply_args_1 (void)
1319 int size, align, regno;
1320 enum machine_mode mode;
1321 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1323 /* Create a block where the arg-pointer, structure value address,
1324 and argument registers can be saved. */
1325 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1327 /* Walk past the arg-pointer and structure value address. */
1328 size = GET_MODE_SIZE (Pmode);
1329 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1330 size += GET_MODE_SIZE (Pmode);
1332 /* Save each register used in calling a function to the block. */
1333 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1334 if ((mode = apply_args_mode[regno]) != VOIDmode)
1336 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1337 if (size % align != 0)
1338 size = CEIL (size, align) * align;
1340 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1342 emit_move_insn (adjust_address (registers, mode, size), tem);
1343 size += GET_MODE_SIZE (mode);
1346 /* Save the arg pointer to the block. */
1347 tem = copy_to_reg (virtual_incoming_args_rtx);
1348 #ifdef STACK_GROWS_DOWNWARD
1349 /* We need the pointer as the caller actually passed them to us, not
1350 as we might have pretended they were passed. Make sure it's a valid
1351 operand, as emit_move_insn isn't expected to handle a PLUS. */
1353 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1356 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1358 size = GET_MODE_SIZE (Pmode);
1360 /* Save the structure value address unless this is passed as an
1361 "invisible" first argument. */
1362 if (struct_incoming_value)
1364 emit_move_insn (adjust_address (registers, Pmode, size),
1365 copy_to_reg (struct_incoming_value));
1366 size += GET_MODE_SIZE (Pmode);
1369 /* Return the address of the block. */
1370 return copy_addr_to_reg (XEXP (registers, 0));
1373 /* __builtin_apply_args returns block of memory allocated on
1374 the stack into which is stored the arg pointer, structure
1375 value address, static chain, and all the registers that might
1376 possibly be used in performing a function call. The code is
1377 moved to the start of the function so the incoming values are
1381 expand_builtin_apply_args (void)
1383 /* Don't do __builtin_apply_args more than once in a function.
1384 Save the result of the first call and reuse it. */
1385 if (apply_args_value != 0)
1386 return apply_args_value;
1388 /* When this function is called, it means that registers must be
1389 saved on entry to this function. So we migrate the
1390 call to the first insn of this function. */
1395 temp = expand_builtin_apply_args_1 ();
1399 apply_args_value = temp;
1401 /* Put the insns after the NOTE that starts the function.
1402 If this is inside a start_sequence, make the outer-level insn
1403 chain current, so the code is placed at the start of the
1405 push_topmost_sequence ();
1406 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1407 pop_topmost_sequence ();
1412 /* Perform an untyped call and save the state required to perform an
1413 untyped return of whatever value was returned by the given function. */
1416 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1418 int size, align, regno;
1419 enum machine_mode mode;
1420 rtx incoming_args, result, reg, dest, src, call_insn;
1421 rtx old_stack_level = 0;
1422 rtx call_fusage = 0;
1423 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1425 arguments = convert_memory_address (Pmode, arguments);
1427 /* Create a block where the return registers can be saved. */
1428 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1430 /* Fetch the arg pointer from the ARGUMENTS block. */
1431 incoming_args = gen_reg_rtx (Pmode);
1432 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1433 #ifndef STACK_GROWS_DOWNWARD
1434 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1435 incoming_args, 0, OPTAB_LIB_WIDEN);
1438 /* Push a new argument block and copy the arguments. Do not allow
1439 the (potential) memcpy call below to interfere with our stack
1441 do_pending_stack_adjust ();
1444 /* Save the stack with nonlocal if available. */
1445 #ifdef HAVE_save_stack_nonlocal
1446 if (HAVE_save_stack_nonlocal)
1447 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1450 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1452 /* Allocate a block of memory onto the stack and copy the memory
1453 arguments to the outgoing arguments address. */
1454 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1455 dest = virtual_outgoing_args_rtx;
1456 #ifndef STACK_GROWS_DOWNWARD
1457 if (GET_CODE (argsize) == CONST_INT)
1458 dest = plus_constant (dest, -INTVAL (argsize));
1460 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1462 dest = gen_rtx_MEM (BLKmode, dest);
1463 set_mem_align (dest, PARM_BOUNDARY);
1464 src = gen_rtx_MEM (BLKmode, incoming_args);
1465 set_mem_align (src, PARM_BOUNDARY);
1466 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1468 /* Refer to the argument block. */
1470 arguments = gen_rtx_MEM (BLKmode, arguments);
1471 set_mem_align (arguments, PARM_BOUNDARY);
1473 /* Walk past the arg-pointer and structure value address. */
1474 size = GET_MODE_SIZE (Pmode);
1476 size += GET_MODE_SIZE (Pmode);
1478 /* Restore each of the registers previously saved. Make USE insns
1479 for each of these registers for use in making the call. */
1480 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1481 if ((mode = apply_args_mode[regno]) != VOIDmode)
1483 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1484 if (size % align != 0)
1485 size = CEIL (size, align) * align;
1486 reg = gen_rtx_REG (mode, regno);
1487 emit_move_insn (reg, adjust_address (arguments, mode, size));
1488 use_reg (&call_fusage, reg);
1489 size += GET_MODE_SIZE (mode);
1492 /* Restore the structure value address unless this is passed as an
1493 "invisible" first argument. */
1494 size = GET_MODE_SIZE (Pmode);
1497 rtx value = gen_reg_rtx (Pmode);
1498 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1499 emit_move_insn (struct_value, value);
1500 if (REG_P (struct_value))
1501 use_reg (&call_fusage, struct_value);
1502 size += GET_MODE_SIZE (Pmode);
1505 /* All arguments and registers used for the call are set up by now! */
1506 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1508 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1509 and we don't want to load it into a register as an optimization,
1510 because prepare_call_address already did it if it should be done. */
1511 if (GET_CODE (function) != SYMBOL_REF)
1512 function = memory_address (FUNCTION_MODE, function);
1514 /* Generate the actual call instruction and save the return value. */
1515 #ifdef HAVE_untyped_call
1516 if (HAVE_untyped_call)
1517 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1518 result, result_vector (1, result)));
1521 #ifdef HAVE_call_value
1522 if (HAVE_call_value)
1526 /* Locate the unique return register. It is not possible to
1527 express a call that sets more than one return register using
1528 call_value; use untyped_call for that. In fact, untyped_call
1529 only needs to save the return registers in the given block. */
1530 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1531 if ((mode = apply_result_mode[regno]) != VOIDmode)
1533 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1535 valreg = gen_rtx_REG (mode, regno);
1538 emit_call_insn (GEN_CALL_VALUE (valreg,
1539 gen_rtx_MEM (FUNCTION_MODE, function),
1540 const0_rtx, NULL_RTX, const0_rtx));
1542 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1548 /* Find the CALL insn we just emitted, and attach the register usage
1550 call_insn = last_call_insn ();
1551 add_function_usage_to (call_insn, call_fusage);
1553 /* Restore the stack. */
1554 #ifdef HAVE_save_stack_nonlocal
1555 if (HAVE_save_stack_nonlocal)
1556 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1559 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1563 /* Return the address of the result block. */
1564 result = copy_addr_to_reg (XEXP (result, 0));
1565 return convert_memory_address (ptr_mode, result);
1568 /* Perform an untyped return. */
1571 expand_builtin_return (rtx result)
1573 int size, align, regno;
1574 enum machine_mode mode;
1576 rtx call_fusage = 0;
1578 result = convert_memory_address (Pmode, result);
1580 apply_result_size ();
1581 result = gen_rtx_MEM (BLKmode, result);
1583 #ifdef HAVE_untyped_return
1584 if (HAVE_untyped_return)
1586 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1592 /* Restore the return value and note that each value is used. */
1594 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1595 if ((mode = apply_result_mode[regno]) != VOIDmode)
1597 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1598 if (size % align != 0)
1599 size = CEIL (size, align) * align;
1600 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1601 emit_move_insn (reg, adjust_address (result, mode, size));
1603 push_to_sequence (call_fusage);
1605 call_fusage = get_insns ();
1607 size += GET_MODE_SIZE (mode);
1610 /* Put the USE insns before the return. */
1611 emit_insn (call_fusage);
1613 /* Return whatever values was restored by jumping directly to the end
1615 expand_naked_return ();
1618 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1620 static enum type_class
1621 type_to_class (tree type)
1623 switch (TREE_CODE (type))
1625 case VOID_TYPE: return void_type_class;
1626 case INTEGER_TYPE: return integer_type_class;
1627 case ENUMERAL_TYPE: return enumeral_type_class;
1628 case BOOLEAN_TYPE: return boolean_type_class;
1629 case POINTER_TYPE: return pointer_type_class;
1630 case REFERENCE_TYPE: return reference_type_class;
1631 case OFFSET_TYPE: return offset_type_class;
1632 case REAL_TYPE: return real_type_class;
1633 case COMPLEX_TYPE: return complex_type_class;
1634 case FUNCTION_TYPE: return function_type_class;
1635 case METHOD_TYPE: return method_type_class;
1636 case RECORD_TYPE: return record_type_class;
1638 case QUAL_UNION_TYPE: return union_type_class;
1639 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1640 ? string_type_class : array_type_class);
1641 case LANG_TYPE: return lang_type_class;
1642 default: return no_type_class;
1646 /* Expand a call EXP to __builtin_classify_type. */
1649 expand_builtin_classify_type (tree exp)
1651 if (call_expr_nargs (exp))
1652 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1653 return GEN_INT (no_type_class);
1656 /* This helper macro, meant to be used in mathfn_built_in below,
1657 determines which among a set of three builtin math functions is
1658 appropriate for a given type mode. The `F' and `L' cases are
1659 automatically generated from the `double' case. */
1660 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1661 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1662 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1663 fcodel = BUILT_IN_MATHFN##L ; break;
1664 /* Similar to above, but appends _R after any F/L suffix. */
1665 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1666 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1667 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1668 fcodel = BUILT_IN_MATHFN##L_R ; break;
1670 /* Return mathematic function equivalent to FN but operating directly
1671 on TYPE, if available. If IMPLICIT is true find the function in
1672 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1673 can't do the conversion, return zero. */
1676 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1678 tree const *const fn_arr
1679 = implicit ? implicit_built_in_decls : built_in_decls;
1680 enum built_in_function fcode, fcodef, fcodel;
1684 CASE_MATHFN (BUILT_IN_ACOS)
1685 CASE_MATHFN (BUILT_IN_ACOSH)
1686 CASE_MATHFN (BUILT_IN_ASIN)
1687 CASE_MATHFN (BUILT_IN_ASINH)
1688 CASE_MATHFN (BUILT_IN_ATAN)
1689 CASE_MATHFN (BUILT_IN_ATAN2)
1690 CASE_MATHFN (BUILT_IN_ATANH)
1691 CASE_MATHFN (BUILT_IN_CBRT)
1692 CASE_MATHFN (BUILT_IN_CEIL)
1693 CASE_MATHFN (BUILT_IN_CEXPI)
1694 CASE_MATHFN (BUILT_IN_COPYSIGN)
1695 CASE_MATHFN (BUILT_IN_COS)
1696 CASE_MATHFN (BUILT_IN_COSH)
1697 CASE_MATHFN (BUILT_IN_DREM)
1698 CASE_MATHFN (BUILT_IN_ERF)
1699 CASE_MATHFN (BUILT_IN_ERFC)
1700 CASE_MATHFN (BUILT_IN_EXP)
1701 CASE_MATHFN (BUILT_IN_EXP10)
1702 CASE_MATHFN (BUILT_IN_EXP2)
1703 CASE_MATHFN (BUILT_IN_EXPM1)
1704 CASE_MATHFN (BUILT_IN_FABS)
1705 CASE_MATHFN (BUILT_IN_FDIM)
1706 CASE_MATHFN (BUILT_IN_FLOOR)
1707 CASE_MATHFN (BUILT_IN_FMA)
1708 CASE_MATHFN (BUILT_IN_FMAX)
1709 CASE_MATHFN (BUILT_IN_FMIN)
1710 CASE_MATHFN (BUILT_IN_FMOD)
1711 CASE_MATHFN (BUILT_IN_FREXP)
1712 CASE_MATHFN (BUILT_IN_GAMMA)
1713 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1714 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1715 CASE_MATHFN (BUILT_IN_HYPOT)
1716 CASE_MATHFN (BUILT_IN_ILOGB)
1717 CASE_MATHFN (BUILT_IN_INF)
1718 CASE_MATHFN (BUILT_IN_ISINF)
1719 CASE_MATHFN (BUILT_IN_J0)
1720 CASE_MATHFN (BUILT_IN_J1)
1721 CASE_MATHFN (BUILT_IN_JN)
1722 CASE_MATHFN (BUILT_IN_LCEIL)
1723 CASE_MATHFN (BUILT_IN_LDEXP)
1724 CASE_MATHFN (BUILT_IN_LFLOOR)
1725 CASE_MATHFN (BUILT_IN_LGAMMA)
1726 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1727 CASE_MATHFN (BUILT_IN_LLCEIL)
1728 CASE_MATHFN (BUILT_IN_LLFLOOR)
1729 CASE_MATHFN (BUILT_IN_LLRINT)
1730 CASE_MATHFN (BUILT_IN_LLROUND)
1731 CASE_MATHFN (BUILT_IN_LOG)
1732 CASE_MATHFN (BUILT_IN_LOG10)
1733 CASE_MATHFN (BUILT_IN_LOG1P)
1734 CASE_MATHFN (BUILT_IN_LOG2)
1735 CASE_MATHFN (BUILT_IN_LOGB)
1736 CASE_MATHFN (BUILT_IN_LRINT)
1737 CASE_MATHFN (BUILT_IN_LROUND)
1738 CASE_MATHFN (BUILT_IN_MODF)
1739 CASE_MATHFN (BUILT_IN_NAN)
1740 CASE_MATHFN (BUILT_IN_NANS)
1741 CASE_MATHFN (BUILT_IN_NEARBYINT)
1742 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1743 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1744 CASE_MATHFN (BUILT_IN_POW)
1745 CASE_MATHFN (BUILT_IN_POWI)
1746 CASE_MATHFN (BUILT_IN_POW10)
1747 CASE_MATHFN (BUILT_IN_REMAINDER)
1748 CASE_MATHFN (BUILT_IN_REMQUO)
1749 CASE_MATHFN (BUILT_IN_RINT)
1750 CASE_MATHFN (BUILT_IN_ROUND)
1751 CASE_MATHFN (BUILT_IN_SCALB)
1752 CASE_MATHFN (BUILT_IN_SCALBLN)
1753 CASE_MATHFN (BUILT_IN_SCALBN)
1754 CASE_MATHFN (BUILT_IN_SIGNBIT)
1755 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1756 CASE_MATHFN (BUILT_IN_SIN)
1757 CASE_MATHFN (BUILT_IN_SINCOS)
1758 CASE_MATHFN (BUILT_IN_SINH)
1759 CASE_MATHFN (BUILT_IN_SQRT)
1760 CASE_MATHFN (BUILT_IN_TAN)
1761 CASE_MATHFN (BUILT_IN_TANH)
1762 CASE_MATHFN (BUILT_IN_TGAMMA)
1763 CASE_MATHFN (BUILT_IN_TRUNC)
1764 CASE_MATHFN (BUILT_IN_Y0)
1765 CASE_MATHFN (BUILT_IN_Y1)
1766 CASE_MATHFN (BUILT_IN_YN)
1772 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1773 return fn_arr[fcode];
1774 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1775 return fn_arr[fcodef];
1776 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1777 return fn_arr[fcodel];
1782 /* Like mathfn_built_in_1(), but always use the implicit array. */
1785 mathfn_built_in (tree type, enum built_in_function fn)
1787 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1790 /* If errno must be maintained, expand the RTL to check if the result,
1791 TARGET, of a built-in function call, EXP, is NaN, and if so set
1795 expand_errno_check (tree exp, rtx target)
1797 rtx lab = gen_label_rtx ();
1799 /* Test the result; if it is NaN, set errno=EDOM because
1800 the argument was not in the domain. */
1801 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1805 /* If this built-in doesn't throw an exception, set errno directly. */
1806 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1808 #ifdef GEN_ERRNO_RTX
1809 rtx errno_rtx = GEN_ERRNO_RTX;
1812 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1814 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1820 /* Make sure the library call isn't expanded as a tail call. */
1821 CALL_EXPR_TAILCALL (exp) = 0;
1823 /* We can't set errno=EDOM directly; let the library call do it.
1824 Pop the arguments right away in case the call gets deleted. */
1826 expand_call (exp, target, 0);
1831 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1832 Return NULL_RTX if a normal call should be emitted rather than expanding
1833 the function in-line. EXP is the expression that is a call to the builtin
1834 function; if convenient, the result should be placed in TARGET.
1835 SUBTARGET may be used as the target for computing one of EXP's operands. */
1838 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1840 optab builtin_optab;
1841 rtx op0, insns, before_call;
1842 tree fndecl = get_callee_fndecl (exp);
1843 enum machine_mode mode;
1844 bool errno_set = false;
1847 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1850 arg = CALL_EXPR_ARG (exp, 0);
1852 switch (DECL_FUNCTION_CODE (fndecl))
1854 CASE_FLT_FN (BUILT_IN_SQRT):
1855 errno_set = ! tree_expr_nonnegative_p (arg);
1856 builtin_optab = sqrt_optab;
1858 CASE_FLT_FN (BUILT_IN_EXP):
1859 errno_set = true; builtin_optab = exp_optab; break;
1860 CASE_FLT_FN (BUILT_IN_EXP10):
1861 CASE_FLT_FN (BUILT_IN_POW10):
1862 errno_set = true; builtin_optab = exp10_optab; break;
1863 CASE_FLT_FN (BUILT_IN_EXP2):
1864 errno_set = true; builtin_optab = exp2_optab; break;
1865 CASE_FLT_FN (BUILT_IN_EXPM1):
1866 errno_set = true; builtin_optab = expm1_optab; break;
1867 CASE_FLT_FN (BUILT_IN_LOGB):
1868 errno_set = true; builtin_optab = logb_optab; break;
1869 CASE_FLT_FN (BUILT_IN_LOG):
1870 errno_set = true; builtin_optab = log_optab; break;
1871 CASE_FLT_FN (BUILT_IN_LOG10):
1872 errno_set = true; builtin_optab = log10_optab; break;
1873 CASE_FLT_FN (BUILT_IN_LOG2):
1874 errno_set = true; builtin_optab = log2_optab; break;
1875 CASE_FLT_FN (BUILT_IN_LOG1P):
1876 errno_set = true; builtin_optab = log1p_optab; break;
1877 CASE_FLT_FN (BUILT_IN_ASIN):
1878 builtin_optab = asin_optab; break;
1879 CASE_FLT_FN (BUILT_IN_ACOS):
1880 builtin_optab = acos_optab; break;
1881 CASE_FLT_FN (BUILT_IN_TAN):
1882 builtin_optab = tan_optab; break;
1883 CASE_FLT_FN (BUILT_IN_ATAN):
1884 builtin_optab = atan_optab; break;
1885 CASE_FLT_FN (BUILT_IN_FLOOR):
1886 builtin_optab = floor_optab; break;
1887 CASE_FLT_FN (BUILT_IN_CEIL):
1888 builtin_optab = ceil_optab; break;
1889 CASE_FLT_FN (BUILT_IN_TRUNC):
1890 builtin_optab = btrunc_optab; break;
1891 CASE_FLT_FN (BUILT_IN_ROUND):
1892 builtin_optab = round_optab; break;
1893 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1894 builtin_optab = nearbyint_optab;
1895 if (flag_trapping_math)
1897 /* Else fallthrough and expand as rint. */
1898 CASE_FLT_FN (BUILT_IN_RINT):
1899 builtin_optab = rint_optab; break;
1904 /* Make a suitable register to place result in. */
1905 mode = TYPE_MODE (TREE_TYPE (exp));
1907 if (! flag_errno_math || ! HONOR_NANS (mode))
1910 /* Before working hard, check whether the instruction is available. */
1911 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1913 target = gen_reg_rtx (mode);
1915 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1916 need to expand the argument again. This way, we will not perform
1917 side-effects more the once. */
1918 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1920 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1924 /* Compute into TARGET.
1925 Set TARGET to wherever the result comes back. */
1926 target = expand_unop (mode, builtin_optab, op0, target, 0);
1931 expand_errno_check (exp, target);
1933 /* Output the entire sequence. */
1934 insns = get_insns ();
1940 /* If we were unable to expand via the builtin, stop the sequence
1941 (without outputting the insns) and call to the library function
1942 with the stabilized argument list. */
1946 before_call = get_last_insn ();
1948 target = expand_call (exp, target, target == const0_rtx);
1950 /* If this is a sqrt operation and we don't care about errno, try to
1951 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1952 This allows the semantics of the libcall to be visible to the RTL
1954 if (builtin_optab == sqrt_optab && !errno_set)
1956 /* Search backwards through the insns emitted by expand_call looking
1957 for the instruction with the REG_RETVAL note. */
1958 rtx last = get_last_insn ();
1959 while (last != before_call)
1961 if (find_reg_note (last, REG_RETVAL, NULL))
1963 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1964 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1965 two elements, i.e. symbol_ref(sqrt) and the operand. */
1967 && GET_CODE (note) == EXPR_LIST
1968 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1969 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1970 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1972 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1973 /* Check operand is a register with expected mode. */
1976 && GET_MODE (operand) == mode)
1978 /* Replace the REG_EQUAL note with a SQRT rtx. */
1979 rtx equiv = gen_rtx_SQRT (mode, operand);
1980 set_unique_reg_note (last, REG_EQUAL, equiv);
1985 last = PREV_INSN (last);
1992 /* Expand a call to the builtin binary math functions (pow and atan2).
1993 Return NULL_RTX if a normal call should be emitted rather than expanding the
1994 function in-line. EXP is the expression that is a call to the builtin
1995 function; if convenient, the result should be placed in TARGET.
1996 SUBTARGET may be used as the target for computing one of EXP's
2000 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2002 optab builtin_optab;
2003 rtx op0, op1, insns;
2004 int op1_type = REAL_TYPE;
2005 tree fndecl = get_callee_fndecl (exp);
2007 enum machine_mode mode;
2008 bool errno_set = true;
2010 switch (DECL_FUNCTION_CODE (fndecl))
2012 CASE_FLT_FN (BUILT_IN_SCALBN):
2013 CASE_FLT_FN (BUILT_IN_SCALBLN):
2014 CASE_FLT_FN (BUILT_IN_LDEXP):
2015 op1_type = INTEGER_TYPE;
2020 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2023 arg0 = CALL_EXPR_ARG (exp, 0);
2024 arg1 = CALL_EXPR_ARG (exp, 1);
2026 switch (DECL_FUNCTION_CODE (fndecl))
2028 CASE_FLT_FN (BUILT_IN_POW):
2029 builtin_optab = pow_optab; break;
2030 CASE_FLT_FN (BUILT_IN_ATAN2):
2031 builtin_optab = atan2_optab; break;
2032 CASE_FLT_FN (BUILT_IN_SCALB):
2033 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2035 builtin_optab = scalb_optab; break;
2036 CASE_FLT_FN (BUILT_IN_SCALBN):
2037 CASE_FLT_FN (BUILT_IN_SCALBLN):
2038 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2040 /* Fall through... */
2041 CASE_FLT_FN (BUILT_IN_LDEXP):
2042 builtin_optab = ldexp_optab; break;
2043 CASE_FLT_FN (BUILT_IN_FMOD):
2044 builtin_optab = fmod_optab; break;
2045 CASE_FLT_FN (BUILT_IN_REMAINDER):
2046 CASE_FLT_FN (BUILT_IN_DREM):
2047 builtin_optab = remainder_optab; break;
2052 /* Make a suitable register to place result in. */
2053 mode = TYPE_MODE (TREE_TYPE (exp));
2055 /* Before working hard, check whether the instruction is available. */
2056 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2059 target = gen_reg_rtx (mode);
2061 if (! flag_errno_math || ! HONOR_NANS (mode))
2064 /* Always stabilize the argument list. */
2065 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2066 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2068 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2069 op1 = expand_normal (arg1);
2073 /* Compute into TARGET.
2074 Set TARGET to wherever the result comes back. */
2075 target = expand_binop (mode, builtin_optab, op0, op1,
2076 target, 0, OPTAB_DIRECT);
2078 /* If we were unable to expand via the builtin, stop the sequence
2079 (without outputting the insns) and call to the library function
2080 with the stabilized argument list. */
2084 return expand_call (exp, target, target == const0_rtx);
2088 expand_errno_check (exp, target);
2090 /* Output the entire sequence. */
2091 insns = get_insns ();
2098 /* Expand a call to the builtin sin and cos math functions.
2099 Return NULL_RTX if a normal call should be emitted rather than expanding the
2100 function in-line. EXP is the expression that is a call to the builtin
2101 function; if convenient, the result should be placed in TARGET.
2102 SUBTARGET may be used as the target for computing one of EXP's
2106 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2108 optab builtin_optab;
2110 tree fndecl = get_callee_fndecl (exp);
2111 enum machine_mode mode;
2114 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2117 arg = CALL_EXPR_ARG (exp, 0);
2119 switch (DECL_FUNCTION_CODE (fndecl))
2121 CASE_FLT_FN (BUILT_IN_SIN):
2122 CASE_FLT_FN (BUILT_IN_COS):
2123 builtin_optab = sincos_optab; break;
2128 /* Make a suitable register to place result in. */
2129 mode = TYPE_MODE (TREE_TYPE (exp));
2131 /* Check if sincos insn is available, otherwise fallback
2132 to sin or cos insn. */
2133 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2134 switch (DECL_FUNCTION_CODE (fndecl))
2136 CASE_FLT_FN (BUILT_IN_SIN):
2137 builtin_optab = sin_optab; break;
2138 CASE_FLT_FN (BUILT_IN_COS):
2139 builtin_optab = cos_optab; break;
2144 /* Before working hard, check whether the instruction is available. */
2145 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2147 target = gen_reg_rtx (mode);
2149 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2150 need to expand the argument again. This way, we will not perform
2151 side-effects more the once. */
2152 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2154 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2158 /* Compute into TARGET.
2159 Set TARGET to wherever the result comes back. */
2160 if (builtin_optab == sincos_optab)
2164 switch (DECL_FUNCTION_CODE (fndecl))
2166 CASE_FLT_FN (BUILT_IN_SIN):
2167 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2169 CASE_FLT_FN (BUILT_IN_COS):
2170 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2175 gcc_assert (result);
2179 target = expand_unop (mode, builtin_optab, op0, target, 0);
2184 /* Output the entire sequence. */
2185 insns = get_insns ();
2191 /* If we were unable to expand via the builtin, stop the sequence
2192 (without outputting the insns) and call to the library function
2193 with the stabilized argument list. */
2197 target = expand_call (exp, target, target == const0_rtx);
2202 /* Expand a call to one of the builtin math functions that operate on
2203 floating point argument and output an integer result (ilogb, isinf,
2205 Return 0 if a normal call should be emitted rather than expanding the
2206 function in-line. EXP is the expression that is a call to the builtin
2207 function; if convenient, the result should be placed in TARGET.
2208 SUBTARGET may be used as the target for computing one of EXP's operands. */
2211 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2213 optab builtin_optab = 0;
2214 enum insn_code icode = CODE_FOR_nothing;
2216 tree fndecl = get_callee_fndecl (exp);
2217 enum machine_mode mode;
2218 bool errno_set = false;
2221 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2224 arg = CALL_EXPR_ARG (exp, 0);
2226 switch (DECL_FUNCTION_CODE (fndecl))
2228 CASE_FLT_FN (BUILT_IN_ILOGB):
2229 errno_set = true; builtin_optab = ilogb_optab; break;
2230 CASE_FLT_FN (BUILT_IN_ISINF):
2231 builtin_optab = isinf_optab; break;
2232 case BUILT_IN_ISNORMAL:
2233 case BUILT_IN_ISFINITE:
2234 CASE_FLT_FN (BUILT_IN_FINITE):
2235 /* These builtins have no optabs (yet). */
2241 /* There's no easy way to detect the case we need to set EDOM. */
2242 if (flag_errno_math && errno_set)
2245 /* Optab mode depends on the mode of the input argument. */
2246 mode = TYPE_MODE (TREE_TYPE (arg));
2249 icode = optab_handler (builtin_optab, mode)->insn_code;
2251 /* Before working hard, check whether the instruction is available. */
2252 if (icode != CODE_FOR_nothing)
2254 /* Make a suitable register to place result in. */
2256 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2257 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2259 gcc_assert (insn_data[icode].operand[0].predicate
2260 (target, GET_MODE (target)));
2262 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2263 need to expand the argument again. This way, we will not perform
2264 side-effects more the once. */
2265 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2267 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2269 if (mode != GET_MODE (op0))
2270 op0 = convert_to_mode (mode, op0, 0);
2272 /* Compute into TARGET.
2273 Set TARGET to wherever the result comes back. */
2274 emit_unop_insn (icode, target, op0, UNKNOWN);
2278 /* If there is no optab, try generic code. */
2279 switch (DECL_FUNCTION_CODE (fndecl))
2283 CASE_FLT_FN (BUILT_IN_ISINF):
2285 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2286 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2287 tree const type = TREE_TYPE (arg);
2291 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2292 real_from_string (&r, buf);
2293 result = build_call_expr (isgr_fn, 2,
2294 fold_build1 (ABS_EXPR, type, arg),
2295 build_real (type, r));
2296 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2298 CASE_FLT_FN (BUILT_IN_FINITE):
2299 case BUILT_IN_ISFINITE:
2301 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2302 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2303 tree const type = TREE_TYPE (arg);
2307 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2308 real_from_string (&r, buf);
2309 result = build_call_expr (isle_fn, 2,
2310 fold_build1 (ABS_EXPR, type, arg),
2311 build_real (type, r));
2312 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2314 case BUILT_IN_ISNORMAL:
2316 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2317 islessequal(fabs(x),DBL_MAX). */
2318 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2319 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2320 tree const type = TREE_TYPE (arg);
2321 REAL_VALUE_TYPE rmax, rmin;
2324 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2325 real_from_string (&rmax, buf);
2326 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2327 real_from_string (&rmin, buf);
2328 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2329 result = build_call_expr (isle_fn, 2, arg,
2330 build_real (type, rmax));
2331 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2332 build_call_expr (isge_fn, 2, arg,
2333 build_real (type, rmin)));
2334 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2340 target = expand_call (exp, target, target == const0_rtx);
2345 /* Expand a call to the builtin sincos math function.
2346 Return NULL_RTX if a normal call should be emitted rather than expanding the
2347 function in-line. EXP is the expression that is a call to the builtin
2351 expand_builtin_sincos (tree exp)
2353 rtx op0, op1, op2, target1, target2;
2354 enum machine_mode mode;
2355 tree arg, sinp, cosp;
2358 if (!validate_arglist (exp, REAL_TYPE,
2359 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2362 arg = CALL_EXPR_ARG (exp, 0);
2363 sinp = CALL_EXPR_ARG (exp, 1);
2364 cosp = CALL_EXPR_ARG (exp, 2);
2366 /* Make a suitable register to place result in. */
2367 mode = TYPE_MODE (TREE_TYPE (arg));
2369 /* Check if sincos insn is available, otherwise emit the call. */
2370 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2373 target1 = gen_reg_rtx (mode);
2374 target2 = gen_reg_rtx (mode);
2376 op0 = expand_normal (arg);
2377 op1 = expand_normal (build_fold_indirect_ref (sinp));
2378 op2 = expand_normal (build_fold_indirect_ref (cosp));
2380 /* Compute into target1 and target2.
2381 Set TARGET to wherever the result comes back. */
2382 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2383 gcc_assert (result);
2385 /* Move target1 and target2 to the memory locations indicated
2387 emit_move_insn (op1, target1);
2388 emit_move_insn (op2, target2);
2393 /* Expand a call to the internal cexpi builtin to the sincos math function.
2394 EXP is the expression that is a call to the builtin function; if convenient,
2395 the result should be placed in TARGET. SUBTARGET may be used as the target
2396 for computing one of EXP's operands. */
2399 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2401 tree fndecl = get_callee_fndecl (exp);
2403 enum machine_mode mode;
2406 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2409 arg = CALL_EXPR_ARG (exp, 0);
2410 type = TREE_TYPE (arg);
2411 mode = TYPE_MODE (TREE_TYPE (arg));
2413 /* Try expanding via a sincos optab, fall back to emitting a libcall
2414 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2415 is only generated from sincos, cexp or if we have either of them. */
2416 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2418 op1 = gen_reg_rtx (mode);
2419 op2 = gen_reg_rtx (mode);
2421 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2423 /* Compute into op1 and op2. */
2424 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2426 else if (TARGET_HAS_SINCOS)
2428 tree call, fn = NULL_TREE;
2432 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2433 fn = built_in_decls[BUILT_IN_SINCOSF];
2434 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2435 fn = built_in_decls[BUILT_IN_SINCOS];
2436 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2437 fn = built_in_decls[BUILT_IN_SINCOSL];
2441 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2442 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2443 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2444 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2445 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2446 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2448 /* Make sure not to fold the sincos call again. */
2449 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2450 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2451 call, 3, arg, top1, top2));
2455 tree call, fn = NULL_TREE, narg;
2456 tree ctype = build_complex_type (type);
2458 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2459 fn = built_in_decls[BUILT_IN_CEXPF];
2460 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2461 fn = built_in_decls[BUILT_IN_CEXP];
2462 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2463 fn = built_in_decls[BUILT_IN_CEXPL];
2467 /* If we don't have a decl for cexp create one. This is the
2468 friendliest fallback if the user calls __builtin_cexpi
2469 without full target C99 function support. */
2470 if (fn == NULL_TREE)
2473 const char *name = NULL;
2475 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2477 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2479 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2482 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2483 fn = build_fn_decl (name, fntype);
2486 narg = fold_build2 (COMPLEX_EXPR, ctype,
2487 build_real (type, dconst0), arg);
2489 /* Make sure not to fold the cexp call again. */
2490 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2491 return expand_expr (build_call_nary (ctype, call, 1, narg),
2492 target, VOIDmode, EXPAND_NORMAL);
2495 /* Now build the proper return type. */
2496 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2497 make_tree (TREE_TYPE (arg), op2),
2498 make_tree (TREE_TYPE (arg), op1)),
2499 target, VOIDmode, EXPAND_NORMAL);
2502 /* Expand a call to one of the builtin rounding functions gcc defines
2503 as an extension (lfloor and lceil). As these are gcc extensions we
2504 do not need to worry about setting errno to EDOM.
2505 If expanding via optab fails, lower expression to (int)(floor(x)).
2506 EXP is the expression that is a call to the builtin function;
2507 if convenient, the result should be placed in TARGET. SUBTARGET may
2508 be used as the target for computing one of EXP's operands. */
2511 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2513 convert_optab builtin_optab;
2514 rtx op0, insns, tmp;
2515 tree fndecl = get_callee_fndecl (exp);
2516 enum built_in_function fallback_fn;
2517 tree fallback_fndecl;
2518 enum machine_mode mode;
2521 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2524 arg = CALL_EXPR_ARG (exp, 0);
2526 switch (DECL_FUNCTION_CODE (fndecl))
2528 CASE_FLT_FN (BUILT_IN_LCEIL):
2529 CASE_FLT_FN (BUILT_IN_LLCEIL):
2530 builtin_optab = lceil_optab;
2531 fallback_fn = BUILT_IN_CEIL;
2534 CASE_FLT_FN (BUILT_IN_LFLOOR):
2535 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2536 builtin_optab = lfloor_optab;
2537 fallback_fn = BUILT_IN_FLOOR;
2544 /* Make a suitable register to place result in. */
2545 mode = TYPE_MODE (TREE_TYPE (exp));
2547 target = gen_reg_rtx (mode);
2549 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2550 need to expand the argument again. This way, we will not perform
2551 side-effects more the once. */
2552 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2554 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2558 /* Compute into TARGET. */
2559 if (expand_sfix_optab (target, op0, builtin_optab))
2561 /* Output the entire sequence. */
2562 insns = get_insns ();
2568 /* If we were unable to expand via the builtin, stop the sequence
2569 (without outputting the insns). */
2572 /* Fall back to floating point rounding optab. */
2573 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2575 /* For non-C99 targets we may end up without a fallback fndecl here
2576 if the user called __builtin_lfloor directly. In this case emit
2577 a call to the floor/ceil variants nevertheless. This should result
2578 in the best user experience for not full C99 targets. */
2579 if (fallback_fndecl == NULL_TREE)
2582 const char *name = NULL;
2584 switch (DECL_FUNCTION_CODE (fndecl))
2586 case BUILT_IN_LCEIL:
2587 case BUILT_IN_LLCEIL:
2590 case BUILT_IN_LCEILF:
2591 case BUILT_IN_LLCEILF:
2594 case BUILT_IN_LCEILL:
2595 case BUILT_IN_LLCEILL:
2598 case BUILT_IN_LFLOOR:
2599 case BUILT_IN_LLFLOOR:
2602 case BUILT_IN_LFLOORF:
2603 case BUILT_IN_LLFLOORF:
2606 case BUILT_IN_LFLOORL:
2607 case BUILT_IN_LLFLOORL:
2614 fntype = build_function_type_list (TREE_TYPE (arg),
2615 TREE_TYPE (arg), NULL_TREE);
2616 fallback_fndecl = build_fn_decl (name, fntype);
2619 exp = build_call_expr (fallback_fndecl, 1, arg);
2621 tmp = expand_normal (exp);
2623 /* Truncate the result of floating point optab to integer
2624 via expand_fix (). */
2625 target = gen_reg_rtx (mode);
2626 expand_fix (target, tmp, 0);
2631 /* Expand a call to one of the builtin math functions doing integer
2633 Return 0 if a normal call should be emitted rather than expanding the
2634 function in-line. EXP is the expression that is a call to the builtin
2635 function; if convenient, the result should be placed in TARGET.
2636 SUBTARGET may be used as the target for computing one of EXP's operands. */
2639 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2641 convert_optab builtin_optab;
2643 tree fndecl = get_callee_fndecl (exp);
2645 enum machine_mode mode;
2647 /* There's no easy way to detect the case we need to set EDOM. */
2648 if (flag_errno_math)
2651 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2654 arg = CALL_EXPR_ARG (exp, 0);
2656 switch (DECL_FUNCTION_CODE (fndecl))
2658 CASE_FLT_FN (BUILT_IN_LRINT):
2659 CASE_FLT_FN (BUILT_IN_LLRINT):
2660 builtin_optab = lrint_optab; break;
2661 CASE_FLT_FN (BUILT_IN_LROUND):
2662 CASE_FLT_FN (BUILT_IN_LLROUND):
2663 builtin_optab = lround_optab; break;
2668 /* Make a suitable register to place result in. */
2669 mode = TYPE_MODE (TREE_TYPE (exp));
2671 target = gen_reg_rtx (mode);
2673 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2674 need to expand the argument again. This way, we will not perform
2675 side-effects more the once. */
2676 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2678 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2682 if (expand_sfix_optab (target, op0, builtin_optab))
2684 /* Output the entire sequence. */
2685 insns = get_insns ();
2691 /* If we were unable to expand via the builtin, stop the sequence
2692 (without outputting the insns) and call to the library function
2693 with the stabilized argument list. */
2696 target = expand_call (exp, target, target == const0_rtx);
2701 /* To evaluate powi(x,n), the floating point value x raised to the
2702 constant integer exponent n, we use a hybrid algorithm that
2703 combines the "window method" with look-up tables. For an
2704 introduction to exponentiation algorithms and "addition chains",
2705 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2706 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2707 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2708 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2710 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2711 multiplications to inline before calling the system library's pow
2712 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2713 so this default never requires calling pow, powf or powl. */
2715 #ifndef POWI_MAX_MULTS
2716 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2719 /* The size of the "optimal power tree" lookup table. All
2720 exponents less than this value are simply looked up in the
2721 powi_table below. This threshold is also used to size the
2722 cache of pseudo registers that hold intermediate results. */
2723 #define POWI_TABLE_SIZE 256
2725 /* The size, in bits of the window, used in the "window method"
2726 exponentiation algorithm. This is equivalent to a radix of
2727 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2728 #define POWI_WINDOW_SIZE 3
2730 /* The following table is an efficient representation of an
2731 "optimal power tree". For each value, i, the corresponding
2732 value, j, in the table states than an optimal evaluation
2733 sequence for calculating pow(x,i) can be found by evaluating
2734 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2735 100 integers is given in Knuth's "Seminumerical algorithms". */
2737 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2739 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2740 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2741 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2742 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2743 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2744 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2745 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2746 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2747 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2748 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2749 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2750 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2751 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2752 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2753 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2754 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2755 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2756 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2757 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2758 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2759 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2760 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2761 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2762 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2763 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2764 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2765 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2766 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2767 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2768 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2769 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2770 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2774 /* Return the number of multiplications required to calculate
2775 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2776 subroutine of powi_cost. CACHE is an array indicating
2777 which exponents have already been calculated. */
2780 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2782 /* If we've already calculated this exponent, then this evaluation
2783 doesn't require any additional multiplications. */
2788 return powi_lookup_cost (n - powi_table[n], cache)
2789 + powi_lookup_cost (powi_table[n], cache) + 1;
2792 /* Return the number of multiplications required to calculate
2793 powi(x,n) for an arbitrary x, given the exponent N. This
2794 function needs to be kept in sync with expand_powi below. */
2797 powi_cost (HOST_WIDE_INT n)
2799 bool cache[POWI_TABLE_SIZE];
2800 unsigned HOST_WIDE_INT digit;
2801 unsigned HOST_WIDE_INT val;
2807 /* Ignore the reciprocal when calculating the cost. */
2808 val = (n < 0) ? -n : n;
2810 /* Initialize the exponent cache. */
2811 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2816 while (val >= POWI_TABLE_SIZE)
2820 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2821 result += powi_lookup_cost (digit, cache)
2822 + POWI_WINDOW_SIZE + 1;
2823 val >>= POWI_WINDOW_SIZE;
2832 return result + powi_lookup_cost (val, cache);
2835 /* Recursive subroutine of expand_powi. This function takes the array,
2836 CACHE, of already calculated exponents and an exponent N and returns
2837 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2840 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2842 unsigned HOST_WIDE_INT digit;
2846 if (n < POWI_TABLE_SIZE)
2851 target = gen_reg_rtx (mode);
2854 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2855 op1 = expand_powi_1 (mode, powi_table[n], cache);
2859 target = gen_reg_rtx (mode);
2860 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2861 op0 = expand_powi_1 (mode, n - digit, cache);
2862 op1 = expand_powi_1 (mode, digit, cache);
2866 target = gen_reg_rtx (mode);
2867 op0 = expand_powi_1 (mode, n >> 1, cache);
2871 result = expand_mult (mode, op0, op1, target, 0);
2872 if (result != target)
2873 emit_move_insn (target, result);
2877 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2878 floating point operand in mode MODE, and N is the exponent. This
2879 function needs to be kept in sync with powi_cost above. */
2882 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2884 unsigned HOST_WIDE_INT val;
2885 rtx cache[POWI_TABLE_SIZE];
2889 return CONST1_RTX (mode);
2891 val = (n < 0) ? -n : n;
2893 memset (cache, 0, sizeof (cache));
2896 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2898 /* If the original exponent was negative, reciprocate the result. */
2900 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2901 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2906 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2907 a normal call should be emitted rather than expanding the function
2908 in-line. EXP is the expression that is a call to the builtin
2909 function; if convenient, the result should be placed in TARGET. */
2912 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2916 tree type = TREE_TYPE (exp);
2917 REAL_VALUE_TYPE cint, c, c2;
2920 enum machine_mode mode = TYPE_MODE (type);
2922 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2925 arg0 = CALL_EXPR_ARG (exp, 0);
2926 arg1 = CALL_EXPR_ARG (exp, 1);
2928 if (TREE_CODE (arg1) != REAL_CST
2929 || TREE_OVERFLOW (arg1))
2930 return expand_builtin_mathfn_2 (exp, target, subtarget);
2932 /* Handle constant exponents. */
2934 /* For integer valued exponents we can expand to an optimal multiplication
2935 sequence using expand_powi. */
2936 c = TREE_REAL_CST (arg1);
2937 n = real_to_integer (&c);
2938 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2939 if (real_identical (&c, &cint)
2940 && ((n >= -1 && n <= 2)
2941 || (flag_unsafe_math_optimizations
2943 && powi_cost (n) <= POWI_MAX_MULTS)))
2945 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2948 op = force_reg (mode, op);
2949 op = expand_powi (op, mode, n);
2954 narg0 = builtin_save_expr (arg0);
2956 /* If the exponent is not integer valued, check if it is half of an integer.
2957 In this case we can expand to sqrt (x) * x**(n/2). */
2958 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2959 if (fn != NULL_TREE)
2961 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2962 n = real_to_integer (&c2);
2963 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2964 if (real_identical (&c2, &cint)
2965 && ((flag_unsafe_math_optimizations
2967 && powi_cost (n/2) <= POWI_MAX_MULTS)
2970 tree call_expr = build_call_expr (fn, 1, narg0);
2971 /* Use expand_expr in case the newly built call expression
2972 was folded to a non-call. */
2973 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2976 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2977 op2 = force_reg (mode, op2);
2978 op2 = expand_powi (op2, mode, abs (n / 2));
2979 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2980 0, OPTAB_LIB_WIDEN);
2981 /* If the original exponent was negative, reciprocate the
2984 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2985 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2991 /* Try if the exponent is a third of an integer. In this case
2992 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2993 different from pow (x, 1./3.) due to rounding and behavior
2994 with negative x we need to constrain this transformation to
2995 unsafe math and positive x or finite math. */
2996 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2998 && flag_unsafe_math_optimizations
2999 && (tree_expr_nonnegative_p (arg0)
3000 || !HONOR_NANS (mode)))
3002 REAL_VALUE_TYPE dconst3;
3003 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3004 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3005 real_round (&c2, mode, &c2);
3006 n = real_to_integer (&c2);
3007 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3008 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3009 real_convert (&c2, mode, &c2);
3010 if (real_identical (&c2, &c)
3012 && powi_cost (n/3) <= POWI_MAX_MULTS)
3015 tree call_expr = build_call_expr (fn, 1,narg0);
3016 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3017 if (abs (n) % 3 == 2)
3018 op = expand_simple_binop (mode, MULT, op, op, op,
3019 0, OPTAB_LIB_WIDEN);
3022 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3023 op2 = force_reg (mode, op2);
3024 op2 = expand_powi (op2, mode, abs (n / 3));
3025 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3026 0, OPTAB_LIB_WIDEN);
3027 /* If the original exponent was negative, reciprocate the
3030 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3031 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3037 /* Fall back to optab expansion. */
3038 return expand_builtin_mathfn_2 (exp, target, subtarget);
3041 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3042 a normal call should be emitted rather than expanding the function
3043 in-line. EXP is the expression that is a call to the builtin
3044 function; if convenient, the result should be placed in TARGET. */
3047 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3051 enum machine_mode mode;
3052 enum machine_mode mode2;
3054 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3057 arg0 = CALL_EXPR_ARG (exp, 0);
3058 arg1 = CALL_EXPR_ARG (exp, 1);
3059 mode = TYPE_MODE (TREE_TYPE (exp));
3061 /* Handle constant power. */
3063 if (TREE_CODE (arg1) == INTEGER_CST
3064 && !TREE_OVERFLOW (arg1))
3066 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3068 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3069 Otherwise, check the number of multiplications required. */
3070 if ((TREE_INT_CST_HIGH (arg1) == 0
3071 || TREE_INT_CST_HIGH (arg1) == -1)
3072 && ((n >= -1 && n <= 2)
3074 && powi_cost (n) <= POWI_MAX_MULTS)))
3076 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3077 op0 = force_reg (mode, op0);
3078 return expand_powi (op0, mode, n);
3082 /* Emit a libcall to libgcc. */
3084 /* Mode of the 2nd argument must match that of an int. */
3085 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3087 if (target == NULL_RTX)
3088 target = gen_reg_rtx (mode);
3090 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3091 if (GET_MODE (op0) != mode)
3092 op0 = convert_to_mode (mode, op0, 0);
3093 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3094 if (GET_MODE (op1) != mode2)
3095 op1 = convert_to_mode (mode2, op1, 0);
3097 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3098 target, LCT_CONST, mode, 2,
3099 op0, mode, op1, mode2);
3104 /* Expand expression EXP which is a call to the strlen builtin. Return
3105 NULL_RTX if we failed the caller should emit a normal call, otherwise
3106 try to get the result in TARGET, if convenient. */
3109 expand_builtin_strlen (tree exp, rtx target,
3110 enum machine_mode target_mode)
3112 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3118 tree src = CALL_EXPR_ARG (exp, 0);
3119 rtx result, src_reg, char_rtx, before_strlen;
3120 enum machine_mode insn_mode = target_mode, char_mode;
3121 enum insn_code icode = CODE_FOR_nothing;
3124 /* If the length can be computed at compile-time, return it. */
3125 len = c_strlen (src, 0);
3127 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3129 /* If the length can be computed at compile-time and is constant
3130 integer, but there are side-effects in src, evaluate
3131 src for side-effects, then return len.
3132 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3133 can be optimized into: i++; x = 3; */
3134 len = c_strlen (src, 1);
3135 if (len && TREE_CODE (len) == INTEGER_CST)
3137 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3138 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3141 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3143 /* If SRC is not a pointer type, don't do this operation inline. */
3147 /* Bail out if we can't compute strlen in the right mode. */
3148 while (insn_mode != VOIDmode)
3150 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3151 if (icode != CODE_FOR_nothing)
3154 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3156 if (insn_mode == VOIDmode)
3159 /* Make a place to write the result of the instruction. */
3163 && GET_MODE (result) == insn_mode
3164 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3165 result = gen_reg_rtx (insn_mode);
3167 /* Make a place to hold the source address. We will not expand
3168 the actual source until we are sure that the expansion will
3169 not fail -- there are trees that cannot be expanded twice. */
3170 src_reg = gen_reg_rtx (Pmode);
3172 /* Mark the beginning of the strlen sequence so we can emit the
3173 source operand later. */
3174 before_strlen = get_last_insn ();
3176 char_rtx = const0_rtx;
3177 char_mode = insn_data[(int) icode].operand[2].mode;
3178 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3180 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3182 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3183 char_rtx, GEN_INT (align));
3188 /* Now that we are assured of success, expand the source. */
3190 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3192 emit_move_insn (src_reg, pat);
3197 emit_insn_after (pat, before_strlen);
3199 emit_insn_before (pat, get_insns ());
3201 /* Return the value in the proper mode for this function. */
3202 if (GET_MODE (result) == target_mode)
3204 else if (target != 0)
3205 convert_move (target, result, 0);
3207 target = convert_to_mode (target_mode, result, 0);
3213 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3214 caller should emit a normal call, otherwise try to get the result
3215 in TARGET, if convenient (and in mode MODE if that's convenient). */
3218 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3220 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3222 tree type = TREE_TYPE (exp);
3223 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3224 CALL_EXPR_ARG (exp, 1), type);
3226 return expand_expr (result, target, mode, EXPAND_NORMAL);
3231 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3232 caller should emit a normal call, otherwise try to get the result
3233 in TARGET, if convenient (and in mode MODE if that's convenient). */
3236 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3238 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3240 tree type = TREE_TYPE (exp);
3241 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3242 CALL_EXPR_ARG (exp, 1), type);
3244 return expand_expr (result, target, mode, EXPAND_NORMAL);
3246 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3251 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3252 caller should emit a normal call, otherwise try to get the result
3253 in TARGET, if convenient (and in mode MODE if that's convenient). */
3256 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3258 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3260 tree type = TREE_TYPE (exp);
3261 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3262 CALL_EXPR_ARG (exp, 1), type);
3264 return expand_expr (result, target, mode, EXPAND_NORMAL);
3269 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3270 caller should emit a normal call, otherwise try to get the result
3271 in TARGET, if convenient (and in mode MODE if that's convenient). */
3274 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3276 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3278 tree type = TREE_TYPE (exp);
3279 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3280 CALL_EXPR_ARG (exp, 1), type);
3282 return expand_expr (result, target, mode, EXPAND_NORMAL);
3287 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3288 bytes from constant string DATA + OFFSET and return it as target
3292 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3293 enum machine_mode mode)
3295 const char *str = (const char *) data;
3297 gcc_assert (offset >= 0
3298 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3299 <= strlen (str) + 1));
3301 return c_readstr (str + offset, mode);
3304 /* Expand a call EXP to the memcpy builtin.
3305 Return NULL_RTX if we failed, the caller should emit a normal call,
3306 otherwise try to get the result in TARGET, if convenient (and in
3307 mode MODE if that's convenient). */
3310 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3312 tree fndecl = get_callee_fndecl (exp);
3314 if (!validate_arglist (exp,
3315 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3319 tree dest = CALL_EXPR_ARG (exp, 0);
3320 tree src = CALL_EXPR_ARG (exp, 1);
3321 tree len = CALL_EXPR_ARG (exp, 2);
3322 const char *src_str;
3323 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3324 unsigned int dest_align
3325 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3326 rtx dest_mem, src_mem, dest_addr, len_rtx;
3327 tree result = fold_builtin_memory_op (dest, src, len,
3328 TREE_TYPE (TREE_TYPE (fndecl)),
3330 HOST_WIDE_INT expected_size = -1;
3331 unsigned int expected_align = 0;
3335 while (TREE_CODE (result) == COMPOUND_EXPR)
3337 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3339 result = TREE_OPERAND (result, 1);
3341 return expand_expr (result, target, mode, EXPAND_NORMAL);
3344 /* If DEST is not a pointer type, call the normal function. */
3345 if (dest_align == 0)
3348 /* If either SRC is not a pointer type, don't do this
3349 operation in-line. */
3353 stringop_block_profile (exp, &expected_align, &expected_size);
3354 if (expected_align < dest_align)
3355 expected_align = dest_align;
3356 dest_mem = get_memory_rtx (dest, len);
3357 set_mem_align (dest_mem, dest_align);
3358 len_rtx = expand_normal (len);
3359 src_str = c_getstr (src);
3361 /* If SRC is a string constant and block move would be done
3362 by pieces, we can avoid loading the string from memory
3363 and only stored the computed constants. */
3365 && GET_CODE (len_rtx) == CONST_INT
3366 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3367 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3368 (void *) src_str, dest_align, false))
3370 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3371 builtin_memcpy_read_str,
3372 (void *) src_str, dest_align, false, 0);
3373 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3374 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3378 src_mem = get_memory_rtx (src, len);
3379 set_mem_align (src_mem, src_align);
3381 /* Copy word part most expediently. */
3382 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3383 CALL_EXPR_TAILCALL (exp)
3384 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3385 expected_align, expected_size);
3389 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3390 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3396 /* Expand a call EXP to the mempcpy builtin.
3397 Return NULL_RTX if we failed; the caller should emit a normal call,
3398 otherwise try to get the result in TARGET, if convenient (and in
3399 mode MODE if that's convenient). If ENDP is 0 return the
3400 destination pointer, if ENDP is 1 return the end pointer ala
3401 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3405 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3407 if (!validate_arglist (exp,
3408 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3412 tree dest = CALL_EXPR_ARG (exp, 0);
3413 tree src = CALL_EXPR_ARG (exp, 1);
3414 tree len = CALL_EXPR_ARG (exp, 2);
3415 return expand_builtin_mempcpy_args (dest, src, len,
3417 target, mode, /*endp=*/ 1);
3421 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3422 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3423 so that this can also be called without constructing an actual CALL_EXPR.
3424 TYPE is the return type of the call. The other arguments and return value
3425 are the same as for expand_builtin_mempcpy. */
3428 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3429 rtx target, enum machine_mode mode, int endp)
3431 /* If return value is ignored, transform mempcpy into memcpy. */
3432 if (target == const0_rtx)
3434 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3439 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3440 target, mode, EXPAND_NORMAL);
3444 const char *src_str;
3445 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3446 unsigned int dest_align
3447 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3448 rtx dest_mem, src_mem, len_rtx;
3449 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3453 while (TREE_CODE (result) == COMPOUND_EXPR)
3455 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3457 result = TREE_OPERAND (result, 1);
3459 return expand_expr (result, target, mode, EXPAND_NORMAL);
3462 /* If either SRC or DEST is not a pointer type, don't do this
3463 operation in-line. */
3464 if (dest_align == 0 || src_align == 0)
3467 /* If LEN is not constant, call the normal function. */
3468 if (! host_integerp (len, 1))
3471 len_rtx = expand_normal (len);
3472 src_str = c_getstr (src);
3474 /* If SRC is a string constant and block move would be done
3475 by pieces, we can avoid loading the string from memory
3476 and only stored the computed constants. */
3478 && GET_CODE (len_rtx) == CONST_INT
3479 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3480 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3481 (void *) src_str, dest_align, false))
3483 dest_mem = get_memory_rtx (dest, len);
3484 set_mem_align (dest_mem, dest_align);
3485 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3486 builtin_memcpy_read_str,
3487 (void *) src_str, dest_align,
3489 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3490 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3494 if (GET_CODE (len_rtx) == CONST_INT
3495 && can_move_by_pieces (INTVAL (len_rtx),
3496 MIN (dest_align, src_align)))
3498 dest_mem = get_memory_rtx (dest, len);
3499 set_mem_align (dest_mem, dest_align);
3500 src_mem = get_memory_rtx (src, len);
3501 set_mem_align (src_mem, src_align);
3502 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3503 MIN (dest_align, src_align), endp);
3504 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3505 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3513 /* Expand expression EXP, which is a call to the memmove builtin. Return
3514 NULL_RTX if we failed; the caller should emit a normal call. */
3517 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3519 if (!validate_arglist (exp,
3520 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3524 tree dest = CALL_EXPR_ARG (exp, 0);
3525 tree src = CALL_EXPR_ARG (exp, 1);
3526 tree len = CALL_EXPR_ARG (exp, 2);
3527 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3528 target, mode, ignore);
3532 /* Helper function to do the actual work for expand_builtin_memmove. The
3533 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3534 so that this can also be called without constructing an actual CALL_EXPR.
3535 TYPE is the return type of the call. The other arguments and return value
3536 are the same as for expand_builtin_memmove. */
3539 expand_builtin_memmove_args (tree dest, tree src, tree len,
3540 tree type, rtx target, enum machine_mode mode,
3543 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3547 STRIP_TYPE_NOPS (result);
3548 while (TREE_CODE (result) == COMPOUND_EXPR)
3550 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3552 result = TREE_OPERAND (result, 1);
3554 return expand_expr (result, target, mode, EXPAND_NORMAL);
3557 /* Otherwise, call the normal function. */
3561 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3562 NULL_RTX if we failed the caller should emit a normal call. */
3565 expand_builtin_bcopy (tree exp, int ignore)
3567 tree type = TREE_TYPE (exp);
3568 tree src, dest, size;
3570 if (!validate_arglist (exp,
3571 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3574 src = CALL_EXPR_ARG (exp, 0);
3575 dest = CALL_EXPR_ARG (exp, 1);
3576 size = CALL_EXPR_ARG (exp, 2);
3578 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3579 This is done this way so that if it isn't expanded inline, we fall
3580 back to calling bcopy instead of memmove. */
3581 return expand_builtin_memmove_args (dest, src,
3582 fold_convert (sizetype, size),
3583 type, const0_rtx, VOIDmode,
3588 # define HAVE_movstr 0
3589 # define CODE_FOR_movstr CODE_FOR_nothing
3592 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3593 we failed, the caller should emit a normal call, otherwise try to
3594 get the result in TARGET, if convenient. If ENDP is 0 return the
3595 destination pointer, if ENDP is 1 return the end pointer ala
3596 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3600 expand_movstr (tree dest, tree src, rtx target, int endp)
3606 const struct insn_data * data;
3611 dest_mem = get_memory_rtx (dest, NULL);
3612 src_mem = get_memory_rtx (src, NULL);
3615 target = force_reg (Pmode, XEXP (dest_mem, 0));
3616 dest_mem = replace_equiv_address (dest_mem, target);
3617 end = gen_reg_rtx (Pmode);
3621 if (target == 0 || target == const0_rtx)
3623 end = gen_reg_rtx (Pmode);
3631 data = insn_data + CODE_FOR_movstr;
3633 if (data->operand[0].mode != VOIDmode)
3634 end = gen_lowpart (data->operand[0].mode, end);
3636 insn = data->genfun (end, dest_mem, src_mem);
3642 /* movstr is supposed to set end to the address of the NUL
3643 terminator. If the caller requested a mempcpy-like return value,
3645 if (endp == 1 && target != const0_rtx)
3647 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3648 emit_move_insn (target, force_operand (tem, NULL_RTX));
3654 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3655 NULL_RTX if we failed the caller should emit a normal call, otherwise
3656 try to get the result in TARGET, if convenient (and in mode MODE if that's
3660 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3662 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3664 tree dest = CALL_EXPR_ARG (exp, 0);
3665 tree src = CALL_EXPR_ARG (exp, 1);
3666 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3671 /* Helper function to do the actual work for expand_builtin_strcpy. The
3672 arguments to the builtin_strcpy call DEST and SRC are broken out
3673 so that this can also be called without constructing an actual CALL_EXPR.
3674 The other arguments and return value are the same as for
3675 expand_builtin_strcpy. */
3678 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3679 rtx target, enum machine_mode mode)
3681 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3683 return expand_expr (result, target, mode, EXPAND_NORMAL);
3684 return expand_movstr (dest, src, target, /*endp=*/0);
3688 /* Expand a call EXP to the stpcpy builtin.
3689 Return NULL_RTX if we failed the caller should emit a normal call,
3690 otherwise try to get the result in TARGET, if convenient (and in
3691 mode MODE if that's convenient). */
3694 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3698 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3701 dst = CALL_EXPR_ARG (exp, 0);
3702 src = CALL_EXPR_ARG (exp, 1);
3704 /* If return value is ignored, transform stpcpy into strcpy. */
3705 if (target == const0_rtx)
3707 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3711 return expand_expr (build_call_expr (fn, 2, dst, src),
3712 target, mode, EXPAND_NORMAL);
3719 /* Ensure we get an actual string whose length can be evaluated at
3720 compile-time, not an expression containing a string. This is
3721 because the latter will potentially produce pessimized code
3722 when used to produce the return value. */
3723 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3724 return expand_movstr (dst, src, target, /*endp=*/2);
3726 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3727 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3728 target, mode, /*endp=*/2);
3733 if (TREE_CODE (len) == INTEGER_CST)
3735 rtx len_rtx = expand_normal (len);
3737 if (GET_CODE (len_rtx) == CONST_INT)
3739 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3740 dst, src, target, mode);
3746 if (mode != VOIDmode)
3747 target = gen_reg_rtx (mode);
3749 target = gen_reg_rtx (GET_MODE (ret));
3751 if (GET_MODE (target) != GET_MODE (ret))
3752 ret = gen_lowpart (GET_MODE (target), ret);
3754 ret = plus_constant (ret, INTVAL (len_rtx));
3755 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3763 return expand_movstr (dst, src, target, /*endp=*/2);
3767 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3768 bytes from constant string DATA + OFFSET and return it as target
3772 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3773 enum machine_mode mode)
3775 const char *str = (const char *) data;
3777 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3780 return c_readstr (str + offset, mode);
3783 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3784 NULL_RTX if we failed the caller should emit a normal call. */
3787 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3789 tree fndecl = get_callee_fndecl (exp);
3791 if (validate_arglist (exp,
3792 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3794 tree dest = CALL_EXPR_ARG (exp, 0);
3795 tree src = CALL_EXPR_ARG (exp, 1);
3796 tree len = CALL_EXPR_ARG (exp, 2);
3797 tree slen = c_strlen (src, 1);
3798 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3802 while (TREE_CODE (result) == COMPOUND_EXPR)
3804 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3806 result = TREE_OPERAND (result, 1);
3808 return expand_expr (result, target, mode, EXPAND_NORMAL);
3811 /* We must be passed a constant len and src parameter. */
3812 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3815 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3817 /* We're required to pad with trailing zeros if the requested
3818 len is greater than strlen(s2)+1. In that case try to
3819 use store_by_pieces, if it fails, punt. */
3820 if (tree_int_cst_lt (slen, len))
3822 unsigned int dest_align
3823 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3824 const char *p = c_getstr (src);
3827 if (!p || dest_align == 0 || !host_integerp (len, 1)
3828 || !can_store_by_pieces (tree_low_cst (len, 1),
3829 builtin_strncpy_read_str,
3830 (void *) p, dest_align, false))
3833 dest_mem = get_memory_rtx (dest, len);
3834 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3835 builtin_strncpy_read_str,
3836 (void *) p, dest_align, false, 0);
3837 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3838 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3845 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3846 bytes from constant string DATA + OFFSET and return it as target
3850 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3851 enum machine_mode mode)
3853 const char *c = (const char *) data;
3854 char *p = alloca (GET_MODE_SIZE (mode));
3856 memset (p, *c, GET_MODE_SIZE (mode));
3858 return c_readstr (p, mode);
3861 /* Callback routine for store_by_pieces. Return the RTL of a register
3862 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3863 char value given in the RTL register data. For example, if mode is
3864 4 bytes wide, return the RTL for 0x01010101*data. */
3867 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3868 enum machine_mode mode)
3874 size = GET_MODE_SIZE (mode);
3879 memset (p, 1, size);
3880 coeff = c_readstr (p, mode);
3882 target = convert_to_mode (mode, (rtx) data, 1);
3883 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3884 return force_reg (mode, target);
3887 /* Expand expression EXP, which is a call to the memset builtin. Return
3888 NULL_RTX if we failed the caller should emit a normal call, otherwise
3889 try to get the result in TARGET, if convenient (and in mode MODE if that's
3893 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3895 if (!validate_arglist (exp,
3896 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3900 tree dest = CALL_EXPR_ARG (exp, 0);
3901 tree val = CALL_EXPR_ARG (exp, 1);
3902 tree len = CALL_EXPR_ARG (exp, 2);
3903 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3907 /* Helper function to do the actual work for expand_builtin_memset. The
3908 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3909 so that this can also be called without constructing an actual CALL_EXPR.
3910 The other arguments and return value are the same as for
3911 expand_builtin_memset. */
3914 expand_builtin_memset_args (tree dest, tree val, tree len,
3915 rtx target, enum machine_mode mode, tree orig_exp)
3918 enum built_in_function fcode;
3920 unsigned int dest_align;
3921 rtx dest_mem, dest_addr, len_rtx;
3922 HOST_WIDE_INT expected_size = -1;
3923 unsigned int expected_align = 0;
3925 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3927 /* If DEST is not a pointer type, don't do this operation in-line. */
3928 if (dest_align == 0)
3931 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3932 if (expected_align < dest_align)
3933 expected_align = dest_align;
3935 /* If the LEN parameter is zero, return DEST. */
3936 if (integer_zerop (len))
3938 /* Evaluate and ignore VAL in case it has side-effects. */
3939 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3940 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3943 /* Stabilize the arguments in case we fail. */
3944 dest = builtin_save_expr (dest);
3945 val = builtin_save_expr (val);
3946 len = builtin_save_expr (len);
3948 len_rtx = expand_normal (len);
3949 dest_mem = get_memory_rtx (dest, len);
3951 if (TREE_CODE (val) != INTEGER_CST)
3955 val_rtx = expand_normal (val);
3956 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3959 /* Assume that we can memset by pieces if we can store
3960 * the coefficients by pieces (in the required modes).
3961 * We can't pass builtin_memset_gen_str as that emits RTL. */
3963 if (host_integerp (len, 1)
3964 && can_store_by_pieces (tree_low_cst (len, 1),
3965 builtin_memset_read_str, &c, dest_align,
3968 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3970 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3971 builtin_memset_gen_str, val_rtx, dest_align,
3974 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3975 dest_align, expected_align,
3979 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3980 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3984 if (target_char_cast (val, &c))
3989 if (host_integerp (len, 1)
3990 && can_store_by_pieces (tree_low_cst (len, 1),
3991 builtin_memset_read_str, &c, dest_align,
3993 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3994 builtin_memset_read_str, &c, dest_align, true, 0);
3995 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3996 dest_align, expected_align,
4000 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4001 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4005 set_mem_align (dest_mem, dest_align);
4006 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4007 CALL_EXPR_TAILCALL (orig_exp)
4008 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4009 expected_align, expected_size);
4013 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4014 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4020 fndecl = get_callee_fndecl (orig_exp);
4021 fcode = DECL_FUNCTION_CODE (fndecl);
4022 if (fcode == BUILT_IN_MEMSET)
4023 fn = build_call_expr (fndecl, 3, dest, val, len);
4024 else if (fcode == BUILT_IN_BZERO)
4025 fn = build_call_expr (fndecl, 2, dest, len);
4028 if (TREE_CODE (fn) == CALL_EXPR)
4029 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4030 return expand_call (fn, target, target == const0_rtx);
4033 /* Expand expression EXP, which is a call to the bzero builtin. Return
4034 NULL_RTX if we failed the caller should emit a normal call. */
4037 expand_builtin_bzero (tree exp)
4041 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4044 dest = CALL_EXPR_ARG (exp, 0);
4045 size = CALL_EXPR_ARG (exp, 1);
4047 /* New argument list transforming bzero(ptr x, int y) to
4048 memset(ptr x, int 0, size_t y). This is done this way
4049 so that if it isn't expanded inline, we fallback to
4050 calling bzero instead of memset. */
4052 return expand_builtin_memset_args (dest, integer_zero_node,
4053 fold_convert (sizetype, size),
4054 const0_rtx, VOIDmode, exp);
4057 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4058 caller should emit a normal call, otherwise try to get the result
4059 in TARGET, if convenient (and in mode MODE if that's convenient). */
4062 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4064 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4065 INTEGER_TYPE, VOID_TYPE))
4067 tree type = TREE_TYPE (exp);
4068 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4069 CALL_EXPR_ARG (exp, 1),
4070 CALL_EXPR_ARG (exp, 2), type);
4072 return expand_expr (result, target, mode, EXPAND_NORMAL);
4077 /* Expand expression EXP, which is a call to the memcmp built-in function.
4078 Return NULL_RTX if we failed and the
4079 caller should emit a normal call, otherwise try to get the result in
4080 TARGET, if convenient (and in mode MODE, if that's convenient). */
4083 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4085 if (!validate_arglist (exp,
4086 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4090 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4091 CALL_EXPR_ARG (exp, 1),
4092 CALL_EXPR_ARG (exp, 2));
4094 return expand_expr (result, target, mode, EXPAND_NORMAL);
4097 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4099 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4102 tree arg1 = CALL_EXPR_ARG (exp, 0);
4103 tree arg2 = CALL_EXPR_ARG (exp, 1);
4104 tree len = CALL_EXPR_ARG (exp, 2);
4107 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4109 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4110 enum machine_mode insn_mode;
4112 #ifdef HAVE_cmpmemsi
4114 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4117 #ifdef HAVE_cmpstrnsi
4119 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4124 /* If we don't have POINTER_TYPE, call the function. */
4125 if (arg1_align == 0 || arg2_align == 0)
4128 /* Make a place to write the result of the instruction. */
4131 && REG_P (result) && GET_MODE (result) == insn_mode
4132 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4133 result = gen_reg_rtx (insn_mode);
4135 arg1_rtx = get_memory_rtx (arg1, len);
4136 arg2_rtx = get_memory_rtx (arg2, len);
4137 arg3_rtx = expand_normal (len);
4139 /* Set MEM_SIZE as appropriate. */
4140 if (GET_CODE (arg3_rtx) == CONST_INT)
4142 set_mem_size (arg1_rtx, arg3_rtx);
4143 set_mem_size (arg2_rtx, arg3_rtx);
4146 #ifdef HAVE_cmpmemsi
4148 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4149 GEN_INT (MIN (arg1_align, arg2_align)));
4152 #ifdef HAVE_cmpstrnsi
4154 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4155 GEN_INT (MIN (arg1_align, arg2_align)));
4163 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4164 TYPE_MODE (integer_type_node), 3,
4165 XEXP (arg1_rtx, 0), Pmode,
4166 XEXP (arg2_rtx, 0), Pmode,
4167 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4168 TYPE_UNSIGNED (sizetype)),
4169 TYPE_MODE (sizetype));
4171 /* Return the value in the proper mode for this function. */
4172 mode = TYPE_MODE (TREE_TYPE (exp));
4173 if (GET_MODE (result) == mode)
4175 else if (target != 0)
4177 convert_move (target, result, 0);
4181 return convert_to_mode (mode, result, 0);
4188 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4189 if we failed the caller should emit a normal call, otherwise try to get
4190 the result in TARGET, if convenient. */
4193 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4195 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4199 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4200 CALL_EXPR_ARG (exp, 1));
4202 return expand_expr (result, target, mode, EXPAND_NORMAL);
4205 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4206 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4207 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4209 rtx arg1_rtx, arg2_rtx;
4210 rtx result, insn = NULL_RTX;
4212 tree arg1 = CALL_EXPR_ARG (exp, 0);
4213 tree arg2 = CALL_EXPR_ARG (exp, 1);
4216 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4218 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4220 /* If we don't have POINTER_TYPE, call the function. */
4221 if (arg1_align == 0 || arg2_align == 0)
4224 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4225 arg1 = builtin_save_expr (arg1);
4226 arg2 = builtin_save_expr (arg2);
4228 arg1_rtx = get_memory_rtx (arg1, NULL);
4229 arg2_rtx = get_memory_rtx (arg2, NULL);
4231 #ifdef HAVE_cmpstrsi
4232 /* Try to call cmpstrsi. */
4235 enum machine_mode insn_mode
4236 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4238 /* Make a place to write the result of the instruction. */
4241 && REG_P (result) && GET_MODE (result) == insn_mode
4242 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4243 result = gen_reg_rtx (insn_mode);
4245 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4246 GEN_INT (MIN (arg1_align, arg2_align)));
4249 #ifdef HAVE_cmpstrnsi
4250 /* Try to determine at least one length and call cmpstrnsi. */
4251 if (!insn && HAVE_cmpstrnsi)
4256 enum machine_mode insn_mode
4257 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4258 tree len1 = c_strlen (arg1, 1);
4259 tree len2 = c_strlen (arg2, 1);
4262 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4264 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4266 /* If we don't have a constant length for the first, use the length
4267 of the second, if we know it. We don't require a constant for
4268 this case; some cost analysis could be done if both are available
4269 but neither is constant. For now, assume they're equally cheap,
4270 unless one has side effects. If both strings have constant lengths,
4277 else if (TREE_SIDE_EFFECTS (len1))
4279 else if (TREE_SIDE_EFFECTS (len2))
4281 else if (TREE_CODE (len1) != INTEGER_CST)
4283 else if (TREE_CODE (len2) != INTEGER_CST)
4285 else if (tree_int_cst_lt (len1, len2))
4290 /* If both arguments have side effects, we cannot optimize. */
4291 if (!len || TREE_SIDE_EFFECTS (len))
4294 arg3_rtx = expand_normal (len);
4296 /* Make a place to write the result of the instruction. */
4299 && REG_P (result) && GET_MODE (result) == insn_mode
4300 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4301 result = gen_reg_rtx (insn_mode);
4303 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4304 GEN_INT (MIN (arg1_align, arg2_align)));
4312 /* Return the value in the proper mode for this function. */
4313 mode = TYPE_MODE (TREE_TYPE (exp));
4314 if (GET_MODE (result) == mode)
4317 return convert_to_mode (mode, result, 0);
4318 convert_move (target, result, 0);
4322 /* Expand the library call ourselves using a stabilized argument
4323 list to avoid re-evaluating the function's arguments twice. */
4324 #ifdef HAVE_cmpstrnsi
4327 fndecl = get_callee_fndecl (exp);
4328 fn = build_call_expr (fndecl, 2, arg1, arg2);
4329 if (TREE_CODE (fn) == CALL_EXPR)
4330 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4331 return expand_call (fn, target, target == const0_rtx);
4337 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4338 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4339 the result in TARGET, if convenient. */
4342 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4344 if (!validate_arglist (exp,
4345 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4349 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4350 CALL_EXPR_ARG (exp, 1),
4351 CALL_EXPR_ARG (exp, 2));
4353 return expand_expr (result, target, mode, EXPAND_NORMAL);
4356 /* If c_strlen can determine an expression for one of the string
4357 lengths, and it doesn't have side effects, then emit cmpstrnsi
4358 using length MIN(strlen(string)+1, arg3). */
4359 #ifdef HAVE_cmpstrnsi
4362 tree len, len1, len2;
4363 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4366 tree arg1 = CALL_EXPR_ARG (exp, 0);
4367 tree arg2 = CALL_EXPR_ARG (exp, 1);
4368 tree arg3 = CALL_EXPR_ARG (exp, 2);
4371 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4373 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4374 enum machine_mode insn_mode
4375 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4377 len1 = c_strlen (arg1, 1);
4378 len2 = c_strlen (arg2, 1);
4381 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4383 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4385 /* If we don't have a constant length for the first, use the length
4386 of the second, if we know it. We don't require a constant for
4387 this case; some cost analysis could be done if both are available
4388 but neither is constant. For now, assume they're equally cheap,
4389 unless one has side effects. If both strings have constant lengths,
4396 else if (TREE_SIDE_EFFECTS (len1))
4398 else if (TREE_SIDE_EFFECTS (len2))
4400 else if (TREE_CODE (len1) != INTEGER_CST)
4402 else if (TREE_CODE (len2) != INTEGER_CST)
4404 else if (tree_int_cst_lt (len1, len2))
4409 /* If both arguments have side effects, we cannot optimize. */
4410 if (!len || TREE_SIDE_EFFECTS (len))
4413 /* The actual new length parameter is MIN(len,arg3). */
4414 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4415 fold_convert (TREE_TYPE (len), arg3));
4417 /* If we don't have POINTER_TYPE, call the function. */
4418 if (arg1_align == 0 || arg2_align == 0)
4421 /* Make a place to write the result of the instruction. */
4424 && REG_P (result) && GET_MODE (result) == insn_mode
4425 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4426 result = gen_reg_rtx (insn_mode);
4428 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4429 arg1 = builtin_save_expr (arg1);
4430 arg2 = builtin_save_expr (arg2);
4431 len = builtin_save_expr (len);
4433 arg1_rtx = get_memory_rtx (arg1, len);
4434 arg2_rtx = get_memory_rtx (arg2, len);
4435 arg3_rtx = expand_normal (len);
4436 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4437 GEN_INT (MIN (arg1_align, arg2_align)));
4442 /* Return the value in the proper mode for this function. */
4443 mode = TYPE_MODE (TREE_TYPE (exp));
4444 if (GET_MODE (result) == mode)
4447 return convert_to_mode (mode, result, 0);
4448 convert_move (target, result, 0);
4452 /* Expand the library call ourselves using a stabilized argument
4453 list to avoid re-evaluating the function's arguments twice. */
4454 fndecl = get_callee_fndecl (exp);
4455 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4456 if (TREE_CODE (fn) == CALL_EXPR)
4457 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4458 return expand_call (fn, target, target == const0_rtx);
4464 /* Expand expression EXP, which is a call to the strcat builtin.
4465 Return NULL_RTX if we failed the caller should emit a normal call,
4466 otherwise try to get the result in TARGET, if convenient. */
4469 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4471 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4475 tree dst = CALL_EXPR_ARG (exp, 0);
4476 tree src = CALL_EXPR_ARG (exp, 1);
4477 const char *p = c_getstr (src);
4479 /* If the string length is zero, return the dst parameter. */
4480 if (p && *p == '\0')
4481 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4485 /* See if we can store by pieces into (dst + strlen(dst)). */
4486 tree newsrc, newdst,
4487 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4490 /* Stabilize the argument list. */
4491 newsrc = builtin_save_expr (src);
4492 dst = builtin_save_expr (dst);
4496 /* Create strlen (dst). */
4497 newdst = build_call_expr (strlen_fn, 1, dst);
4498 /* Create (dst p+ strlen (dst)). */
4500 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4501 newdst = builtin_save_expr (newdst);
4503 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4505 end_sequence (); /* Stop sequence. */
4509 /* Output the entire sequence. */
4510 insns = get_insns ();
4514 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4521 /* Expand expression EXP, which is a call to the strncat builtin.
4522 Return NULL_RTX if we failed the caller should emit a normal call,
4523 otherwise try to get the result in TARGET, if convenient. */
4526 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4528 if (validate_arglist (exp,
4529 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4531 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4532 CALL_EXPR_ARG (exp, 1),
4533 CALL_EXPR_ARG (exp, 2));
4535 return expand_expr (result, target, mode, EXPAND_NORMAL);
4540 /* Expand expression EXP, which is a call to the strspn builtin.
4541 Return NULL_RTX if we failed the caller should emit a normal call,
4542 otherwise try to get the result in TARGET, if convenient. */
4545 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4547 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4549 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4550 CALL_EXPR_ARG (exp, 1));
4552 return expand_expr (result, target, mode, EXPAND_NORMAL);
4557 /* Expand expression EXP, which is a call to the strcspn builtin.
4558 Return NULL_RTX if we failed the caller should emit a normal call,
4559 otherwise try to get the result in TARGET, if convenient. */
4562 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4564 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4566 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4567 CALL_EXPR_ARG (exp, 1));
4569 return expand_expr (result, target, mode, EXPAND_NORMAL);
4574 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4575 if that's convenient. */
4578 expand_builtin_saveregs (void)
4582 /* Don't do __builtin_saveregs more than once in a function.
4583 Save the result of the first call and reuse it. */
4584 if (saveregs_value != 0)
4585 return saveregs_value;
4587 /* When this function is called, it means that registers must be
4588 saved on entry to this function. So we migrate the call to the
4589 first insn of this function. */
4593 /* Do whatever the machine needs done in this case. */
4594 val = targetm.calls.expand_builtin_saveregs ();
4599 saveregs_value = val;
4601 /* Put the insns after the NOTE that starts the function. If this
4602 is inside a start_sequence, make the outer-level insn chain current, so
4603 the code is placed at the start of the function. */
4604 push_topmost_sequence ();
4605 emit_insn_after (seq, entry_of_function ());
4606 pop_topmost_sequence ();
4611 /* __builtin_args_info (N) returns word N of the arg space info
4612 for the current function. The number and meanings of words
4613 is controlled by the definition of CUMULATIVE_ARGS. */
4616 expand_builtin_args_info (tree exp)
4618 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4619 int *word_ptr = (int *) &crtl->args.info;
4621 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4623 if (call_expr_nargs (exp) != 0)
4625 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4626 error ("argument of %<__builtin_args_info%> must be constant");
4629 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4631 if (wordnum < 0 || wordnum >= nwords)
4632 error ("argument of %<__builtin_args_info%> out of range");
4634 return GEN_INT (word_ptr[wordnum]);
4638 error ("missing argument in %<__builtin_args_info%>");
4643 /* Expand a call to __builtin_next_arg. */
4646 expand_builtin_next_arg (void)
4648 /* Checking arguments is already done in fold_builtin_next_arg
4649 that must be called before this function. */
4650 return expand_binop (ptr_mode, add_optab,
4651 crtl->args.internal_arg_pointer,
4652 crtl->args.arg_offset_rtx,
4653 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4656 /* Make it easier for the backends by protecting the valist argument
4657 from multiple evaluations. */
4660 stabilize_va_list (tree valist, int needs_lvalue)
4662 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4664 if (TREE_SIDE_EFFECTS (valist))
4665 valist = save_expr (valist);
4667 /* For this case, the backends will be expecting a pointer to
4668 TREE_TYPE (va_list_type_node), but it's possible we've
4669 actually been given an array (an actual va_list_type_node).
4671 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4673 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4674 valist = build_fold_addr_expr_with_type (valist, p1);
4683 if (! TREE_SIDE_EFFECTS (valist))
4686 pt = build_pointer_type (va_list_type_node);
4687 valist = fold_build1 (ADDR_EXPR, pt, valist);
4688 TREE_SIDE_EFFECTS (valist) = 1;
4691 if (TREE_SIDE_EFFECTS (valist))
4692 valist = save_expr (valist);
4693 valist = build_fold_indirect_ref (valist);
4699 /* The "standard" definition of va_list is void*. */
4702 std_build_builtin_va_list (void)
4704 return ptr_type_node;
4707 /* The "standard" implementation of va_start: just assign `nextarg' to
4711 std_expand_builtin_va_start (tree valist, rtx nextarg)
4713 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4714 convert_move (va_r, nextarg, 0);
4717 /* Expand EXP, a call to __builtin_va_start. */
4720 expand_builtin_va_start (tree exp)
4725 if (call_expr_nargs (exp) < 2)
4727 error ("too few arguments to function %<va_start%>");
4731 if (fold_builtin_next_arg (exp, true))
4734 nextarg = expand_builtin_next_arg ();
4735 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4737 if (targetm.expand_builtin_va_start)
4738 targetm.expand_builtin_va_start (valist, nextarg);
4740 std_expand_builtin_va_start (valist, nextarg);
4745 /* The "standard" implementation of va_arg: read the value from the
4746 current (padded) address and increment by the (padded) size. */
4749 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4751 tree addr, t, type_size, rounded_size, valist_tmp;
4752 unsigned HOST_WIDE_INT align, boundary;
4755 #ifdef ARGS_GROW_DOWNWARD
4756 /* All of the alignment and movement below is for args-grow-up machines.
4757 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4758 implement their own specialized gimplify_va_arg_expr routines. */
4762 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4764 type = build_pointer_type (type);
4766 align = PARM_BOUNDARY / BITS_PER_UNIT;
4767 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4769 /* Hoist the valist value into a temporary for the moment. */
4770 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4772 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4773 requires greater alignment, we must perform dynamic alignment. */
4774 if (boundary > align
4775 && !integer_zerop (TYPE_SIZE (type)))
4777 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4778 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4779 valist_tmp, size_int (boundary - 1)));
4780 gimplify_and_add (t, pre_p);
4782 t = fold_convert (sizetype, valist_tmp);
4783 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4784 fold_convert (TREE_TYPE (valist),
4785 fold_build2 (BIT_AND_EXPR, sizetype, t,
4786 size_int (-boundary))));
4787 gimplify_and_add (t, pre_p);
4792 /* If the actual alignment is less than the alignment of the type,
4793 adjust the type accordingly so that we don't assume strict alignment
4794 when dereferencing the pointer. */
4795 boundary *= BITS_PER_UNIT;
4796 if (boundary < TYPE_ALIGN (type))
4798 type = build_variant_type_copy (type);
4799 TYPE_ALIGN (type) = boundary;
4802 /* Compute the rounded size of the type. */
4803 type_size = size_in_bytes (type);
4804 rounded_size = round_up (type_size, align);
4806 /* Reduce rounded_size so it's sharable with the postqueue. */
4807 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4811 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4813 /* Small args are padded downward. */
4814 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4815 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4816 size_binop (MINUS_EXPR, rounded_size, type_size));
4817 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4820 /* Compute new value for AP. */
4821 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4822 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4823 gimplify_and_add (t, pre_p);
4825 addr = fold_convert (build_pointer_type (type), addr);
4828 addr = build_va_arg_indirect_ref (addr);
4830 return build_va_arg_indirect_ref (addr);
4833 /* Build an indirect-ref expression over the given TREE, which represents a
4834 piece of a va_arg() expansion. */
4836 build_va_arg_indirect_ref (tree addr)
4838 addr = build_fold_indirect_ref (addr);
4840 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4846 /* Return a dummy expression of type TYPE in order to keep going after an
4850 dummy_object (tree type)
4852 tree t = build_int_cst (build_pointer_type (type), 0);
4853 return build1 (INDIRECT_REF, type, t);
4856 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4857 builtin function, but a very special sort of operator. */
4859 enum gimplify_status
4860 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4862 tree promoted_type, want_va_type, have_va_type;
4863 tree valist = TREE_OPERAND (*expr_p, 0);
4864 tree type = TREE_TYPE (*expr_p);
4867 /* Verify that valist is of the proper type. */
4868 want_va_type = va_list_type_node;
4869 have_va_type = TREE_TYPE (valist);
4871 if (have_va_type == error_mark_node)
4874 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4876 /* If va_list is an array type, the argument may have decayed
4877 to a pointer type, e.g. by being passed to another function.
4878 In that case, unwrap both types so that we can compare the
4879 underlying records. */
4880 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4881 || POINTER_TYPE_P (have_va_type))
4883 want_va_type = TREE_TYPE (want_va_type);
4884 have_va_type = TREE_TYPE (have_va_type);
4888 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4890 error ("first argument to %<va_arg%> not of type %<va_list%>");
4894 /* Generate a diagnostic for requesting data of a type that cannot
4895 be passed through `...' due to type promotion at the call site. */
4896 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4899 static bool gave_help;
4901 /* Unfortunately, this is merely undefined, rather than a constraint
4902 violation, so we cannot make this an error. If this call is never
4903 executed, the program is still strictly conforming. */
4904 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4905 type, promoted_type);
4909 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4910 promoted_type, type);
4913 /* We can, however, treat "undefined" any way we please.
4914 Call abort to encourage the user to fix the program. */
4915 inform ("if this code is reached, the program will abort");
4916 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4917 append_to_statement_list (t, pre_p);
4919 /* This is dead code, but go ahead and finish so that the
4920 mode of the result comes out right. */
4921 *expr_p = dummy_object (type);
4926 /* Make it easier for the backends by protecting the valist argument
4927 from multiple evaluations. */
4928 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4930 /* For this case, the backends will be expecting a pointer to
4931 TREE_TYPE (va_list_type_node), but it's possible we've
4932 actually been given an array (an actual va_list_type_node).
4934 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4936 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4937 valist = build_fold_addr_expr_with_type (valist, p1);
4939 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4942 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4944 if (!targetm.gimplify_va_arg_expr)
4945 /* FIXME:Once most targets are converted we should merely
4946 assert this is non-null. */
4949 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4954 /* Expand EXP, a call to __builtin_va_end. */
4957 expand_builtin_va_end (tree exp)
4959 tree valist = CALL_EXPR_ARG (exp, 0);
4961 /* Evaluate for side effects, if needed. I hate macros that don't
4963 if (TREE_SIDE_EFFECTS (valist))
4964 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4969 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4970 builtin rather than just as an assignment in stdarg.h because of the
4971 nastiness of array-type va_list types. */
4974 expand_builtin_va_copy (tree exp)
4978 dst = CALL_EXPR_ARG (exp, 0);
4979 src = CALL_EXPR_ARG (exp, 1);
4981 dst = stabilize_va_list (dst, 1);
4982 src = stabilize_va_list (src, 0);
4984 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4986 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4987 TREE_SIDE_EFFECTS (t) = 1;
4988 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4992 rtx dstb, srcb, size;
4994 /* Evaluate to pointers. */
4995 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4996 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4997 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4998 VOIDmode, EXPAND_NORMAL);
5000 dstb = convert_memory_address (Pmode, dstb);
5001 srcb = convert_memory_address (Pmode, srcb);
5003 /* "Dereference" to BLKmode memories. */
5004 dstb = gen_rtx_MEM (BLKmode, dstb);
5005 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5006 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
5007 srcb = gen_rtx_MEM (BLKmode, srcb);
5008 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5009 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
5012 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5018 /* Expand a call to one of the builtin functions __builtin_frame_address or
5019 __builtin_return_address. */
5022 expand_builtin_frame_address (tree fndecl, tree exp)
5024 /* The argument must be a nonnegative integer constant.
5025 It counts the number of frames to scan up the stack.
5026 The value is the return address saved in that frame. */
5027 if (call_expr_nargs (exp) == 0)
5028 /* Warning about missing arg was already issued. */
5030 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5032 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5033 error ("invalid argument to %<__builtin_frame_address%>");
5035 error ("invalid argument to %<__builtin_return_address%>");
5041 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5042 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5044 /* Some ports cannot access arbitrary stack frames. */
5047 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5048 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5050 warning (0, "unsupported argument to %<__builtin_return_address%>");
5054 /* For __builtin_frame_address, return what we've got. */
5055 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5059 && ! CONSTANT_P (tem))
5060 tem = copy_to_mode_reg (Pmode, tem);
5065 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5066 we failed and the caller should emit a normal call, otherwise try to get
5067 the result in TARGET, if convenient. */
5070 expand_builtin_alloca (tree exp, rtx target)
5075 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5076 should always expand to function calls. These can be intercepted
5081 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5084 /* Compute the argument. */
5085 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5087 /* Allocate the desired space. */
5088 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5089 result = convert_memory_address (ptr_mode, result);
5094 /* Expand a call to a bswap builtin with argument ARG0. MODE
5095 is the mode to expand with. */
5098 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5100 enum machine_mode mode;
5104 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5107 arg = CALL_EXPR_ARG (exp, 0);
5108 mode = TYPE_MODE (TREE_TYPE (arg));
5109 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5111 target = expand_unop (mode, bswap_optab, op0, target, 1);
5113 gcc_assert (target);
5115 return convert_to_mode (mode, target, 0);
5118 /* Expand a call to a unary builtin in EXP.
5119 Return NULL_RTX if a normal call should be emitted rather than expanding the
5120 function in-line. If convenient, the result should be placed in TARGET.
5121 SUBTARGET may be used as the target for computing one of EXP's operands. */
5124 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5125 rtx subtarget, optab op_optab)
5129 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5132 /* Compute the argument. */
5133 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5134 VOIDmode, EXPAND_NORMAL);
5135 /* Compute op, into TARGET if possible.
5136 Set TARGET to wherever the result comes back. */
5137 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5138 op_optab, op0, target, 1);
5139 gcc_assert (target);
5141 return convert_to_mode (target_mode, target, 0);
5144 /* If the string passed to fputs is a constant and is one character
5145 long, we attempt to transform this call into __builtin_fputc(). */
5148 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5150 /* Verify the arguments in the original call. */
5151 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5153 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5154 CALL_EXPR_ARG (exp, 1),
5155 (target == const0_rtx),
5156 unlocked, NULL_TREE);
5158 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5163 /* Expand a call to __builtin_expect. We just return our argument
5164 as the builtin_expect semantic should've been already executed by
5165 tree branch prediction pass. */
5168 expand_builtin_expect (tree exp, rtx target)
5172 if (call_expr_nargs (exp) < 2)
5174 arg = CALL_EXPR_ARG (exp, 0);
5175 c = CALL_EXPR_ARG (exp, 1);
5177 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5178 /* When guessing was done, the hints should be already stripped away. */
5179 gcc_assert (!flag_guess_branch_prob
5180 || optimize == 0 || errorcount || sorrycount);
5185 expand_builtin_trap (void)
5189 emit_insn (gen_trap ());
5192 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5196 /* Expand EXP, a call to fabs, fabsf or fabsl.
5197 Return NULL_RTX if a normal call should be emitted rather than expanding
5198 the function inline. If convenient, the result should be placed
5199 in TARGET. SUBTARGET may be used as the target for computing
5203 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5205 enum machine_mode mode;
5209 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5212 arg = CALL_EXPR_ARG (exp, 0);
5213 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5214 mode = TYPE_MODE (TREE_TYPE (arg));
5215 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5216 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5219 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5220 Return NULL is a normal call should be emitted rather than expanding the
5221 function inline. If convenient, the result should be placed in TARGET.
5222 SUBTARGET may be used as the target for computing the operand. */
5225 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5230 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5233 arg = CALL_EXPR_ARG (exp, 0);
5234 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5236 arg = CALL_EXPR_ARG (exp, 1);
5237 op1 = expand_normal (arg);
5239 return expand_copysign (op0, op1, target);
5242 /* Create a new constant string literal and return a char* pointer to it.
5243 The STRING_CST value is the LEN characters at STR. */
5245 build_string_literal (int len, const char *str)
5247 tree t, elem, index, type;
5249 t = build_string (len, str);
5250 elem = build_type_variant (char_type_node, 1, 0);
5251 index = build_index_type (size_int (len - 1));
5252 type = build_array_type (elem, index);
5253 TREE_TYPE (t) = type;
5254 TREE_CONSTANT (t) = 1;
5255 TREE_READONLY (t) = 1;
5256 TREE_STATIC (t) = 1;
5258 type = build_pointer_type (elem);
5259 t = build1 (ADDR_EXPR, type,
5260 build4 (ARRAY_REF, elem,
5261 t, integer_zero_node, NULL_TREE, NULL_TREE));
5265 /* Expand EXP, a call to printf or printf_unlocked.
5266 Return NULL_RTX if a normal call should be emitted rather than transforming
5267 the function inline. If convenient, the result should be placed in
5268 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5271 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5274 /* If we're using an unlocked function, assume the other unlocked
5275 functions exist explicitly. */
5276 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5277 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5278 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5279 : implicit_built_in_decls[BUILT_IN_PUTS];
5280 const char *fmt_str;
5283 int nargs = call_expr_nargs (exp);
5285 /* If the return value is used, don't do the transformation. */
5286 if (target != const0_rtx)
5289 /* Verify the required arguments in the original call. */
5292 fmt = CALL_EXPR_ARG (exp, 0);
5293 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5296 /* Check whether the format is a literal string constant. */
5297 fmt_str = c_getstr (fmt);
5298 if (fmt_str == NULL)
5301 if (!init_target_chars ())
5304 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5305 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5308 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5311 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5313 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5314 else if (strcmp (fmt_str, target_percent_c) == 0)
5317 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5320 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5324 /* We can't handle anything else with % args or %% ... yet. */
5325 if (strchr (fmt_str, target_percent))
5331 /* If the format specifier was "", printf does nothing. */
5332 if (fmt_str[0] == '\0')
5334 /* If the format specifier has length of 1, call putchar. */
5335 if (fmt_str[1] == '\0')
5337 /* Given printf("c"), (where c is any one character,)
5338 convert "c"[0] to an int and pass that to the replacement
5340 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5342 fn = build_call_expr (fn_putchar, 1, arg);
5346 /* If the format specifier was "string\n", call puts("string"). */
5347 size_t len = strlen (fmt_str);
5348 if ((unsigned char)fmt_str[len - 1] == target_newline)
5350 /* Create a NUL-terminated string that's one char shorter
5351 than the original, stripping off the trailing '\n'. */
5352 char *newstr = alloca (len);
5353 memcpy (newstr, fmt_str, len - 1);
5354 newstr[len - 1] = 0;
5355 arg = build_string_literal (len, newstr);
5357 fn = build_call_expr (fn_puts, 1, arg);
5360 /* We'd like to arrange to call fputs(string,stdout) here,
5361 but we need stdout and don't have a way to get it yet. */
5368 if (TREE_CODE (fn) == CALL_EXPR)
5369 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5370 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5373 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5374 Return NULL_RTX if a normal call should be emitted rather than transforming
5375 the function inline. If convenient, the result should be placed in
5376 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5379 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5382 /* If we're using an unlocked function, assume the other unlocked
5383 functions exist explicitly. */
5384 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5385 : implicit_built_in_decls[BUILT_IN_FPUTC];
5386 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5387 : implicit_built_in_decls[BUILT_IN_FPUTS];
5388 const char *fmt_str;
5391 int nargs = call_expr_nargs (exp);
5393 /* If the return value is used, don't do the transformation. */
5394 if (target != const0_rtx)
5397 /* Verify the required arguments in the original call. */
5400 fp = CALL_EXPR_ARG (exp, 0);
5401 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5403 fmt = CALL_EXPR_ARG (exp, 1);
5404 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5407 /* Check whether the format is a literal string constant. */
5408 fmt_str = c_getstr (fmt);
5409 if (fmt_str == NULL)
5412 if (!init_target_chars ())
5415 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5416 if (strcmp (fmt_str, target_percent_s) == 0)
5419 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5421 arg = CALL_EXPR_ARG (exp, 2);
5423 fn = build_call_expr (fn_fputs, 2, arg, fp);
5425 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5426 else if (strcmp (fmt_str, target_percent_c) == 0)
5429 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5431 arg = CALL_EXPR_ARG (exp, 2);
5433 fn = build_call_expr (fn_fputc, 2, arg, fp);
5437 /* We can't handle anything else with % args or %% ... yet. */
5438 if (strchr (fmt_str, target_percent))
5444 /* If the format specifier was "", fprintf does nothing. */
5445 if (fmt_str[0] == '\0')
5447 /* Evaluate and ignore FILE* argument for side-effects. */
5448 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5452 /* When "string" doesn't contain %, replace all cases of
5453 fprintf(stream,string) with fputs(string,stream). The fputs
5454 builtin will take care of special cases like length == 1. */
5456 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5461 if (TREE_CODE (fn) == CALL_EXPR)
5462 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5463 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5466 /* Expand a call EXP to sprintf. Return NULL_RTX if
5467 a normal call should be emitted rather than expanding the function
5468 inline. If convenient, the result should be placed in TARGET with
5472 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5475 const char *fmt_str;
5476 int nargs = call_expr_nargs (exp);
5478 /* Verify the required arguments in the original call. */
5481 dest = CALL_EXPR_ARG (exp, 0);
5482 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5484 fmt = CALL_EXPR_ARG (exp, 0);
5485 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5488 /* Check whether the format is a literal string constant. */
5489 fmt_str = c_getstr (fmt);
5490 if (fmt_str == NULL)
5493 if (!init_target_chars ())
5496 /* If the format doesn't contain % args or %%, use strcpy. */
5497 if (strchr (fmt_str, target_percent) == 0)
5499 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5502 if ((nargs > 2) || ! fn)
5504 expand_expr (build_call_expr (fn, 2, dest, fmt),
5505 const0_rtx, VOIDmode, EXPAND_NORMAL);
5506 if (target == const0_rtx)
5508 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5509 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5511 /* If the format is "%s", use strcpy if the result isn't used. */
5512 else if (strcmp (fmt_str, target_percent_s) == 0)
5515 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5521 arg = CALL_EXPR_ARG (exp, 2);
5522 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5525 if (target != const0_rtx)
5527 len = c_strlen (arg, 1);
5528 if (! len || TREE_CODE (len) != INTEGER_CST)
5534 expand_expr (build_call_expr (fn, 2, dest, arg),
5535 const0_rtx, VOIDmode, EXPAND_NORMAL);
5537 if (target == const0_rtx)
5539 return expand_expr (len, target, mode, EXPAND_NORMAL);
5545 /* Expand a call to either the entry or exit function profiler. */
5548 expand_builtin_profile_func (bool exitp)
5552 this = DECL_RTL (current_function_decl);
5553 gcc_assert (MEM_P (this));
5554 this = XEXP (this, 0);
5557 which = profile_function_exit_libfunc;
5559 which = profile_function_entry_libfunc;
5561 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5562 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5569 /* Expand a call to __builtin___clear_cache. */
5572 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5574 #ifndef HAVE_clear_cache
5575 #ifdef CLEAR_INSN_CACHE
5576 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5577 does something. Just do the default expansion to a call to
5581 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5582 does nothing. There is no need to call it. Do nothing. */
5584 #endif /* CLEAR_INSN_CACHE */
5586 /* We have a "clear_cache" insn, and it will handle everything. */
5588 rtx begin_rtx, end_rtx;
5589 enum insn_code icode;
5591 /* We must not expand to a library call. If we did, any
5592 fallback library function in libgcc that might contain a call to
5593 __builtin___clear_cache() would recurse infinitely. */
5594 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5596 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5600 if (HAVE_clear_cache)
5602 icode = CODE_FOR_clear_cache;
5604 begin = CALL_EXPR_ARG (exp, 0);
5605 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5606 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5607 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5608 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5610 end = CALL_EXPR_ARG (exp, 1);
5611 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5612 end_rtx = convert_memory_address (Pmode, end_rtx);
5613 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5614 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5616 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5619 #endif /* HAVE_clear_cache */
5622 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5625 round_trampoline_addr (rtx tramp)
5627 rtx temp, addend, mask;
5629 /* If we don't need too much alignment, we'll have been guaranteed
5630 proper alignment by get_trampoline_type. */
5631 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5634 /* Round address up to desired boundary. */
5635 temp = gen_reg_rtx (Pmode);
5636 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5637 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5639 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5640 temp, 0, OPTAB_LIB_WIDEN);
5641 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5642 temp, 0, OPTAB_LIB_WIDEN);
5648 expand_builtin_init_trampoline (tree exp)
5650 tree t_tramp, t_func, t_chain;
5651 rtx r_tramp, r_func, r_chain;
5652 #ifdef TRAMPOLINE_TEMPLATE
5656 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5657 POINTER_TYPE, VOID_TYPE))
5660 t_tramp = CALL_EXPR_ARG (exp, 0);
5661 t_func = CALL_EXPR_ARG (exp, 1);
5662 t_chain = CALL_EXPR_ARG (exp, 2);
5664 r_tramp = expand_normal (t_tramp);
5665 r_func = expand_normal (t_func);
5666 r_chain = expand_normal (t_chain);
5668 /* Generate insns to initialize the trampoline. */
5669 r_tramp = round_trampoline_addr (r_tramp);
5670 #ifdef TRAMPOLINE_TEMPLATE
5671 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5672 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5673 emit_block_move (blktramp, assemble_trampoline_template (),
5674 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5676 trampolines_created = 1;
5677 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5683 expand_builtin_adjust_trampoline (tree exp)
5687 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5690 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5691 tramp = round_trampoline_addr (tramp);
5692 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5693 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5699 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5700 function. The function first checks whether the back end provides
5701 an insn to implement signbit for the respective mode. If not, it
5702 checks whether the floating point format of the value is such that
5703 the sign bit can be extracted. If that is not the case, the
5704 function returns NULL_RTX to indicate that a normal call should be
5705 emitted rather than expanding the function in-line. EXP is the
5706 expression that is a call to the builtin function; if convenient,
5707 the result should be placed in TARGET. */
5709 expand_builtin_signbit (tree exp, rtx target)
5711 const struct real_format *fmt;
5712 enum machine_mode fmode, imode, rmode;
5713 HOST_WIDE_INT hi, lo;
5716 enum insn_code icode;
5719 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5722 arg = CALL_EXPR_ARG (exp, 0);
5723 fmode = TYPE_MODE (TREE_TYPE (arg));
5724 rmode = TYPE_MODE (TREE_TYPE (exp));
5725 fmt = REAL_MODE_FORMAT (fmode);
5727 arg = builtin_save_expr (arg);
5729 /* Expand the argument yielding a RTX expression. */
5730 temp = expand_normal (arg);
5732 /* Check if the back end provides an insn that handles signbit for the
5734 icode = signbit_optab->handlers [(int) fmode].insn_code;
5735 if (icode != CODE_FOR_nothing)
5737 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5738 emit_unop_insn (icode, target, temp, UNKNOWN);
5742 /* For floating point formats without a sign bit, implement signbit
5744 bitpos = fmt->signbit_ro;
5747 /* But we can't do this if the format supports signed zero. */
5748 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5751 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5752 build_real (TREE_TYPE (arg), dconst0));
5753 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5756 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5758 imode = int_mode_for_mode (fmode);
5759 if (imode == BLKmode)
5761 temp = gen_lowpart (imode, temp);
5766 /* Handle targets with different FP word orders. */
5767 if (FLOAT_WORDS_BIG_ENDIAN)
5768 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5770 word = bitpos / BITS_PER_WORD;
5771 temp = operand_subword_force (temp, word, fmode);
5772 bitpos = bitpos % BITS_PER_WORD;
5775 /* Force the intermediate word_mode (or narrower) result into a
5776 register. This avoids attempting to create paradoxical SUBREGs
5777 of floating point modes below. */
5778 temp = force_reg (imode, temp);
5780 /* If the bitpos is within the "result mode" lowpart, the operation
5781 can be implement with a single bitwise AND. Otherwise, we need
5782 a right shift and an AND. */
5784 if (bitpos < GET_MODE_BITSIZE (rmode))
5786 if (bitpos < HOST_BITS_PER_WIDE_INT)
5789 lo = (HOST_WIDE_INT) 1 << bitpos;
5793 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5797 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5798 temp = gen_lowpart (rmode, temp);
5799 temp = expand_binop (rmode, and_optab, temp,
5800 immed_double_const (lo, hi, rmode),
5801 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5805 /* Perform a logical right shift to place the signbit in the least
5806 significant bit, then truncate the result to the desired mode
5807 and mask just this bit. */
5808 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5809 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5810 temp = gen_lowpart (rmode, temp);
5811 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5812 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5818 /* Expand fork or exec calls. TARGET is the desired target of the
5819 call. EXP is the call. FN is the
5820 identificator of the actual function. IGNORE is nonzero if the
5821 value is to be ignored. */
5824 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5829 /* If we are not profiling, just call the function. */
5830 if (!profile_arc_flag)
5833 /* Otherwise call the wrapper. This should be equivalent for the rest of
5834 compiler, so the code does not diverge, and the wrapper may run the
5835 code necessary for keeping the profiling sane. */
5837 switch (DECL_FUNCTION_CODE (fn))
5840 id = get_identifier ("__gcov_fork");
5843 case BUILT_IN_EXECL:
5844 id = get_identifier ("__gcov_execl");
5847 case BUILT_IN_EXECV:
5848 id = get_identifier ("__gcov_execv");
5851 case BUILT_IN_EXECLP:
5852 id = get_identifier ("__gcov_execlp");
5855 case BUILT_IN_EXECLE:
5856 id = get_identifier ("__gcov_execle");
5859 case BUILT_IN_EXECVP:
5860 id = get_identifier ("__gcov_execvp");
5863 case BUILT_IN_EXECVE:
5864 id = get_identifier ("__gcov_execve");
5871 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5872 DECL_EXTERNAL (decl) = 1;
5873 TREE_PUBLIC (decl) = 1;
5874 DECL_ARTIFICIAL (decl) = 1;
5875 TREE_NOTHROW (decl) = 1;
5876 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5877 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5878 call = rewrite_call_expr (exp, 0, decl, 0);
5879 return expand_call (call, target, ignore);
5884 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5885 the pointer in these functions is void*, the tree optimizers may remove
5886 casts. The mode computed in expand_builtin isn't reliable either, due
5887 to __sync_bool_compare_and_swap.
5889 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5890 group of builtins. This gives us log2 of the mode size. */
5892 static inline enum machine_mode
5893 get_builtin_sync_mode (int fcode_diff)
5895 /* The size is not negotiable, so ask not to get BLKmode in return
5896 if the target indicates that a smaller size would be better. */
5897 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5900 /* Expand the memory expression LOC and return the appropriate memory operand
5901 for the builtin_sync operations. */
5904 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5908 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5910 /* Note that we explicitly do not want any alias information for this
5911 memory, so that we kill all other live memories. Otherwise we don't
5912 satisfy the full barrier semantics of the intrinsic. */
5913 mem = validize_mem (gen_rtx_MEM (mode, addr));
5915 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5916 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5917 MEM_VOLATILE_P (mem) = 1;
5922 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5923 EXP is the CALL_EXPR. CODE is the rtx code
5924 that corresponds to the arithmetic or logical operation from the name;
5925 an exception here is that NOT actually means NAND. TARGET is an optional
5926 place for us to store the results; AFTER is true if this is the
5927 fetch_and_xxx form. IGNORE is true if we don't actually care about
5928 the result of the operation at all. */
5931 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5932 enum rtx_code code, bool after,
5933 rtx target, bool ignore)
5936 enum machine_mode old_mode;
5938 /* Expand the operands. */
5939 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5941 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5942 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5943 of CONST_INTs, where we know the old_mode only from the call argument. */
5944 old_mode = GET_MODE (val);
5945 if (old_mode == VOIDmode)
5946 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5947 val = convert_modes (mode, old_mode, val, 1);
5950 return expand_sync_operation (mem, val, code);
5952 return expand_sync_fetch_operation (mem, val, code, after, target);
5955 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5956 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5957 true if this is the boolean form. TARGET is a place for us to store the
5958 results; this is NOT optional if IS_BOOL is true. */
5961 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5962 bool is_bool, rtx target)
5964 rtx old_val, new_val, mem;
5965 enum machine_mode old_mode;
5967 /* Expand the operands. */
5968 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5971 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5972 mode, EXPAND_NORMAL);
5973 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5974 of CONST_INTs, where we know the old_mode only from the call argument. */
5975 old_mode = GET_MODE (old_val);
5976 if (old_mode == VOIDmode)
5977 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5978 old_val = convert_modes (mode, old_mode, old_val, 1);
5980 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5981 mode, EXPAND_NORMAL);
5982 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5983 of CONST_INTs, where we know the old_mode only from the call argument. */
5984 old_mode = GET_MODE (new_val);
5985 if (old_mode == VOIDmode)
5986 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5987 new_val = convert_modes (mode, old_mode, new_val, 1);
5990 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5992 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5995 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5996 general form is actually an atomic exchange, and some targets only
5997 support a reduced form with the second argument being a constant 1.
5998 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6002 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6006 enum machine_mode old_mode;
6008 /* Expand the operands. */
6009 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6010 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6011 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6012 of CONST_INTs, where we know the old_mode only from the call argument. */
6013 old_mode = GET_MODE (val);
6014 if (old_mode == VOIDmode)
6015 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6016 val = convert_modes (mode, old_mode, val, 1);
6018 return expand_sync_lock_test_and_set (mem, val, target);
6021 /* Expand the __sync_synchronize intrinsic. */
6024 expand_builtin_synchronize (void)
6028 #ifdef HAVE_memory_barrier
6029 if (HAVE_memory_barrier)
6031 emit_insn (gen_memory_barrier ());
6036 /* If no explicit memory barrier instruction is available, create an
6037 empty asm stmt with a memory clobber. */
6038 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6039 tree_cons (NULL, build_string (6, "memory"), NULL));
6040 ASM_VOLATILE_P (x) = 1;
6041 expand_asm_expr (x);
6044 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6047 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6049 enum insn_code icode;
6051 rtx val = const0_rtx;
6053 /* Expand the operands. */
6054 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6056 /* If there is an explicit operation in the md file, use it. */
6057 icode = sync_lock_release[mode];
6058 if (icode != CODE_FOR_nothing)
6060 if (!insn_data[icode].operand[1].predicate (val, mode))
6061 val = force_reg (mode, val);
6063 insn = GEN_FCN (icode) (mem, val);
6071 /* Otherwise we can implement this operation by emitting a barrier
6072 followed by a store of zero. */
6073 expand_builtin_synchronize ();
6074 emit_move_insn (mem, val);
6077 /* Expand an expression EXP that calls a built-in function,
6078 with result going to TARGET if that's convenient
6079 (and in mode MODE if that's convenient).
6080 SUBTARGET may be used as the target for computing one of EXP's operands.
6081 IGNORE is nonzero if the value is to be ignored. */
6084 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6087 tree fndecl = get_callee_fndecl (exp);
6088 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6089 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6091 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6092 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6094 /* When not optimizing, generate calls to library functions for a certain
6097 && !called_as_built_in (fndecl)
6098 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6099 && fcode != BUILT_IN_ALLOCA)
6100 return expand_call (exp, target, ignore);
6102 /* The built-in function expanders test for target == const0_rtx
6103 to determine whether the function's result will be ignored. */
6105 target = const0_rtx;
6107 /* If the result of a pure or const built-in function is ignored, and
6108 none of its arguments are volatile, we can avoid expanding the
6109 built-in call and just evaluate the arguments for side-effects. */
6110 if (target == const0_rtx
6111 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6113 bool volatilep = false;
6115 call_expr_arg_iterator iter;
6117 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6118 if (TREE_THIS_VOLATILE (arg))
6126 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6127 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6134 CASE_FLT_FN (BUILT_IN_FABS):
6135 target = expand_builtin_fabs (exp, target, subtarget);
6140 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6141 target = expand_builtin_copysign (exp, target, subtarget);
6146 /* Just do a normal library call if we were unable to fold
6148 CASE_FLT_FN (BUILT_IN_CABS):
6151 CASE_FLT_FN (BUILT_IN_EXP):
6152 CASE_FLT_FN (BUILT_IN_EXP10):
6153 CASE_FLT_FN (BUILT_IN_POW10):
6154 CASE_FLT_FN (BUILT_IN_EXP2):
6155 CASE_FLT_FN (BUILT_IN_EXPM1):
6156 CASE_FLT_FN (BUILT_IN_LOGB):
6157 CASE_FLT_FN (BUILT_IN_LOG):
6158 CASE_FLT_FN (BUILT_IN_LOG10):
6159 CASE_FLT_FN (BUILT_IN_LOG2):
6160 CASE_FLT_FN (BUILT_IN_LOG1P):
6161 CASE_FLT_FN (BUILT_IN_TAN):
6162 CASE_FLT_FN (BUILT_IN_ASIN):
6163 CASE_FLT_FN (BUILT_IN_ACOS):
6164 CASE_FLT_FN (BUILT_IN_ATAN):
6165 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6166 because of possible accuracy problems. */
6167 if (! flag_unsafe_math_optimizations)
6169 CASE_FLT_FN (BUILT_IN_SQRT):
6170 CASE_FLT_FN (BUILT_IN_FLOOR):
6171 CASE_FLT_FN (BUILT_IN_CEIL):
6172 CASE_FLT_FN (BUILT_IN_TRUNC):
6173 CASE_FLT_FN (BUILT_IN_ROUND):
6174 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6175 CASE_FLT_FN (BUILT_IN_RINT):
6176 target = expand_builtin_mathfn (exp, target, subtarget);
6181 CASE_FLT_FN (BUILT_IN_ILOGB):
6182 if (! flag_unsafe_math_optimizations)
6184 CASE_FLT_FN (BUILT_IN_ISINF):
6185 CASE_FLT_FN (BUILT_IN_FINITE):
6186 case BUILT_IN_ISFINITE:
6187 case BUILT_IN_ISNORMAL:
6188 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6193 CASE_FLT_FN (BUILT_IN_LCEIL):
6194 CASE_FLT_FN (BUILT_IN_LLCEIL):
6195 CASE_FLT_FN (BUILT_IN_LFLOOR):
6196 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6197 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6202 CASE_FLT_FN (BUILT_IN_LRINT):
6203 CASE_FLT_FN (BUILT_IN_LLRINT):
6204 CASE_FLT_FN (BUILT_IN_LROUND):
6205 CASE_FLT_FN (BUILT_IN_LLROUND):
6206 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6211 CASE_FLT_FN (BUILT_IN_POW):
6212 target = expand_builtin_pow (exp, target, subtarget);
6217 CASE_FLT_FN (BUILT_IN_POWI):
6218 target = expand_builtin_powi (exp, target, subtarget);
6223 CASE_FLT_FN (BUILT_IN_ATAN2):
6224 CASE_FLT_FN (BUILT_IN_LDEXP):
6225 CASE_FLT_FN (BUILT_IN_SCALB):
6226 CASE_FLT_FN (BUILT_IN_SCALBN):
6227 CASE_FLT_FN (BUILT_IN_SCALBLN):
6228 if (! flag_unsafe_math_optimizations)
6231 CASE_FLT_FN (BUILT_IN_FMOD):
6232 CASE_FLT_FN (BUILT_IN_REMAINDER):
6233 CASE_FLT_FN (BUILT_IN_DREM):
6234 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6239 CASE_FLT_FN (BUILT_IN_CEXPI):
6240 target = expand_builtin_cexpi (exp, target, subtarget);
6241 gcc_assert (target);
6244 CASE_FLT_FN (BUILT_IN_SIN):
6245 CASE_FLT_FN (BUILT_IN_COS):
6246 if (! flag_unsafe_math_optimizations)
6248 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6253 CASE_FLT_FN (BUILT_IN_SINCOS):
6254 if (! flag_unsafe_math_optimizations)
6256 target = expand_builtin_sincos (exp);
6261 case BUILT_IN_APPLY_ARGS:
6262 return expand_builtin_apply_args ();
6264 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6265 FUNCTION with a copy of the parameters described by
6266 ARGUMENTS, and ARGSIZE. It returns a block of memory
6267 allocated on the stack into which is stored all the registers
6268 that might possibly be used for returning the result of a
6269 function. ARGUMENTS is the value returned by
6270 __builtin_apply_args. ARGSIZE is the number of bytes of
6271 arguments that must be copied. ??? How should this value be
6272 computed? We'll also need a safe worst case value for varargs
6274 case BUILT_IN_APPLY:
6275 if (!validate_arglist (exp, POINTER_TYPE,
6276 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6277 && !validate_arglist (exp, REFERENCE_TYPE,
6278 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6284 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6285 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6286 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6288 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6291 /* __builtin_return (RESULT) causes the function to return the
6292 value described by RESULT. RESULT is address of the block of
6293 memory returned by __builtin_apply. */
6294 case BUILT_IN_RETURN:
6295 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6296 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6299 case BUILT_IN_SAVEREGS:
6300 return expand_builtin_saveregs ();
6302 case BUILT_IN_ARGS_INFO:
6303 return expand_builtin_args_info (exp);
6305 case BUILT_IN_VA_ARG_PACK:
6306 /* All valid uses of __builtin_va_arg_pack () are removed during
6308 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6311 case BUILT_IN_VA_ARG_PACK_LEN:
6312 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6314 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6317 /* Return the address of the first anonymous stack arg. */
6318 case BUILT_IN_NEXT_ARG:
6319 if (fold_builtin_next_arg (exp, false))
6321 return expand_builtin_next_arg ();
6323 case BUILT_IN_CLEAR_CACHE:
6324 target = expand_builtin___clear_cache (exp);
6329 case BUILT_IN_CLASSIFY_TYPE:
6330 return expand_builtin_classify_type (exp);
6332 case BUILT_IN_CONSTANT_P:
6335 case BUILT_IN_FRAME_ADDRESS:
6336 case BUILT_IN_RETURN_ADDRESS:
6337 return expand_builtin_frame_address (fndecl, exp);
6339 /* Returns the address of the area where the structure is returned.
6341 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6342 if (call_expr_nargs (exp) != 0
6343 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6344 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6347 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6349 case BUILT_IN_ALLOCA:
6350 target = expand_builtin_alloca (exp, target);
6355 case BUILT_IN_STACK_SAVE:
6356 return expand_stack_save ();
6358 case BUILT_IN_STACK_RESTORE:
6359 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6362 case BUILT_IN_BSWAP32:
6363 case BUILT_IN_BSWAP64:
6364 target = expand_builtin_bswap (exp, target, subtarget);
6370 CASE_INT_FN (BUILT_IN_FFS):
6371 case BUILT_IN_FFSIMAX:
6372 target = expand_builtin_unop (target_mode, exp, target,
6373 subtarget, ffs_optab);
6378 CASE_INT_FN (BUILT_IN_CLZ):
6379 case BUILT_IN_CLZIMAX:
6380 target = expand_builtin_unop (target_mode, exp, target,
6381 subtarget, clz_optab);
6386 CASE_INT_FN (BUILT_IN_CTZ):
6387 case BUILT_IN_CTZIMAX:
6388 target = expand_builtin_unop (target_mode, exp, target,
6389 subtarget, ctz_optab);
6394 CASE_INT_FN (BUILT_IN_POPCOUNT):
6395 case BUILT_IN_POPCOUNTIMAX:
6396 target = expand_builtin_unop (target_mode, exp, target,
6397 subtarget, popcount_optab);
6402 CASE_INT_FN (BUILT_IN_PARITY):
6403 case BUILT_IN_PARITYIMAX:
6404 target = expand_builtin_unop (target_mode, exp, target,
6405 subtarget, parity_optab);
6410 case BUILT_IN_STRLEN:
6411 target = expand_builtin_strlen (exp, target, target_mode);
6416 case BUILT_IN_STRCPY:
6417 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6422 case BUILT_IN_STRNCPY:
6423 target = expand_builtin_strncpy (exp, target, mode);
6428 case BUILT_IN_STPCPY:
6429 target = expand_builtin_stpcpy (exp, target, mode);
6434 case BUILT_IN_STRCAT:
6435 target = expand_builtin_strcat (fndecl, exp, target, mode);
6440 case BUILT_IN_STRNCAT:
6441 target = expand_builtin_strncat (exp, target, mode);
6446 case BUILT_IN_STRSPN:
6447 target = expand_builtin_strspn (exp, target, mode);
6452 case BUILT_IN_STRCSPN:
6453 target = expand_builtin_strcspn (exp, target, mode);
6458 case BUILT_IN_STRSTR:
6459 target = expand_builtin_strstr (exp, target, mode);
6464 case BUILT_IN_STRPBRK:
6465 target = expand_builtin_strpbrk (exp, target, mode);
6470 case BUILT_IN_INDEX:
6471 case BUILT_IN_STRCHR:
6472 target = expand_builtin_strchr (exp, target, mode);
6477 case BUILT_IN_RINDEX:
6478 case BUILT_IN_STRRCHR:
6479 target = expand_builtin_strrchr (exp, target, mode);
6484 case BUILT_IN_MEMCPY:
6485 target = expand_builtin_memcpy (exp, target, mode);
6490 case BUILT_IN_MEMPCPY:
6491 target = expand_builtin_mempcpy (exp, target, mode);
6496 case BUILT_IN_MEMMOVE:
6497 target = expand_builtin_memmove (exp, target, mode, ignore);
6502 case BUILT_IN_BCOPY:
6503 target = expand_builtin_bcopy (exp, ignore);
6508 case BUILT_IN_MEMSET:
6509 target = expand_builtin_memset (exp, target, mode);
6514 case BUILT_IN_BZERO:
6515 target = expand_builtin_bzero (exp);
6520 case BUILT_IN_STRCMP:
6521 target = expand_builtin_strcmp (exp, target, mode);
6526 case BUILT_IN_STRNCMP:
6527 target = expand_builtin_strncmp (exp, target, mode);
6532 case BUILT_IN_MEMCHR:
6533 target = expand_builtin_memchr (exp, target, mode);
6539 case BUILT_IN_MEMCMP:
6540 target = expand_builtin_memcmp (exp, target, mode);
6545 case BUILT_IN_SETJMP:
6546 /* This should have been lowered to the builtins below. */
6549 case BUILT_IN_SETJMP_SETUP:
6550 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6551 and the receiver label. */
6552 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6554 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6555 VOIDmode, EXPAND_NORMAL);
6556 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6557 rtx label_r = label_rtx (label);
6559 /* This is copied from the handling of non-local gotos. */
6560 expand_builtin_setjmp_setup (buf_addr, label_r);
6561 nonlocal_goto_handler_labels
6562 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6563 nonlocal_goto_handler_labels);
6564 /* ??? Do not let expand_label treat us as such since we would
6565 not want to be both on the list of non-local labels and on
6566 the list of forced labels. */
6567 FORCED_LABEL (label) = 0;
6572 case BUILT_IN_SETJMP_DISPATCHER:
6573 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6574 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6576 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6577 rtx label_r = label_rtx (label);
6579 /* Remove the dispatcher label from the list of non-local labels
6580 since the receiver labels have been added to it above. */
6581 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6586 case BUILT_IN_SETJMP_RECEIVER:
6587 /* __builtin_setjmp_receiver is passed the receiver label. */
6588 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6590 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6591 rtx label_r = label_rtx (label);
6593 expand_builtin_setjmp_receiver (label_r);
6598 /* __builtin_longjmp is passed a pointer to an array of five words.
6599 It's similar to the C library longjmp function but works with
6600 __builtin_setjmp above. */
6601 case BUILT_IN_LONGJMP:
6602 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6604 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6605 VOIDmode, EXPAND_NORMAL);
6606 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6608 if (value != const1_rtx)
6610 error ("%<__builtin_longjmp%> second argument must be 1");
6614 expand_builtin_longjmp (buf_addr, value);
6619 case BUILT_IN_NONLOCAL_GOTO:
6620 target = expand_builtin_nonlocal_goto (exp);
6625 /* This updates the setjmp buffer that is its argument with the value
6626 of the current stack pointer. */
6627 case BUILT_IN_UPDATE_SETJMP_BUF:
6628 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6631 = expand_normal (CALL_EXPR_ARG (exp, 0));
6633 expand_builtin_update_setjmp_buf (buf_addr);
6639 expand_builtin_trap ();
6642 case BUILT_IN_PRINTF:
6643 target = expand_builtin_printf (exp, target, mode, false);
6648 case BUILT_IN_PRINTF_UNLOCKED:
6649 target = expand_builtin_printf (exp, target, mode, true);
6654 case BUILT_IN_FPUTS:
6655 target = expand_builtin_fputs (exp, target, false);
6659 case BUILT_IN_FPUTS_UNLOCKED:
6660 target = expand_builtin_fputs (exp, target, true);
6665 case BUILT_IN_FPRINTF:
6666 target = expand_builtin_fprintf (exp, target, mode, false);
6671 case BUILT_IN_FPRINTF_UNLOCKED:
6672 target = expand_builtin_fprintf (exp, target, mode, true);
6677 case BUILT_IN_SPRINTF:
6678 target = expand_builtin_sprintf (exp, target, mode);
6683 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6684 case BUILT_IN_SIGNBITD32:
6685 case BUILT_IN_SIGNBITD64:
6686 case BUILT_IN_SIGNBITD128:
6687 target = expand_builtin_signbit (exp, target);
6692 /* Various hooks for the DWARF 2 __throw routine. */
6693 case BUILT_IN_UNWIND_INIT:
6694 expand_builtin_unwind_init ();
6696 case BUILT_IN_DWARF_CFA:
6697 return virtual_cfa_rtx;
6698 #ifdef DWARF2_UNWIND_INFO
6699 case BUILT_IN_DWARF_SP_COLUMN:
6700 return expand_builtin_dwarf_sp_column ();
6701 case BUILT_IN_INIT_DWARF_REG_SIZES:
6702 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6705 case BUILT_IN_FROB_RETURN_ADDR:
6706 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6707 case BUILT_IN_EXTRACT_RETURN_ADDR:
6708 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6709 case BUILT_IN_EH_RETURN:
6710 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6711 CALL_EXPR_ARG (exp, 1));
6713 #ifdef EH_RETURN_DATA_REGNO
6714 case BUILT_IN_EH_RETURN_DATA_REGNO:
6715 return expand_builtin_eh_return_data_regno (exp);
6717 case BUILT_IN_EXTEND_POINTER:
6718 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6720 case BUILT_IN_VA_START:
6721 return expand_builtin_va_start (exp);
6722 case BUILT_IN_VA_END:
6723 return expand_builtin_va_end (exp);
6724 case BUILT_IN_VA_COPY:
6725 return expand_builtin_va_copy (exp);
6726 case BUILT_IN_EXPECT:
6727 return expand_builtin_expect (exp, target);
6728 case BUILT_IN_PREFETCH:
6729 expand_builtin_prefetch (exp);
6732 case BUILT_IN_PROFILE_FUNC_ENTER:
6733 return expand_builtin_profile_func (false);
6734 case BUILT_IN_PROFILE_FUNC_EXIT:
6735 return expand_builtin_profile_func (true);
6737 case BUILT_IN_INIT_TRAMPOLINE:
6738 return expand_builtin_init_trampoline (exp);
6739 case BUILT_IN_ADJUST_TRAMPOLINE:
6740 return expand_builtin_adjust_trampoline (exp);
6743 case BUILT_IN_EXECL:
6744 case BUILT_IN_EXECV:
6745 case BUILT_IN_EXECLP:
6746 case BUILT_IN_EXECLE:
6747 case BUILT_IN_EXECVP:
6748 case BUILT_IN_EXECVE:
6749 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6754 case BUILT_IN_FETCH_AND_ADD_1:
6755 case BUILT_IN_FETCH_AND_ADD_2:
6756 case BUILT_IN_FETCH_AND_ADD_4:
6757 case BUILT_IN_FETCH_AND_ADD_8:
6758 case BUILT_IN_FETCH_AND_ADD_16:
6759 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6760 target = expand_builtin_sync_operation (mode, exp, PLUS,
6761 false, target, ignore);
6766 case BUILT_IN_FETCH_AND_SUB_1:
6767 case BUILT_IN_FETCH_AND_SUB_2:
6768 case BUILT_IN_FETCH_AND_SUB_4:
6769 case BUILT_IN_FETCH_AND_SUB_8:
6770 case BUILT_IN_FETCH_AND_SUB_16:
6771 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6772 target = expand_builtin_sync_operation (mode, exp, MINUS,
6773 false, target, ignore);
6778 case BUILT_IN_FETCH_AND_OR_1:
6779 case BUILT_IN_FETCH_AND_OR_2:
6780 case BUILT_IN_FETCH_AND_OR_4:
6781 case BUILT_IN_FETCH_AND_OR_8:
6782 case BUILT_IN_FETCH_AND_OR_16:
6783 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6784 target = expand_builtin_sync_operation (mode, exp, IOR,
6785 false, target, ignore);
6790 case BUILT_IN_FETCH_AND_AND_1:
6791 case BUILT_IN_FETCH_AND_AND_2:
6792 case BUILT_IN_FETCH_AND_AND_4:
6793 case BUILT_IN_FETCH_AND_AND_8:
6794 case BUILT_IN_FETCH_AND_AND_16:
6795 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6796 target = expand_builtin_sync_operation (mode, exp, AND,
6797 false, target, ignore);
6802 case BUILT_IN_FETCH_AND_XOR_1:
6803 case BUILT_IN_FETCH_AND_XOR_2:
6804 case BUILT_IN_FETCH_AND_XOR_4:
6805 case BUILT_IN_FETCH_AND_XOR_8:
6806 case BUILT_IN_FETCH_AND_XOR_16:
6807 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6808 target = expand_builtin_sync_operation (mode, exp, XOR,
6809 false, target, ignore);
6814 case BUILT_IN_FETCH_AND_NAND_1:
6815 case BUILT_IN_FETCH_AND_NAND_2:
6816 case BUILT_IN_FETCH_AND_NAND_4:
6817 case BUILT_IN_FETCH_AND_NAND_8:
6818 case BUILT_IN_FETCH_AND_NAND_16:
6819 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6820 target = expand_builtin_sync_operation (mode, exp, NOT,
6821 false, target, ignore);
6826 case BUILT_IN_ADD_AND_FETCH_1:
6827 case BUILT_IN_ADD_AND_FETCH_2:
6828 case BUILT_IN_ADD_AND_FETCH_4:
6829 case BUILT_IN_ADD_AND_FETCH_8:
6830 case BUILT_IN_ADD_AND_FETCH_16:
6831 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6832 target = expand_builtin_sync_operation (mode, exp, PLUS,
6833 true, target, ignore);
6838 case BUILT_IN_SUB_AND_FETCH_1:
6839 case BUILT_IN_SUB_AND_FETCH_2:
6840 case BUILT_IN_SUB_AND_FETCH_4:
6841 case BUILT_IN_SUB_AND_FETCH_8:
6842 case BUILT_IN_SUB_AND_FETCH_16:
6843 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6844 target = expand_builtin_sync_operation (mode, exp, MINUS,
6845 true, target, ignore);
6850 case BUILT_IN_OR_AND_FETCH_1:
6851 case BUILT_IN_OR_AND_FETCH_2:
6852 case BUILT_IN_OR_AND_FETCH_4:
6853 case BUILT_IN_OR_AND_FETCH_8:
6854 case BUILT_IN_OR_AND_FETCH_16:
6855 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6856 target = expand_builtin_sync_operation (mode, exp, IOR,
6857 true, target, ignore);
6862 case BUILT_IN_AND_AND_FETCH_1:
6863 case BUILT_IN_AND_AND_FETCH_2:
6864 case BUILT_IN_AND_AND_FETCH_4:
6865 case BUILT_IN_AND_AND_FETCH_8:
6866 case BUILT_IN_AND_AND_FETCH_16:
6867 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6868 target = expand_builtin_sync_operation (mode, exp, AND,
6869 true, target, ignore);
6874 case BUILT_IN_XOR_AND_FETCH_1:
6875 case BUILT_IN_XOR_AND_FETCH_2:
6876 case BUILT_IN_XOR_AND_FETCH_4:
6877 case BUILT_IN_XOR_AND_FETCH_8:
6878 case BUILT_IN_XOR_AND_FETCH_16:
6879 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6880 target = expand_builtin_sync_operation (mode, exp, XOR,
6881 true, target, ignore);
6886 case BUILT_IN_NAND_AND_FETCH_1:
6887 case BUILT_IN_NAND_AND_FETCH_2:
6888 case BUILT_IN_NAND_AND_FETCH_4:
6889 case BUILT_IN_NAND_AND_FETCH_8:
6890 case BUILT_IN_NAND_AND_FETCH_16:
6891 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6892 target = expand_builtin_sync_operation (mode, exp, NOT,
6893 true, target, ignore);
6898 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6899 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6900 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6901 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6902 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6903 if (mode == VOIDmode)
6904 mode = TYPE_MODE (boolean_type_node);
6905 if (!target || !register_operand (target, mode))
6906 target = gen_reg_rtx (mode);
6908 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6909 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6914 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6915 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6916 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6917 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6918 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6919 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6920 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6925 case BUILT_IN_LOCK_TEST_AND_SET_1:
6926 case BUILT_IN_LOCK_TEST_AND_SET_2:
6927 case BUILT_IN_LOCK_TEST_AND_SET_4:
6928 case BUILT_IN_LOCK_TEST_AND_SET_8:
6929 case BUILT_IN_LOCK_TEST_AND_SET_16:
6930 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6931 target = expand_builtin_lock_test_and_set (mode, exp, target);
6936 case BUILT_IN_LOCK_RELEASE_1:
6937 case BUILT_IN_LOCK_RELEASE_2:
6938 case BUILT_IN_LOCK_RELEASE_4:
6939 case BUILT_IN_LOCK_RELEASE_8:
6940 case BUILT_IN_LOCK_RELEASE_16:
6941 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6942 expand_builtin_lock_release (mode, exp);
6945 case BUILT_IN_SYNCHRONIZE:
6946 expand_builtin_synchronize ();
6949 case BUILT_IN_OBJECT_SIZE:
6950 return expand_builtin_object_size (exp);
6952 case BUILT_IN_MEMCPY_CHK:
6953 case BUILT_IN_MEMPCPY_CHK:
6954 case BUILT_IN_MEMMOVE_CHK:
6955 case BUILT_IN_MEMSET_CHK:
6956 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6961 case BUILT_IN_STRCPY_CHK:
6962 case BUILT_IN_STPCPY_CHK:
6963 case BUILT_IN_STRNCPY_CHK:
6964 case BUILT_IN_STRCAT_CHK:
6965 case BUILT_IN_STRNCAT_CHK:
6966 case BUILT_IN_SNPRINTF_CHK:
6967 case BUILT_IN_VSNPRINTF_CHK:
6968 maybe_emit_chk_warning (exp, fcode);
6971 case BUILT_IN_SPRINTF_CHK:
6972 case BUILT_IN_VSPRINTF_CHK:
6973 maybe_emit_sprintf_chk_warning (exp, fcode);
6976 default: /* just do library call, if unknown builtin */
6980 /* The switch statement above can drop through to cause the function
6981 to be called normally. */
6982 return expand_call (exp, target, ignore);
6985 /* Determine whether a tree node represents a call to a built-in
6986 function. If the tree T is a call to a built-in function with
6987 the right number of arguments of the appropriate types, return
6988 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6989 Otherwise the return value is END_BUILTINS. */
6991 enum built_in_function
6992 builtin_mathfn_code (const_tree t)
6994 const_tree fndecl, arg, parmlist;
6995 const_tree argtype, parmtype;
6996 const_call_expr_arg_iterator iter;
6998 if (TREE_CODE (t) != CALL_EXPR
6999 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7000 return END_BUILTINS;
7002 fndecl = get_callee_fndecl (t);
7003 if (fndecl == NULL_TREE
7004 || TREE_CODE (fndecl) != FUNCTION_DECL
7005 || ! DECL_BUILT_IN (fndecl)
7006 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7007 return END_BUILTINS;
7009 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7010 init_const_call_expr_arg_iterator (t, &iter);
7011 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7013 /* If a function doesn't take a variable number of arguments,
7014 the last element in the list will have type `void'. */
7015 parmtype = TREE_VALUE (parmlist);
7016 if (VOID_TYPE_P (parmtype))
7018 if (more_const_call_expr_args_p (&iter))
7019 return END_BUILTINS;
7020 return DECL_FUNCTION_CODE (fndecl);
7023 if (! more_const_call_expr_args_p (&iter))
7024 return END_BUILTINS;
7026 arg = next_const_call_expr_arg (&iter);
7027 argtype = TREE_TYPE (arg);
7029 if (SCALAR_FLOAT_TYPE_P (parmtype))
7031 if (! SCALAR_FLOAT_TYPE_P (argtype))
7032 return END_BUILTINS;
7034 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7036 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7037 return END_BUILTINS;
7039 else if (POINTER_TYPE_P (parmtype))
7041 if (! POINTER_TYPE_P (argtype))
7042 return END_BUILTINS;
7044 else if (INTEGRAL_TYPE_P (parmtype))
7046 if (! INTEGRAL_TYPE_P (argtype))
7047 return END_BUILTINS;
7050 return END_BUILTINS;
7053 /* Variable-length argument list. */
7054 return DECL_FUNCTION_CODE (fndecl);
7057 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7058 evaluate to a constant. */
7061 fold_builtin_constant_p (tree arg)
7063 /* We return 1 for a numeric type that's known to be a constant
7064 value at compile-time or for an aggregate type that's a
7065 literal constant. */
7068 /* If we know this is a constant, emit the constant of one. */
7069 if (CONSTANT_CLASS_P (arg)
7070 || (TREE_CODE (arg) == CONSTRUCTOR
7071 && TREE_CONSTANT (arg)))
7072 return integer_one_node;
7073 if (TREE_CODE (arg) == ADDR_EXPR)
7075 tree op = TREE_OPERAND (arg, 0);
7076 if (TREE_CODE (op) == STRING_CST
7077 || (TREE_CODE (op) == ARRAY_REF
7078 && integer_zerop (TREE_OPERAND (op, 1))
7079 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7080 return integer_one_node;
7083 /* If this expression has side effects, show we don't know it to be a
7084 constant. Likewise if it's a pointer or aggregate type since in
7085 those case we only want literals, since those are only optimized
7086 when generating RTL, not later.
7087 And finally, if we are compiling an initializer, not code, we
7088 need to return a definite result now; there's not going to be any
7089 more optimization done. */
7090 if (TREE_SIDE_EFFECTS (arg)
7091 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7092 || POINTER_TYPE_P (TREE_TYPE (arg))
7094 || folding_initializer)
7095 return integer_zero_node;
7100 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7101 return it as a truthvalue. */
7104 build_builtin_expect_predicate (tree pred, tree expected)
7106 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7108 fn = built_in_decls[BUILT_IN_EXPECT];
7109 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7110 ret_type = TREE_TYPE (TREE_TYPE (fn));
7111 pred_type = TREE_VALUE (arg_types);
7112 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7114 pred = fold_convert (pred_type, pred);
7115 expected = fold_convert (expected_type, expected);
7116 call_expr = build_call_expr (fn, 2, pred, expected);
7118 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7119 build_int_cst (ret_type, 0));
7122 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7123 NULL_TREE if no simplification is possible. */
7126 fold_builtin_expect (tree arg0, tree arg1)
7129 enum tree_code code;
7131 /* If this is a builtin_expect within a builtin_expect keep the
7132 inner one. See through a comparison against a constant. It
7133 might have been added to create a thruthvalue. */
7135 if (COMPARISON_CLASS_P (inner)
7136 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7137 inner = TREE_OPERAND (inner, 0);
7139 if (TREE_CODE (inner) == CALL_EXPR
7140 && (fndecl = get_callee_fndecl (inner))
7141 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7142 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7145 /* Distribute the expected value over short-circuiting operators.
7146 See through the cast from truthvalue_type_node to long. */
7148 while (TREE_CODE (inner) == NOP_EXPR
7149 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7150 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7151 inner = TREE_OPERAND (inner, 0);
7153 code = TREE_CODE (inner);
7154 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7156 tree op0 = TREE_OPERAND (inner, 0);
7157 tree op1 = TREE_OPERAND (inner, 1);
7159 op0 = build_builtin_expect_predicate (op0, arg1);
7160 op1 = build_builtin_expect_predicate (op1, arg1);
7161 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7163 return fold_convert (TREE_TYPE (arg0), inner);
7166 /* If the argument isn't invariant then there's nothing else we can do. */
7167 if (!TREE_CONSTANT (arg0))
7170 /* If we expect that a comparison against the argument will fold to
7171 a constant return the constant. In practice, this means a true
7172 constant or the address of a non-weak symbol. */
7175 if (TREE_CODE (inner) == ADDR_EXPR)
7179 inner = TREE_OPERAND (inner, 0);
7181 while (TREE_CODE (inner) == COMPONENT_REF
7182 || TREE_CODE (inner) == ARRAY_REF);
7183 if (DECL_P (inner) && DECL_WEAK (inner))
7187 /* Otherwise, ARG0 already has the proper type for the return value. */
7191 /* Fold a call to __builtin_classify_type with argument ARG. */
7194 fold_builtin_classify_type (tree arg)
7197 return build_int_cst (NULL_TREE, no_type_class);
7199 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7202 /* Fold a call to __builtin_strlen with argument ARG. */
7205 fold_builtin_strlen (tree arg)
7207 if (!validate_arg (arg, POINTER_TYPE))
7211 tree len = c_strlen (arg, 0);
7215 /* Convert from the internal "sizetype" type to "size_t". */
7217 len = fold_convert (size_type_node, len);
7225 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7228 fold_builtin_inf (tree type, int warn)
7230 REAL_VALUE_TYPE real;
7232 /* __builtin_inff is intended to be usable to define INFINITY on all
7233 targets. If an infinity is not available, INFINITY expands "to a
7234 positive constant of type float that overflows at translation
7235 time", footnote "In this case, using INFINITY will violate the
7236 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7237 Thus we pedwarn to ensure this constraint violation is
7239 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7240 pedwarn ("target format does not support infinity");
7243 return build_real (type, real);
7246 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7249 fold_builtin_nan (tree arg, tree type, int quiet)
7251 REAL_VALUE_TYPE real;
7254 if (!validate_arg (arg, POINTER_TYPE))
7256 str = c_getstr (arg);
7260 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7263 return build_real (type, real);
7266 /* Return true if the floating point expression T has an integer value.
7267 We also allow +Inf, -Inf and NaN to be considered integer values. */
7270 integer_valued_real_p (tree t)
7272 switch (TREE_CODE (t))
7279 return integer_valued_real_p (TREE_OPERAND (t, 0));
7284 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7291 return integer_valued_real_p (TREE_OPERAND (t, 0))
7292 && integer_valued_real_p (TREE_OPERAND (t, 1));
7295 return integer_valued_real_p (TREE_OPERAND (t, 1))
7296 && integer_valued_real_p (TREE_OPERAND (t, 2));
7299 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7303 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7304 if (TREE_CODE (type) == INTEGER_TYPE)
7306 if (TREE_CODE (type) == REAL_TYPE)
7307 return integer_valued_real_p (TREE_OPERAND (t, 0));
7312 switch (builtin_mathfn_code (t))
7314 CASE_FLT_FN (BUILT_IN_CEIL):
7315 CASE_FLT_FN (BUILT_IN_FLOOR):
7316 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7317 CASE_FLT_FN (BUILT_IN_RINT):
7318 CASE_FLT_FN (BUILT_IN_ROUND):
7319 CASE_FLT_FN (BUILT_IN_TRUNC):
7322 CASE_FLT_FN (BUILT_IN_FMIN):
7323 CASE_FLT_FN (BUILT_IN_FMAX):
7324 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7325 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7338 /* FNDECL is assumed to be a builtin where truncation can be propagated
7339 across (for instance floor((double)f) == (double)floorf (f).
7340 Do the transformation for a call with argument ARG. */
7343 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7345 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7347 if (!validate_arg (arg, REAL_TYPE))
7350 /* Integer rounding functions are idempotent. */
7351 if (fcode == builtin_mathfn_code (arg))
7354 /* If argument is already integer valued, and we don't need to worry
7355 about setting errno, there's no need to perform rounding. */
7356 if (! flag_errno_math && integer_valued_real_p (arg))
7361 tree arg0 = strip_float_extensions (arg);
7362 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7363 tree newtype = TREE_TYPE (arg0);
7366 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7367 && (decl = mathfn_built_in (newtype, fcode)))
7368 return fold_convert (ftype,
7369 build_call_expr (decl, 1,
7370 fold_convert (newtype, arg0)));
7375 /* FNDECL is assumed to be builtin which can narrow the FP type of
7376 the argument, for instance lround((double)f) -> lroundf (f).
7377 Do the transformation for a call with argument ARG. */
7380 fold_fixed_mathfn (tree fndecl, tree arg)
7382 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7384 if (!validate_arg (arg, REAL_TYPE))
7387 /* If argument is already integer valued, and we don't need to worry
7388 about setting errno, there's no need to perform rounding. */
7389 if (! flag_errno_math && integer_valued_real_p (arg))
7390 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7394 tree ftype = TREE_TYPE (arg);
7395 tree arg0 = strip_float_extensions (arg);
7396 tree newtype = TREE_TYPE (arg0);
7399 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7400 && (decl = mathfn_built_in (newtype, fcode)))
7401 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7404 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7405 sizeof (long long) == sizeof (long). */
7406 if (TYPE_PRECISION (long_long_integer_type_node)
7407 == TYPE_PRECISION (long_integer_type_node))
7409 tree newfn = NULL_TREE;
7412 CASE_FLT_FN (BUILT_IN_LLCEIL):
7413 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7416 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7417 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7420 CASE_FLT_FN (BUILT_IN_LLROUND):
7421 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7424 CASE_FLT_FN (BUILT_IN_LLRINT):
7425 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7434 tree newcall = build_call_expr(newfn, 1, arg);
7435 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7442 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7443 return type. Return NULL_TREE if no simplification can be made. */
7446 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7450 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7451 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7454 /* Calculate the result when the argument is a constant. */
7455 if (TREE_CODE (arg) == COMPLEX_CST
7456 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7460 if (TREE_CODE (arg) == COMPLEX_EXPR)
7462 tree real = TREE_OPERAND (arg, 0);
7463 tree imag = TREE_OPERAND (arg, 1);
7465 /* If either part is zero, cabs is fabs of the other. */
7466 if (real_zerop (real))
7467 return fold_build1 (ABS_EXPR, type, imag);
7468 if (real_zerop (imag))
7469 return fold_build1 (ABS_EXPR, type, real);
7471 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7472 if (flag_unsafe_math_optimizations
7473 && operand_equal_p (real, imag, OEP_PURE_SAME))
7475 const REAL_VALUE_TYPE sqrt2_trunc
7476 = real_value_truncate (TYPE_MODE (type),
7477 *get_real_const (rv_sqrt2));
7479 return fold_build2 (MULT_EXPR, type,
7480 fold_build1 (ABS_EXPR, type, real),
7481 build_real (type, sqrt2_trunc));
7485 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7486 if (TREE_CODE (arg) == NEGATE_EXPR
7487 || TREE_CODE (arg) == CONJ_EXPR)
7488 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7490 /* Don't do this when optimizing for size. */
7491 if (flag_unsafe_math_optimizations
7492 && optimize && !optimize_size)
7494 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7496 if (sqrtfn != NULL_TREE)
7498 tree rpart, ipart, result;
7500 arg = builtin_save_expr (arg);
7502 rpart = fold_build1 (REALPART_EXPR, type, arg);
7503 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7505 rpart = builtin_save_expr (rpart);
7506 ipart = builtin_save_expr (ipart);
7508 result = fold_build2 (PLUS_EXPR, type,
7509 fold_build2 (MULT_EXPR, type,
7511 fold_build2 (MULT_EXPR, type,
7514 return build_call_expr (sqrtfn, 1, result);
7521 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7522 Return NULL_TREE if no simplification can be made. */
7525 fold_builtin_sqrt (tree arg, tree type)
7528 enum built_in_function fcode;
7531 if (!validate_arg (arg, REAL_TYPE))
7534 /* Calculate the result when the argument is a constant. */
7535 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7538 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7539 fcode = builtin_mathfn_code (arg);
7540 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7542 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7543 arg = fold_build2 (MULT_EXPR, type,
7544 CALL_EXPR_ARG (arg, 0),
7545 build_real (type, dconsthalf));
7546 return build_call_expr (expfn, 1, arg);
7549 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7550 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7552 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7556 tree arg0 = CALL_EXPR_ARG (arg, 0);
7558 /* The inner root was either sqrt or cbrt. */
7559 REAL_VALUE_TYPE dconstroot =
7560 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7562 /* Adjust for the outer root. */
7563 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7564 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7565 tree_root = build_real (type, dconstroot);
7566 return build_call_expr (powfn, 2, arg0, tree_root);
7570 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7571 if (flag_unsafe_math_optimizations
7572 && (fcode == BUILT_IN_POW
7573 || fcode == BUILT_IN_POWF
7574 || fcode == BUILT_IN_POWL))
7576 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7577 tree arg0 = CALL_EXPR_ARG (arg, 0);
7578 tree arg1 = CALL_EXPR_ARG (arg, 1);
7580 if (!tree_expr_nonnegative_p (arg0))
7581 arg0 = build1 (ABS_EXPR, type, arg0);
7582 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7583 build_real (type, dconsthalf));
7584 return build_call_expr (powfn, 2, arg0, narg1);
7590 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7591 Return NULL_TREE if no simplification can be made. */
7594 fold_builtin_cbrt (tree arg, tree type)
7596 const enum built_in_function fcode = builtin_mathfn_code (arg);
7599 if (!validate_arg (arg, REAL_TYPE))
7602 /* Calculate the result when the argument is a constant. */
7603 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7606 if (flag_unsafe_math_optimizations)
7608 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7609 if (BUILTIN_EXPONENT_P (fcode))
7611 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7612 const REAL_VALUE_TYPE third_trunc =
7613 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7614 arg = fold_build2 (MULT_EXPR, type,
7615 CALL_EXPR_ARG (arg, 0),
7616 build_real (type, third_trunc));
7617 return build_call_expr (expfn, 1, arg);
7620 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7621 if (BUILTIN_SQRT_P (fcode))
7623 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7627 tree arg0 = CALL_EXPR_ARG (arg, 0);
7629 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7631 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7632 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7633 tree_root = build_real (type, dconstroot);
7634 return build_call_expr (powfn, 2, arg0, tree_root);
7638 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7639 if (BUILTIN_CBRT_P (fcode))
7641 tree arg0 = CALL_EXPR_ARG (arg, 0);
7642 if (tree_expr_nonnegative_p (arg0))
7644 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7649 REAL_VALUE_TYPE dconstroot;
7651 real_arithmetic (&dconstroot, MULT_EXPR,
7652 get_real_const (rv_third),
7653 get_real_const (rv_third));
7654 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7655 tree_root = build_real (type, dconstroot);
7656 return build_call_expr (powfn, 2, arg0, tree_root);
7661 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7662 if (fcode == BUILT_IN_POW
7663 || fcode == BUILT_IN_POWF
7664 || fcode == BUILT_IN_POWL)
7666 tree arg00 = CALL_EXPR_ARG (arg, 0);
7667 tree arg01 = CALL_EXPR_ARG (arg, 1);
7668 if (tree_expr_nonnegative_p (arg00))
7670 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7671 const REAL_VALUE_TYPE dconstroot
7672 = real_value_truncate (TYPE_MODE (type),
7673 *get_real_const (rv_third));
7674 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7675 build_real (type, dconstroot));
7676 return build_call_expr (powfn, 2, arg00, narg01);
7683 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7684 TYPE is the type of the return value. Return NULL_TREE if no
7685 simplification can be made. */
7688 fold_builtin_cos (tree arg, tree type, tree fndecl)
7692 if (!validate_arg (arg, REAL_TYPE))
7695 /* Calculate the result when the argument is a constant. */
7696 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7699 /* Optimize cos(-x) into cos (x). */
7700 if ((narg = fold_strip_sign_ops (arg)))
7701 return build_call_expr (fndecl, 1, narg);
7706 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7707 Return NULL_TREE if no simplification can be made. */
7710 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7712 if (validate_arg (arg, REAL_TYPE))
7716 /* Calculate the result when the argument is a constant. */
7717 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7720 /* Optimize cosh(-x) into cosh (x). */
7721 if ((narg = fold_strip_sign_ops (arg)))
7722 return build_call_expr (fndecl, 1, narg);
7728 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7729 Return NULL_TREE if no simplification can be made. */
7732 fold_builtin_tan (tree arg, tree type)
7734 enum built_in_function fcode;
7737 if (!validate_arg (arg, REAL_TYPE))
7740 /* Calculate the result when the argument is a constant. */
7741 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7744 /* Optimize tan(atan(x)) = x. */
7745 fcode = builtin_mathfn_code (arg);
7746 if (flag_unsafe_math_optimizations
7747 && (fcode == BUILT_IN_ATAN
7748 || fcode == BUILT_IN_ATANF
7749 || fcode == BUILT_IN_ATANL))
7750 return CALL_EXPR_ARG (arg, 0);
7755 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7756 NULL_TREE if no simplification can be made. */
7759 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7764 if (!validate_arg (arg0, REAL_TYPE)
7765 || !validate_arg (arg1, POINTER_TYPE)
7766 || !validate_arg (arg2, POINTER_TYPE))
7769 type = TREE_TYPE (arg0);
7771 /* Calculate the result when the argument is a constant. */
7772 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7775 /* Canonicalize sincos to cexpi. */
7776 if (!TARGET_C99_FUNCTIONS)
7778 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7782 call = build_call_expr (fn, 1, arg0);
7783 call = builtin_save_expr (call);
7785 return build2 (COMPOUND_EXPR, type,
7786 build2 (MODIFY_EXPR, void_type_node,
7787 build_fold_indirect_ref (arg1),
7788 build1 (IMAGPART_EXPR, type, call)),
7789 build2 (MODIFY_EXPR, void_type_node,
7790 build_fold_indirect_ref (arg2),
7791 build1 (REALPART_EXPR, type, call)));
7794 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7795 NULL_TREE if no simplification can be made. */
7798 fold_builtin_cexp (tree arg0, tree type)
7801 tree realp, imagp, ifn;
7803 if (!validate_arg (arg0, COMPLEX_TYPE))
7806 rtype = TREE_TYPE (TREE_TYPE (arg0));
7808 /* In case we can figure out the real part of arg0 and it is constant zero
7810 if (!TARGET_C99_FUNCTIONS)
7812 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7816 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7817 && real_zerop (realp))
7819 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7820 return build_call_expr (ifn, 1, narg);
7823 /* In case we can easily decompose real and imaginary parts split cexp
7824 to exp (r) * cexpi (i). */
7825 if (flag_unsafe_math_optimizations
7828 tree rfn, rcall, icall;
7830 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7834 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7838 icall = build_call_expr (ifn, 1, imagp);
7839 icall = builtin_save_expr (icall);
7840 rcall = build_call_expr (rfn, 1, realp);
7841 rcall = builtin_save_expr (rcall);
7842 return fold_build2 (COMPLEX_EXPR, type,
7843 fold_build2 (MULT_EXPR, rtype,
7845 fold_build1 (REALPART_EXPR, rtype, icall)),
7846 fold_build2 (MULT_EXPR, rtype,
7848 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7854 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7855 Return NULL_TREE if no simplification can be made. */
7858 fold_builtin_trunc (tree fndecl, tree arg)
7860 if (!validate_arg (arg, REAL_TYPE))
7863 /* Optimize trunc of constant value. */
7864 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7866 REAL_VALUE_TYPE r, x;
7867 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7869 x = TREE_REAL_CST (arg);
7870 real_trunc (&r, TYPE_MODE (type), &x);
7871 return build_real (type, r);
7874 return fold_trunc_transparent_mathfn (fndecl, arg);
7877 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7878 Return NULL_TREE if no simplification can be made. */
7881 fold_builtin_floor (tree fndecl, tree arg)
7883 if (!validate_arg (arg, REAL_TYPE))
7886 /* Optimize floor of constant value. */
7887 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7891 x = TREE_REAL_CST (arg);
7892 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7894 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7897 real_floor (&r, TYPE_MODE (type), &x);
7898 return build_real (type, r);
7902 /* Fold floor (x) where x is nonnegative to trunc (x). */
7903 if (tree_expr_nonnegative_p (arg))
7905 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7907 return build_call_expr (truncfn, 1, arg);
7910 return fold_trunc_transparent_mathfn (fndecl, arg);
7913 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7914 Return NULL_TREE if no simplification can be made. */
7917 fold_builtin_ceil (tree fndecl, tree arg)
7919 if (!validate_arg (arg, REAL_TYPE))
7922 /* Optimize ceil of constant value. */
7923 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7927 x = TREE_REAL_CST (arg);
7928 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7930 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7933 real_ceil (&r, TYPE_MODE (type), &x);
7934 return build_real (type, r);
7938 return fold_trunc_transparent_mathfn (fndecl, arg);
7941 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7942 Return NULL_TREE if no simplification can be made. */
7945 fold_builtin_round (tree fndecl, tree arg)
7947 if (!validate_arg (arg, REAL_TYPE))
7950 /* Optimize round of constant value. */
7951 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7955 x = TREE_REAL_CST (arg);
7956 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7958 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7961 real_round (&r, TYPE_MODE (type), &x);
7962 return build_real (type, r);
7966 return fold_trunc_transparent_mathfn (fndecl, arg);
7969 /* Fold function call to builtin lround, lroundf or lroundl (or the
7970 corresponding long long versions) and other rounding functions. ARG
7971 is the argument to the call. Return NULL_TREE if no simplification
7975 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7977 if (!validate_arg (arg, REAL_TYPE))
7980 /* Optimize lround of constant value. */
7981 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7983 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7985 if (real_isfinite (&x))
7987 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7988 tree ftype = TREE_TYPE (arg);
7989 unsigned HOST_WIDE_INT lo2;
7990 HOST_WIDE_INT hi, lo;
7993 switch (DECL_FUNCTION_CODE (fndecl))
7995 CASE_FLT_FN (BUILT_IN_LFLOOR):
7996 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7997 real_floor (&r, TYPE_MODE (ftype), &x);
8000 CASE_FLT_FN (BUILT_IN_LCEIL):
8001 CASE_FLT_FN (BUILT_IN_LLCEIL):
8002 real_ceil (&r, TYPE_MODE (ftype), &x);
8005 CASE_FLT_FN (BUILT_IN_LROUND):
8006 CASE_FLT_FN (BUILT_IN_LLROUND):
8007 real_round (&r, TYPE_MODE (ftype), &x);
8014 REAL_VALUE_TO_INT (&lo, &hi, r);
8015 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8016 return build_int_cst_wide (itype, lo2, hi);
8020 switch (DECL_FUNCTION_CODE (fndecl))
8022 CASE_FLT_FN (BUILT_IN_LFLOOR):
8023 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8024 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8025 if (tree_expr_nonnegative_p (arg))
8026 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8032 return fold_fixed_mathfn (fndecl, arg);
8035 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8036 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8037 the argument to the call. Return NULL_TREE if no simplification can
8041 fold_builtin_bitop (tree fndecl, tree arg)
8043 if (!validate_arg (arg, INTEGER_TYPE))
8046 /* Optimize for constant argument. */
8047 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8049 HOST_WIDE_INT hi, width, result;
8050 unsigned HOST_WIDE_INT lo;
8053 type = TREE_TYPE (arg);
8054 width = TYPE_PRECISION (type);
8055 lo = TREE_INT_CST_LOW (arg);
8057 /* Clear all the bits that are beyond the type's precision. */
8058 if (width > HOST_BITS_PER_WIDE_INT)
8060 hi = TREE_INT_CST_HIGH (arg);
8061 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8062 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8067 if (width < HOST_BITS_PER_WIDE_INT)
8068 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8071 switch (DECL_FUNCTION_CODE (fndecl))
8073 CASE_INT_FN (BUILT_IN_FFS):
8075 result = exact_log2 (lo & -lo) + 1;
8077 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8082 CASE_INT_FN (BUILT_IN_CLZ):
8084 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8086 result = width - floor_log2 (lo) - 1;
8087 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8091 CASE_INT_FN (BUILT_IN_CTZ):
8093 result = exact_log2 (lo & -lo);
8095 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8096 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8100 CASE_INT_FN (BUILT_IN_POPCOUNT):
8103 result++, lo &= lo - 1;
8105 result++, hi &= hi - 1;
8108 CASE_INT_FN (BUILT_IN_PARITY):
8111 result++, lo &= lo - 1;
8113 result++, hi &= hi - 1;
8121 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8127 /* Fold function call to builtin_bswap and the long and long long
8128 variants. Return NULL_TREE if no simplification can be made. */
8130 fold_builtin_bswap (tree fndecl, tree arg)
8132 if (! validate_arg (arg, INTEGER_TYPE))
8135 /* Optimize constant value. */
8136 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8138 HOST_WIDE_INT hi, width, r_hi = 0;
8139 unsigned HOST_WIDE_INT lo, r_lo = 0;
8142 type = TREE_TYPE (arg);
8143 width = TYPE_PRECISION (type);
8144 lo = TREE_INT_CST_LOW (arg);
8145 hi = TREE_INT_CST_HIGH (arg);
8147 switch (DECL_FUNCTION_CODE (fndecl))
8149 case BUILT_IN_BSWAP32:
8150 case BUILT_IN_BSWAP64:
8154 for (s = 0; s < width; s += 8)
8156 int d = width - s - 8;
8157 unsigned HOST_WIDE_INT byte;
8159 if (s < HOST_BITS_PER_WIDE_INT)
8160 byte = (lo >> s) & 0xff;
8162 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8164 if (d < HOST_BITS_PER_WIDE_INT)
8167 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8177 if (width < HOST_BITS_PER_WIDE_INT)
8178 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8180 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8186 /* Return true if EXPR is the real constant contained in VALUE. */
8189 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8193 return ((TREE_CODE (expr) == REAL_CST
8194 && !TREE_OVERFLOW (expr)
8195 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8196 || (TREE_CODE (expr) == COMPLEX_CST
8197 && real_dconstp (TREE_REALPART (expr), value)
8198 && real_zerop (TREE_IMAGPART (expr))));
8201 /* A subroutine of fold_builtin to fold the various logarithmic
8202 functions. Return NULL_TREE if no simplification can me made.
8203 FUNC is the corresponding MPFR logarithm function. */
8206 fold_builtin_logarithm (tree fndecl, tree arg,
8207 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8209 if (validate_arg (arg, REAL_TYPE))
8211 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8213 const enum built_in_function fcode = builtin_mathfn_code (arg);
8215 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8216 instead we'll look for 'e' truncated to MODE. So only do
8217 this if flag_unsafe_math_optimizations is set. */
8218 if (flag_unsafe_math_optimizations && func == mpfr_log)
8220 const REAL_VALUE_TYPE e_truncated =
8221 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8222 if (real_dconstp (arg, &e_truncated))
8223 return build_real (type, dconst1);
8226 /* Calculate the result when the argument is a constant. */
8227 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8230 /* Special case, optimize logN(expN(x)) = x. */
8231 if (flag_unsafe_math_optimizations
8232 && ((func == mpfr_log
8233 && (fcode == BUILT_IN_EXP
8234 || fcode == BUILT_IN_EXPF
8235 || fcode == BUILT_IN_EXPL))
8236 || (func == mpfr_log2
8237 && (fcode == BUILT_IN_EXP2
8238 || fcode == BUILT_IN_EXP2F
8239 || fcode == BUILT_IN_EXP2L))
8240 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8241 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8243 /* Optimize logN(func()) for various exponential functions. We
8244 want to determine the value "x" and the power "exponent" in
8245 order to transform logN(x**exponent) into exponent*logN(x). */
8246 if (flag_unsafe_math_optimizations)
8248 tree exponent = 0, x = 0;
8252 CASE_FLT_FN (BUILT_IN_EXP):
8253 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8254 x = build_real (type,
8255 real_value_truncate (TYPE_MODE (type),
8256 *get_real_const (rv_e)));
8257 exponent = CALL_EXPR_ARG (arg, 0);
8259 CASE_FLT_FN (BUILT_IN_EXP2):
8260 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8261 x = build_real (type, dconst2);
8262 exponent = CALL_EXPR_ARG (arg, 0);
8264 CASE_FLT_FN (BUILT_IN_EXP10):
8265 CASE_FLT_FN (BUILT_IN_POW10):
8266 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8268 REAL_VALUE_TYPE dconst10;
8269 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8270 x = build_real (type, dconst10);
8272 exponent = CALL_EXPR_ARG (arg, 0);
8274 CASE_FLT_FN (BUILT_IN_SQRT):
8275 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8276 x = CALL_EXPR_ARG (arg, 0);
8277 exponent = build_real (type, dconsthalf);
8279 CASE_FLT_FN (BUILT_IN_CBRT):
8280 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8281 x = CALL_EXPR_ARG (arg, 0);
8282 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8283 *get_real_const (rv_third)));
8285 CASE_FLT_FN (BUILT_IN_POW):
8286 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8287 x = CALL_EXPR_ARG (arg, 0);
8288 exponent = CALL_EXPR_ARG (arg, 1);
8294 /* Now perform the optimization. */
8297 tree logfn = build_call_expr (fndecl, 1, x);
8298 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8306 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8307 NULL_TREE if no simplification can be made. */
8310 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8312 tree res, narg0, narg1;
8314 if (!validate_arg (arg0, REAL_TYPE)
8315 || !validate_arg (arg1, REAL_TYPE))
8318 /* Calculate the result when the argument is a constant. */
8319 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8322 /* If either argument to hypot has a negate or abs, strip that off.
8323 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8324 narg0 = fold_strip_sign_ops (arg0);
8325 narg1 = fold_strip_sign_ops (arg1);
8328 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8329 narg1 ? narg1 : arg1);
8332 /* If either argument is zero, hypot is fabs of the other. */
8333 if (real_zerop (arg0))
8334 return fold_build1 (ABS_EXPR, type, arg1);
8335 else if (real_zerop (arg1))
8336 return fold_build1 (ABS_EXPR, type, arg0);
8338 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8339 if (flag_unsafe_math_optimizations
8340 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8342 const REAL_VALUE_TYPE sqrt2_trunc
8343 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8344 return fold_build2 (MULT_EXPR, type,
8345 fold_build1 (ABS_EXPR, type, arg0),
8346 build_real (type, sqrt2_trunc));
8353 /* Fold a builtin function call to pow, powf, or powl. Return
8354 NULL_TREE if no simplification can be made. */
8356 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8360 if (!validate_arg (arg0, REAL_TYPE)
8361 || !validate_arg (arg1, REAL_TYPE))
8364 /* Calculate the result when the argument is a constant. */
8365 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8368 /* Optimize pow(1.0,y) = 1.0. */
8369 if (real_onep (arg0))
8370 return omit_one_operand (type, build_real (type, dconst1), arg1);
8372 if (TREE_CODE (arg1) == REAL_CST
8373 && !TREE_OVERFLOW (arg1))
8375 REAL_VALUE_TYPE cint;
8379 c = TREE_REAL_CST (arg1);
8381 /* Optimize pow(x,0.0) = 1.0. */
8382 if (REAL_VALUES_EQUAL (c, dconst0))
8383 return omit_one_operand (type, build_real (type, dconst1),
8386 /* Optimize pow(x,1.0) = x. */
8387 if (REAL_VALUES_EQUAL (c, dconst1))
8390 /* Optimize pow(x,-1.0) = 1.0/x. */
8391 if (REAL_VALUES_EQUAL (c, dconstm1))
8392 return fold_build2 (RDIV_EXPR, type,
8393 build_real (type, dconst1), arg0);
8395 /* Optimize pow(x,0.5) = sqrt(x). */
8396 if (flag_unsafe_math_optimizations
8397 && REAL_VALUES_EQUAL (c, dconsthalf))
8399 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8401 if (sqrtfn != NULL_TREE)
8402 return build_call_expr (sqrtfn, 1, arg0);
8405 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8406 if (flag_unsafe_math_optimizations)
8408 const REAL_VALUE_TYPE dconstroot
8409 = real_value_truncate (TYPE_MODE (type),
8410 *get_real_const (rv_third));
8412 if (REAL_VALUES_EQUAL (c, dconstroot))
8414 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8415 if (cbrtfn != NULL_TREE)
8416 return build_call_expr (cbrtfn, 1, arg0);
8420 /* Check for an integer exponent. */
8421 n = real_to_integer (&c);
8422 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8423 if (real_identical (&c, &cint))
8425 /* Attempt to evaluate pow at compile-time. */
8426 if (TREE_CODE (arg0) == REAL_CST
8427 && !TREE_OVERFLOW (arg0))
8432 x = TREE_REAL_CST (arg0);
8433 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8434 if (flag_unsafe_math_optimizations || !inexact)
8435 return build_real (type, x);
8438 /* Strip sign ops from even integer powers. */
8439 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8441 tree narg0 = fold_strip_sign_ops (arg0);
8443 return build_call_expr (fndecl, 2, narg0, arg1);
8448 if (flag_unsafe_math_optimizations)
8450 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8452 /* Optimize pow(expN(x),y) = expN(x*y). */
8453 if (BUILTIN_EXPONENT_P (fcode))
8455 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8456 tree arg = CALL_EXPR_ARG (arg0, 0);
8457 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8458 return build_call_expr (expfn, 1, arg);
8461 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8462 if (BUILTIN_SQRT_P (fcode))
8464 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8465 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8466 build_real (type, dconsthalf));
8467 return build_call_expr (fndecl, 2, narg0, narg1);
8470 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8471 if (BUILTIN_CBRT_P (fcode))
8473 tree arg = CALL_EXPR_ARG (arg0, 0);
8474 if (tree_expr_nonnegative_p (arg))
8476 const REAL_VALUE_TYPE dconstroot
8477 = real_value_truncate (TYPE_MODE (type),
8478 *get_real_const (rv_third));
8479 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8480 build_real (type, dconstroot));
8481 return build_call_expr (fndecl, 2, arg, narg1);
8485 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8486 if (fcode == BUILT_IN_POW
8487 || fcode == BUILT_IN_POWF
8488 || fcode == BUILT_IN_POWL)
8490 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8491 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8492 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8493 return build_call_expr (fndecl, 2, arg00, narg1);
8500 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8501 Return NULL_TREE if no simplification can be made. */
8503 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8504 tree arg0, tree arg1, tree type)
8506 if (!validate_arg (arg0, REAL_TYPE)
8507 || !validate_arg (arg1, INTEGER_TYPE))
8510 /* Optimize pow(1.0,y) = 1.0. */
8511 if (real_onep (arg0))
8512 return omit_one_operand (type, build_real (type, dconst1), arg1);
8514 if (host_integerp (arg1, 0))
8516 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8518 /* Evaluate powi at compile-time. */
8519 if (TREE_CODE (arg0) == REAL_CST
8520 && !TREE_OVERFLOW (arg0))
8523 x = TREE_REAL_CST (arg0);
8524 real_powi (&x, TYPE_MODE (type), &x, c);
8525 return build_real (type, x);
8528 /* Optimize pow(x,0) = 1.0. */
8530 return omit_one_operand (type, build_real (type, dconst1),
8533 /* Optimize pow(x,1) = x. */
8537 /* Optimize pow(x,-1) = 1.0/x. */
8539 return fold_build2 (RDIV_EXPR, type,
8540 build_real (type, dconst1), arg0);
8546 /* A subroutine of fold_builtin to fold the various exponent
8547 functions. Return NULL_TREE if no simplification can be made.
8548 FUNC is the corresponding MPFR exponent function. */
8551 fold_builtin_exponent (tree fndecl, tree arg,
8552 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8554 if (validate_arg (arg, REAL_TYPE))
8556 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8559 /* Calculate the result when the argument is a constant. */
8560 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8563 /* Optimize expN(logN(x)) = x. */
8564 if (flag_unsafe_math_optimizations)
8566 const enum built_in_function fcode = builtin_mathfn_code (arg);
8568 if ((func == mpfr_exp
8569 && (fcode == BUILT_IN_LOG
8570 || fcode == BUILT_IN_LOGF
8571 || fcode == BUILT_IN_LOGL))
8572 || (func == mpfr_exp2
8573 && (fcode == BUILT_IN_LOG2
8574 || fcode == BUILT_IN_LOG2F
8575 || fcode == BUILT_IN_LOG2L))
8576 || (func == mpfr_exp10
8577 && (fcode == BUILT_IN_LOG10
8578 || fcode == BUILT_IN_LOG10F
8579 || fcode == BUILT_IN_LOG10L)))
8580 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8587 /* Return true if VAR is a VAR_DECL or a component thereof. */
8590 var_decl_component_p (tree var)
8593 while (handled_component_p (inner))
8594 inner = TREE_OPERAND (inner, 0);
8595 return SSA_VAR_P (inner);
8598 /* Fold function call to builtin memset. Return
8599 NULL_TREE if no simplification can be made. */
8602 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8605 unsigned HOST_WIDE_INT length, cval;
8607 if (! validate_arg (dest, POINTER_TYPE)
8608 || ! validate_arg (c, INTEGER_TYPE)
8609 || ! validate_arg (len, INTEGER_TYPE))
8612 if (! host_integerp (len, 1))
8615 /* If the LEN parameter is zero, return DEST. */
8616 if (integer_zerop (len))
8617 return omit_one_operand (type, dest, c);
8619 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8624 if (TREE_CODE (var) != ADDR_EXPR)
8627 var = TREE_OPERAND (var, 0);
8628 if (TREE_THIS_VOLATILE (var))
8631 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8632 && !POINTER_TYPE_P (TREE_TYPE (var)))
8635 if (! var_decl_component_p (var))
8638 length = tree_low_cst (len, 1);
8639 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8640 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8644 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8647 if (integer_zerop (c))
8651 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8654 cval = tree_low_cst (c, 1);
8658 cval |= (cval << 31) << 1;
8661 ret = build_int_cst_type (TREE_TYPE (var), cval);
8662 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8666 return omit_one_operand (type, dest, ret);
8669 /* Fold function call to builtin memset. Return
8670 NULL_TREE if no simplification can be made. */
8673 fold_builtin_bzero (tree dest, tree size, bool ignore)
8675 if (! validate_arg (dest, POINTER_TYPE)
8676 || ! validate_arg (size, INTEGER_TYPE))
8682 /* New argument list transforming bzero(ptr x, int y) to
8683 memset(ptr x, int 0, size_t y). This is done this way
8684 so that if it isn't expanded inline, we fallback to
8685 calling bzero instead of memset. */
8687 return fold_builtin_memset (dest, integer_zero_node,
8688 fold_convert (sizetype, size),
8689 void_type_node, ignore);
8692 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8693 NULL_TREE if no simplification can be made.
8694 If ENDP is 0, return DEST (like memcpy).
8695 If ENDP is 1, return DEST+LEN (like mempcpy).
8696 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8697 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8701 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8703 tree destvar, srcvar, expr;
8705 if (! validate_arg (dest, POINTER_TYPE)
8706 || ! validate_arg (src, POINTER_TYPE)
8707 || ! validate_arg (len, INTEGER_TYPE))
8710 /* If the LEN parameter is zero, return DEST. */
8711 if (integer_zerop (len))
8712 return omit_one_operand (type, dest, src);
8714 /* If SRC and DEST are the same (and not volatile), return
8715 DEST{,+LEN,+LEN-1}. */
8716 if (operand_equal_p (src, dest, 0))
8720 tree srctype, desttype;
8723 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8724 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8726 /* Both DEST and SRC must be pointer types.
8727 ??? This is what old code did. Is the testing for pointer types
8730 If either SRC is readonly or length is 1, we can use memcpy. */
8731 if (dest_align && src_align
8732 && (readonly_data_expr (src)
8733 || (host_integerp (len, 1)
8734 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8735 tree_low_cst (len, 1)))))
8737 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8740 return build_call_expr (fn, 3, dest, src, len);
8745 if (!host_integerp (len, 0))
8748 This logic lose for arguments like (type *)malloc (sizeof (type)),
8749 since we strip the casts of up to VOID return value from malloc.
8750 Perhaps we ought to inherit type from non-VOID argument here? */
8753 srctype = TREE_TYPE (TREE_TYPE (src));
8754 desttype = TREE_TYPE (TREE_TYPE (dest));
8755 if (!srctype || !desttype
8756 || !TYPE_SIZE_UNIT (srctype)
8757 || !TYPE_SIZE_UNIT (desttype)
8758 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8759 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8760 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8761 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8764 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8765 < (int) TYPE_ALIGN (desttype)
8766 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8767 < (int) TYPE_ALIGN (srctype)))
8771 dest = builtin_save_expr (dest);
8773 srcvar = build_fold_indirect_ref (src);
8774 if (TREE_THIS_VOLATILE (srcvar))
8776 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8778 /* With memcpy, it is possible to bypass aliasing rules, so without
8779 this check i.e. execute/20060930-2.c would be misoptimized, because
8780 it use conflicting alias set to hold argument for the memcpy call.
8781 This check is probably unnecessary with -fno-strict-aliasing.
8782 Similarly for destvar. See also PR29286. */
8783 if (!var_decl_component_p (srcvar)
8784 /* Accept: memcpy (*char_var, "test", 1); that simplify
8786 || is_gimple_min_invariant (srcvar)
8787 || readonly_data_expr (src))
8790 destvar = build_fold_indirect_ref (dest);
8791 if (TREE_THIS_VOLATILE (destvar))
8793 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8795 if (!var_decl_component_p (destvar))
8798 if (srctype == desttype
8799 || (gimple_in_ssa_p (cfun)
8800 && useless_type_conversion_p (desttype, srctype)))
8802 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8803 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8804 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8805 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8806 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8808 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8809 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8815 if (endp == 0 || endp == 3)
8816 return omit_one_operand (type, dest, expr);
8822 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8825 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8826 dest = fold_convert (type, dest);
8828 dest = omit_one_operand (type, dest, expr);
8832 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8833 If LEN is not NULL, it represents the length of the string to be
8834 copied. Return NULL_TREE if no simplification can be made. */
8837 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8841 if (!validate_arg (dest, POINTER_TYPE)
8842 || !validate_arg (src, POINTER_TYPE))
8845 /* If SRC and DEST are the same (and not volatile), return DEST. */
8846 if (operand_equal_p (src, dest, 0))
8847 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8852 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8858 len = c_strlen (src, 1);
8859 if (! len || TREE_SIDE_EFFECTS (len))
8863 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8864 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8865 build_call_expr (fn, 3, dest, src, len));
8868 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8869 If SLEN is not NULL, it represents the length of the source string.
8870 Return NULL_TREE if no simplification can be made. */
8873 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8877 if (!validate_arg (dest, POINTER_TYPE)
8878 || !validate_arg (src, POINTER_TYPE)
8879 || !validate_arg (len, INTEGER_TYPE))
8882 /* If the LEN parameter is zero, return DEST. */
8883 if (integer_zerop (len))
8884 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8886 /* We can't compare slen with len as constants below if len is not a
8888 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8892 slen = c_strlen (src, 1);
8894 /* Now, we must be passed a constant src ptr parameter. */
8895 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8898 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8900 /* We do not support simplification of this case, though we do
8901 support it when expanding trees into RTL. */
8902 /* FIXME: generate a call to __builtin_memset. */
8903 if (tree_int_cst_lt (slen, len))
8906 /* OK transform into builtin memcpy. */
8907 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8910 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8911 build_call_expr (fn, 3, dest, src, len));
8914 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8915 arguments to the call, and TYPE is its return type.
8916 Return NULL_TREE if no simplification can be made. */
8919 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8921 if (!validate_arg (arg1, POINTER_TYPE)
8922 || !validate_arg (arg2, INTEGER_TYPE)
8923 || !validate_arg (len, INTEGER_TYPE))
8929 if (TREE_CODE (arg2) != INTEGER_CST
8930 || !host_integerp (len, 1))
8933 p1 = c_getstr (arg1);
8934 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8940 if (target_char_cast (arg2, &c))
8943 r = memchr (p1, c, tree_low_cst (len, 1));
8946 return build_int_cst (TREE_TYPE (arg1), 0);
8948 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8950 return fold_convert (type, tem);
8956 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8957 Return NULL_TREE if no simplification can be made. */
8960 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8962 const char *p1, *p2;
8964 if (!validate_arg (arg1, POINTER_TYPE)
8965 || !validate_arg (arg2, POINTER_TYPE)
8966 || !validate_arg (len, INTEGER_TYPE))
8969 /* If the LEN parameter is zero, return zero. */
8970 if (integer_zerop (len))
8971 return omit_two_operands (integer_type_node, integer_zero_node,
8974 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8975 if (operand_equal_p (arg1, arg2, 0))
8976 return omit_one_operand (integer_type_node, integer_zero_node, len);
8978 p1 = c_getstr (arg1);
8979 p2 = c_getstr (arg2);
8981 /* If all arguments are constant, and the value of len is not greater
8982 than the lengths of arg1 and arg2, evaluate at compile-time. */
8983 if (host_integerp (len, 1) && p1 && p2
8984 && compare_tree_int (len, strlen (p1) + 1) <= 0
8985 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8987 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8990 return integer_one_node;
8992 return integer_minus_one_node;
8994 return integer_zero_node;
8997 /* If len parameter is one, return an expression corresponding to
8998 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8999 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9001 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9002 tree cst_uchar_ptr_node
9003 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9005 tree ind1 = fold_convert (integer_type_node,
9006 build1 (INDIRECT_REF, cst_uchar_node,
9007 fold_convert (cst_uchar_ptr_node,
9009 tree ind2 = fold_convert (integer_type_node,
9010 build1 (INDIRECT_REF, cst_uchar_node,
9011 fold_convert (cst_uchar_ptr_node,
9013 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9019 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9020 Return NULL_TREE if no simplification can be made. */
9023 fold_builtin_strcmp (tree arg1, tree arg2)
9025 const char *p1, *p2;
9027 if (!validate_arg (arg1, POINTER_TYPE)
9028 || !validate_arg (arg2, POINTER_TYPE))
9031 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9032 if (operand_equal_p (arg1, arg2, 0))
9033 return integer_zero_node;
9035 p1 = c_getstr (arg1);
9036 p2 = c_getstr (arg2);
9040 const int i = strcmp (p1, p2);
9042 return integer_minus_one_node;
9044 return integer_one_node;
9046 return integer_zero_node;
9049 /* If the second arg is "", return *(const unsigned char*)arg1. */
9050 if (p2 && *p2 == '\0')
9052 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9053 tree cst_uchar_ptr_node
9054 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9056 return fold_convert (integer_type_node,
9057 build1 (INDIRECT_REF, cst_uchar_node,
9058 fold_convert (cst_uchar_ptr_node,
9062 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9063 if (p1 && *p1 == '\0')
9065 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9066 tree cst_uchar_ptr_node
9067 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9069 tree temp = fold_convert (integer_type_node,
9070 build1 (INDIRECT_REF, cst_uchar_node,
9071 fold_convert (cst_uchar_ptr_node,
9073 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9079 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9080 Return NULL_TREE if no simplification can be made. */
9083 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9085 const char *p1, *p2;
9087 if (!validate_arg (arg1, POINTER_TYPE)
9088 || !validate_arg (arg2, POINTER_TYPE)
9089 || !validate_arg (len, INTEGER_TYPE))
9092 /* If the LEN parameter is zero, return zero. */
9093 if (integer_zerop (len))
9094 return omit_two_operands (integer_type_node, integer_zero_node,
9097 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9098 if (operand_equal_p (arg1, arg2, 0))
9099 return omit_one_operand (integer_type_node, integer_zero_node, len);
9101 p1 = c_getstr (arg1);
9102 p2 = c_getstr (arg2);
9104 if (host_integerp (len, 1) && p1 && p2)
9106 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9108 return integer_one_node;
9110 return integer_minus_one_node;
9112 return integer_zero_node;
9115 /* If the second arg is "", and the length is greater than zero,
9116 return *(const unsigned char*)arg1. */
9117 if (p2 && *p2 == '\0'
9118 && TREE_CODE (len) == INTEGER_CST
9119 && tree_int_cst_sgn (len) == 1)
9121 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9122 tree cst_uchar_ptr_node
9123 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9125 return fold_convert (integer_type_node,
9126 build1 (INDIRECT_REF, cst_uchar_node,
9127 fold_convert (cst_uchar_ptr_node,
9131 /* If the first arg is "", and the length is greater than zero,
9132 return -*(const unsigned char*)arg2. */
9133 if (p1 && *p1 == '\0'
9134 && TREE_CODE (len) == INTEGER_CST
9135 && tree_int_cst_sgn (len) == 1)
9137 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9138 tree cst_uchar_ptr_node
9139 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9141 tree temp = fold_convert (integer_type_node,
9142 build1 (INDIRECT_REF, cst_uchar_node,
9143 fold_convert (cst_uchar_ptr_node,
9145 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9148 /* If len parameter is one, return an expression corresponding to
9149 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9150 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9152 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9153 tree cst_uchar_ptr_node
9154 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9156 tree ind1 = fold_convert (integer_type_node,
9157 build1 (INDIRECT_REF, cst_uchar_node,
9158 fold_convert (cst_uchar_ptr_node,
9160 tree ind2 = fold_convert (integer_type_node,
9161 build1 (INDIRECT_REF, cst_uchar_node,
9162 fold_convert (cst_uchar_ptr_node,
9164 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9170 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9171 ARG. Return NULL_TREE if no simplification can be made. */
9174 fold_builtin_signbit (tree arg, tree type)
9178 if (!validate_arg (arg, REAL_TYPE))
9181 /* If ARG is a compile-time constant, determine the result. */
9182 if (TREE_CODE (arg) == REAL_CST
9183 && !TREE_OVERFLOW (arg))
9187 c = TREE_REAL_CST (arg);
9188 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9189 return fold_convert (type, temp);
9192 /* If ARG is non-negative, the result is always zero. */
9193 if (tree_expr_nonnegative_p (arg))
9194 return omit_one_operand (type, integer_zero_node, arg);
9196 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9197 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9198 return fold_build2 (LT_EXPR, type, arg,
9199 build_real (TREE_TYPE (arg), dconst0));
9204 /* Fold function call to builtin copysign, copysignf or copysignl with
9205 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9209 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9213 if (!validate_arg (arg1, REAL_TYPE)
9214 || !validate_arg (arg2, REAL_TYPE))
9217 /* copysign(X,X) is X. */
9218 if (operand_equal_p (arg1, arg2, 0))
9219 return fold_convert (type, arg1);
9221 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9222 if (TREE_CODE (arg1) == REAL_CST
9223 && TREE_CODE (arg2) == REAL_CST
9224 && !TREE_OVERFLOW (arg1)
9225 && !TREE_OVERFLOW (arg2))
9227 REAL_VALUE_TYPE c1, c2;
9229 c1 = TREE_REAL_CST (arg1);
9230 c2 = TREE_REAL_CST (arg2);
9231 /* c1.sign := c2.sign. */
9232 real_copysign (&c1, &c2);
9233 return build_real (type, c1);
9236 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9237 Remember to evaluate Y for side-effects. */
9238 if (tree_expr_nonnegative_p (arg2))
9239 return omit_one_operand (type,
9240 fold_build1 (ABS_EXPR, type, arg1),
9243 /* Strip sign changing operations for the first argument. */
9244 tem = fold_strip_sign_ops (arg1);
9246 return build_call_expr (fndecl, 2, tem, arg2);
9251 /* Fold a call to builtin isascii with argument ARG. */
9254 fold_builtin_isascii (tree arg)
9256 if (!validate_arg (arg, INTEGER_TYPE))
9260 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9261 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9262 build_int_cst (NULL_TREE,
9263 ~ (unsigned HOST_WIDE_INT) 0x7f));
9264 return fold_build2 (EQ_EXPR, integer_type_node,
9265 arg, integer_zero_node);
9269 /* Fold a call to builtin toascii with argument ARG. */
9272 fold_builtin_toascii (tree arg)
9274 if (!validate_arg (arg, INTEGER_TYPE))
9277 /* Transform toascii(c) -> (c & 0x7f). */
9278 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9279 build_int_cst (NULL_TREE, 0x7f));
9282 /* Fold a call to builtin isdigit with argument ARG. */
9285 fold_builtin_isdigit (tree arg)
9287 if (!validate_arg (arg, INTEGER_TYPE))
9291 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9292 /* According to the C standard, isdigit is unaffected by locale.
9293 However, it definitely is affected by the target character set. */
9294 unsigned HOST_WIDE_INT target_digit0
9295 = lang_hooks.to_target_charset ('0');
9297 if (target_digit0 == 0)
9300 arg = fold_convert (unsigned_type_node, arg);
9301 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9302 build_int_cst (unsigned_type_node, target_digit0));
9303 return fold_build2 (LE_EXPR, integer_type_node, arg,
9304 build_int_cst (unsigned_type_node, 9));
9308 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9311 fold_builtin_fabs (tree arg, tree type)
9313 if (!validate_arg (arg, REAL_TYPE))
9316 arg = fold_convert (type, arg);
9317 if (TREE_CODE (arg) == REAL_CST)
9318 return fold_abs_const (arg, type);
9319 return fold_build1 (ABS_EXPR, type, arg);
9322 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9325 fold_builtin_abs (tree arg, tree type)
9327 if (!validate_arg (arg, INTEGER_TYPE))
9330 arg = fold_convert (type, arg);
9331 if (TREE_CODE (arg) == INTEGER_CST)
9332 return fold_abs_const (arg, type);
9333 return fold_build1 (ABS_EXPR, type, arg);
9336 /* Fold a call to builtin fmin or fmax. */
9339 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9341 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9343 /* Calculate the result when the argument is a constant. */
9344 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9349 /* If either argument is NaN, return the other one. Avoid the
9350 transformation if we get (and honor) a signalling NaN. Using
9351 omit_one_operand() ensures we create a non-lvalue. */
9352 if (TREE_CODE (arg0) == REAL_CST
9353 && real_isnan (&TREE_REAL_CST (arg0))
9354 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9355 || ! TREE_REAL_CST (arg0).signalling))
9356 return omit_one_operand (type, arg1, arg0);
9357 if (TREE_CODE (arg1) == REAL_CST
9358 && real_isnan (&TREE_REAL_CST (arg1))
9359 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9360 || ! TREE_REAL_CST (arg1).signalling))
9361 return omit_one_operand (type, arg0, arg1);
9363 /* Transform fmin/fmax(x,x) -> x. */
9364 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9365 return omit_one_operand (type, arg0, arg1);
9367 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9368 functions to return the numeric arg if the other one is NaN.
9369 These tree codes don't honor that, so only transform if
9370 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9371 handled, so we don't have to worry about it either. */
9372 if (flag_finite_math_only)
9373 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9374 fold_convert (type, arg0),
9375 fold_convert (type, arg1));
9380 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9383 fold_builtin_carg (tree arg, tree type)
9385 if (validate_arg (arg, COMPLEX_TYPE))
9387 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9391 tree new_arg = builtin_save_expr (arg);
9392 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9393 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9394 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9401 /* Fold a call to builtin logb/ilogb. */
9404 fold_builtin_logb (tree arg, tree rettype)
9406 if (! validate_arg (arg, REAL_TYPE))
9411 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9413 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9419 /* If arg is Inf or NaN and we're logb, return it. */
9420 if (TREE_CODE (rettype) == REAL_TYPE)
9421 return fold_convert (rettype, arg);
9422 /* Fall through... */
9424 /* Zero may set errno and/or raise an exception for logb, also
9425 for ilogb we don't know FP_ILOGB0. */
9428 /* For normal numbers, proceed iff radix == 2. In GCC,
9429 normalized significands are in the range [0.5, 1.0). We
9430 want the exponent as if they were [1.0, 2.0) so get the
9431 exponent and subtract 1. */
9432 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9433 return fold_convert (rettype, build_int_cst (NULL_TREE,
9434 REAL_EXP (value)-1));
9442 /* Fold a call to builtin significand, if radix == 2. */
9445 fold_builtin_significand (tree arg, tree rettype)
9447 if (! validate_arg (arg, REAL_TYPE))
9452 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9454 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9461 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9462 return fold_convert (rettype, arg);
9464 /* For normal numbers, proceed iff radix == 2. */
9465 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9467 REAL_VALUE_TYPE result = *value;
9468 /* In GCC, normalized significands are in the range [0.5,
9469 1.0). We want them to be [1.0, 2.0) so set the
9471 SET_REAL_EXP (&result, 1);
9472 return build_real (rettype, result);
9481 /* Fold a call to builtin frexp, we can assume the base is 2. */
9484 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9486 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9491 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9494 arg1 = build_fold_indirect_ref (arg1);
9496 /* Proceed if a valid pointer type was passed in. */
9497 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9499 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9505 /* For +-0, return (*exp = 0, +-0). */
9506 exp = integer_zero_node;
9511 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9512 return omit_one_operand (rettype, arg0, arg1);
9515 /* Since the frexp function always expects base 2, and in
9516 GCC normalized significands are already in the range
9517 [0.5, 1.0), we have exactly what frexp wants. */
9518 REAL_VALUE_TYPE frac_rvt = *value;
9519 SET_REAL_EXP (&frac_rvt, 0);
9520 frac = build_real (rettype, frac_rvt);
9521 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9528 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9529 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9530 TREE_SIDE_EFFECTS (arg1) = 1;
9531 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9537 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9538 then we can assume the base is two. If it's false, then we have to
9539 check the mode of the TYPE parameter in certain cases. */
9542 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9544 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9549 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9550 if (real_zerop (arg0) || integer_zerop (arg1)
9551 || (TREE_CODE (arg0) == REAL_CST
9552 && !real_isfinite (&TREE_REAL_CST (arg0))))
9553 return omit_one_operand (type, arg0, arg1);
9555 /* If both arguments are constant, then try to evaluate it. */
9556 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9557 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9558 && host_integerp (arg1, 0))
9560 /* Bound the maximum adjustment to twice the range of the
9561 mode's valid exponents. Use abs to ensure the range is
9562 positive as a sanity check. */
9563 const long max_exp_adj = 2 *
9564 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9565 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9567 /* Get the user-requested adjustment. */
9568 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9570 /* The requested adjustment must be inside this range. This
9571 is a preliminary cap to avoid things like overflow, we
9572 may still fail to compute the result for other reasons. */
9573 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9575 REAL_VALUE_TYPE initial_result;
9577 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9579 /* Ensure we didn't overflow. */
9580 if (! real_isinf (&initial_result))
9582 const REAL_VALUE_TYPE trunc_result
9583 = real_value_truncate (TYPE_MODE (type), initial_result);
9585 /* Only proceed if the target mode can hold the
9587 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9588 return build_real (type, trunc_result);
9597 /* Fold a call to builtin modf. */
9600 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9602 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9607 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9610 arg1 = build_fold_indirect_ref (arg1);
9612 /* Proceed if a valid pointer type was passed in. */
9613 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9615 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9616 REAL_VALUE_TYPE trunc, frac;
9622 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9623 trunc = frac = *value;
9626 /* For +-Inf, return (*arg1 = arg0, +-0). */
9628 frac.sign = value->sign;
9632 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9633 real_trunc (&trunc, VOIDmode, value);
9634 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9635 /* If the original number was negative and already
9636 integral, then the fractional part is -0.0. */
9637 if (value->sign && frac.cl == rvc_zero)
9638 frac.sign = value->sign;
9642 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9643 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9644 build_real (rettype, trunc));
9645 TREE_SIDE_EFFECTS (arg1) = 1;
9646 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9647 build_real (rettype, frac));
9653 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9654 ARG is the argument for the call. */
9657 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9659 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9662 if (!validate_arg (arg, REAL_TYPE))
9665 switch (builtin_index)
9667 case BUILT_IN_ISINF:
9668 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9669 return omit_one_operand (type, integer_zero_node, arg);
9671 if (TREE_CODE (arg) == REAL_CST)
9673 r = TREE_REAL_CST (arg);
9674 if (real_isinf (&r))
9675 return real_compare (GT_EXPR, &r, &dconst0)
9676 ? integer_one_node : integer_minus_one_node;
9678 return integer_zero_node;
9683 case BUILT_IN_ISINF_SIGN:
9685 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9686 /* In a boolean context, GCC will fold the inner COND_EXPR to
9687 1. So e.g. "if (isinf_sign(x))" would be folded to just
9688 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9689 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9690 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9691 tree tmp = NULL_TREE;
9693 arg = builtin_save_expr (arg);
9695 if (signbit_fn && isinf_fn)
9697 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9698 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9700 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9701 signbit_call, integer_zero_node);
9702 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9703 isinf_call, integer_zero_node);
9705 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9706 integer_minus_one_node, integer_one_node);
9707 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9714 case BUILT_IN_ISFINITE:
9715 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9716 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9717 return omit_one_operand (type, integer_one_node, arg);
9719 if (TREE_CODE (arg) == REAL_CST)
9721 r = TREE_REAL_CST (arg);
9722 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9727 case BUILT_IN_ISNAN:
9728 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9729 return omit_one_operand (type, integer_zero_node, arg);
9731 if (TREE_CODE (arg) == REAL_CST)
9733 r = TREE_REAL_CST (arg);
9734 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9737 arg = builtin_save_expr (arg);
9738 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9745 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9746 This builtin will generate code to return the appropriate floating
9747 point classification depending on the value of the floating point
9748 number passed in. The possible return values must be supplied as
9749 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9750 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9751 one floating point argument which is "type generic". */
9754 fold_builtin_fpclassify (tree exp)
9756 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9757 arg, type, res, tmp;
9758 enum machine_mode mode;
9762 /* Verify the required arguments in the original call. */
9763 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9764 INTEGER_TYPE, INTEGER_TYPE,
9765 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9768 fp_nan = CALL_EXPR_ARG (exp, 0);
9769 fp_infinite = CALL_EXPR_ARG (exp, 1);
9770 fp_normal = CALL_EXPR_ARG (exp, 2);
9771 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9772 fp_zero = CALL_EXPR_ARG (exp, 4);
9773 arg = CALL_EXPR_ARG (exp, 5);
9774 type = TREE_TYPE (arg);
9775 mode = TYPE_MODE (type);
9776 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9780 (fabs(x) == Inf ? FP_INFINITE :
9781 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9782 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9784 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9785 build_real (type, dconst0));
9786 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9788 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9789 real_from_string (&r, buf);
9790 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9791 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9793 if (HONOR_INFINITIES (mode))
9796 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9797 build_real (type, r));
9798 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9801 if (HONOR_NANS (mode))
9803 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9804 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9810 /* Fold a call to an unordered comparison function such as
9811 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9812 being called and ARG0 and ARG1 are the arguments for the call.
9813 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9814 the opposite of the desired result. UNORDERED_CODE is used
9815 for modes that can hold NaNs and ORDERED_CODE is used for
9819 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9820 enum tree_code unordered_code,
9821 enum tree_code ordered_code)
9823 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9824 enum tree_code code;
9826 enum tree_code code0, code1;
9827 tree cmp_type = NULL_TREE;
9829 type0 = TREE_TYPE (arg0);
9830 type1 = TREE_TYPE (arg1);
9832 code0 = TREE_CODE (type0);
9833 code1 = TREE_CODE (type1);
9835 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9836 /* Choose the wider of two real types. */
9837 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9839 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9841 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9844 arg0 = fold_convert (cmp_type, arg0);
9845 arg1 = fold_convert (cmp_type, arg1);
9847 if (unordered_code == UNORDERED_EXPR)
9849 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9850 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9851 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9854 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9856 return fold_build1 (TRUTH_NOT_EXPR, type,
9857 fold_build2 (code, type, arg0, arg1));
9860 /* Fold a call to built-in function FNDECL with 0 arguments.
9861 IGNORE is true if the result of the function call is ignored. This
9862 function returns NULL_TREE if no simplification was possible. */
9865 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9867 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9868 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9871 CASE_FLT_FN (BUILT_IN_INF):
9872 case BUILT_IN_INFD32:
9873 case BUILT_IN_INFD64:
9874 case BUILT_IN_INFD128:
9875 return fold_builtin_inf (type, true);
9877 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9878 return fold_builtin_inf (type, false);
9880 case BUILT_IN_CLASSIFY_TYPE:
9881 return fold_builtin_classify_type (NULL_TREE);
9889 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9890 IGNORE is true if the result of the function call is ignored. This
9891 function returns NULL_TREE if no simplification was possible. */
9894 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9896 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9897 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9901 case BUILT_IN_CONSTANT_P:
9903 tree val = fold_builtin_constant_p (arg0);
9905 /* Gimplification will pull the CALL_EXPR for the builtin out of
9906 an if condition. When not optimizing, we'll not CSE it back.
9907 To avoid link error types of regressions, return false now. */
9908 if (!val && !optimize)
9909 val = integer_zero_node;
9914 case BUILT_IN_CLASSIFY_TYPE:
9915 return fold_builtin_classify_type (arg0);
9917 case BUILT_IN_STRLEN:
9918 return fold_builtin_strlen (arg0);
9920 CASE_FLT_FN (BUILT_IN_FABS):
9921 return fold_builtin_fabs (arg0, type);
9925 case BUILT_IN_LLABS:
9926 case BUILT_IN_IMAXABS:
9927 return fold_builtin_abs (arg0, type);
9929 CASE_FLT_FN (BUILT_IN_CONJ):
9930 if (validate_arg (arg0, COMPLEX_TYPE))
9931 return fold_build1 (CONJ_EXPR, type, arg0);
9934 CASE_FLT_FN (BUILT_IN_CREAL):
9935 if (validate_arg (arg0, COMPLEX_TYPE))
9936 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9939 CASE_FLT_FN (BUILT_IN_CIMAG):
9940 if (validate_arg (arg0, COMPLEX_TYPE))
9941 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9944 CASE_FLT_FN (BUILT_IN_CCOS):
9945 CASE_FLT_FN (BUILT_IN_CCOSH):
9946 /* These functions are "even", i.e. f(x) == f(-x). */
9947 if (validate_arg (arg0, COMPLEX_TYPE))
9949 tree narg = fold_strip_sign_ops (arg0);
9951 return build_call_expr (fndecl, 1, narg);
9955 CASE_FLT_FN (BUILT_IN_CABS):
9956 return fold_builtin_cabs (arg0, type, fndecl);
9958 CASE_FLT_FN (BUILT_IN_CARG):
9959 return fold_builtin_carg (arg0, type);
9961 CASE_FLT_FN (BUILT_IN_SQRT):
9962 return fold_builtin_sqrt (arg0, type);
9964 CASE_FLT_FN (BUILT_IN_CBRT):
9965 return fold_builtin_cbrt (arg0, type);
9967 CASE_FLT_FN (BUILT_IN_ASIN):
9968 if (validate_arg (arg0, REAL_TYPE))
9969 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9970 &dconstm1, &dconst1, true);
9973 CASE_FLT_FN (BUILT_IN_ACOS):
9974 if (validate_arg (arg0, REAL_TYPE))
9975 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9976 &dconstm1, &dconst1, true);
9979 CASE_FLT_FN (BUILT_IN_ATAN):
9980 if (validate_arg (arg0, REAL_TYPE))
9981 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9984 CASE_FLT_FN (BUILT_IN_ASINH):
9985 if (validate_arg (arg0, REAL_TYPE))
9986 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9989 CASE_FLT_FN (BUILT_IN_ACOSH):
9990 if (validate_arg (arg0, REAL_TYPE))
9991 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9992 &dconst1, NULL, true);
9995 CASE_FLT_FN (BUILT_IN_ATANH):
9996 if (validate_arg (arg0, REAL_TYPE))
9997 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9998 &dconstm1, &dconst1, false);
10001 CASE_FLT_FN (BUILT_IN_SIN):
10002 if (validate_arg (arg0, REAL_TYPE))
10003 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10006 CASE_FLT_FN (BUILT_IN_COS):
10007 return fold_builtin_cos (arg0, type, fndecl);
10010 CASE_FLT_FN (BUILT_IN_TAN):
10011 return fold_builtin_tan (arg0, type);
10013 CASE_FLT_FN (BUILT_IN_CEXP):
10014 return fold_builtin_cexp (arg0, type);
10016 CASE_FLT_FN (BUILT_IN_CEXPI):
10017 if (validate_arg (arg0, REAL_TYPE))
10018 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10021 CASE_FLT_FN (BUILT_IN_SINH):
10022 if (validate_arg (arg0, REAL_TYPE))
10023 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10026 CASE_FLT_FN (BUILT_IN_COSH):
10027 return fold_builtin_cosh (arg0, type, fndecl);
10029 CASE_FLT_FN (BUILT_IN_TANH):
10030 if (validate_arg (arg0, REAL_TYPE))
10031 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10034 CASE_FLT_FN (BUILT_IN_ERF):
10035 if (validate_arg (arg0, REAL_TYPE))
10036 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10039 CASE_FLT_FN (BUILT_IN_ERFC):
10040 if (validate_arg (arg0, REAL_TYPE))
10041 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10044 CASE_FLT_FN (BUILT_IN_TGAMMA):
10045 if (validate_arg (arg0, REAL_TYPE))
10046 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10049 CASE_FLT_FN (BUILT_IN_EXP):
10050 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10052 CASE_FLT_FN (BUILT_IN_EXP2):
10053 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10055 CASE_FLT_FN (BUILT_IN_EXP10):
10056 CASE_FLT_FN (BUILT_IN_POW10):
10057 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10059 CASE_FLT_FN (BUILT_IN_EXPM1):
10060 if (validate_arg (arg0, REAL_TYPE))
10061 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10064 CASE_FLT_FN (BUILT_IN_LOG):
10065 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10067 CASE_FLT_FN (BUILT_IN_LOG2):
10068 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10070 CASE_FLT_FN (BUILT_IN_LOG10):
10071 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10073 CASE_FLT_FN (BUILT_IN_LOG1P):
10074 if (validate_arg (arg0, REAL_TYPE))
10075 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10076 &dconstm1, NULL, false);
10079 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10080 CASE_FLT_FN (BUILT_IN_J0):
10081 if (validate_arg (arg0, REAL_TYPE))
10082 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10086 CASE_FLT_FN (BUILT_IN_J1):
10087 if (validate_arg (arg0, REAL_TYPE))
10088 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10092 CASE_FLT_FN (BUILT_IN_Y0):
10093 if (validate_arg (arg0, REAL_TYPE))
10094 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10095 &dconst0, NULL, false);
10098 CASE_FLT_FN (BUILT_IN_Y1):
10099 if (validate_arg (arg0, REAL_TYPE))
10100 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10101 &dconst0, NULL, false);
10105 CASE_FLT_FN (BUILT_IN_NAN):
10106 case BUILT_IN_NAND32:
10107 case BUILT_IN_NAND64:
10108 case BUILT_IN_NAND128:
10109 return fold_builtin_nan (arg0, type, true);
10111 CASE_FLT_FN (BUILT_IN_NANS):
10112 return fold_builtin_nan (arg0, type, false);
10114 CASE_FLT_FN (BUILT_IN_FLOOR):
10115 return fold_builtin_floor (fndecl, arg0);
10117 CASE_FLT_FN (BUILT_IN_CEIL):
10118 return fold_builtin_ceil (fndecl, arg0);
10120 CASE_FLT_FN (BUILT_IN_TRUNC):
10121 return fold_builtin_trunc (fndecl, arg0);
10123 CASE_FLT_FN (BUILT_IN_ROUND):
10124 return fold_builtin_round (fndecl, arg0);
10126 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10127 CASE_FLT_FN (BUILT_IN_RINT):
10128 return fold_trunc_transparent_mathfn (fndecl, arg0);
10130 CASE_FLT_FN (BUILT_IN_LCEIL):
10131 CASE_FLT_FN (BUILT_IN_LLCEIL):
10132 CASE_FLT_FN (BUILT_IN_LFLOOR):
10133 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10134 CASE_FLT_FN (BUILT_IN_LROUND):
10135 CASE_FLT_FN (BUILT_IN_LLROUND):
10136 return fold_builtin_int_roundingfn (fndecl, arg0);
10138 CASE_FLT_FN (BUILT_IN_LRINT):
10139 CASE_FLT_FN (BUILT_IN_LLRINT):
10140 return fold_fixed_mathfn (fndecl, arg0);
10142 case BUILT_IN_BSWAP32:
10143 case BUILT_IN_BSWAP64:
10144 return fold_builtin_bswap (fndecl, arg0);
10146 CASE_INT_FN (BUILT_IN_FFS):
10147 CASE_INT_FN (BUILT_IN_CLZ):
10148 CASE_INT_FN (BUILT_IN_CTZ):
10149 CASE_INT_FN (BUILT_IN_POPCOUNT):
10150 CASE_INT_FN (BUILT_IN_PARITY):
10151 return fold_builtin_bitop (fndecl, arg0);
10153 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10154 return fold_builtin_signbit (arg0, type);
10156 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10157 return fold_builtin_significand (arg0, type);
10159 CASE_FLT_FN (BUILT_IN_ILOGB):
10160 CASE_FLT_FN (BUILT_IN_LOGB):
10161 return fold_builtin_logb (arg0, type);
10163 case BUILT_IN_ISASCII:
10164 return fold_builtin_isascii (arg0);
10166 case BUILT_IN_TOASCII:
10167 return fold_builtin_toascii (arg0);
10169 case BUILT_IN_ISDIGIT:
10170 return fold_builtin_isdigit (arg0);
10172 CASE_FLT_FN (BUILT_IN_FINITE):
10173 case BUILT_IN_FINITED32:
10174 case BUILT_IN_FINITED64:
10175 case BUILT_IN_FINITED128:
10176 case BUILT_IN_ISFINITE:
10177 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10179 CASE_FLT_FN (BUILT_IN_ISINF):
10180 case BUILT_IN_ISINFD32:
10181 case BUILT_IN_ISINFD64:
10182 case BUILT_IN_ISINFD128:
10183 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10185 case BUILT_IN_ISINF_SIGN:
10186 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10188 CASE_FLT_FN (BUILT_IN_ISNAN):
10189 case BUILT_IN_ISNAND32:
10190 case BUILT_IN_ISNAND64:
10191 case BUILT_IN_ISNAND128:
10192 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10194 case BUILT_IN_PRINTF:
10195 case BUILT_IN_PRINTF_UNLOCKED:
10196 case BUILT_IN_VPRINTF:
10197 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10207 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10208 IGNORE is true if the result of the function call is ignored. This
10209 function returns NULL_TREE if no simplification was possible. */
10212 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10214 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10215 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10219 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10220 CASE_FLT_FN (BUILT_IN_JN):
10221 if (validate_arg (arg0, INTEGER_TYPE)
10222 && validate_arg (arg1, REAL_TYPE))
10223 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10226 CASE_FLT_FN (BUILT_IN_YN):
10227 if (validate_arg (arg0, INTEGER_TYPE)
10228 && validate_arg (arg1, REAL_TYPE))
10229 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10233 CASE_FLT_FN (BUILT_IN_DREM):
10234 CASE_FLT_FN (BUILT_IN_REMAINDER):
10235 if (validate_arg (arg0, REAL_TYPE)
10236 && validate_arg(arg1, REAL_TYPE))
10237 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10240 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10241 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10242 if (validate_arg (arg0, REAL_TYPE)
10243 && validate_arg(arg1, POINTER_TYPE))
10244 return do_mpfr_lgamma_r (arg0, arg1, type);
10248 CASE_FLT_FN (BUILT_IN_ATAN2):
10249 if (validate_arg (arg0, REAL_TYPE)
10250 && validate_arg(arg1, REAL_TYPE))
10251 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10254 CASE_FLT_FN (BUILT_IN_FDIM):
10255 if (validate_arg (arg0, REAL_TYPE)
10256 && validate_arg(arg1, REAL_TYPE))
10257 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10260 CASE_FLT_FN (BUILT_IN_HYPOT):
10261 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10263 CASE_FLT_FN (BUILT_IN_LDEXP):
10264 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10265 CASE_FLT_FN (BUILT_IN_SCALBN):
10266 CASE_FLT_FN (BUILT_IN_SCALBLN):
10267 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10269 CASE_FLT_FN (BUILT_IN_FREXP):
10270 return fold_builtin_frexp (arg0, arg1, type);
10272 CASE_FLT_FN (BUILT_IN_MODF):
10273 return fold_builtin_modf (arg0, arg1, type);
10275 case BUILT_IN_BZERO:
10276 return fold_builtin_bzero (arg0, arg1, ignore);
10278 case BUILT_IN_FPUTS:
10279 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10281 case BUILT_IN_FPUTS_UNLOCKED:
10282 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10284 case BUILT_IN_STRSTR:
10285 return fold_builtin_strstr (arg0, arg1, type);
10287 case BUILT_IN_STRCAT:
10288 return fold_builtin_strcat (arg0, arg1);
10290 case BUILT_IN_STRSPN:
10291 return fold_builtin_strspn (arg0, arg1);
10293 case BUILT_IN_STRCSPN:
10294 return fold_builtin_strcspn (arg0, arg1);
10296 case BUILT_IN_STRCHR:
10297 case BUILT_IN_INDEX:
10298 return fold_builtin_strchr (arg0, arg1, type);
10300 case BUILT_IN_STRRCHR:
10301 case BUILT_IN_RINDEX:
10302 return fold_builtin_strrchr (arg0, arg1, type);
10304 case BUILT_IN_STRCPY:
10305 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10307 case BUILT_IN_STRCMP:
10308 return fold_builtin_strcmp (arg0, arg1);
10310 case BUILT_IN_STRPBRK:
10311 return fold_builtin_strpbrk (arg0, arg1, type);
10313 case BUILT_IN_EXPECT:
10314 return fold_builtin_expect (arg0, arg1);
10316 CASE_FLT_FN (BUILT_IN_POW):
10317 return fold_builtin_pow (fndecl, arg0, arg1, type);
10319 CASE_FLT_FN (BUILT_IN_POWI):
10320 return fold_builtin_powi (fndecl, arg0, arg1, type);
10322 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10323 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10325 CASE_FLT_FN (BUILT_IN_FMIN):
10326 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10328 CASE_FLT_FN (BUILT_IN_FMAX):
10329 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10331 case BUILT_IN_ISGREATER:
10332 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10333 case BUILT_IN_ISGREATEREQUAL:
10334 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10335 case BUILT_IN_ISLESS:
10336 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10337 case BUILT_IN_ISLESSEQUAL:
10338 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10339 case BUILT_IN_ISLESSGREATER:
10340 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10341 case BUILT_IN_ISUNORDERED:
10342 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10345 /* We do the folding for va_start in the expander. */
10346 case BUILT_IN_VA_START:
10349 case BUILT_IN_SPRINTF:
10350 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10352 case BUILT_IN_OBJECT_SIZE:
10353 return fold_builtin_object_size (arg0, arg1);
10355 case BUILT_IN_PRINTF:
10356 case BUILT_IN_PRINTF_UNLOCKED:
10357 case BUILT_IN_VPRINTF:
10358 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10360 case BUILT_IN_PRINTF_CHK:
10361 case BUILT_IN_VPRINTF_CHK:
10362 if (!validate_arg (arg0, INTEGER_TYPE)
10363 || TREE_SIDE_EFFECTS (arg0))
10366 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10369 case BUILT_IN_FPRINTF:
10370 case BUILT_IN_FPRINTF_UNLOCKED:
10371 case BUILT_IN_VFPRINTF:
10372 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10381 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10382 and ARG2. IGNORE is true if the result of the function call is ignored.
10383 This function returns NULL_TREE if no simplification was possible. */
10386 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10388 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10389 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10393 CASE_FLT_FN (BUILT_IN_SINCOS):
10394 return fold_builtin_sincos (arg0, arg1, arg2);
10396 CASE_FLT_FN (BUILT_IN_FMA):
10397 if (validate_arg (arg0, REAL_TYPE)
10398 && validate_arg(arg1, REAL_TYPE)
10399 && validate_arg(arg2, REAL_TYPE))
10400 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10403 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10404 CASE_FLT_FN (BUILT_IN_REMQUO):
10405 if (validate_arg (arg0, REAL_TYPE)
10406 && validate_arg(arg1, REAL_TYPE)
10407 && validate_arg(arg2, POINTER_TYPE))
10408 return do_mpfr_remquo (arg0, arg1, arg2);
10412 case BUILT_IN_MEMSET:
10413 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10415 case BUILT_IN_BCOPY:
10416 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10418 case BUILT_IN_MEMCPY:
10419 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10421 case BUILT_IN_MEMPCPY:
10422 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10424 case BUILT_IN_MEMMOVE:
10425 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10427 case BUILT_IN_STRNCAT:
10428 return fold_builtin_strncat (arg0, arg1, arg2);
10430 case BUILT_IN_STRNCPY:
10431 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10433 case BUILT_IN_STRNCMP:
10434 return fold_builtin_strncmp (arg0, arg1, arg2);
10436 case BUILT_IN_MEMCHR:
10437 return fold_builtin_memchr (arg0, arg1, arg2, type);
10439 case BUILT_IN_BCMP:
10440 case BUILT_IN_MEMCMP:
10441 return fold_builtin_memcmp (arg0, arg1, arg2);;
10443 case BUILT_IN_SPRINTF:
10444 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10446 case BUILT_IN_STRCPY_CHK:
10447 case BUILT_IN_STPCPY_CHK:
10448 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10451 case BUILT_IN_STRCAT_CHK:
10452 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10454 case BUILT_IN_PRINTF_CHK:
10455 case BUILT_IN_VPRINTF_CHK:
10456 if (!validate_arg (arg0, INTEGER_TYPE)
10457 || TREE_SIDE_EFFECTS (arg0))
10460 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10463 case BUILT_IN_FPRINTF:
10464 case BUILT_IN_FPRINTF_UNLOCKED:
10465 case BUILT_IN_VFPRINTF:
10466 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10468 case BUILT_IN_FPRINTF_CHK:
10469 case BUILT_IN_VFPRINTF_CHK:
10470 if (!validate_arg (arg1, INTEGER_TYPE)
10471 || TREE_SIDE_EFFECTS (arg1))
10474 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10483 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10484 ARG2, and ARG3. IGNORE is true if the result of the function call is
10485 ignored. This function returns NULL_TREE if no simplification was
10489 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10492 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10496 case BUILT_IN_MEMCPY_CHK:
10497 case BUILT_IN_MEMPCPY_CHK:
10498 case BUILT_IN_MEMMOVE_CHK:
10499 case BUILT_IN_MEMSET_CHK:
10500 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10502 DECL_FUNCTION_CODE (fndecl));
10504 case BUILT_IN_STRNCPY_CHK:
10505 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10507 case BUILT_IN_STRNCAT_CHK:
10508 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10510 case BUILT_IN_FPRINTF_CHK:
10511 case BUILT_IN_VFPRINTF_CHK:
10512 if (!validate_arg (arg1, INTEGER_TYPE)
10513 || TREE_SIDE_EFFECTS (arg1))
10516 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10526 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10527 arguments, where NARGS <= 4. IGNORE is true if the result of the
10528 function call is ignored. This function returns NULL_TREE if no
10529 simplification was possible. Note that this only folds builtins with
10530 fixed argument patterns. Foldings that do varargs-to-varargs
10531 transformations, or that match calls with more than 4 arguments,
10532 need to be handled with fold_builtin_varargs instead. */
10534 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10537 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10539 tree ret = NULL_TREE;
10544 ret = fold_builtin_0 (fndecl, ignore);
10547 ret = fold_builtin_1 (fndecl, args[0], ignore);
10550 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10553 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10556 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10564 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10565 TREE_NO_WARNING (ret) = 1;
10571 /* Builtins with folding operations that operate on "..." arguments
10572 need special handling; we need to store the arguments in a convenient
10573 data structure before attempting any folding. Fortunately there are
10574 only a few builtins that fall into this category. FNDECL is the
10575 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10576 result of the function call is ignored. */
10579 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10581 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10582 tree ret = NULL_TREE;
10586 case BUILT_IN_SPRINTF_CHK:
10587 case BUILT_IN_VSPRINTF_CHK:
10588 ret = fold_builtin_sprintf_chk (exp, fcode);
10591 case BUILT_IN_SNPRINTF_CHK:
10592 case BUILT_IN_VSNPRINTF_CHK:
10593 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10596 case BUILT_IN_FPCLASSIFY:
10597 ret = fold_builtin_fpclassify (exp);
10605 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10606 TREE_NO_WARNING (ret) = 1;
10612 /* A wrapper function for builtin folding that prevents warnings for
10613 "statement without effect" and the like, caused by removing the
10614 call node earlier than the warning is generated. */
10617 fold_call_expr (tree exp, bool ignore)
10619 tree ret = NULL_TREE;
10620 tree fndecl = get_callee_fndecl (exp);
10622 && TREE_CODE (fndecl) == FUNCTION_DECL
10623 && DECL_BUILT_IN (fndecl)
10624 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10625 yet. Defer folding until we see all the arguments
10626 (after inlining). */
10627 && !CALL_EXPR_VA_ARG_PACK (exp))
10629 int nargs = call_expr_nargs (exp);
10631 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10632 instead last argument is __builtin_va_arg_pack (). Defer folding
10633 even in that case, until arguments are finalized. */
10634 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10636 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10638 && TREE_CODE (fndecl2) == FUNCTION_DECL
10639 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10640 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10644 /* FIXME: Don't use a list in this interface. */
10645 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10646 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10649 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10651 tree *args = CALL_EXPR_ARGP (exp);
10652 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10655 ret = fold_builtin_varargs (fndecl, exp, ignore);
10658 /* Propagate location information from original call to
10659 expansion of builtin. Otherwise things like
10660 maybe_emit_chk_warning, that operate on the expansion
10661 of a builtin, will use the wrong location information. */
10662 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10664 tree realret = ret;
10665 if (TREE_CODE (ret) == NOP_EXPR)
10666 realret = TREE_OPERAND (ret, 0);
10667 if (CAN_HAVE_LOCATION_P (realret)
10668 && !EXPR_HAS_LOCATION (realret))
10669 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10679 /* Conveniently construct a function call expression. FNDECL names the
10680 function to be called and ARGLIST is a TREE_LIST of arguments. */
10683 build_function_call_expr (tree fndecl, tree arglist)
10685 tree fntype = TREE_TYPE (fndecl);
10686 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10687 int n = list_length (arglist);
10688 tree *argarray = (tree *) alloca (n * sizeof (tree));
10691 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10692 argarray[i] = TREE_VALUE (arglist);
10693 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10696 /* Conveniently construct a function call expression. FNDECL names the
10697 function to be called, N is the number of arguments, and the "..."
10698 parameters are the argument expressions. */
10701 build_call_expr (tree fndecl, int n, ...)
10704 tree fntype = TREE_TYPE (fndecl);
10705 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10706 tree *argarray = (tree *) alloca (n * sizeof (tree));
10710 for (i = 0; i < n; i++)
10711 argarray[i] = va_arg (ap, tree);
10713 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10716 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10717 N arguments are passed in the array ARGARRAY. */
10720 fold_builtin_call_array (tree type,
10725 tree ret = NULL_TREE;
10729 if (TREE_CODE (fn) == ADDR_EXPR)
10731 tree fndecl = TREE_OPERAND (fn, 0);
10732 if (TREE_CODE (fndecl) == FUNCTION_DECL
10733 && DECL_BUILT_IN (fndecl))
10735 /* If last argument is __builtin_va_arg_pack (), arguments to this
10736 function are not finalized yet. Defer folding until they are. */
10737 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10739 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10741 && TREE_CODE (fndecl2) == FUNCTION_DECL
10742 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10743 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10744 return build_call_array (type, fn, n, argarray);
10746 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10748 tree arglist = NULL_TREE;
10749 for (i = n - 1; i >= 0; i--)
10750 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10751 ret = targetm.fold_builtin (fndecl, arglist, false);
10755 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10757 /* First try the transformations that don't require consing up
10759 ret = fold_builtin_n (fndecl, argarray, n, false);
10764 /* If we got this far, we need to build an exp. */
10765 exp = build_call_array (type, fn, n, argarray);
10766 ret = fold_builtin_varargs (fndecl, exp, false);
10767 return ret ? ret : exp;
10771 return build_call_array (type, fn, n, argarray);
10774 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10775 along with N new arguments specified as the "..." parameters. SKIP
10776 is the number of arguments in EXP to be omitted. This function is used
10777 to do varargs-to-varargs transformations. */
10780 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10782 int oldnargs = call_expr_nargs (exp);
10783 int nargs = oldnargs - skip + n;
10784 tree fntype = TREE_TYPE (fndecl);
10785 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10793 buffer = alloca (nargs * sizeof (tree));
10795 for (i = 0; i < n; i++)
10796 buffer[i] = va_arg (ap, tree);
10798 for (j = skip; j < oldnargs; j++, i++)
10799 buffer[i] = CALL_EXPR_ARG (exp, j);
10802 buffer = CALL_EXPR_ARGP (exp) + skip;
10804 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10807 /* Validate a single argument ARG against a tree code CODE representing
10811 validate_arg (const_tree arg, enum tree_code code)
10815 else if (code == POINTER_TYPE)
10816 return POINTER_TYPE_P (TREE_TYPE (arg));
10817 else if (code == INTEGER_TYPE)
10818 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10819 return code == TREE_CODE (TREE_TYPE (arg));
10822 /* This function validates the types of a function call argument list
10823 against a specified list of tree_codes. If the last specifier is a 0,
10824 that represents an ellipses, otherwise the last specifier must be a
10828 validate_arglist (const_tree callexpr, ...)
10830 enum tree_code code;
10833 const_call_expr_arg_iterator iter;
10836 va_start (ap, callexpr);
10837 init_const_call_expr_arg_iterator (callexpr, &iter);
10841 code = va_arg (ap, enum tree_code);
10845 /* This signifies an ellipses, any further arguments are all ok. */
10849 /* This signifies an endlink, if no arguments remain, return
10850 true, otherwise return false. */
10851 res = !more_const_call_expr_args_p (&iter);
10854 /* If no parameters remain or the parameter's code does not
10855 match the specified code, return false. Otherwise continue
10856 checking any remaining arguments. */
10857 arg = next_const_call_expr_arg (&iter);
10858 if (!validate_arg (arg, code))
10865 /* We need gotos here since we can only have one VA_CLOSE in a
10873 /* Default target-specific builtin expander that does nothing. */
10876 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10877 rtx target ATTRIBUTE_UNUSED,
10878 rtx subtarget ATTRIBUTE_UNUSED,
10879 enum machine_mode mode ATTRIBUTE_UNUSED,
10880 int ignore ATTRIBUTE_UNUSED)
10885 /* Returns true is EXP represents data that would potentially reside
10886 in a readonly section. */
10889 readonly_data_expr (tree exp)
10893 if (TREE_CODE (exp) != ADDR_EXPR)
10896 exp = get_base_address (TREE_OPERAND (exp, 0));
10900 /* Make sure we call decl_readonly_section only for trees it
10901 can handle (since it returns true for everything it doesn't
10903 if (TREE_CODE (exp) == STRING_CST
10904 || TREE_CODE (exp) == CONSTRUCTOR
10905 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10906 return decl_readonly_section (exp, 0);
10911 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10912 to the call, and TYPE is its return type.
10914 Return NULL_TREE if no simplification was possible, otherwise return the
10915 simplified form of the call as a tree.
10917 The simplified form may be a constant or other expression which
10918 computes the same value, but in a more efficient manner (including
10919 calls to other builtin functions).
10921 The call may contain arguments which need to be evaluated, but
10922 which are not useful to determine the result of the call. In
10923 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10924 COMPOUND_EXPR will be an argument which must be evaluated.
10925 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10926 COMPOUND_EXPR in the chain will contain the tree for the simplified
10927 form of the builtin function call. */
10930 fold_builtin_strstr (tree s1, tree s2, tree type)
10932 if (!validate_arg (s1, POINTER_TYPE)
10933 || !validate_arg (s2, POINTER_TYPE))
10938 const char *p1, *p2;
10940 p2 = c_getstr (s2);
10944 p1 = c_getstr (s1);
10947 const char *r = strstr (p1, p2);
10951 return build_int_cst (TREE_TYPE (s1), 0);
10953 /* Return an offset into the constant string argument. */
10954 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10955 s1, size_int (r - p1));
10956 return fold_convert (type, tem);
10959 /* The argument is const char *, and the result is char *, so we need
10960 a type conversion here to avoid a warning. */
10962 return fold_convert (type, s1);
10967 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10971 /* New argument list transforming strstr(s1, s2) to
10972 strchr(s1, s2[0]). */
10973 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10977 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10978 the call, and TYPE is its return type.
10980 Return NULL_TREE if no simplification was possible, otherwise return the
10981 simplified form of the call as a tree.
10983 The simplified form may be a constant or other expression which
10984 computes the same value, but in a more efficient manner (including
10985 calls to other builtin functions).
10987 The call may contain arguments which need to be evaluated, but
10988 which are not useful to determine the result of the call. In
10989 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10990 COMPOUND_EXPR will be an argument which must be evaluated.
10991 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10992 COMPOUND_EXPR in the chain will contain the tree for the simplified
10993 form of the builtin function call. */
10996 fold_builtin_strchr (tree s1, tree s2, tree type)
10998 if (!validate_arg (s1, POINTER_TYPE)
10999 || !validate_arg (s2, INTEGER_TYPE))
11005 if (TREE_CODE (s2) != INTEGER_CST)
11008 p1 = c_getstr (s1);
11015 if (target_char_cast (s2, &c))
11018 r = strchr (p1, c);
11021 return build_int_cst (TREE_TYPE (s1), 0);
11023 /* Return an offset into the constant string argument. */
11024 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11025 s1, size_int (r - p1));
11026 return fold_convert (type, tem);
11032 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11033 the call, and TYPE is its return type.
11035 Return NULL_TREE if no simplification was possible, otherwise return the
11036 simplified form of the call as a tree.
11038 The simplified form may be a constant or other expression which
11039 computes the same value, but in a more efficient manner (including
11040 calls to other builtin functions).
11042 The call may contain arguments which need to be evaluated, but
11043 which are not useful to determine the result of the call. In
11044 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11045 COMPOUND_EXPR will be an argument which must be evaluated.
11046 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11047 COMPOUND_EXPR in the chain will contain the tree for the simplified
11048 form of the builtin function call. */
11051 fold_builtin_strrchr (tree s1, tree s2, tree type)
11053 if (!validate_arg (s1, POINTER_TYPE)
11054 || !validate_arg (s2, INTEGER_TYPE))
11061 if (TREE_CODE (s2) != INTEGER_CST)
11064 p1 = c_getstr (s1);
11071 if (target_char_cast (s2, &c))
11074 r = strrchr (p1, c);
11077 return build_int_cst (TREE_TYPE (s1), 0);
11079 /* Return an offset into the constant string argument. */
11080 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11081 s1, size_int (r - p1));
11082 return fold_convert (type, tem);
11085 if (! integer_zerop (s2))
11088 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11092 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11093 return build_call_expr (fn, 2, s1, s2);
11097 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11098 to the call, and TYPE is its return type.
11100 Return NULL_TREE if no simplification was possible, otherwise return the
11101 simplified form of the call as a tree.
11103 The simplified form may be a constant or other expression which
11104 computes the same value, but in a more efficient manner (including
11105 calls to other builtin functions).
11107 The call may contain arguments which need to be evaluated, but
11108 which are not useful to determine the result of the call. In
11109 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11110 COMPOUND_EXPR will be an argument which must be evaluated.
11111 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11112 COMPOUND_EXPR in the chain will contain the tree for the simplified
11113 form of the builtin function call. */
11116 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11118 if (!validate_arg (s1, POINTER_TYPE)
11119 || !validate_arg (s2, POINTER_TYPE))
11124 const char *p1, *p2;
11126 p2 = c_getstr (s2);
11130 p1 = c_getstr (s1);
11133 const char *r = strpbrk (p1, p2);
11137 return build_int_cst (TREE_TYPE (s1), 0);
11139 /* Return an offset into the constant string argument. */
11140 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11141 s1, size_int (r - p1));
11142 return fold_convert (type, tem);
11146 /* strpbrk(x, "") == NULL.
11147 Evaluate and ignore s1 in case it had side-effects. */
11148 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11151 return NULL_TREE; /* Really call strpbrk. */
11153 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11157 /* New argument list transforming strpbrk(s1, s2) to
11158 strchr(s1, s2[0]). */
11159 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11163 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11166 Return NULL_TREE if no simplification was possible, otherwise return the
11167 simplified form of the call as a tree.
11169 The simplified form may be a constant or other expression which
11170 computes the same value, but in a more efficient manner (including
11171 calls to other builtin functions).
11173 The call may contain arguments which need to be evaluated, but
11174 which are not useful to determine the result of the call. In
11175 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11176 COMPOUND_EXPR will be an argument which must be evaluated.
11177 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11178 COMPOUND_EXPR in the chain will contain the tree for the simplified
11179 form of the builtin function call. */
11182 fold_builtin_strcat (tree dst, tree src)
11184 if (!validate_arg (dst, POINTER_TYPE)
11185 || !validate_arg (src, POINTER_TYPE))
11189 const char *p = c_getstr (src);
11191 /* If the string length is zero, return the dst parameter. */
11192 if (p && *p == '\0')
11199 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11200 arguments to the call.
11202 Return NULL_TREE if no simplification was possible, otherwise return the
11203 simplified form of the call as a tree.
11205 The simplified form may be a constant or other expression which
11206 computes the same value, but in a more efficient manner (including
11207 calls to other builtin functions).
11209 The call may contain arguments which need to be evaluated, but
11210 which are not useful to determine the result of the call. In
11211 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11212 COMPOUND_EXPR will be an argument which must be evaluated.
11213 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11214 COMPOUND_EXPR in the chain will contain the tree for the simplified
11215 form of the builtin function call. */
11218 fold_builtin_strncat (tree dst, tree src, tree len)
11220 if (!validate_arg (dst, POINTER_TYPE)
11221 || !validate_arg (src, POINTER_TYPE)
11222 || !validate_arg (len, INTEGER_TYPE))
11226 const char *p = c_getstr (src);
11228 /* If the requested length is zero, or the src parameter string
11229 length is zero, return the dst parameter. */
11230 if (integer_zerop (len) || (p && *p == '\0'))
11231 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11233 /* If the requested len is greater than or equal to the string
11234 length, call strcat. */
11235 if (TREE_CODE (len) == INTEGER_CST && p
11236 && compare_tree_int (len, strlen (p)) >= 0)
11238 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11240 /* If the replacement _DECL isn't initialized, don't do the
11245 return build_call_expr (fn, 2, dst, src);
11251 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11254 Return NULL_TREE if no simplification was possible, otherwise return the
11255 simplified form of the call as a tree.
11257 The simplified form may be a constant or other expression which
11258 computes the same value, but in a more efficient manner (including
11259 calls to other builtin functions).
11261 The call may contain arguments which need to be evaluated, but
11262 which are not useful to determine the result of the call. In
11263 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11264 COMPOUND_EXPR will be an argument which must be evaluated.
11265 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11266 COMPOUND_EXPR in the chain will contain the tree for the simplified
11267 form of the builtin function call. */
11270 fold_builtin_strspn (tree s1, tree s2)
11272 if (!validate_arg (s1, POINTER_TYPE)
11273 || !validate_arg (s2, POINTER_TYPE))
11277 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11279 /* If both arguments are constants, evaluate at compile-time. */
11282 const size_t r = strspn (p1, p2);
11283 return size_int (r);
11286 /* If either argument is "", return NULL_TREE. */
11287 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11288 /* Evaluate and ignore both arguments in case either one has
11290 return omit_two_operands (integer_type_node, integer_zero_node,
11296 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11299 Return NULL_TREE if no simplification was possible, otherwise return the
11300 simplified form of the call as a tree.
11302 The simplified form may be a constant or other expression which
11303 computes the same value, but in a more efficient manner (including
11304 calls to other builtin functions).
11306 The call may contain arguments which need to be evaluated, but
11307 which are not useful to determine the result of the call. In
11308 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11309 COMPOUND_EXPR will be an argument which must be evaluated.
11310 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11311 COMPOUND_EXPR in the chain will contain the tree for the simplified
11312 form of the builtin function call. */
11315 fold_builtin_strcspn (tree s1, tree s2)
11317 if (!validate_arg (s1, POINTER_TYPE)
11318 || !validate_arg (s2, POINTER_TYPE))
11322 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11324 /* If both arguments are constants, evaluate at compile-time. */
11327 const size_t r = strcspn (p1, p2);
11328 return size_int (r);
11331 /* If the first argument is "", return NULL_TREE. */
11332 if (p1 && *p1 == '\0')
11334 /* Evaluate and ignore argument s2 in case it has
11336 return omit_one_operand (integer_type_node,
11337 integer_zero_node, s2);
11340 /* If the second argument is "", return __builtin_strlen(s1). */
11341 if (p2 && *p2 == '\0')
11343 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11345 /* If the replacement _DECL isn't initialized, don't do the
11350 return build_call_expr (fn, 1, s1);
11356 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11357 to the call. IGNORE is true if the value returned
11358 by the builtin will be ignored. UNLOCKED is true is true if this
11359 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11360 the known length of the string. Return NULL_TREE if no simplification
11364 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11366 /* If we're using an unlocked function, assume the other unlocked
11367 functions exist explicitly. */
11368 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11369 : implicit_built_in_decls[BUILT_IN_FPUTC];
11370 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11371 : implicit_built_in_decls[BUILT_IN_FWRITE];
11373 /* If the return value is used, don't do the transformation. */
11377 /* Verify the arguments in the original call. */
11378 if (!validate_arg (arg0, POINTER_TYPE)
11379 || !validate_arg (arg1, POINTER_TYPE))
11383 len = c_strlen (arg0, 0);
11385 /* Get the length of the string passed to fputs. If the length
11386 can't be determined, punt. */
11388 || TREE_CODE (len) != INTEGER_CST)
11391 switch (compare_tree_int (len, 1))
11393 case -1: /* length is 0, delete the call entirely . */
11394 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11396 case 0: /* length is 1, call fputc. */
11398 const char *p = c_getstr (arg0);
11403 return build_call_expr (fn_fputc, 2,
11404 build_int_cst (NULL_TREE, p[0]), arg1);
11410 case 1: /* length is greater than 1, call fwrite. */
11412 /* If optimizing for size keep fputs. */
11415 /* New argument list transforming fputs(string, stream) to
11416 fwrite(string, 1, len, stream). */
11418 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11423 gcc_unreachable ();
11428 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11429 produced. False otherwise. This is done so that we don't output the error
11430 or warning twice or three times. */
11432 fold_builtin_next_arg (tree exp, bool va_start_p)
11434 tree fntype = TREE_TYPE (current_function_decl);
11435 int nargs = call_expr_nargs (exp);
11438 if (TYPE_ARG_TYPES (fntype) == 0
11439 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11440 == void_type_node))
11442 error ("%<va_start%> used in function with fixed args");
11448 if (va_start_p && (nargs != 2))
11450 error ("wrong number of arguments to function %<va_start%>");
11453 arg = CALL_EXPR_ARG (exp, 1);
11455 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11456 when we checked the arguments and if needed issued a warning. */
11461 /* Evidently an out of date version of <stdarg.h>; can't validate
11462 va_start's second argument, but can still work as intended. */
11463 warning (0, "%<__builtin_next_arg%> called without an argument");
11466 else if (nargs > 1)
11468 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11471 arg = CALL_EXPR_ARG (exp, 0);
11474 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11475 or __builtin_next_arg (0) the first time we see it, after checking
11476 the arguments and if needed issuing a warning. */
11477 if (!integer_zerop (arg))
11479 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11481 /* Strip off all nops for the sake of the comparison. This
11482 is not quite the same as STRIP_NOPS. It does more.
11483 We must also strip off INDIRECT_EXPR for C++ reference
11485 while (CONVERT_EXPR_P (arg)
11486 || TREE_CODE (arg) == INDIRECT_REF)
11487 arg = TREE_OPERAND (arg, 0);
11488 if (arg != last_parm)
11490 /* FIXME: Sometimes with the tree optimizers we can get the
11491 not the last argument even though the user used the last
11492 argument. We just warn and set the arg to be the last
11493 argument so that we will get wrong-code because of
11495 warning (0, "second parameter of %<va_start%> not last named argument");
11497 /* We want to verify the second parameter just once before the tree
11498 optimizers are run and then avoid keeping it in the tree,
11499 as otherwise we could warn even for correct code like:
11500 void foo (int i, ...)
11501 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11503 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11505 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11511 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11512 ORIG may be null if this is a 2-argument call. We don't attempt to
11513 simplify calls with more than 3 arguments.
11515 Return NULL_TREE if no simplification was possible, otherwise return the
11516 simplified form of the call as a tree. If IGNORED is true, it means that
11517 the caller does not use the returned value of the function. */
11520 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11523 const char *fmt_str = NULL;
11525 /* Verify the required arguments in the original call. We deal with two
11526 types of sprintf() calls: 'sprintf (str, fmt)' and
11527 'sprintf (dest, "%s", orig)'. */
11528 if (!validate_arg (dest, POINTER_TYPE)
11529 || !validate_arg (fmt, POINTER_TYPE))
11531 if (orig && !validate_arg (orig, POINTER_TYPE))
11534 /* Check whether the format is a literal string constant. */
11535 fmt_str = c_getstr (fmt);
11536 if (fmt_str == NULL)
11540 retval = NULL_TREE;
11542 if (!init_target_chars ())
11545 /* If the format doesn't contain % args or %%, use strcpy. */
11546 if (strchr (fmt_str, target_percent) == NULL)
11548 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11553 /* Don't optimize sprintf (buf, "abc", ptr++). */
11557 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11558 'format' is known to contain no % formats. */
11559 call = build_call_expr (fn, 2, dest, fmt);
11561 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11564 /* If the format is "%s", use strcpy if the result isn't used. */
11565 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11568 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11573 /* Don't crash on sprintf (str1, "%s"). */
11577 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11580 retval = c_strlen (orig, 1);
11581 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11584 call = build_call_expr (fn, 2, dest, orig);
11587 if (call && retval)
11589 retval = fold_convert
11590 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11592 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11598 /* Expand a call EXP to __builtin_object_size. */
11601 expand_builtin_object_size (tree exp)
11604 int object_size_type;
11605 tree fndecl = get_callee_fndecl (exp);
11607 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11609 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11611 expand_builtin_trap ();
11615 ost = CALL_EXPR_ARG (exp, 1);
11618 if (TREE_CODE (ost) != INTEGER_CST
11619 || tree_int_cst_sgn (ost) < 0
11620 || compare_tree_int (ost, 3) > 0)
11622 error ("%Klast argument of %D is not integer constant between 0 and 3",
11624 expand_builtin_trap ();
11628 object_size_type = tree_low_cst (ost, 0);
11630 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11633 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11634 FCODE is the BUILT_IN_* to use.
11635 Return NULL_RTX if we failed; the caller should emit a normal call,
11636 otherwise try to get the result in TARGET, if convenient (and in
11637 mode MODE if that's convenient). */
11640 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11641 enum built_in_function fcode)
11643 tree dest, src, len, size;
11645 if (!validate_arglist (exp,
11647 fcode == BUILT_IN_MEMSET_CHK
11648 ? INTEGER_TYPE : POINTER_TYPE,
11649 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11652 dest = CALL_EXPR_ARG (exp, 0);
11653 src = CALL_EXPR_ARG (exp, 1);
11654 len = CALL_EXPR_ARG (exp, 2);
11655 size = CALL_EXPR_ARG (exp, 3);
11657 if (! host_integerp (size, 1))
11660 if (host_integerp (len, 1) || integer_all_onesp (size))
11664 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11666 warning (0, "%Kcall to %D will always overflow destination buffer",
11667 exp, get_callee_fndecl (exp));
11672 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11673 mem{cpy,pcpy,move,set} is available. */
11676 case BUILT_IN_MEMCPY_CHK:
11677 fn = built_in_decls[BUILT_IN_MEMCPY];
11679 case BUILT_IN_MEMPCPY_CHK:
11680 fn = built_in_decls[BUILT_IN_MEMPCPY];
11682 case BUILT_IN_MEMMOVE_CHK:
11683 fn = built_in_decls[BUILT_IN_MEMMOVE];
11685 case BUILT_IN_MEMSET_CHK:
11686 fn = built_in_decls[BUILT_IN_MEMSET];
11695 fn = build_call_expr (fn, 3, dest, src, len);
11696 STRIP_TYPE_NOPS (fn);
11697 while (TREE_CODE (fn) == COMPOUND_EXPR)
11699 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11701 fn = TREE_OPERAND (fn, 1);
11703 if (TREE_CODE (fn) == CALL_EXPR)
11704 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11705 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11707 else if (fcode == BUILT_IN_MEMSET_CHK)
11711 unsigned int dest_align
11712 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11714 /* If DEST is not a pointer type, call the normal function. */
11715 if (dest_align == 0)
11718 /* If SRC and DEST are the same (and not volatile), do nothing. */
11719 if (operand_equal_p (src, dest, 0))
11723 if (fcode != BUILT_IN_MEMPCPY_CHK)
11725 /* Evaluate and ignore LEN in case it has side-effects. */
11726 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11727 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11730 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11731 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11734 /* __memmove_chk special case. */
11735 if (fcode == BUILT_IN_MEMMOVE_CHK)
11737 unsigned int src_align
11738 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11740 if (src_align == 0)
11743 /* If src is categorized for a readonly section we can use
11744 normal __memcpy_chk. */
11745 if (readonly_data_expr (src))
11747 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11750 fn = build_call_expr (fn, 4, dest, src, len, size);
11751 STRIP_TYPE_NOPS (fn);
11752 while (TREE_CODE (fn) == COMPOUND_EXPR)
11754 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11756 fn = TREE_OPERAND (fn, 1);
11758 if (TREE_CODE (fn) == CALL_EXPR)
11759 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11760 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11767 /* Emit warning if a buffer overflow is detected at compile time. */
11770 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11777 case BUILT_IN_STRCPY_CHK:
11778 case BUILT_IN_STPCPY_CHK:
11779 /* For __strcat_chk the warning will be emitted only if overflowing
11780 by at least strlen (dest) + 1 bytes. */
11781 case BUILT_IN_STRCAT_CHK:
11782 len = CALL_EXPR_ARG (exp, 1);
11783 size = CALL_EXPR_ARG (exp, 2);
11786 case BUILT_IN_STRNCAT_CHK:
11787 case BUILT_IN_STRNCPY_CHK:
11788 len = CALL_EXPR_ARG (exp, 2);
11789 size = CALL_EXPR_ARG (exp, 3);
11791 case BUILT_IN_SNPRINTF_CHK:
11792 case BUILT_IN_VSNPRINTF_CHK:
11793 len = CALL_EXPR_ARG (exp, 1);
11794 size = CALL_EXPR_ARG (exp, 3);
11797 gcc_unreachable ();
11803 if (! host_integerp (size, 1) || integer_all_onesp (size))
11808 len = c_strlen (len, 1);
11809 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11812 else if (fcode == BUILT_IN_STRNCAT_CHK)
11814 tree src = CALL_EXPR_ARG (exp, 1);
11815 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11817 src = c_strlen (src, 1);
11818 if (! src || ! host_integerp (src, 1))
11820 warning (0, "%Kcall to %D might overflow destination buffer",
11821 exp, get_callee_fndecl (exp));
11824 else if (tree_int_cst_lt (src, size))
11827 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11830 warning (0, "%Kcall to %D will always overflow destination buffer",
11831 exp, get_callee_fndecl (exp));
11834 /* Emit warning if a buffer overflow is detected at compile time
11835 in __sprintf_chk/__vsprintf_chk calls. */
11838 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11840 tree dest, size, len, fmt, flag;
11841 const char *fmt_str;
11842 int nargs = call_expr_nargs (exp);
11844 /* Verify the required arguments in the original call. */
11848 dest = CALL_EXPR_ARG (exp, 0);
11849 flag = CALL_EXPR_ARG (exp, 1);
11850 size = CALL_EXPR_ARG (exp, 2);
11851 fmt = CALL_EXPR_ARG (exp, 3);
11853 if (! host_integerp (size, 1) || integer_all_onesp (size))
11856 /* Check whether the format is a literal string constant. */
11857 fmt_str = c_getstr (fmt);
11858 if (fmt_str == NULL)
11861 if (!init_target_chars ())
11864 /* If the format doesn't contain % args or %%, we know its size. */
11865 if (strchr (fmt_str, target_percent) == 0)
11866 len = build_int_cstu (size_type_node, strlen (fmt_str));
11867 /* If the format is "%s" and first ... argument is a string literal,
11869 else if (fcode == BUILT_IN_SPRINTF_CHK
11870 && strcmp (fmt_str, target_percent_s) == 0)
11876 arg = CALL_EXPR_ARG (exp, 4);
11877 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11880 len = c_strlen (arg, 1);
11881 if (!len || ! host_integerp (len, 1))
11887 if (! tree_int_cst_lt (len, size))
11889 warning (0, "%Kcall to %D will always overflow destination buffer",
11890 exp, get_callee_fndecl (exp));
11894 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11898 fold_builtin_object_size (tree ptr, tree ost)
11900 tree ret = NULL_TREE;
11901 int object_size_type;
11903 if (!validate_arg (ptr, POINTER_TYPE)
11904 || !validate_arg (ost, INTEGER_TYPE))
11909 if (TREE_CODE (ost) != INTEGER_CST
11910 || tree_int_cst_sgn (ost) < 0
11911 || compare_tree_int (ost, 3) > 0)
11914 object_size_type = tree_low_cst (ost, 0);
11916 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11917 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11918 and (size_t) 0 for types 2 and 3. */
11919 if (TREE_SIDE_EFFECTS (ptr))
11920 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11922 if (TREE_CODE (ptr) == ADDR_EXPR)
11923 ret = build_int_cstu (size_type_node,
11924 compute_builtin_object_size (ptr, object_size_type));
11926 else if (TREE_CODE (ptr) == SSA_NAME)
11928 unsigned HOST_WIDE_INT bytes;
11930 /* If object size is not known yet, delay folding until
11931 later. Maybe subsequent passes will help determining
11933 bytes = compute_builtin_object_size (ptr, object_size_type);
11934 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11936 ret = build_int_cstu (size_type_node, bytes);
11941 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11942 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11943 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11950 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11951 DEST, SRC, LEN, and SIZE are the arguments to the call.
11952 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11953 code of the builtin. If MAXLEN is not NULL, it is maximum length
11954 passed as third argument. */
11957 fold_builtin_memory_chk (tree fndecl,
11958 tree dest, tree src, tree len, tree size,
11959 tree maxlen, bool ignore,
11960 enum built_in_function fcode)
11964 if (!validate_arg (dest, POINTER_TYPE)
11965 || !validate_arg (src,
11966 (fcode == BUILT_IN_MEMSET_CHK
11967 ? INTEGER_TYPE : POINTER_TYPE))
11968 || !validate_arg (len, INTEGER_TYPE)
11969 || !validate_arg (size, INTEGER_TYPE))
11972 /* If SRC and DEST are the same (and not volatile), return DEST
11973 (resp. DEST+LEN for __mempcpy_chk). */
11974 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11976 if (fcode != BUILT_IN_MEMPCPY_CHK)
11977 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11980 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11981 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11985 if (! host_integerp (size, 1))
11988 if (! integer_all_onesp (size))
11990 if (! host_integerp (len, 1))
11992 /* If LEN is not constant, try MAXLEN too.
11993 For MAXLEN only allow optimizing into non-_ocs function
11994 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11995 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11997 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11999 /* (void) __mempcpy_chk () can be optimized into
12000 (void) __memcpy_chk (). */
12001 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12005 return build_call_expr (fn, 4, dest, src, len, size);
12013 if (tree_int_cst_lt (size, maxlen))
12018 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12019 mem{cpy,pcpy,move,set} is available. */
12022 case BUILT_IN_MEMCPY_CHK:
12023 fn = built_in_decls[BUILT_IN_MEMCPY];
12025 case BUILT_IN_MEMPCPY_CHK:
12026 fn = built_in_decls[BUILT_IN_MEMPCPY];
12028 case BUILT_IN_MEMMOVE_CHK:
12029 fn = built_in_decls[BUILT_IN_MEMMOVE];
12031 case BUILT_IN_MEMSET_CHK:
12032 fn = built_in_decls[BUILT_IN_MEMSET];
12041 return build_call_expr (fn, 3, dest, src, len);
12044 /* Fold a call to the __st[rp]cpy_chk builtin.
12045 DEST, SRC, and SIZE are the arguments to the call.
12046 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12047 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12048 strings passed as second argument. */
12051 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12052 tree maxlen, bool ignore,
12053 enum built_in_function fcode)
12057 if (!validate_arg (dest, POINTER_TYPE)
12058 || !validate_arg (src, POINTER_TYPE)
12059 || !validate_arg (size, INTEGER_TYPE))
12062 /* If SRC and DEST are the same (and not volatile), return DEST. */
12063 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12064 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12066 if (! host_integerp (size, 1))
12069 if (! integer_all_onesp (size))
12071 len = c_strlen (src, 1);
12072 if (! len || ! host_integerp (len, 1))
12074 /* If LEN is not constant, try MAXLEN too.
12075 For MAXLEN only allow optimizing into non-_ocs function
12076 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12077 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12079 if (fcode == BUILT_IN_STPCPY_CHK)
12084 /* If return value of __stpcpy_chk is ignored,
12085 optimize into __strcpy_chk. */
12086 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12090 return build_call_expr (fn, 3, dest, src, size);
12093 if (! len || TREE_SIDE_EFFECTS (len))
12096 /* If c_strlen returned something, but not a constant,
12097 transform __strcpy_chk into __memcpy_chk. */
12098 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12102 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12103 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12104 build_call_expr (fn, 4,
12105 dest, src, len, size));
12111 if (! tree_int_cst_lt (maxlen, size))
12115 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12116 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12117 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12121 return build_call_expr (fn, 2, dest, src);
12124 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12125 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12126 length passed as third argument. */
12129 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12134 if (!validate_arg (dest, POINTER_TYPE)
12135 || !validate_arg (src, POINTER_TYPE)
12136 || !validate_arg (len, INTEGER_TYPE)
12137 || !validate_arg (size, INTEGER_TYPE))
12140 if (! host_integerp (size, 1))
12143 if (! integer_all_onesp (size))
12145 if (! host_integerp (len, 1))
12147 /* If LEN is not constant, try MAXLEN too.
12148 For MAXLEN only allow optimizing into non-_ocs function
12149 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12150 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12156 if (tree_int_cst_lt (size, maxlen))
12160 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12161 fn = built_in_decls[BUILT_IN_STRNCPY];
12165 return build_call_expr (fn, 3, dest, src, len);
12168 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12169 are the arguments to the call. */
12172 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12177 if (!validate_arg (dest, POINTER_TYPE)
12178 || !validate_arg (src, POINTER_TYPE)
12179 || !validate_arg (size, INTEGER_TYPE))
12182 p = c_getstr (src);
12183 /* If the SRC parameter is "", return DEST. */
12184 if (p && *p == '\0')
12185 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12187 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12190 /* If __builtin_strcat_chk is used, assume strcat is available. */
12191 fn = built_in_decls[BUILT_IN_STRCAT];
12195 return build_call_expr (fn, 2, dest, src);
12198 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12202 fold_builtin_strncat_chk (tree fndecl,
12203 tree dest, tree src, tree len, tree size)
12208 if (!validate_arg (dest, POINTER_TYPE)
12209 || !validate_arg (src, POINTER_TYPE)
12210 || !validate_arg (size, INTEGER_TYPE)
12211 || !validate_arg (size, INTEGER_TYPE))
12214 p = c_getstr (src);
12215 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12216 if (p && *p == '\0')
12217 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12218 else if (integer_zerop (len))
12219 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12221 if (! host_integerp (size, 1))
12224 if (! integer_all_onesp (size))
12226 tree src_len = c_strlen (src, 1);
12228 && host_integerp (src_len, 1)
12229 && host_integerp (len, 1)
12230 && ! tree_int_cst_lt (len, src_len))
12232 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12233 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12237 return build_call_expr (fn, 3, dest, src, size);
12242 /* If __builtin_strncat_chk is used, assume strncat is available. */
12243 fn = built_in_decls[BUILT_IN_STRNCAT];
12247 return build_call_expr (fn, 3, dest, src, len);
12250 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12251 a normal call should be emitted rather than expanding the function
12252 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12255 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12257 tree dest, size, len, fn, fmt, flag;
12258 const char *fmt_str;
12259 int nargs = call_expr_nargs (exp);
12261 /* Verify the required arguments in the original call. */
12264 dest = CALL_EXPR_ARG (exp, 0);
12265 if (!validate_arg (dest, POINTER_TYPE))
12267 flag = CALL_EXPR_ARG (exp, 1);
12268 if (!validate_arg (flag, INTEGER_TYPE))
12270 size = CALL_EXPR_ARG (exp, 2);
12271 if (!validate_arg (size, INTEGER_TYPE))
12273 fmt = CALL_EXPR_ARG (exp, 3);
12274 if (!validate_arg (fmt, POINTER_TYPE))
12277 if (! host_integerp (size, 1))
12282 if (!init_target_chars ())
12285 /* Check whether the format is a literal string constant. */
12286 fmt_str = c_getstr (fmt);
12287 if (fmt_str != NULL)
12289 /* If the format doesn't contain % args or %%, we know the size. */
12290 if (strchr (fmt_str, target_percent) == 0)
12292 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12293 len = build_int_cstu (size_type_node, strlen (fmt_str));
12295 /* If the format is "%s" and first ... argument is a string literal,
12296 we know the size too. */
12297 else if (fcode == BUILT_IN_SPRINTF_CHK
12298 && strcmp (fmt_str, target_percent_s) == 0)
12304 arg = CALL_EXPR_ARG (exp, 4);
12305 if (validate_arg (arg, POINTER_TYPE))
12307 len = c_strlen (arg, 1);
12308 if (! len || ! host_integerp (len, 1))
12315 if (! integer_all_onesp (size))
12317 if (! len || ! tree_int_cst_lt (len, size))
12321 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12322 or if format doesn't contain % chars or is "%s". */
12323 if (! integer_zerop (flag))
12325 if (fmt_str == NULL)
12327 if (strchr (fmt_str, target_percent) != NULL
12328 && strcmp (fmt_str, target_percent_s))
12332 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12333 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12334 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12338 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12341 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12342 a normal call should be emitted rather than expanding the function
12343 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12344 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12345 passed as second argument. */
12348 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12349 enum built_in_function fcode)
12351 tree dest, size, len, fn, fmt, flag;
12352 const char *fmt_str;
12354 /* Verify the required arguments in the original call. */
12355 if (call_expr_nargs (exp) < 5)
12357 dest = CALL_EXPR_ARG (exp, 0);
12358 if (!validate_arg (dest, POINTER_TYPE))
12360 len = CALL_EXPR_ARG (exp, 1);
12361 if (!validate_arg (len, INTEGER_TYPE))
12363 flag = CALL_EXPR_ARG (exp, 2);
12364 if (!validate_arg (flag, INTEGER_TYPE))
12366 size = CALL_EXPR_ARG (exp, 3);
12367 if (!validate_arg (size, INTEGER_TYPE))
12369 fmt = CALL_EXPR_ARG (exp, 4);
12370 if (!validate_arg (fmt, POINTER_TYPE))
12373 if (! host_integerp (size, 1))
12376 if (! integer_all_onesp (size))
12378 if (! host_integerp (len, 1))
12380 /* If LEN is not constant, try MAXLEN too.
12381 For MAXLEN only allow optimizing into non-_ocs function
12382 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12383 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12389 if (tree_int_cst_lt (size, maxlen))
12393 if (!init_target_chars ())
12396 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12397 or if format doesn't contain % chars or is "%s". */
12398 if (! integer_zerop (flag))
12400 fmt_str = c_getstr (fmt);
12401 if (fmt_str == NULL)
12403 if (strchr (fmt_str, target_percent) != NULL
12404 && strcmp (fmt_str, target_percent_s))
12408 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12410 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12411 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12415 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12418 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12419 FMT and ARG are the arguments to the call; we don't fold cases with
12420 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12422 Return NULL_TREE if no simplification was possible, otherwise return the
12423 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12424 code of the function to be simplified. */
12427 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12428 enum built_in_function fcode)
12430 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12431 const char *fmt_str = NULL;
12433 /* If the return value is used, don't do the transformation. */
12437 /* Verify the required arguments in the original call. */
12438 if (!validate_arg (fmt, POINTER_TYPE))
12441 /* Check whether the format is a literal string constant. */
12442 fmt_str = c_getstr (fmt);
12443 if (fmt_str == NULL)
12446 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12448 /* If we're using an unlocked function, assume the other
12449 unlocked functions exist explicitly. */
12450 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12451 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12455 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12456 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12459 if (!init_target_chars ())
12462 if (strcmp (fmt_str, target_percent_s) == 0
12463 || strchr (fmt_str, target_percent) == NULL)
12467 if (strcmp (fmt_str, target_percent_s) == 0)
12469 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12472 if (!arg || !validate_arg (arg, POINTER_TYPE))
12475 str = c_getstr (arg);
12481 /* The format specifier doesn't contain any '%' characters. */
12482 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12488 /* If the string was "", printf does nothing. */
12489 if (str[0] == '\0')
12490 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12492 /* If the string has length of 1, call putchar. */
12493 if (str[1] == '\0')
12495 /* Given printf("c"), (where c is any one character,)
12496 convert "c"[0] to an int and pass that to the replacement
12498 newarg = build_int_cst (NULL_TREE, str[0]);
12500 call = build_call_expr (fn_putchar, 1, newarg);
12504 /* If the string was "string\n", call puts("string"). */
12505 size_t len = strlen (str);
12506 if ((unsigned char)str[len - 1] == target_newline)
12508 /* Create a NUL-terminated string that's one char shorter
12509 than the original, stripping off the trailing '\n'. */
12510 char *newstr = alloca (len);
12511 memcpy (newstr, str, len - 1);
12512 newstr[len - 1] = 0;
12514 newarg = build_string_literal (len, newstr);
12516 call = build_call_expr (fn_puts, 1, newarg);
12519 /* We'd like to arrange to call fputs(string,stdout) here,
12520 but we need stdout and don't have a way to get it yet. */
12525 /* The other optimizations can be done only on the non-va_list variants. */
12526 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12529 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12530 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12532 if (!arg || !validate_arg (arg, POINTER_TYPE))
12535 call = build_call_expr (fn_puts, 1, arg);
12538 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12539 else if (strcmp (fmt_str, target_percent_c) == 0)
12541 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12544 call = build_call_expr (fn_putchar, 1, arg);
12550 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12553 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12554 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12555 more than 3 arguments, and ARG may be null in the 2-argument case.
12557 Return NULL_TREE if no simplification was possible, otherwise return the
12558 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12559 code of the function to be simplified. */
12562 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12563 enum built_in_function fcode)
12565 tree fn_fputc, fn_fputs, call = NULL_TREE;
12566 const char *fmt_str = NULL;
12568 /* If the return value is used, don't do the transformation. */
12572 /* Verify the required arguments in the original call. */
12573 if (!validate_arg (fp, POINTER_TYPE))
12575 if (!validate_arg (fmt, POINTER_TYPE))
12578 /* Check whether the format is a literal string constant. */
12579 fmt_str = c_getstr (fmt);
12580 if (fmt_str == NULL)
12583 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12585 /* If we're using an unlocked function, assume the other
12586 unlocked functions exist explicitly. */
12587 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12588 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12592 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12593 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12596 if (!init_target_chars ())
12599 /* If the format doesn't contain % args or %%, use strcpy. */
12600 if (strchr (fmt_str, target_percent) == NULL)
12602 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12606 /* If the format specifier was "", fprintf does nothing. */
12607 if (fmt_str[0] == '\0')
12609 /* If FP has side-effects, just wait until gimplification is
12611 if (TREE_SIDE_EFFECTS (fp))
12614 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12617 /* When "string" doesn't contain %, replace all cases of
12618 fprintf (fp, string) with fputs (string, fp). The fputs
12619 builtin will take care of special cases like length == 1. */
12621 call = build_call_expr (fn_fputs, 2, fmt, fp);
12624 /* The other optimizations can be done only on the non-va_list variants. */
12625 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12628 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12629 else if (strcmp (fmt_str, target_percent_s) == 0)
12631 if (!arg || !validate_arg (arg, POINTER_TYPE))
12634 call = build_call_expr (fn_fputs, 2, arg, fp);
12637 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12638 else if (strcmp (fmt_str, target_percent_c) == 0)
12640 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12643 call = build_call_expr (fn_fputc, 2, arg, fp);
12648 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12651 /* Initialize format string characters in the target charset. */
12654 init_target_chars (void)
12659 target_newline = lang_hooks.to_target_charset ('\n');
12660 target_percent = lang_hooks.to_target_charset ('%');
12661 target_c = lang_hooks.to_target_charset ('c');
12662 target_s = lang_hooks.to_target_charset ('s');
12663 if (target_newline == 0 || target_percent == 0 || target_c == 0
12667 target_percent_c[0] = target_percent;
12668 target_percent_c[1] = target_c;
12669 target_percent_c[2] = '\0';
12671 target_percent_s[0] = target_percent;
12672 target_percent_s[1] = target_s;
12673 target_percent_s[2] = '\0';
12675 target_percent_s_newline[0] = target_percent;
12676 target_percent_s_newline[1] = target_s;
12677 target_percent_s_newline[2] = target_newline;
12678 target_percent_s_newline[3] = '\0';
12685 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12686 and no overflow/underflow occurred. INEXACT is true if M was not
12687 exactly calculated. TYPE is the tree type for the result. This
12688 function assumes that you cleared the MPFR flags and then
12689 calculated M to see if anything subsequently set a flag prior to
12690 entering this function. Return NULL_TREE if any checks fail. */
12693 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12695 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12696 overflow/underflow occurred. If -frounding-math, proceed iff the
12697 result of calling FUNC was exact. */
12698 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12699 && (!flag_rounding_math || !inexact))
12701 REAL_VALUE_TYPE rr;
12703 real_from_mpfr (&rr, m, type, GMP_RNDN);
12704 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12705 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12706 but the mpft_t is not, then we underflowed in the
12708 if (real_isfinite (&rr)
12709 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12711 REAL_VALUE_TYPE rmode;
12713 real_convert (&rmode, TYPE_MODE (type), &rr);
12714 /* Proceed iff the specified mode can hold the value. */
12715 if (real_identical (&rmode, &rr))
12716 return build_real (type, rmode);
12722 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12723 FUNC on it and return the resulting value as a tree with type TYPE.
12724 If MIN and/or MAX are not NULL, then the supplied ARG must be
12725 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12726 acceptable values, otherwise they are not. The mpfr precision is
12727 set to the precision of TYPE. We assume that function FUNC returns
12728 zero if the result could be calculated exactly within the requested
12732 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12733 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12736 tree result = NULL_TREE;
12740 /* To proceed, MPFR must exactly represent the target floating point
12741 format, which only happens when the target base equals two. */
12742 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12743 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12745 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12747 if (real_isfinite (ra)
12748 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12749 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12751 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12755 mpfr_init2 (m, prec);
12756 mpfr_from_real (m, ra, GMP_RNDN);
12757 mpfr_clear_flags ();
12758 inexact = func (m, m, GMP_RNDN);
12759 result = do_mpfr_ckconv (m, type, inexact);
12767 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12768 FUNC on it and return the resulting value as a tree with type TYPE.
12769 The mpfr precision is set to the precision of TYPE. We assume that
12770 function FUNC returns zero if the result could be calculated
12771 exactly within the requested precision. */
12774 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12775 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12777 tree result = NULL_TREE;
12782 /* To proceed, MPFR must exactly represent the target floating point
12783 format, which only happens when the target base equals two. */
12784 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12785 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12786 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12788 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12789 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12791 if (real_isfinite (ra1) && real_isfinite (ra2))
12793 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12797 mpfr_inits2 (prec, m1, m2, NULL);
12798 mpfr_from_real (m1, ra1, GMP_RNDN);
12799 mpfr_from_real (m2, ra2, GMP_RNDN);
12800 mpfr_clear_flags ();
12801 inexact = func (m1, m1, m2, GMP_RNDN);
12802 result = do_mpfr_ckconv (m1, type, inexact);
12803 mpfr_clears (m1, m2, NULL);
12810 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12811 FUNC on it and return the resulting value as a tree with type TYPE.
12812 The mpfr precision is set to the precision of TYPE. We assume that
12813 function FUNC returns zero if the result could be calculated
12814 exactly within the requested precision. */
12817 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12818 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12820 tree result = NULL_TREE;
12826 /* To proceed, MPFR must exactly represent the target floating point
12827 format, which only happens when the target base equals two. */
12828 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12829 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12830 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12831 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12833 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12834 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12835 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12837 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12839 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12843 mpfr_inits2 (prec, m1, m2, m3, NULL);
12844 mpfr_from_real (m1, ra1, GMP_RNDN);
12845 mpfr_from_real (m2, ra2, GMP_RNDN);
12846 mpfr_from_real (m3, ra3, GMP_RNDN);
12847 mpfr_clear_flags ();
12848 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12849 result = do_mpfr_ckconv (m1, type, inexact);
12850 mpfr_clears (m1, m2, m3, NULL);
12857 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12858 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12859 If ARG_SINP and ARG_COSP are NULL then the result is returned
12860 as a complex value.
12861 The type is taken from the type of ARG and is used for setting the
12862 precision of the calculation and results. */
12865 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12867 tree const type = TREE_TYPE (arg);
12868 tree result = NULL_TREE;
12872 /* To proceed, MPFR must exactly represent the target floating point
12873 format, which only happens when the target base equals two. */
12874 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12875 && TREE_CODE (arg) == REAL_CST
12876 && !TREE_OVERFLOW (arg))
12878 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12880 if (real_isfinite (ra))
12882 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12883 tree result_s, result_c;
12887 mpfr_inits2 (prec, m, ms, mc, NULL);
12888 mpfr_from_real (m, ra, GMP_RNDN);
12889 mpfr_clear_flags ();
12890 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12891 result_s = do_mpfr_ckconv (ms, type, inexact);
12892 result_c = do_mpfr_ckconv (mc, type, inexact);
12893 mpfr_clears (m, ms, mc, NULL);
12894 if (result_s && result_c)
12896 /* If we are to return in a complex value do so. */
12897 if (!arg_sinp && !arg_cosp)
12898 return build_complex (build_complex_type (type),
12899 result_c, result_s);
12901 /* Dereference the sin/cos pointer arguments. */
12902 arg_sinp = build_fold_indirect_ref (arg_sinp);
12903 arg_cosp = build_fold_indirect_ref (arg_cosp);
12904 /* Proceed if valid pointer type were passed in. */
12905 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12906 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12908 /* Set the values. */
12909 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12911 TREE_SIDE_EFFECTS (result_s) = 1;
12912 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12914 TREE_SIDE_EFFECTS (result_c) = 1;
12915 /* Combine the assignments into a compound expr. */
12916 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12917 result_s, result_c));
12925 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12926 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12927 two-argument mpfr order N Bessel function FUNC on them and return
12928 the resulting value as a tree with type TYPE. The mpfr precision
12929 is set to the precision of TYPE. We assume that function FUNC
12930 returns zero if the result could be calculated exactly within the
12931 requested precision. */
12933 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12934 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12935 const REAL_VALUE_TYPE *min, bool inclusive)
12937 tree result = NULL_TREE;
12942 /* To proceed, MPFR must exactly represent the target floating point
12943 format, which only happens when the target base equals two. */
12944 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12945 && host_integerp (arg1, 0)
12946 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12948 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12949 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12952 && real_isfinite (ra)
12953 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12955 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12959 mpfr_init2 (m, prec);
12960 mpfr_from_real (m, ra, GMP_RNDN);
12961 mpfr_clear_flags ();
12962 inexact = func (m, n, m, GMP_RNDN);
12963 result = do_mpfr_ckconv (m, type, inexact);
12971 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12972 the pointer *(ARG_QUO) and return the result. The type is taken
12973 from the type of ARG0 and is used for setting the precision of the
12974 calculation and results. */
12977 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12979 tree const type = TREE_TYPE (arg0);
12980 tree result = NULL_TREE;
12985 /* To proceed, MPFR must exactly represent the target floating point
12986 format, which only happens when the target base equals two. */
12987 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12988 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12989 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12991 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12992 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12994 if (real_isfinite (ra0) && real_isfinite (ra1))
12996 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13001 mpfr_inits2 (prec, m0, m1, NULL);
13002 mpfr_from_real (m0, ra0, GMP_RNDN);
13003 mpfr_from_real (m1, ra1, GMP_RNDN);
13004 mpfr_clear_flags ();
13005 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
13006 /* Remquo is independent of the rounding mode, so pass
13007 inexact=0 to do_mpfr_ckconv(). */
13008 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13009 mpfr_clears (m0, m1, NULL);
13012 /* MPFR calculates quo in the host's long so it may
13013 return more bits in quo than the target int can hold
13014 if sizeof(host long) > sizeof(target int). This can
13015 happen even for native compilers in LP64 mode. In
13016 these cases, modulo the quo value with the largest
13017 number that the target int can hold while leaving one
13018 bit for the sign. */
13019 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13020 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13022 /* Dereference the quo pointer argument. */
13023 arg_quo = build_fold_indirect_ref (arg_quo);
13024 /* Proceed iff a valid pointer type was passed in. */
13025 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13027 /* Set the value. */
13028 tree result_quo = fold_build2 (MODIFY_EXPR,
13029 TREE_TYPE (arg_quo), arg_quo,
13030 build_int_cst (NULL, integer_quo));
13031 TREE_SIDE_EFFECTS (result_quo) = 1;
13032 /* Combine the quo assignment with the rem. */
13033 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13034 result_quo, result_rem));
13042 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13043 resulting value as a tree with type TYPE. The mpfr precision is
13044 set to the precision of TYPE. We assume that this mpfr function
13045 returns zero if the result could be calculated exactly within the
13046 requested precision. In addition, the integer pointer represented
13047 by ARG_SG will be dereferenced and set to the appropriate signgam
13051 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13053 tree result = NULL_TREE;
13057 /* To proceed, MPFR must exactly represent the target floating point
13058 format, which only happens when the target base equals two. Also
13059 verify ARG is a constant and that ARG_SG is an int pointer. */
13060 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13061 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13062 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13063 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13065 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13067 /* In addition to NaN and Inf, the argument cannot be zero or a
13068 negative integer. */
13069 if (real_isfinite (ra)
13070 && ra->cl != rvc_zero
13071 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13073 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13078 mpfr_init2 (m, prec);
13079 mpfr_from_real (m, ra, GMP_RNDN);
13080 mpfr_clear_flags ();
13081 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
13082 result_lg = do_mpfr_ckconv (m, type, inexact);
13088 /* Dereference the arg_sg pointer argument. */
13089 arg_sg = build_fold_indirect_ref (arg_sg);
13090 /* Assign the signgam value into *arg_sg. */
13091 result_sg = fold_build2 (MODIFY_EXPR,
13092 TREE_TYPE (arg_sg), arg_sg,
13093 build_int_cst (NULL, sg));
13094 TREE_SIDE_EFFECTS (result_sg) = 1;
13095 /* Combine the signgam assignment with the lgamma result. */
13096 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13097 result_sg, result_lg));