1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
250 if (strncmp (name, "__sync_", 7) == 0)
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
280 switch (TREE_CODE (exp))
284 case NON_LVALUE_EXPR:
285 exp = TREE_OPERAND (exp, 0);
286 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
289 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
290 align = MIN (inner, max_align);
293 case POINTER_PLUS_EXPR:
294 /* If sum of pointer + int, restrict our maximum alignment to that
295 imposed by the integer. If not, we can't do any better than
297 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
300 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
301 & (max_align / BITS_PER_UNIT - 1))
305 exp = TREE_OPERAND (exp, 0);
309 /* See what we are pointing at and look at its alignment. */
310 exp = TREE_OPERAND (exp, 0);
312 if (handled_component_p (exp))
314 HOST_WIDE_INT bitsize, bitpos;
316 enum machine_mode mode;
317 int unsignedp, volatilep;
319 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
320 &mode, &unsignedp, &volatilep, true);
322 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
323 if (offset && TREE_CODE (offset) == PLUS_EXPR
324 && host_integerp (TREE_OPERAND (offset, 1), 1))
326 /* Any overflow in calculating offset_bits won't change
329 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
333 inner = MIN (inner, (offset_bits & -offset_bits));
334 offset = TREE_OPERAND (offset, 0);
336 if (offset && TREE_CODE (offset) == MULT_EXPR
337 && host_integerp (TREE_OPERAND (offset, 1), 1))
339 /* Any overflow in calculating offset_factor won't change
341 unsigned offset_factor
342 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
346 inner = MIN (inner, (offset_factor & -offset_factor));
349 inner = MIN (inner, BITS_PER_UNIT);
352 align = MIN (inner, DECL_ALIGN (exp));
353 #ifdef CONSTANT_ALIGNMENT
354 else if (CONSTANT_CLASS_P (exp))
355 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
357 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
358 || TREE_CODE (exp) == INDIRECT_REF)
359 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
361 align = MIN (align, inner);
362 return MIN (align, max_align);
370 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
371 way, because it could contain a zero byte in the middle.
372 TREE_STRING_LENGTH is the size of the character array, not the string.
374 ONLY_VALUE should be nonzero if the result is not going to be emitted
375 into the instruction stream and zero if it is going to be expanded.
376 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
377 is returned, otherwise NULL, since
378 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
379 evaluate the side-effects.
381 The value returned is of type `ssizetype'.
383 Unfortunately, string_constant can't access the values of const char
384 arrays with initializers, so neither can we do so here. */
387 c_strlen (tree src, int only_value)
390 HOST_WIDE_INT offset;
395 if (TREE_CODE (src) == COND_EXPR
396 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
400 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
401 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
402 if (tree_int_cst_equal (len1, len2))
406 if (TREE_CODE (src) == COMPOUND_EXPR
407 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
408 return c_strlen (TREE_OPERAND (src, 1), only_value);
410 src = string_constant (src, &offset_node);
414 max = TREE_STRING_LENGTH (src) - 1;
415 ptr = TREE_STRING_POINTER (src);
417 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
419 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
420 compute the offset to the following null if we don't know where to
421 start searching for it. */
424 for (i = 0; i < max; i++)
428 /* We don't know the starting offset, but we do know that the string
429 has no internal zero bytes. We can assume that the offset falls
430 within the bounds of the string; otherwise, the programmer deserves
431 what he gets. Subtract the offset from the length of the string,
432 and return that. This would perhaps not be valid if we were dealing
433 with named arrays in addition to literal string constants. */
435 return size_diffop (size_int (max), offset_node);
438 /* We have a known offset into the string. Start searching there for
439 a null character if we can represent it as a single HOST_WIDE_INT. */
440 if (offset_node == 0)
442 else if (! host_integerp (offset_node, 0))
445 offset = tree_low_cst (offset_node, 0);
447 /* If the offset is known to be out of bounds, warn, and call strlen at
449 if (offset < 0 || offset > max)
451 warning (0, "offset outside bounds of constant string");
455 /* Use strlen to search for the first zero byte. Since any strings
456 constructed with build_string will have nulls appended, we win even
457 if we get handed something like (char[4])"abcd".
459 Since OFFSET is our starting index into the string, no further
460 calculation is needed. */
461 return ssize_int (strlen (ptr + offset));
464 /* Return a char pointer for a C string if it is a string constant
465 or sum of string constant and integer constant. */
472 src = string_constant (src, &offset_node);
476 if (offset_node == 0)
477 return TREE_STRING_POINTER (src);
478 else if (!host_integerp (offset_node, 1)
479 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
482 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
485 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
486 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
489 c_readstr (const char *str, enum machine_mode mode)
495 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
500 for (i = 0; i < GET_MODE_SIZE (mode); i++)
503 if (WORDS_BIG_ENDIAN)
504 j = GET_MODE_SIZE (mode) - i - 1;
505 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
506 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
507 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
509 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
512 ch = (unsigned char) str[i];
513 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
515 return immed_double_const (c[0], c[1], mode);
518 /* Cast a target constant CST to target CHAR and if that value fits into
519 host char type, return zero and put that value into variable pointed to by
523 target_char_cast (tree cst, char *p)
525 unsigned HOST_WIDE_INT val, hostval;
527 if (!host_integerp (cst, 1)
528 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
531 val = tree_low_cst (cst, 1);
532 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
533 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
536 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
537 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
546 /* Similar to save_expr, but assumes that arbitrary code is not executed
547 in between the multiple evaluations. In particular, we assume that a
548 non-addressable local variable will not be modified. */
551 builtin_save_expr (tree exp)
553 if (TREE_ADDRESSABLE (exp) == 0
554 && (TREE_CODE (exp) == PARM_DECL
555 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
558 return save_expr (exp);
561 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
562 times to get the address of either a higher stack frame, or a return
563 address located within it (depending on FNDECL_CODE). */
566 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
570 #ifdef INITIAL_FRAME_ADDRESS_RTX
571 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
575 /* For a zero count with __builtin_return_address, we don't care what
576 frame address we return, because target-specific definitions will
577 override us. Therefore frame pointer elimination is OK, and using
578 the soft frame pointer is OK.
580 For a nonzero count, or a zero count with __builtin_frame_address,
581 we require a stable offset from the current frame pointer to the
582 previous one, so we must use the hard frame pointer, and
583 we must disable frame pointer elimination. */
584 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
585 tem = frame_pointer_rtx;
588 tem = hard_frame_pointer_rtx;
590 /* Tell reload not to eliminate the frame pointer. */
591 current_function_accesses_prior_frames = 1;
595 /* Some machines need special handling before we can access
596 arbitrary frames. For example, on the SPARC, we must first flush
597 all register windows to the stack. */
598 #ifdef SETUP_FRAME_ADDRESSES
600 SETUP_FRAME_ADDRESSES ();
603 /* On the SPARC, the return address is not in the frame, it is in a
604 register. There is no way to access it off of the current frame
605 pointer, but it can be accessed off the previous frame pointer by
606 reading the value from the register window save area. */
607 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
608 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
612 /* Scan back COUNT frames to the specified frame. */
613 for (i = 0; i < count; i++)
615 /* Assume the dynamic chain pointer is in the word that the
616 frame address points to, unless otherwise specified. */
617 #ifdef DYNAMIC_CHAIN_ADDRESS
618 tem = DYNAMIC_CHAIN_ADDRESS (tem);
620 tem = memory_address (Pmode, tem);
621 tem = gen_frame_mem (Pmode, tem);
622 tem = copy_to_reg (tem);
625 /* For __builtin_frame_address, return what we've got. But, on
626 the SPARC for example, we may have to add a bias. */
627 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
628 #ifdef FRAME_ADDR_RTX
629 return FRAME_ADDR_RTX (tem);
634 /* For __builtin_return_address, get the return address from that frame. */
635 #ifdef RETURN_ADDR_RTX
636 tem = RETURN_ADDR_RTX (count, tem);
638 tem = memory_address (Pmode,
639 plus_constant (tem, GET_MODE_SIZE (Pmode)));
640 tem = gen_frame_mem (Pmode, tem);
645 /* Alias set used for setjmp buffer. */
646 static alias_set_type setjmp_alias_set = -1;
648 /* Construct the leading half of a __builtin_setjmp call. Control will
649 return to RECEIVER_LABEL. This is also called directly by the SJLJ
650 exception handling code. */
653 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
655 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
659 if (setjmp_alias_set == -1)
660 setjmp_alias_set = new_alias_set ();
662 buf_addr = convert_memory_address (Pmode, buf_addr);
664 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
666 /* We store the frame pointer and the address of receiver_label in
667 the buffer and use the rest of it for the stack save area, which
668 is machine-dependent. */
670 mem = gen_rtx_MEM (Pmode, buf_addr);
671 set_mem_alias_set (mem, setjmp_alias_set);
672 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
674 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
675 set_mem_alias_set (mem, setjmp_alias_set);
677 emit_move_insn (validize_mem (mem),
678 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
680 stack_save = gen_rtx_MEM (sa_mode,
681 plus_constant (buf_addr,
682 2 * GET_MODE_SIZE (Pmode)));
683 set_mem_alias_set (stack_save, setjmp_alias_set);
684 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
686 /* If there is further processing to do, do it. */
687 #ifdef HAVE_builtin_setjmp_setup
688 if (HAVE_builtin_setjmp_setup)
689 emit_insn (gen_builtin_setjmp_setup (buf_addr));
692 /* Tell optimize_save_area_alloca that extra work is going to
693 need to go on during alloca. */
694 current_function_calls_setjmp = 1;
696 /* We have a nonlocal label. */
697 current_function_has_nonlocal_label = 1;
700 /* Construct the trailing part of a __builtin_setjmp call. This is
701 also called directly by the SJLJ exception handling code. */
704 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
706 /* Clobber the FP when we get here, so we have to make sure it's
707 marked as used by this function. */
708 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
710 /* Mark the static chain as clobbered here so life information
711 doesn't get messed up for it. */
712 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
714 /* Now put in the code to restore the frame pointer, and argument
715 pointer, if needed. */
716 #ifdef HAVE_nonlocal_goto
717 if (! HAVE_nonlocal_goto)
720 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
721 /* This might change the hard frame pointer in ways that aren't
722 apparent to early optimization passes, so force a clobber. */
723 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
726 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
727 if (fixed_regs[ARG_POINTER_REGNUM])
729 #ifdef ELIMINABLE_REGS
731 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
733 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
734 if (elim_regs[i].from == ARG_POINTER_REGNUM
735 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
738 if (i == ARRAY_SIZE (elim_regs))
741 /* Now restore our arg pointer from the address at which it
742 was saved in our stack frame. */
743 emit_move_insn (virtual_incoming_args_rtx,
744 copy_to_reg (get_arg_pointer_save_area ()));
749 #ifdef HAVE_builtin_setjmp_receiver
750 if (HAVE_builtin_setjmp_receiver)
751 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
754 #ifdef HAVE_nonlocal_goto_receiver
755 if (HAVE_nonlocal_goto_receiver)
756 emit_insn (gen_nonlocal_goto_receiver ());
761 /* We must not allow the code we just generated to be reordered by
762 scheduling. Specifically, the update of the frame pointer must
763 happen immediately, not later. */
764 emit_insn (gen_blockage ());
767 /* __builtin_longjmp is passed a pointer to an array of five words (not
768 all will be used on all machines). It operates similarly to the C
769 library function of the same name, but is more efficient. Much of
770 the code below is copied from the handling of non-local gotos. */
773 expand_builtin_longjmp (rtx buf_addr, rtx value)
775 rtx fp, lab, stack, insn, last;
776 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
778 if (setjmp_alias_set == -1)
779 setjmp_alias_set = new_alias_set ();
781 buf_addr = convert_memory_address (Pmode, buf_addr);
783 buf_addr = force_reg (Pmode, buf_addr);
785 /* We used to store value in static_chain_rtx, but that fails if pointers
786 are smaller than integers. We instead require that the user must pass
787 a second argument of 1, because that is what builtin_setjmp will
788 return. This also makes EH slightly more efficient, since we are no
789 longer copying around a value that we don't care about. */
790 gcc_assert (value == const1_rtx);
792 last = get_last_insn ();
793 #ifdef HAVE_builtin_longjmp
794 if (HAVE_builtin_longjmp)
795 emit_insn (gen_builtin_longjmp (buf_addr));
799 fp = gen_rtx_MEM (Pmode, buf_addr);
800 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
801 GET_MODE_SIZE (Pmode)));
803 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
804 2 * GET_MODE_SIZE (Pmode)));
805 set_mem_alias_set (fp, setjmp_alias_set);
806 set_mem_alias_set (lab, setjmp_alias_set);
807 set_mem_alias_set (stack, setjmp_alias_set);
809 /* Pick up FP, label, and SP from the block and jump. This code is
810 from expand_goto in stmt.c; see there for detailed comments. */
811 #ifdef HAVE_nonlocal_goto
812 if (HAVE_nonlocal_goto)
813 /* We have to pass a value to the nonlocal_goto pattern that will
814 get copied into the static_chain pointer, but it does not matter
815 what that value is, because builtin_setjmp does not use it. */
816 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
820 lab = copy_to_reg (lab);
822 emit_insn (gen_rtx_CLOBBER (VOIDmode,
823 gen_rtx_MEM (BLKmode,
824 gen_rtx_SCRATCH (VOIDmode))));
825 emit_insn (gen_rtx_CLOBBER (VOIDmode,
826 gen_rtx_MEM (BLKmode,
827 hard_frame_pointer_rtx)));
829 emit_move_insn (hard_frame_pointer_rtx, fp);
830 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
832 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
833 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
834 emit_indirect_jump (lab);
838 /* Search backwards and mark the jump insn as a non-local goto.
839 Note that this precludes the use of __builtin_longjmp to a
840 __builtin_setjmp target in the same function. However, we've
841 already cautioned the user that these functions are for
842 internal exception handling use only. */
843 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
845 gcc_assert (insn != last);
849 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
853 else if (CALL_P (insn))
858 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
859 and the address of the save area. */
862 expand_builtin_nonlocal_goto (tree exp)
864 tree t_label, t_save_area;
865 rtx r_label, r_save_area, r_fp, r_sp, insn;
867 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
870 t_label = CALL_EXPR_ARG (exp, 0);
871 t_save_area = CALL_EXPR_ARG (exp, 1);
873 r_label = expand_normal (t_label);
874 r_label = convert_memory_address (Pmode, r_label);
875 r_save_area = expand_normal (t_save_area);
876 r_save_area = convert_memory_address (Pmode, r_save_area);
877 r_fp = gen_rtx_MEM (Pmode, r_save_area);
878 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
879 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
881 current_function_has_nonlocal_goto = 1;
883 #ifdef HAVE_nonlocal_goto
884 /* ??? We no longer need to pass the static chain value, afaik. */
885 if (HAVE_nonlocal_goto)
886 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
890 r_label = copy_to_reg (r_label);
892 emit_insn (gen_rtx_CLOBBER (VOIDmode,
893 gen_rtx_MEM (BLKmode,
894 gen_rtx_SCRATCH (VOIDmode))));
896 emit_insn (gen_rtx_CLOBBER (VOIDmode,
897 gen_rtx_MEM (BLKmode,
898 hard_frame_pointer_rtx)));
900 /* Restore frame pointer for containing function.
901 This sets the actual hard register used for the frame pointer
902 to the location of the function's incoming static chain info.
903 The non-local goto handler will then adjust it to contain the
904 proper value and reload the argument pointer, if needed. */
905 emit_move_insn (hard_frame_pointer_rtx, r_fp);
906 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
908 /* USE of hard_frame_pointer_rtx added for consistency;
909 not clear if really needed. */
910 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
911 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
913 /* If the architecture is using a GP register, we must
914 conservatively assume that the target function makes use of it.
915 The prologue of functions with nonlocal gotos must therefore
916 initialize the GP register to the appropriate value, and we
917 must then make sure that this value is live at the point
918 of the jump. (Note that this doesn't necessarily apply
919 to targets with a nonlocal_goto pattern; they are free
920 to implement it in their own way. Note also that this is
921 a no-op if the GP register is a global invariant.) */
922 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
923 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
924 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
926 emit_indirect_jump (r_label);
929 /* Search backwards to the jump insn and mark it as a
931 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
935 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
936 const0_rtx, REG_NOTES (insn));
939 else if (CALL_P (insn))
946 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
947 (not all will be used on all machines) that was passed to __builtin_setjmp.
948 It updates the stack pointer in that block to correspond to the current
952 expand_builtin_update_setjmp_buf (rtx buf_addr)
954 enum machine_mode sa_mode = Pmode;
958 #ifdef HAVE_save_stack_nonlocal
959 if (HAVE_save_stack_nonlocal)
960 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
962 #ifdef STACK_SAVEAREA_MODE
963 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
967 = gen_rtx_MEM (sa_mode,
970 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
974 emit_insn (gen_setjmp ());
977 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
980 /* Expand a call to __builtin_prefetch. For a target that does not support
981 data prefetch, evaluate the memory address argument in case it has side
985 expand_builtin_prefetch (tree exp)
987 tree arg0, arg1, arg2;
991 if (!validate_arglist (exp, POINTER_TYPE, 0))
994 arg0 = CALL_EXPR_ARG (exp, 0);
996 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
997 zero (read) and argument 2 (locality) defaults to 3 (high degree of
999 nargs = call_expr_nargs (exp);
1001 arg1 = CALL_EXPR_ARG (exp, 1);
1003 arg1 = integer_zero_node;
1005 arg2 = CALL_EXPR_ARG (exp, 2);
1007 arg2 = build_int_cst (NULL_TREE, 3);
1009 /* Argument 0 is an address. */
1010 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1012 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1013 if (TREE_CODE (arg1) != INTEGER_CST)
1015 error ("second argument to %<__builtin_prefetch%> must be a constant");
1016 arg1 = integer_zero_node;
1018 op1 = expand_normal (arg1);
1019 /* Argument 1 must be either zero or one. */
1020 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1022 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1027 /* Argument 2 (locality) must be a compile-time constant int. */
1028 if (TREE_CODE (arg2) != INTEGER_CST)
1030 error ("third argument to %<__builtin_prefetch%> must be a constant");
1031 arg2 = integer_zero_node;
1033 op2 = expand_normal (arg2);
1034 /* Argument 2 must be 0, 1, 2, or 3. */
1035 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1037 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1041 #ifdef HAVE_prefetch
1044 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1046 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1047 || (GET_MODE (op0) != Pmode))
1049 op0 = convert_memory_address (Pmode, op0);
1050 op0 = force_reg (Pmode, op0);
1052 emit_insn (gen_prefetch (op0, op1, op2));
1056 /* Don't do anything with direct references to volatile memory, but
1057 generate code to handle other side effects. */
1058 if (!MEM_P (op0) && side_effects_p (op0))
1062 /* Get a MEM rtx for expression EXP which is the address of an operand
1063 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1064 the maximum length of the block of memory that might be accessed or
1068 get_memory_rtx (tree exp, tree len)
1070 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1071 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1073 /* Get an expression we can use to find the attributes to assign to MEM.
1074 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1075 we can. First remove any nops. */
1076 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1077 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1078 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1079 exp = TREE_OPERAND (exp, 0);
1081 if (TREE_CODE (exp) == ADDR_EXPR)
1082 exp = TREE_OPERAND (exp, 0);
1083 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1084 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1088 /* Honor attributes derived from exp, except for the alias set
1089 (as builtin stringops may alias with anything) and the size
1090 (as stringops may access multiple array elements). */
1093 set_mem_attributes (mem, exp, 0);
1095 /* Allow the string and memory builtins to overflow from one
1096 field into another, see http://gcc.gnu.org/PR23561.
1097 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1098 memory accessed by the string or memory builtin will fit
1099 within the field. */
1100 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1102 tree mem_expr = MEM_EXPR (mem);
1103 HOST_WIDE_INT offset = -1, length = -1;
1106 while (TREE_CODE (inner) == ARRAY_REF
1107 || TREE_CODE (inner) == NOP_EXPR
1108 || TREE_CODE (inner) == CONVERT_EXPR
1109 || TREE_CODE (inner) == NON_LVALUE_EXPR
1110 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1111 || TREE_CODE (inner) == SAVE_EXPR)
1112 inner = TREE_OPERAND (inner, 0);
1114 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1116 if (MEM_OFFSET (mem)
1117 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1118 offset = INTVAL (MEM_OFFSET (mem));
1120 if (offset >= 0 && len && host_integerp (len, 0))
1121 length = tree_low_cst (len, 0);
1123 while (TREE_CODE (inner) == COMPONENT_REF)
1125 tree field = TREE_OPERAND (inner, 1);
1126 gcc_assert (! DECL_BIT_FIELD (field));
1127 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1128 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1131 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1132 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1135 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1136 /* If we can prove the memory starting at XEXP (mem, 0)
1137 and ending at XEXP (mem, 0) + LENGTH will fit into
1138 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1141 && offset + length <= size)
1146 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1147 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1148 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1156 mem_expr = TREE_OPERAND (mem_expr, 0);
1157 inner = TREE_OPERAND (inner, 0);
1160 if (mem_expr == NULL)
1162 if (mem_expr != MEM_EXPR (mem))
1164 set_mem_expr (mem, mem_expr);
1165 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1168 set_mem_alias_set (mem, 0);
1169 set_mem_size (mem, NULL_RTX);
1175 /* Built-in functions to perform an untyped call and return. */
1177 /* For each register that may be used for calling a function, this
1178 gives a mode used to copy the register's value. VOIDmode indicates
1179 the register is not used for calling a function. If the machine
1180 has register windows, this gives only the outbound registers.
1181 INCOMING_REGNO gives the corresponding inbound register. */
1182 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1184 /* For each register that may be used for returning values, this gives
1185 a mode used to copy the register's value. VOIDmode indicates the
1186 register is not used for returning values. If the machine has
1187 register windows, this gives only the outbound registers.
1188 INCOMING_REGNO gives the corresponding inbound register. */
1189 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1191 /* For each register that may be used for calling a function, this
1192 gives the offset of that register into the block returned by
1193 __builtin_apply_args. 0 indicates that the register is not
1194 used for calling a function. */
1195 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1197 /* Return the size required for the block returned by __builtin_apply_args,
1198 and initialize apply_args_mode. */
1201 apply_args_size (void)
1203 static int size = -1;
1206 enum machine_mode mode;
1208 /* The values computed by this function never change. */
1211 /* The first value is the incoming arg-pointer. */
1212 size = GET_MODE_SIZE (Pmode);
1214 /* The second value is the structure value address unless this is
1215 passed as an "invisible" first argument. */
1216 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1217 size += GET_MODE_SIZE (Pmode);
1219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1220 if (FUNCTION_ARG_REGNO_P (regno))
1222 mode = reg_raw_mode[regno];
1224 gcc_assert (mode != VOIDmode);
1226 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1227 if (size % align != 0)
1228 size = CEIL (size, align) * align;
1229 apply_args_reg_offset[regno] = size;
1230 size += GET_MODE_SIZE (mode);
1231 apply_args_mode[regno] = mode;
1235 apply_args_mode[regno] = VOIDmode;
1236 apply_args_reg_offset[regno] = 0;
1242 /* Return the size required for the block returned by __builtin_apply,
1243 and initialize apply_result_mode. */
1246 apply_result_size (void)
1248 static int size = -1;
1250 enum machine_mode mode;
1252 /* The values computed by this function never change. */
1257 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1258 if (FUNCTION_VALUE_REGNO_P (regno))
1260 mode = reg_raw_mode[regno];
1262 gcc_assert (mode != VOIDmode);
1264 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1265 if (size % align != 0)
1266 size = CEIL (size, align) * align;
1267 size += GET_MODE_SIZE (mode);
1268 apply_result_mode[regno] = mode;
1271 apply_result_mode[regno] = VOIDmode;
1273 /* Allow targets that use untyped_call and untyped_return to override
1274 the size so that machine-specific information can be stored here. */
1275 #ifdef APPLY_RESULT_SIZE
1276 size = APPLY_RESULT_SIZE;
1282 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1283 /* Create a vector describing the result block RESULT. If SAVEP is true,
1284 the result block is used to save the values; otherwise it is used to
1285 restore the values. */
1288 result_vector (int savep, rtx result)
1290 int regno, size, align, nelts;
1291 enum machine_mode mode;
1293 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1296 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1297 if ((mode = apply_result_mode[regno]) != VOIDmode)
1299 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1300 if (size % align != 0)
1301 size = CEIL (size, align) * align;
1302 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1303 mem = adjust_address (result, mode, size);
1304 savevec[nelts++] = (savep
1305 ? gen_rtx_SET (VOIDmode, mem, reg)
1306 : gen_rtx_SET (VOIDmode, reg, mem));
1307 size += GET_MODE_SIZE (mode);
1309 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1311 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1313 /* Save the state required to perform an untyped call with the same
1314 arguments as were passed to the current function. */
1317 expand_builtin_apply_args_1 (void)
1320 int size, align, regno;
1321 enum machine_mode mode;
1322 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1324 /* Create a block where the arg-pointer, structure value address,
1325 and argument registers can be saved. */
1326 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1328 /* Walk past the arg-pointer and structure value address. */
1329 size = GET_MODE_SIZE (Pmode);
1330 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1331 size += GET_MODE_SIZE (Pmode);
1333 /* Save each register used in calling a function to the block. */
1334 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1335 if ((mode = apply_args_mode[regno]) != VOIDmode)
1337 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1338 if (size % align != 0)
1339 size = CEIL (size, align) * align;
1341 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1343 emit_move_insn (adjust_address (registers, mode, size), tem);
1344 size += GET_MODE_SIZE (mode);
1347 /* Save the arg pointer to the block. */
1348 tem = copy_to_reg (virtual_incoming_args_rtx);
1349 #ifdef STACK_GROWS_DOWNWARD
1350 /* We need the pointer as the caller actually passed them to us, not
1351 as we might have pretended they were passed. Make sure it's a valid
1352 operand, as emit_move_insn isn't expected to handle a PLUS. */
1354 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1357 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1359 size = GET_MODE_SIZE (Pmode);
1361 /* Save the structure value address unless this is passed as an
1362 "invisible" first argument. */
1363 if (struct_incoming_value)
1365 emit_move_insn (adjust_address (registers, Pmode, size),
1366 copy_to_reg (struct_incoming_value));
1367 size += GET_MODE_SIZE (Pmode);
1370 /* Return the address of the block. */
1371 return copy_addr_to_reg (XEXP (registers, 0));
1374 /* __builtin_apply_args returns block of memory allocated on
1375 the stack into which is stored the arg pointer, structure
1376 value address, static chain, and all the registers that might
1377 possibly be used in performing a function call. The code is
1378 moved to the start of the function so the incoming values are
1382 expand_builtin_apply_args (void)
1384 /* Don't do __builtin_apply_args more than once in a function.
1385 Save the result of the first call and reuse it. */
1386 if (apply_args_value != 0)
1387 return apply_args_value;
1389 /* When this function is called, it means that registers must be
1390 saved on entry to this function. So we migrate the
1391 call to the first insn of this function. */
1396 temp = expand_builtin_apply_args_1 ();
1400 apply_args_value = temp;
1402 /* Put the insns after the NOTE that starts the function.
1403 If this is inside a start_sequence, make the outer-level insn
1404 chain current, so the code is placed at the start of the
1406 push_topmost_sequence ();
1407 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1408 pop_topmost_sequence ();
1413 /* Perform an untyped call and save the state required to perform an
1414 untyped return of whatever value was returned by the given function. */
1417 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1419 int size, align, regno;
1420 enum machine_mode mode;
1421 rtx incoming_args, result, reg, dest, src, call_insn;
1422 rtx old_stack_level = 0;
1423 rtx call_fusage = 0;
1424 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1426 arguments = convert_memory_address (Pmode, arguments);
1428 /* Create a block where the return registers can be saved. */
1429 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1431 /* Fetch the arg pointer from the ARGUMENTS block. */
1432 incoming_args = gen_reg_rtx (Pmode);
1433 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1434 #ifndef STACK_GROWS_DOWNWARD
1435 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1436 incoming_args, 0, OPTAB_LIB_WIDEN);
1439 /* Push a new argument block and copy the arguments. Do not allow
1440 the (potential) memcpy call below to interfere with our stack
1442 do_pending_stack_adjust ();
1445 /* Save the stack with nonlocal if available. */
1446 #ifdef HAVE_save_stack_nonlocal
1447 if (HAVE_save_stack_nonlocal)
1448 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1451 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1453 /* Allocate a block of memory onto the stack and copy the memory
1454 arguments to the outgoing arguments address. */
1455 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1456 dest = virtual_outgoing_args_rtx;
1457 #ifndef STACK_GROWS_DOWNWARD
1458 if (GET_CODE (argsize) == CONST_INT)
1459 dest = plus_constant (dest, -INTVAL (argsize));
1461 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1463 dest = gen_rtx_MEM (BLKmode, dest);
1464 set_mem_align (dest, PARM_BOUNDARY);
1465 src = gen_rtx_MEM (BLKmode, incoming_args);
1466 set_mem_align (src, PARM_BOUNDARY);
1467 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1469 /* Refer to the argument block. */
1471 arguments = gen_rtx_MEM (BLKmode, arguments);
1472 set_mem_align (arguments, PARM_BOUNDARY);
1474 /* Walk past the arg-pointer and structure value address. */
1475 size = GET_MODE_SIZE (Pmode);
1477 size += GET_MODE_SIZE (Pmode);
1479 /* Restore each of the registers previously saved. Make USE insns
1480 for each of these registers for use in making the call. */
1481 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1482 if ((mode = apply_args_mode[regno]) != VOIDmode)
1484 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1485 if (size % align != 0)
1486 size = CEIL (size, align) * align;
1487 reg = gen_rtx_REG (mode, regno);
1488 emit_move_insn (reg, adjust_address (arguments, mode, size));
1489 use_reg (&call_fusage, reg);
1490 size += GET_MODE_SIZE (mode);
1493 /* Restore the structure value address unless this is passed as an
1494 "invisible" first argument. */
1495 size = GET_MODE_SIZE (Pmode);
1498 rtx value = gen_reg_rtx (Pmode);
1499 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1500 emit_move_insn (struct_value, value);
1501 if (REG_P (struct_value))
1502 use_reg (&call_fusage, struct_value);
1503 size += GET_MODE_SIZE (Pmode);
1506 /* All arguments and registers used for the call are set up by now! */
1507 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1509 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1510 and we don't want to load it into a register as an optimization,
1511 because prepare_call_address already did it if it should be done. */
1512 if (GET_CODE (function) != SYMBOL_REF)
1513 function = memory_address (FUNCTION_MODE, function);
1515 /* Generate the actual call instruction and save the return value. */
1516 #ifdef HAVE_untyped_call
1517 if (HAVE_untyped_call)
1518 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1519 result, result_vector (1, result)));
1522 #ifdef HAVE_call_value
1523 if (HAVE_call_value)
1527 /* Locate the unique return register. It is not possible to
1528 express a call that sets more than one return register using
1529 call_value; use untyped_call for that. In fact, untyped_call
1530 only needs to save the return registers in the given block. */
1531 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1532 if ((mode = apply_result_mode[regno]) != VOIDmode)
1534 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1536 valreg = gen_rtx_REG (mode, regno);
1539 emit_call_insn (GEN_CALL_VALUE (valreg,
1540 gen_rtx_MEM (FUNCTION_MODE, function),
1541 const0_rtx, NULL_RTX, const0_rtx));
1543 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1549 /* Find the CALL insn we just emitted, and attach the register usage
1551 call_insn = last_call_insn ();
1552 add_function_usage_to (call_insn, call_fusage);
1554 /* Restore the stack. */
1555 #ifdef HAVE_save_stack_nonlocal
1556 if (HAVE_save_stack_nonlocal)
1557 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1560 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1564 /* Return the address of the result block. */
1565 result = copy_addr_to_reg (XEXP (result, 0));
1566 return convert_memory_address (ptr_mode, result);
1569 /* Perform an untyped return. */
1572 expand_builtin_return (rtx result)
1574 int size, align, regno;
1575 enum machine_mode mode;
1577 rtx call_fusage = 0;
1579 result = convert_memory_address (Pmode, result);
1581 apply_result_size ();
1582 result = gen_rtx_MEM (BLKmode, result);
1584 #ifdef HAVE_untyped_return
1585 if (HAVE_untyped_return)
1587 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1593 /* Restore the return value and note that each value is used. */
1595 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1596 if ((mode = apply_result_mode[regno]) != VOIDmode)
1598 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1599 if (size % align != 0)
1600 size = CEIL (size, align) * align;
1601 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1602 emit_move_insn (reg, adjust_address (result, mode, size));
1604 push_to_sequence (call_fusage);
1605 emit_insn (gen_rtx_USE (VOIDmode, reg));
1606 call_fusage = get_insns ();
1608 size += GET_MODE_SIZE (mode);
1611 /* Put the USE insns before the return. */
1612 emit_insn (call_fusage);
1614 /* Return whatever values was restored by jumping directly to the end
1616 expand_naked_return ();
1619 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1621 static enum type_class
1622 type_to_class (tree type)
1624 switch (TREE_CODE (type))
1626 case VOID_TYPE: return void_type_class;
1627 case INTEGER_TYPE: return integer_type_class;
1628 case ENUMERAL_TYPE: return enumeral_type_class;
1629 case BOOLEAN_TYPE: return boolean_type_class;
1630 case POINTER_TYPE: return pointer_type_class;
1631 case REFERENCE_TYPE: return reference_type_class;
1632 case OFFSET_TYPE: return offset_type_class;
1633 case REAL_TYPE: return real_type_class;
1634 case COMPLEX_TYPE: return complex_type_class;
1635 case FUNCTION_TYPE: return function_type_class;
1636 case METHOD_TYPE: return method_type_class;
1637 case RECORD_TYPE: return record_type_class;
1639 case QUAL_UNION_TYPE: return union_type_class;
1640 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1641 ? string_type_class : array_type_class);
1642 case LANG_TYPE: return lang_type_class;
1643 default: return no_type_class;
1647 /* Expand a call EXP to __builtin_classify_type. */
1650 expand_builtin_classify_type (tree exp)
1652 if (call_expr_nargs (exp))
1653 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1654 return GEN_INT (no_type_class);
1657 /* This helper macro, meant to be used in mathfn_built_in below,
1658 determines which among a set of three builtin math functions is
1659 appropriate for a given type mode. The `F' and `L' cases are
1660 automatically generated from the `double' case. */
1661 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1662 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1663 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1664 fcodel = BUILT_IN_MATHFN##L ; break;
1665 /* Similar to above, but appends _R after any F/L suffix. */
1666 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1667 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1668 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1669 fcodel = BUILT_IN_MATHFN##L_R ; break;
1671 /* Return mathematic function equivalent to FN but operating directly
1672 on TYPE, if available. If we can't do the conversion, return zero. */
1674 mathfn_built_in (tree type, enum built_in_function fn)
1676 enum built_in_function fcode, fcodef, fcodel;
1680 CASE_MATHFN (BUILT_IN_ACOS)
1681 CASE_MATHFN (BUILT_IN_ACOSH)
1682 CASE_MATHFN (BUILT_IN_ASIN)
1683 CASE_MATHFN (BUILT_IN_ASINH)
1684 CASE_MATHFN (BUILT_IN_ATAN)
1685 CASE_MATHFN (BUILT_IN_ATAN2)
1686 CASE_MATHFN (BUILT_IN_ATANH)
1687 CASE_MATHFN (BUILT_IN_CBRT)
1688 CASE_MATHFN (BUILT_IN_CEIL)
1689 CASE_MATHFN (BUILT_IN_CEXPI)
1690 CASE_MATHFN (BUILT_IN_COPYSIGN)
1691 CASE_MATHFN (BUILT_IN_COS)
1692 CASE_MATHFN (BUILT_IN_COSH)
1693 CASE_MATHFN (BUILT_IN_DREM)
1694 CASE_MATHFN (BUILT_IN_ERF)
1695 CASE_MATHFN (BUILT_IN_ERFC)
1696 CASE_MATHFN (BUILT_IN_EXP)
1697 CASE_MATHFN (BUILT_IN_EXP10)
1698 CASE_MATHFN (BUILT_IN_EXP2)
1699 CASE_MATHFN (BUILT_IN_EXPM1)
1700 CASE_MATHFN (BUILT_IN_FABS)
1701 CASE_MATHFN (BUILT_IN_FDIM)
1702 CASE_MATHFN (BUILT_IN_FLOOR)
1703 CASE_MATHFN (BUILT_IN_FMA)
1704 CASE_MATHFN (BUILT_IN_FMAX)
1705 CASE_MATHFN (BUILT_IN_FMIN)
1706 CASE_MATHFN (BUILT_IN_FMOD)
1707 CASE_MATHFN (BUILT_IN_FREXP)
1708 CASE_MATHFN (BUILT_IN_GAMMA)
1709 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1710 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1711 CASE_MATHFN (BUILT_IN_HYPOT)
1712 CASE_MATHFN (BUILT_IN_ILOGB)
1713 CASE_MATHFN (BUILT_IN_INF)
1714 CASE_MATHFN (BUILT_IN_ISINF)
1715 CASE_MATHFN (BUILT_IN_J0)
1716 CASE_MATHFN (BUILT_IN_J1)
1717 CASE_MATHFN (BUILT_IN_JN)
1718 CASE_MATHFN (BUILT_IN_LCEIL)
1719 CASE_MATHFN (BUILT_IN_LDEXP)
1720 CASE_MATHFN (BUILT_IN_LFLOOR)
1721 CASE_MATHFN (BUILT_IN_LGAMMA)
1722 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1723 CASE_MATHFN (BUILT_IN_LLCEIL)
1724 CASE_MATHFN (BUILT_IN_LLFLOOR)
1725 CASE_MATHFN (BUILT_IN_LLRINT)
1726 CASE_MATHFN (BUILT_IN_LLROUND)
1727 CASE_MATHFN (BUILT_IN_LOG)
1728 CASE_MATHFN (BUILT_IN_LOG10)
1729 CASE_MATHFN (BUILT_IN_LOG1P)
1730 CASE_MATHFN (BUILT_IN_LOG2)
1731 CASE_MATHFN (BUILT_IN_LOGB)
1732 CASE_MATHFN (BUILT_IN_LRINT)
1733 CASE_MATHFN (BUILT_IN_LROUND)
1734 CASE_MATHFN (BUILT_IN_MODF)
1735 CASE_MATHFN (BUILT_IN_NAN)
1736 CASE_MATHFN (BUILT_IN_NANS)
1737 CASE_MATHFN (BUILT_IN_NEARBYINT)
1738 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1739 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1740 CASE_MATHFN (BUILT_IN_POW)
1741 CASE_MATHFN (BUILT_IN_POWI)
1742 CASE_MATHFN (BUILT_IN_POW10)
1743 CASE_MATHFN (BUILT_IN_REMAINDER)
1744 CASE_MATHFN (BUILT_IN_REMQUO)
1745 CASE_MATHFN (BUILT_IN_RINT)
1746 CASE_MATHFN (BUILT_IN_ROUND)
1747 CASE_MATHFN (BUILT_IN_SCALB)
1748 CASE_MATHFN (BUILT_IN_SCALBLN)
1749 CASE_MATHFN (BUILT_IN_SCALBN)
1750 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1751 CASE_MATHFN (BUILT_IN_SIN)
1752 CASE_MATHFN (BUILT_IN_SINCOS)
1753 CASE_MATHFN (BUILT_IN_SINH)
1754 CASE_MATHFN (BUILT_IN_SQRT)
1755 CASE_MATHFN (BUILT_IN_TAN)
1756 CASE_MATHFN (BUILT_IN_TANH)
1757 CASE_MATHFN (BUILT_IN_TGAMMA)
1758 CASE_MATHFN (BUILT_IN_TRUNC)
1759 CASE_MATHFN (BUILT_IN_Y0)
1760 CASE_MATHFN (BUILT_IN_Y1)
1761 CASE_MATHFN (BUILT_IN_YN)
1767 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1768 return implicit_built_in_decls[fcode];
1769 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1770 return implicit_built_in_decls[fcodef];
1771 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1772 return implicit_built_in_decls[fcodel];
1777 /* If errno must be maintained, expand the RTL to check if the result,
1778 TARGET, of a built-in function call, EXP, is NaN, and if so set
1782 expand_errno_check (tree exp, rtx target)
1784 rtx lab = gen_label_rtx ();
1786 /* Test the result; if it is NaN, set errno=EDOM because
1787 the argument was not in the domain. */
1788 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1792 /* If this built-in doesn't throw an exception, set errno directly. */
1793 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1795 #ifdef GEN_ERRNO_RTX
1796 rtx errno_rtx = GEN_ERRNO_RTX;
1799 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1801 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1807 /* Make sure the library call isn't expanded as a tail call. */
1808 CALL_EXPR_TAILCALL (exp) = 0;
1810 /* We can't set errno=EDOM directly; let the library call do it.
1811 Pop the arguments right away in case the call gets deleted. */
1813 expand_call (exp, target, 0);
1818 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1819 Return NULL_RTX if a normal call should be emitted rather than expanding
1820 the function in-line. EXP is the expression that is a call to the builtin
1821 function; if convenient, the result should be placed in TARGET.
1822 SUBTARGET may be used as the target for computing one of EXP's operands. */
1825 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1827 optab builtin_optab;
1828 rtx op0, insns, before_call;
1829 tree fndecl = get_callee_fndecl (exp);
1830 enum machine_mode mode;
1831 bool errno_set = false;
1834 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1837 arg = CALL_EXPR_ARG (exp, 0);
1839 switch (DECL_FUNCTION_CODE (fndecl))
1841 CASE_FLT_FN (BUILT_IN_SQRT):
1842 errno_set = ! tree_expr_nonnegative_p (arg);
1843 builtin_optab = sqrt_optab;
1845 CASE_FLT_FN (BUILT_IN_EXP):
1846 errno_set = true; builtin_optab = exp_optab; break;
1847 CASE_FLT_FN (BUILT_IN_EXP10):
1848 CASE_FLT_FN (BUILT_IN_POW10):
1849 errno_set = true; builtin_optab = exp10_optab; break;
1850 CASE_FLT_FN (BUILT_IN_EXP2):
1851 errno_set = true; builtin_optab = exp2_optab; break;
1852 CASE_FLT_FN (BUILT_IN_EXPM1):
1853 errno_set = true; builtin_optab = expm1_optab; break;
1854 CASE_FLT_FN (BUILT_IN_LOGB):
1855 errno_set = true; builtin_optab = logb_optab; break;
1856 CASE_FLT_FN (BUILT_IN_LOG):
1857 errno_set = true; builtin_optab = log_optab; break;
1858 CASE_FLT_FN (BUILT_IN_LOG10):
1859 errno_set = true; builtin_optab = log10_optab; break;
1860 CASE_FLT_FN (BUILT_IN_LOG2):
1861 errno_set = true; builtin_optab = log2_optab; break;
1862 CASE_FLT_FN (BUILT_IN_LOG1P):
1863 errno_set = true; builtin_optab = log1p_optab; break;
1864 CASE_FLT_FN (BUILT_IN_ASIN):
1865 builtin_optab = asin_optab; break;
1866 CASE_FLT_FN (BUILT_IN_ACOS):
1867 builtin_optab = acos_optab; break;
1868 CASE_FLT_FN (BUILT_IN_TAN):
1869 builtin_optab = tan_optab; break;
1870 CASE_FLT_FN (BUILT_IN_ATAN):
1871 builtin_optab = atan_optab; break;
1872 CASE_FLT_FN (BUILT_IN_FLOOR):
1873 builtin_optab = floor_optab; break;
1874 CASE_FLT_FN (BUILT_IN_CEIL):
1875 builtin_optab = ceil_optab; break;
1876 CASE_FLT_FN (BUILT_IN_TRUNC):
1877 builtin_optab = btrunc_optab; break;
1878 CASE_FLT_FN (BUILT_IN_ROUND):
1879 builtin_optab = round_optab; break;
1880 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1881 builtin_optab = nearbyint_optab;
1882 if (flag_trapping_math)
1884 /* Else fallthrough and expand as rint. */
1885 CASE_FLT_FN (BUILT_IN_RINT):
1886 builtin_optab = rint_optab; break;
1891 /* Make a suitable register to place result in. */
1892 mode = TYPE_MODE (TREE_TYPE (exp));
1894 if (! flag_errno_math || ! HONOR_NANS (mode))
1897 /* Before working hard, check whether the instruction is available. */
1898 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1900 target = gen_reg_rtx (mode);
1902 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1903 need to expand the argument again. This way, we will not perform
1904 side-effects more the once. */
1905 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1907 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1911 /* Compute into TARGET.
1912 Set TARGET to wherever the result comes back. */
1913 target = expand_unop (mode, builtin_optab, op0, target, 0);
1918 expand_errno_check (exp, target);
1920 /* Output the entire sequence. */
1921 insns = get_insns ();
1927 /* If we were unable to expand via the builtin, stop the sequence
1928 (without outputting the insns) and call to the library function
1929 with the stabilized argument list. */
1933 before_call = get_last_insn ();
1935 target = expand_call (exp, target, target == const0_rtx);
1937 /* If this is a sqrt operation and we don't care about errno, try to
1938 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1939 This allows the semantics of the libcall to be visible to the RTL
1941 if (builtin_optab == sqrt_optab && !errno_set)
1943 /* Search backwards through the insns emitted by expand_call looking
1944 for the instruction with the REG_RETVAL note. */
1945 rtx last = get_last_insn ();
1946 while (last != before_call)
1948 if (find_reg_note (last, REG_RETVAL, NULL))
1950 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1951 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1952 two elements, i.e. symbol_ref(sqrt) and the operand. */
1954 && GET_CODE (note) == EXPR_LIST
1955 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1956 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1957 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1959 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1960 /* Check operand is a register with expected mode. */
1963 && GET_MODE (operand) == mode)
1965 /* Replace the REG_EQUAL note with a SQRT rtx. */
1966 rtx equiv = gen_rtx_SQRT (mode, operand);
1967 set_unique_reg_note (last, REG_EQUAL, equiv);
1972 last = PREV_INSN (last);
1979 /* Expand a call to the builtin binary math functions (pow and atan2).
1980 Return NULL_RTX if a normal call should be emitted rather than expanding the
1981 function in-line. EXP is the expression that is a call to the builtin
1982 function; if convenient, the result should be placed in TARGET.
1983 SUBTARGET may be used as the target for computing one of EXP's
1987 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1989 optab builtin_optab;
1990 rtx op0, op1, insns;
1991 int op1_type = REAL_TYPE;
1992 tree fndecl = get_callee_fndecl (exp);
1994 enum machine_mode mode;
1995 bool errno_set = true;
1997 switch (DECL_FUNCTION_CODE (fndecl))
1999 CASE_FLT_FN (BUILT_IN_SCALBN):
2000 CASE_FLT_FN (BUILT_IN_SCALBLN):
2001 CASE_FLT_FN (BUILT_IN_LDEXP):
2002 op1_type = INTEGER_TYPE;
2007 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2010 arg0 = CALL_EXPR_ARG (exp, 0);
2011 arg1 = CALL_EXPR_ARG (exp, 1);
2013 switch (DECL_FUNCTION_CODE (fndecl))
2015 CASE_FLT_FN (BUILT_IN_POW):
2016 builtin_optab = pow_optab; break;
2017 CASE_FLT_FN (BUILT_IN_ATAN2):
2018 builtin_optab = atan2_optab; break;
2019 CASE_FLT_FN (BUILT_IN_SCALB):
2020 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2022 builtin_optab = scalb_optab; break;
2023 CASE_FLT_FN (BUILT_IN_SCALBN):
2024 CASE_FLT_FN (BUILT_IN_SCALBLN):
2025 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2027 /* Fall through... */
2028 CASE_FLT_FN (BUILT_IN_LDEXP):
2029 builtin_optab = ldexp_optab; break;
2030 CASE_FLT_FN (BUILT_IN_FMOD):
2031 builtin_optab = fmod_optab; break;
2032 CASE_FLT_FN (BUILT_IN_REMAINDER):
2033 CASE_FLT_FN (BUILT_IN_DREM):
2034 builtin_optab = remainder_optab; break;
2039 /* Make a suitable register to place result in. */
2040 mode = TYPE_MODE (TREE_TYPE (exp));
2042 /* Before working hard, check whether the instruction is available. */
2043 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2046 target = gen_reg_rtx (mode);
2048 if (! flag_errno_math || ! HONOR_NANS (mode))
2051 /* Always stabilize the argument list. */
2052 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2053 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2055 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2056 op1 = expand_normal (arg1);
2060 /* Compute into TARGET.
2061 Set TARGET to wherever the result comes back. */
2062 target = expand_binop (mode, builtin_optab, op0, op1,
2063 target, 0, OPTAB_DIRECT);
2065 /* If we were unable to expand via the builtin, stop the sequence
2066 (without outputting the insns) and call to the library function
2067 with the stabilized argument list. */
2071 return expand_call (exp, target, target == const0_rtx);
2075 expand_errno_check (exp, target);
2077 /* Output the entire sequence. */
2078 insns = get_insns ();
2085 /* Expand a call to the builtin sin and cos math functions.
2086 Return NULL_RTX if a normal call should be emitted rather than expanding the
2087 function in-line. EXP is the expression that is a call to the builtin
2088 function; if convenient, the result should be placed in TARGET.
2089 SUBTARGET may be used as the target for computing one of EXP's
2093 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2095 optab builtin_optab;
2097 tree fndecl = get_callee_fndecl (exp);
2098 enum machine_mode mode;
2101 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2104 arg = CALL_EXPR_ARG (exp, 0);
2106 switch (DECL_FUNCTION_CODE (fndecl))
2108 CASE_FLT_FN (BUILT_IN_SIN):
2109 CASE_FLT_FN (BUILT_IN_COS):
2110 builtin_optab = sincos_optab; break;
2115 /* Make a suitable register to place result in. */
2116 mode = TYPE_MODE (TREE_TYPE (exp));
2118 /* Check if sincos insn is available, otherwise fallback
2119 to sin or cos insn. */
2120 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2121 switch (DECL_FUNCTION_CODE (fndecl))
2123 CASE_FLT_FN (BUILT_IN_SIN):
2124 builtin_optab = sin_optab; break;
2125 CASE_FLT_FN (BUILT_IN_COS):
2126 builtin_optab = cos_optab; break;
2131 /* Before working hard, check whether the instruction is available. */
2132 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2134 target = gen_reg_rtx (mode);
2136 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2137 need to expand the argument again. This way, we will not perform
2138 side-effects more the once. */
2139 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2141 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2145 /* Compute into TARGET.
2146 Set TARGET to wherever the result comes back. */
2147 if (builtin_optab == sincos_optab)
2151 switch (DECL_FUNCTION_CODE (fndecl))
2153 CASE_FLT_FN (BUILT_IN_SIN):
2154 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2156 CASE_FLT_FN (BUILT_IN_COS):
2157 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2162 gcc_assert (result);
2166 target = expand_unop (mode, builtin_optab, op0, target, 0);
2171 /* Output the entire sequence. */
2172 insns = get_insns ();
2178 /* If we were unable to expand via the builtin, stop the sequence
2179 (without outputting the insns) and call to the library function
2180 with the stabilized argument list. */
2184 target = expand_call (exp, target, target == const0_rtx);
2189 /* Expand a call to one of the builtin math functions that operate on
2190 floating point argument and output an integer result (ilogb, isinf,
2192 Return 0 if a normal call should be emitted rather than expanding the
2193 function in-line. EXP is the expression that is a call to the builtin
2194 function; if convenient, the result should be placed in TARGET.
2195 SUBTARGET may be used as the target for computing one of EXP's operands. */
2198 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2200 optab builtin_optab = 0;
2201 enum insn_code icode = CODE_FOR_nothing;
2203 tree fndecl = get_callee_fndecl (exp);
2204 enum machine_mode mode;
2205 bool errno_set = false;
2208 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2211 arg = CALL_EXPR_ARG (exp, 0);
2213 switch (DECL_FUNCTION_CODE (fndecl))
2215 CASE_FLT_FN (BUILT_IN_ILOGB):
2216 errno_set = true; builtin_optab = ilogb_optab; break;
2217 CASE_FLT_FN (BUILT_IN_ISINF):
2218 builtin_optab = isinf_optab; break;
2219 case BUILT_IN_ISNORMAL:
2220 case BUILT_IN_ISFINITE:
2221 CASE_FLT_FN (BUILT_IN_FINITE):
2222 /* These builtins have no optabs (yet). */
2228 /* There's no easy way to detect the case we need to set EDOM. */
2229 if (flag_errno_math && errno_set)
2232 /* Optab mode depends on the mode of the input argument. */
2233 mode = TYPE_MODE (TREE_TYPE (arg));
2236 icode = optab_handler (builtin_optab, mode)->insn_code;
2238 /* Before working hard, check whether the instruction is available. */
2239 if (icode != CODE_FOR_nothing)
2241 /* Make a suitable register to place result in. */
2243 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2244 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2246 gcc_assert (insn_data[icode].operand[0].predicate
2247 (target, GET_MODE (target)));
2249 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2250 need to expand the argument again. This way, we will not perform
2251 side-effects more the once. */
2252 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2254 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2256 if (mode != GET_MODE (op0))
2257 op0 = convert_to_mode (mode, op0, 0);
2259 /* Compute into TARGET.
2260 Set TARGET to wherever the result comes back. */
2261 emit_unop_insn (icode, target, op0, UNKNOWN);
2265 /* If there is no optab, try generic code. */
2266 switch (DECL_FUNCTION_CODE (fndecl))
2270 CASE_FLT_FN (BUILT_IN_ISINF):
2272 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2273 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2274 tree const type = TREE_TYPE (arg);
2278 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2279 real_from_string (&r, buf);
2280 result = build_call_expr (isgr_fn, 2,
2281 fold_build1 (ABS_EXPR, type, arg),
2282 build_real (type, r));
2283 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2285 CASE_FLT_FN (BUILT_IN_FINITE):
2286 case BUILT_IN_ISFINITE:
2288 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2289 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2290 tree const type = TREE_TYPE (arg);
2294 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2295 real_from_string (&r, buf);
2296 result = build_call_expr (isle_fn, 2,
2297 fold_build1 (ABS_EXPR, type, arg),
2298 build_real (type, r));
2299 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2301 case BUILT_IN_ISNORMAL:
2303 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2304 islessequal(fabs(x),DBL_MAX). */
2305 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2306 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2307 tree const type = TREE_TYPE (arg);
2308 REAL_VALUE_TYPE rmax, rmin;
2311 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2312 real_from_string (&rmax, buf);
2313 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2314 real_from_string (&rmin, buf);
2315 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2316 result = build_call_expr (isle_fn, 2, arg,
2317 build_real (type, rmax));
2318 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2319 build_call_expr (isge_fn, 2, arg,
2320 build_real (type, rmin)));
2321 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2327 target = expand_call (exp, target, target == const0_rtx);
2332 /* Expand a call to the builtin sincos math function.
2333 Return NULL_RTX if a normal call should be emitted rather than expanding the
2334 function in-line. EXP is the expression that is a call to the builtin
2338 expand_builtin_sincos (tree exp)
2340 rtx op0, op1, op2, target1, target2;
2341 enum machine_mode mode;
2342 tree arg, sinp, cosp;
2345 if (!validate_arglist (exp, REAL_TYPE,
2346 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2349 arg = CALL_EXPR_ARG (exp, 0);
2350 sinp = CALL_EXPR_ARG (exp, 1);
2351 cosp = CALL_EXPR_ARG (exp, 2);
2353 /* Make a suitable register to place result in. */
2354 mode = TYPE_MODE (TREE_TYPE (arg));
2356 /* Check if sincos insn is available, otherwise emit the call. */
2357 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2360 target1 = gen_reg_rtx (mode);
2361 target2 = gen_reg_rtx (mode);
2363 op0 = expand_normal (arg);
2364 op1 = expand_normal (build_fold_indirect_ref (sinp));
2365 op2 = expand_normal (build_fold_indirect_ref (cosp));
2367 /* Compute into target1 and target2.
2368 Set TARGET to wherever the result comes back. */
2369 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2370 gcc_assert (result);
2372 /* Move target1 and target2 to the memory locations indicated
2374 emit_move_insn (op1, target1);
2375 emit_move_insn (op2, target2);
2380 /* Expand a call to the internal cexpi builtin to the sincos math function.
2381 EXP is the expression that is a call to the builtin function; if convenient,
2382 the result should be placed in TARGET. SUBTARGET may be used as the target
2383 for computing one of EXP's operands. */
2386 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2388 tree fndecl = get_callee_fndecl (exp);
2390 enum machine_mode mode;
2393 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2396 arg = CALL_EXPR_ARG (exp, 0);
2397 type = TREE_TYPE (arg);
2398 mode = TYPE_MODE (TREE_TYPE (arg));
2400 /* Try expanding via a sincos optab, fall back to emitting a libcall
2401 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2402 is only generated from sincos, cexp or if we have either of them. */
2403 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2405 op1 = gen_reg_rtx (mode);
2406 op2 = gen_reg_rtx (mode);
2408 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2410 /* Compute into op1 and op2. */
2411 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2413 else if (TARGET_HAS_SINCOS)
2415 tree call, fn = NULL_TREE;
2419 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2420 fn = built_in_decls[BUILT_IN_SINCOSF];
2421 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2422 fn = built_in_decls[BUILT_IN_SINCOS];
2423 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2424 fn = built_in_decls[BUILT_IN_SINCOSL];
2428 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2429 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2430 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2431 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2432 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2433 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2435 /* Make sure not to fold the sincos call again. */
2436 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2437 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2438 call, 3, arg, top1, top2));
2442 tree call, fn = NULL_TREE, narg;
2443 tree ctype = build_complex_type (type);
2445 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2446 fn = built_in_decls[BUILT_IN_CEXPF];
2447 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2448 fn = built_in_decls[BUILT_IN_CEXP];
2449 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2450 fn = built_in_decls[BUILT_IN_CEXPL];
2454 /* If we don't have a decl for cexp create one. This is the
2455 friendliest fallback if the user calls __builtin_cexpi
2456 without full target C99 function support. */
2457 if (fn == NULL_TREE)
2460 const char *name = NULL;
2462 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2464 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2466 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2469 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2470 fn = build_fn_decl (name, fntype);
2473 narg = fold_build2 (COMPLEX_EXPR, ctype,
2474 build_real (type, dconst0), arg);
2476 /* Make sure not to fold the cexp call again. */
2477 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2478 return expand_expr (build_call_nary (ctype, call, 1, narg),
2479 target, VOIDmode, EXPAND_NORMAL);
2482 /* Now build the proper return type. */
2483 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2484 make_tree (TREE_TYPE (arg), op2),
2485 make_tree (TREE_TYPE (arg), op1)),
2486 target, VOIDmode, EXPAND_NORMAL);
2489 /* Expand a call to one of the builtin rounding functions gcc defines
2490 as an extension (lfloor and lceil). As these are gcc extensions we
2491 do not need to worry about setting errno to EDOM.
2492 If expanding via optab fails, lower expression to (int)(floor(x)).
2493 EXP is the expression that is a call to the builtin function;
2494 if convenient, the result should be placed in TARGET. SUBTARGET may
2495 be used as the target for computing one of EXP's operands. */
2498 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2500 convert_optab builtin_optab;
2501 rtx op0, insns, tmp;
2502 tree fndecl = get_callee_fndecl (exp);
2503 enum built_in_function fallback_fn;
2504 tree fallback_fndecl;
2505 enum machine_mode mode;
2508 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2511 arg = CALL_EXPR_ARG (exp, 0);
2513 switch (DECL_FUNCTION_CODE (fndecl))
2515 CASE_FLT_FN (BUILT_IN_LCEIL):
2516 CASE_FLT_FN (BUILT_IN_LLCEIL):
2517 builtin_optab = lceil_optab;
2518 fallback_fn = BUILT_IN_CEIL;
2521 CASE_FLT_FN (BUILT_IN_LFLOOR):
2522 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2523 builtin_optab = lfloor_optab;
2524 fallback_fn = BUILT_IN_FLOOR;
2531 /* Make a suitable register to place result in. */
2532 mode = TYPE_MODE (TREE_TYPE (exp));
2534 target = gen_reg_rtx (mode);
2536 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2537 need to expand the argument again. This way, we will not perform
2538 side-effects more the once. */
2539 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2541 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2545 /* Compute into TARGET. */
2546 if (expand_sfix_optab (target, op0, builtin_optab))
2548 /* Output the entire sequence. */
2549 insns = get_insns ();
2555 /* If we were unable to expand via the builtin, stop the sequence
2556 (without outputting the insns). */
2559 /* Fall back to floating point rounding optab. */
2560 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2562 /* For non-C99 targets we may end up without a fallback fndecl here
2563 if the user called __builtin_lfloor directly. In this case emit
2564 a call to the floor/ceil variants nevertheless. This should result
2565 in the best user experience for not full C99 targets. */
2566 if (fallback_fndecl == NULL_TREE)
2569 const char *name = NULL;
2571 switch (DECL_FUNCTION_CODE (fndecl))
2573 case BUILT_IN_LCEIL:
2574 case BUILT_IN_LLCEIL:
2577 case BUILT_IN_LCEILF:
2578 case BUILT_IN_LLCEILF:
2581 case BUILT_IN_LCEILL:
2582 case BUILT_IN_LLCEILL:
2585 case BUILT_IN_LFLOOR:
2586 case BUILT_IN_LLFLOOR:
2589 case BUILT_IN_LFLOORF:
2590 case BUILT_IN_LLFLOORF:
2593 case BUILT_IN_LFLOORL:
2594 case BUILT_IN_LLFLOORL:
2601 fntype = build_function_type_list (TREE_TYPE (arg),
2602 TREE_TYPE (arg), NULL_TREE);
2603 fallback_fndecl = build_fn_decl (name, fntype);
2606 exp = build_call_expr (fallback_fndecl, 1, arg);
2608 tmp = expand_normal (exp);
2610 /* Truncate the result of floating point optab to integer
2611 via expand_fix (). */
2612 target = gen_reg_rtx (mode);
2613 expand_fix (target, tmp, 0);
2618 /* Expand a call to one of the builtin math functions doing integer
2620 Return 0 if a normal call should be emitted rather than expanding the
2621 function in-line. EXP is the expression that is a call to the builtin
2622 function; if convenient, the result should be placed in TARGET.
2623 SUBTARGET may be used as the target for computing one of EXP's operands. */
2626 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2628 convert_optab builtin_optab;
2630 tree fndecl = get_callee_fndecl (exp);
2632 enum machine_mode mode;
2634 /* There's no easy way to detect the case we need to set EDOM. */
2635 if (flag_errno_math)
2638 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2641 arg = CALL_EXPR_ARG (exp, 0);
2643 switch (DECL_FUNCTION_CODE (fndecl))
2645 CASE_FLT_FN (BUILT_IN_LRINT):
2646 CASE_FLT_FN (BUILT_IN_LLRINT):
2647 builtin_optab = lrint_optab; break;
2648 CASE_FLT_FN (BUILT_IN_LROUND):
2649 CASE_FLT_FN (BUILT_IN_LLROUND):
2650 builtin_optab = lround_optab; break;
2655 /* Make a suitable register to place result in. */
2656 mode = TYPE_MODE (TREE_TYPE (exp));
2658 target = gen_reg_rtx (mode);
2660 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2661 need to expand the argument again. This way, we will not perform
2662 side-effects more the once. */
2663 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2665 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2669 if (expand_sfix_optab (target, op0, builtin_optab))
2671 /* Output the entire sequence. */
2672 insns = get_insns ();
2678 /* If we were unable to expand via the builtin, stop the sequence
2679 (without outputting the insns) and call to the library function
2680 with the stabilized argument list. */
2683 target = expand_call (exp, target, target == const0_rtx);
2688 /* To evaluate powi(x,n), the floating point value x raised to the
2689 constant integer exponent n, we use a hybrid algorithm that
2690 combines the "window method" with look-up tables. For an
2691 introduction to exponentiation algorithms and "addition chains",
2692 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2693 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2694 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2695 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2697 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2698 multiplications to inline before calling the system library's pow
2699 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2700 so this default never requires calling pow, powf or powl. */
2702 #ifndef POWI_MAX_MULTS
2703 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2706 /* The size of the "optimal power tree" lookup table. All
2707 exponents less than this value are simply looked up in the
2708 powi_table below. This threshold is also used to size the
2709 cache of pseudo registers that hold intermediate results. */
2710 #define POWI_TABLE_SIZE 256
2712 /* The size, in bits of the window, used in the "window method"
2713 exponentiation algorithm. This is equivalent to a radix of
2714 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2715 #define POWI_WINDOW_SIZE 3
2717 /* The following table is an efficient representation of an
2718 "optimal power tree". For each value, i, the corresponding
2719 value, j, in the table states than an optimal evaluation
2720 sequence for calculating pow(x,i) can be found by evaluating
2721 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2722 100 integers is given in Knuth's "Seminumerical algorithms". */
2724 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2726 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2727 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2728 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2729 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2730 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2731 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2732 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2733 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2734 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2735 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2736 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2737 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2738 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2739 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2740 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2741 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2742 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2743 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2744 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2745 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2746 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2747 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2748 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2749 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2750 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2751 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2752 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2753 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2754 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2755 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2756 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2757 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2761 /* Return the number of multiplications required to calculate
2762 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2763 subroutine of powi_cost. CACHE is an array indicating
2764 which exponents have already been calculated. */
2767 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2769 /* If we've already calculated this exponent, then this evaluation
2770 doesn't require any additional multiplications. */
2775 return powi_lookup_cost (n - powi_table[n], cache)
2776 + powi_lookup_cost (powi_table[n], cache) + 1;
2779 /* Return the number of multiplications required to calculate
2780 powi(x,n) for an arbitrary x, given the exponent N. This
2781 function needs to be kept in sync with expand_powi below. */
2784 powi_cost (HOST_WIDE_INT n)
2786 bool cache[POWI_TABLE_SIZE];
2787 unsigned HOST_WIDE_INT digit;
2788 unsigned HOST_WIDE_INT val;
2794 /* Ignore the reciprocal when calculating the cost. */
2795 val = (n < 0) ? -n : n;
2797 /* Initialize the exponent cache. */
2798 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2803 while (val >= POWI_TABLE_SIZE)
2807 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2808 result += powi_lookup_cost (digit, cache)
2809 + POWI_WINDOW_SIZE + 1;
2810 val >>= POWI_WINDOW_SIZE;
2819 return result + powi_lookup_cost (val, cache);
2822 /* Recursive subroutine of expand_powi. This function takes the array,
2823 CACHE, of already calculated exponents and an exponent N and returns
2824 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2827 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2829 unsigned HOST_WIDE_INT digit;
2833 if (n < POWI_TABLE_SIZE)
2838 target = gen_reg_rtx (mode);
2841 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2842 op1 = expand_powi_1 (mode, powi_table[n], cache);
2846 target = gen_reg_rtx (mode);
2847 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2848 op0 = expand_powi_1 (mode, n - digit, cache);
2849 op1 = expand_powi_1 (mode, digit, cache);
2853 target = gen_reg_rtx (mode);
2854 op0 = expand_powi_1 (mode, n >> 1, cache);
2858 result = expand_mult (mode, op0, op1, target, 0);
2859 if (result != target)
2860 emit_move_insn (target, result);
2864 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2865 floating point operand in mode MODE, and N is the exponent. This
2866 function needs to be kept in sync with powi_cost above. */
2869 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2871 unsigned HOST_WIDE_INT val;
2872 rtx cache[POWI_TABLE_SIZE];
2876 return CONST1_RTX (mode);
2878 val = (n < 0) ? -n : n;
2880 memset (cache, 0, sizeof (cache));
2883 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2885 /* If the original exponent was negative, reciprocate the result. */
2887 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2888 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2893 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2894 a normal call should be emitted rather than expanding the function
2895 in-line. EXP is the expression that is a call to the builtin
2896 function; if convenient, the result should be placed in TARGET. */
2899 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2903 tree type = TREE_TYPE (exp);
2904 REAL_VALUE_TYPE cint, c, c2;
2907 enum machine_mode mode = TYPE_MODE (type);
2909 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2912 arg0 = CALL_EXPR_ARG (exp, 0);
2913 arg1 = CALL_EXPR_ARG (exp, 1);
2915 if (TREE_CODE (arg1) != REAL_CST
2916 || TREE_OVERFLOW (arg1))
2917 return expand_builtin_mathfn_2 (exp, target, subtarget);
2919 /* Handle constant exponents. */
2921 /* For integer valued exponents we can expand to an optimal multiplication
2922 sequence using expand_powi. */
2923 c = TREE_REAL_CST (arg1);
2924 n = real_to_integer (&c);
2925 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2926 if (real_identical (&c, &cint)
2927 && ((n >= -1 && n <= 2)
2928 || (flag_unsafe_math_optimizations
2930 && powi_cost (n) <= POWI_MAX_MULTS)))
2932 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2935 op = force_reg (mode, op);
2936 op = expand_powi (op, mode, n);
2941 narg0 = builtin_save_expr (arg0);
2943 /* If the exponent is not integer valued, check if it is half of an integer.
2944 In this case we can expand to sqrt (x) * x**(n/2). */
2945 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2946 if (fn != NULL_TREE)
2948 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2949 n = real_to_integer (&c2);
2950 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2951 if (real_identical (&c2, &cint)
2952 && ((flag_unsafe_math_optimizations
2954 && powi_cost (n/2) <= POWI_MAX_MULTS)
2957 tree call_expr = build_call_expr (fn, 1, narg0);
2958 /* Use expand_expr in case the newly built call expression
2959 was folded to a non-call. */
2960 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2963 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2964 op2 = force_reg (mode, op2);
2965 op2 = expand_powi (op2, mode, abs (n / 2));
2966 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2967 0, OPTAB_LIB_WIDEN);
2968 /* If the original exponent was negative, reciprocate the
2971 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2972 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2978 /* Try if the exponent is a third of an integer. In this case
2979 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2980 different from pow (x, 1./3.) due to rounding and behavior
2981 with negative x we need to constrain this transformation to
2982 unsafe math and positive x or finite math. */
2983 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2985 && flag_unsafe_math_optimizations
2986 && (tree_expr_nonnegative_p (arg0)
2987 || !HONOR_NANS (mode)))
2989 REAL_VALUE_TYPE dconst3;
2990 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2991 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2992 real_round (&c2, mode, &c2);
2993 n = real_to_integer (&c2);
2994 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2995 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2996 real_convert (&c2, mode, &c2);
2997 if (real_identical (&c2, &c)
2999 && powi_cost (n/3) <= POWI_MAX_MULTS)
3002 tree call_expr = build_call_expr (fn, 1,narg0);
3003 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3004 if (abs (n) % 3 == 2)
3005 op = expand_simple_binop (mode, MULT, op, op, op,
3006 0, OPTAB_LIB_WIDEN);
3009 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3010 op2 = force_reg (mode, op2);
3011 op2 = expand_powi (op2, mode, abs (n / 3));
3012 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3013 0, OPTAB_LIB_WIDEN);
3014 /* If the original exponent was negative, reciprocate the
3017 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3018 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3024 /* Fall back to optab expansion. */
3025 return expand_builtin_mathfn_2 (exp, target, subtarget);
3028 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3029 a normal call should be emitted rather than expanding the function
3030 in-line. EXP is the expression that is a call to the builtin
3031 function; if convenient, the result should be placed in TARGET. */
3034 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3038 enum machine_mode mode;
3039 enum machine_mode mode2;
3041 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3044 arg0 = CALL_EXPR_ARG (exp, 0);
3045 arg1 = CALL_EXPR_ARG (exp, 1);
3046 mode = TYPE_MODE (TREE_TYPE (exp));
3048 /* Handle constant power. */
3050 if (TREE_CODE (arg1) == INTEGER_CST
3051 && !TREE_OVERFLOW (arg1))
3053 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3055 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3056 Otherwise, check the number of multiplications required. */
3057 if ((TREE_INT_CST_HIGH (arg1) == 0
3058 || TREE_INT_CST_HIGH (arg1) == -1)
3059 && ((n >= -1 && n <= 2)
3061 && powi_cost (n) <= POWI_MAX_MULTS)))
3063 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3064 op0 = force_reg (mode, op0);
3065 return expand_powi (op0, mode, n);
3069 /* Emit a libcall to libgcc. */
3071 /* Mode of the 2nd argument must match that of an int. */
3072 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3074 if (target == NULL_RTX)
3075 target = gen_reg_rtx (mode);
3077 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3078 if (GET_MODE (op0) != mode)
3079 op0 = convert_to_mode (mode, op0, 0);
3080 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3081 if (GET_MODE (op1) != mode2)
3082 op1 = convert_to_mode (mode2, op1, 0);
3084 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3085 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3086 op0, mode, op1, mode2);
3091 /* Expand expression EXP which is a call to the strlen builtin. Return
3092 NULL_RTX if we failed the caller should emit a normal call, otherwise
3093 try to get the result in TARGET, if convenient. */
3096 expand_builtin_strlen (tree exp, rtx target,
3097 enum machine_mode target_mode)
3099 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3105 tree src = CALL_EXPR_ARG (exp, 0);
3106 rtx result, src_reg, char_rtx, before_strlen;
3107 enum machine_mode insn_mode = target_mode, char_mode;
3108 enum insn_code icode = CODE_FOR_nothing;
3111 /* If the length can be computed at compile-time, return it. */
3112 len = c_strlen (src, 0);
3114 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3116 /* If the length can be computed at compile-time and is constant
3117 integer, but there are side-effects in src, evaluate
3118 src for side-effects, then return len.
3119 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3120 can be optimized into: i++; x = 3; */
3121 len = c_strlen (src, 1);
3122 if (len && TREE_CODE (len) == INTEGER_CST)
3124 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3125 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3128 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3130 /* If SRC is not a pointer type, don't do this operation inline. */
3134 /* Bail out if we can't compute strlen in the right mode. */
3135 while (insn_mode != VOIDmode)
3137 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3138 if (icode != CODE_FOR_nothing)
3141 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3143 if (insn_mode == VOIDmode)
3146 /* Make a place to write the result of the instruction. */
3150 && GET_MODE (result) == insn_mode
3151 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3152 result = gen_reg_rtx (insn_mode);
3154 /* Make a place to hold the source address. We will not expand
3155 the actual source until we are sure that the expansion will
3156 not fail -- there are trees that cannot be expanded twice. */
3157 src_reg = gen_reg_rtx (Pmode);
3159 /* Mark the beginning of the strlen sequence so we can emit the
3160 source operand later. */
3161 before_strlen = get_last_insn ();
3163 char_rtx = const0_rtx;
3164 char_mode = insn_data[(int) icode].operand[2].mode;
3165 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3167 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3169 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3170 char_rtx, GEN_INT (align));
3175 /* Now that we are assured of success, expand the source. */
3177 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3179 emit_move_insn (src_reg, pat);
3184 emit_insn_after (pat, before_strlen);
3186 emit_insn_before (pat, get_insns ());
3188 /* Return the value in the proper mode for this function. */
3189 if (GET_MODE (result) == target_mode)
3191 else if (target != 0)
3192 convert_move (target, result, 0);
3194 target = convert_to_mode (target_mode, result, 0);
3200 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3201 caller should emit a normal call, otherwise try to get the result
3202 in TARGET, if convenient (and in mode MODE if that's convenient). */
3205 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3207 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3209 tree type = TREE_TYPE (exp);
3210 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3211 CALL_EXPR_ARG (exp, 1), type);
3213 return expand_expr (result, target, mode, EXPAND_NORMAL);
3218 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3219 caller should emit a normal call, otherwise try to get the result
3220 in TARGET, if convenient (and in mode MODE if that's convenient). */
3223 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3225 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3227 tree type = TREE_TYPE (exp);
3228 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3229 CALL_EXPR_ARG (exp, 1), type);
3231 return expand_expr (result, target, mode, EXPAND_NORMAL);
3233 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3238 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3239 caller should emit a normal call, otherwise try to get the result
3240 in TARGET, if convenient (and in mode MODE if that's convenient). */
3243 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3245 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3247 tree type = TREE_TYPE (exp);
3248 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3249 CALL_EXPR_ARG (exp, 1), type);
3251 return expand_expr (result, target, mode, EXPAND_NORMAL);
3256 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3257 caller should emit a normal call, otherwise try to get the result
3258 in TARGET, if convenient (and in mode MODE if that's convenient). */
3261 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3263 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3265 tree type = TREE_TYPE (exp);
3266 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3267 CALL_EXPR_ARG (exp, 1), type);
3269 return expand_expr (result, target, mode, EXPAND_NORMAL);
3274 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3275 bytes from constant string DATA + OFFSET and return it as target
3279 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3280 enum machine_mode mode)
3282 const char *str = (const char *) data;
3284 gcc_assert (offset >= 0
3285 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3286 <= strlen (str) + 1));
3288 return c_readstr (str + offset, mode);
3291 /* Expand a call EXP to the memcpy builtin.
3292 Return NULL_RTX if we failed, the caller should emit a normal call,
3293 otherwise try to get the result in TARGET, if convenient (and in
3294 mode MODE if that's convenient). */
3297 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3299 tree fndecl = get_callee_fndecl (exp);
3301 if (!validate_arglist (exp,
3302 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3306 tree dest = CALL_EXPR_ARG (exp, 0);
3307 tree src = CALL_EXPR_ARG (exp, 1);
3308 tree len = CALL_EXPR_ARG (exp, 2);
3309 const char *src_str;
3310 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3311 unsigned int dest_align
3312 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3313 rtx dest_mem, src_mem, dest_addr, len_rtx;
3314 tree result = fold_builtin_memory_op (dest, src, len,
3315 TREE_TYPE (TREE_TYPE (fndecl)),
3317 HOST_WIDE_INT expected_size = -1;
3318 unsigned int expected_align = 0;
3322 while (TREE_CODE (result) == COMPOUND_EXPR)
3324 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3326 result = TREE_OPERAND (result, 1);
3328 return expand_expr (result, target, mode, EXPAND_NORMAL);
3331 /* If DEST is not a pointer type, call the normal function. */
3332 if (dest_align == 0)
3335 /* If either SRC is not a pointer type, don't do this
3336 operation in-line. */
3340 stringop_block_profile (exp, &expected_align, &expected_size);
3341 if (expected_align < dest_align)
3342 expected_align = dest_align;
3343 dest_mem = get_memory_rtx (dest, len);
3344 set_mem_align (dest_mem, dest_align);
3345 len_rtx = expand_normal (len);
3346 src_str = c_getstr (src);
3348 /* If SRC is a string constant and block move would be done
3349 by pieces, we can avoid loading the string from memory
3350 and only stored the computed constants. */
3352 && GET_CODE (len_rtx) == CONST_INT
3353 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3354 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3355 (void *) src_str, dest_align, false))
3357 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3358 builtin_memcpy_read_str,
3359 (void *) src_str, dest_align, false, 0);
3360 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3361 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3365 src_mem = get_memory_rtx (src, len);
3366 set_mem_align (src_mem, src_align);
3368 /* Copy word part most expediently. */
3369 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3370 CALL_EXPR_TAILCALL (exp)
3371 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3372 expected_align, expected_size);
3376 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3377 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3383 /* Expand a call EXP to the mempcpy builtin.
3384 Return NULL_RTX if we failed; the caller should emit a normal call,
3385 otherwise try to get the result in TARGET, if convenient (and in
3386 mode MODE if that's convenient). If ENDP is 0 return the
3387 destination pointer, if ENDP is 1 return the end pointer ala
3388 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3392 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3394 if (!validate_arglist (exp,
3395 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3399 tree dest = CALL_EXPR_ARG (exp, 0);
3400 tree src = CALL_EXPR_ARG (exp, 1);
3401 tree len = CALL_EXPR_ARG (exp, 2);
3402 return expand_builtin_mempcpy_args (dest, src, len,
3404 target, mode, /*endp=*/ 1);
3408 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3409 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3410 so that this can also be called without constructing an actual CALL_EXPR.
3411 TYPE is the return type of the call. The other arguments and return value
3412 are the same as for expand_builtin_mempcpy. */
3415 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3416 rtx target, enum machine_mode mode, int endp)
3418 /* If return value is ignored, transform mempcpy into memcpy. */
3419 if (target == const0_rtx)
3421 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3426 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3427 target, mode, EXPAND_NORMAL);
3431 const char *src_str;
3432 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3433 unsigned int dest_align
3434 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3435 rtx dest_mem, src_mem, len_rtx;
3436 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3440 while (TREE_CODE (result) == COMPOUND_EXPR)
3442 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3444 result = TREE_OPERAND (result, 1);
3446 return expand_expr (result, target, mode, EXPAND_NORMAL);
3449 /* If either SRC or DEST is not a pointer type, don't do this
3450 operation in-line. */
3451 if (dest_align == 0 || src_align == 0)
3454 /* If LEN is not constant, call the normal function. */
3455 if (! host_integerp (len, 1))
3458 len_rtx = expand_normal (len);
3459 src_str = c_getstr (src);
3461 /* If SRC is a string constant and block move would be done
3462 by pieces, we can avoid loading the string from memory
3463 and only stored the computed constants. */
3465 && GET_CODE (len_rtx) == CONST_INT
3466 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3467 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3468 (void *) src_str, dest_align, false))
3470 dest_mem = get_memory_rtx (dest, len);
3471 set_mem_align (dest_mem, dest_align);
3472 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3473 builtin_memcpy_read_str,
3474 (void *) src_str, dest_align,
3476 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3477 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3481 if (GET_CODE (len_rtx) == CONST_INT
3482 && can_move_by_pieces (INTVAL (len_rtx),
3483 MIN (dest_align, src_align)))
3485 dest_mem = get_memory_rtx (dest, len);
3486 set_mem_align (dest_mem, dest_align);
3487 src_mem = get_memory_rtx (src, len);
3488 set_mem_align (src_mem, src_align);
3489 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3490 MIN (dest_align, src_align), endp);
3491 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3492 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3500 /* Expand expression EXP, which is a call to the memmove builtin. Return
3501 NULL_RTX if we failed; the caller should emit a normal call. */
3504 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3506 if (!validate_arglist (exp,
3507 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3511 tree dest = CALL_EXPR_ARG (exp, 0);
3512 tree src = CALL_EXPR_ARG (exp, 1);
3513 tree len = CALL_EXPR_ARG (exp, 2);
3514 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3515 target, mode, ignore);
3519 /* Helper function to do the actual work for expand_builtin_memmove. The
3520 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3521 so that this can also be called without constructing an actual CALL_EXPR.
3522 TYPE is the return type of the call. The other arguments and return value
3523 are the same as for expand_builtin_memmove. */
3526 expand_builtin_memmove_args (tree dest, tree src, tree len,
3527 tree type, rtx target, enum machine_mode mode,
3530 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3534 STRIP_TYPE_NOPS (result);
3535 while (TREE_CODE (result) == COMPOUND_EXPR)
3537 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3539 result = TREE_OPERAND (result, 1);
3541 return expand_expr (result, target, mode, EXPAND_NORMAL);
3544 /* Otherwise, call the normal function. */
3548 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3549 NULL_RTX if we failed the caller should emit a normal call. */
3552 expand_builtin_bcopy (tree exp, int ignore)
3554 tree type = TREE_TYPE (exp);
3555 tree src, dest, size;
3557 if (!validate_arglist (exp,
3558 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3561 src = CALL_EXPR_ARG (exp, 0);
3562 dest = CALL_EXPR_ARG (exp, 1);
3563 size = CALL_EXPR_ARG (exp, 2);
3565 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3566 This is done this way so that if it isn't expanded inline, we fall
3567 back to calling bcopy instead of memmove. */
3568 return expand_builtin_memmove_args (dest, src,
3569 fold_convert (sizetype, size),
3570 type, const0_rtx, VOIDmode,
3575 # define HAVE_movstr 0
3576 # define CODE_FOR_movstr CODE_FOR_nothing
3579 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3580 we failed, the caller should emit a normal call, otherwise try to
3581 get the result in TARGET, if convenient. If ENDP is 0 return the
3582 destination pointer, if ENDP is 1 return the end pointer ala
3583 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3587 expand_movstr (tree dest, tree src, rtx target, int endp)
3593 const struct insn_data * data;
3598 dest_mem = get_memory_rtx (dest, NULL);
3599 src_mem = get_memory_rtx (src, NULL);
3602 target = force_reg (Pmode, XEXP (dest_mem, 0));
3603 dest_mem = replace_equiv_address (dest_mem, target);
3604 end = gen_reg_rtx (Pmode);
3608 if (target == 0 || target == const0_rtx)
3610 end = gen_reg_rtx (Pmode);
3618 data = insn_data + CODE_FOR_movstr;
3620 if (data->operand[0].mode != VOIDmode)
3621 end = gen_lowpart (data->operand[0].mode, end);
3623 insn = data->genfun (end, dest_mem, src_mem);
3629 /* movstr is supposed to set end to the address of the NUL
3630 terminator. If the caller requested a mempcpy-like return value,
3632 if (endp == 1 && target != const0_rtx)
3634 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3635 emit_move_insn (target, force_operand (tem, NULL_RTX));
3641 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3642 NULL_RTX if we failed the caller should emit a normal call, otherwise
3643 try to get the result in TARGET, if convenient (and in mode MODE if that's
3647 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3649 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3651 tree dest = CALL_EXPR_ARG (exp, 0);
3652 tree src = CALL_EXPR_ARG (exp, 1);
3653 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3658 /* Helper function to do the actual work for expand_builtin_strcpy. The
3659 arguments to the builtin_strcpy call DEST and SRC are broken out
3660 so that this can also be called without constructing an actual CALL_EXPR.
3661 The other arguments and return value are the same as for
3662 expand_builtin_strcpy. */
3665 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3666 rtx target, enum machine_mode mode)
3668 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3670 return expand_expr (result, target, mode, EXPAND_NORMAL);
3671 return expand_movstr (dest, src, target, /*endp=*/0);
3675 /* Expand a call EXP to the stpcpy builtin.
3676 Return NULL_RTX if we failed the caller should emit a normal call,
3677 otherwise try to get the result in TARGET, if convenient (and in
3678 mode MODE if that's convenient). */
3681 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3685 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3688 dst = CALL_EXPR_ARG (exp, 0);
3689 src = CALL_EXPR_ARG (exp, 1);
3691 /* If return value is ignored, transform stpcpy into strcpy. */
3692 if (target == const0_rtx)
3694 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3698 return expand_expr (build_call_expr (fn, 2, dst, src),
3699 target, mode, EXPAND_NORMAL);
3706 /* Ensure we get an actual string whose length can be evaluated at
3707 compile-time, not an expression containing a string. This is
3708 because the latter will potentially produce pessimized code
3709 when used to produce the return value. */
3710 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3711 return expand_movstr (dst, src, target, /*endp=*/2);
3713 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3714 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3715 target, mode, /*endp=*/2);
3720 if (TREE_CODE (len) == INTEGER_CST)
3722 rtx len_rtx = expand_normal (len);
3724 if (GET_CODE (len_rtx) == CONST_INT)
3726 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3727 dst, src, target, mode);
3733 if (mode != VOIDmode)
3734 target = gen_reg_rtx (mode);
3736 target = gen_reg_rtx (GET_MODE (ret));
3738 if (GET_MODE (target) != GET_MODE (ret))
3739 ret = gen_lowpart (GET_MODE (target), ret);
3741 ret = plus_constant (ret, INTVAL (len_rtx));
3742 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3750 return expand_movstr (dst, src, target, /*endp=*/2);
3754 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3755 bytes from constant string DATA + OFFSET and return it as target
3759 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3760 enum machine_mode mode)
3762 const char *str = (const char *) data;
3764 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3767 return c_readstr (str + offset, mode);
3770 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3771 NULL_RTX if we failed the caller should emit a normal call. */
3774 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3776 tree fndecl = get_callee_fndecl (exp);
3778 if (validate_arglist (exp,
3779 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3781 tree dest = CALL_EXPR_ARG (exp, 0);
3782 tree src = CALL_EXPR_ARG (exp, 1);
3783 tree len = CALL_EXPR_ARG (exp, 2);
3784 tree slen = c_strlen (src, 1);
3785 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3789 while (TREE_CODE (result) == COMPOUND_EXPR)
3791 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3793 result = TREE_OPERAND (result, 1);
3795 return expand_expr (result, target, mode, EXPAND_NORMAL);
3798 /* We must be passed a constant len and src parameter. */
3799 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3802 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3804 /* We're required to pad with trailing zeros if the requested
3805 len is greater than strlen(s2)+1. In that case try to
3806 use store_by_pieces, if it fails, punt. */
3807 if (tree_int_cst_lt (slen, len))
3809 unsigned int dest_align
3810 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3811 const char *p = c_getstr (src);
3814 if (!p || dest_align == 0 || !host_integerp (len, 1)
3815 || !can_store_by_pieces (tree_low_cst (len, 1),
3816 builtin_strncpy_read_str,
3817 (void *) p, dest_align, false))
3820 dest_mem = get_memory_rtx (dest, len);
3821 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3822 builtin_strncpy_read_str,
3823 (void *) p, dest_align, false, 0);
3824 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3825 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3832 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3833 bytes from constant string DATA + OFFSET and return it as target
3837 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3838 enum machine_mode mode)
3840 const char *c = (const char *) data;
3841 char *p = alloca (GET_MODE_SIZE (mode));
3843 memset (p, *c, GET_MODE_SIZE (mode));
3845 return c_readstr (p, mode);
3848 /* Callback routine for store_by_pieces. Return the RTL of a register
3849 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3850 char value given in the RTL register data. For example, if mode is
3851 4 bytes wide, return the RTL for 0x01010101*data. */
3854 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3855 enum machine_mode mode)
3861 size = GET_MODE_SIZE (mode);
3866 memset (p, 1, size);
3867 coeff = c_readstr (p, mode);
3869 target = convert_to_mode (mode, (rtx) data, 1);
3870 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3871 return force_reg (mode, target);
3874 /* Expand expression EXP, which is a call to the memset builtin. Return
3875 NULL_RTX if we failed the caller should emit a normal call, otherwise
3876 try to get the result in TARGET, if convenient (and in mode MODE if that's
3880 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3882 if (!validate_arglist (exp,
3883 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3887 tree dest = CALL_EXPR_ARG (exp, 0);
3888 tree val = CALL_EXPR_ARG (exp, 1);
3889 tree len = CALL_EXPR_ARG (exp, 2);
3890 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3894 /* Helper function to do the actual work for expand_builtin_memset. The
3895 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3896 so that this can also be called without constructing an actual CALL_EXPR.
3897 The other arguments and return value are the same as for
3898 expand_builtin_memset. */
3901 expand_builtin_memset_args (tree dest, tree val, tree len,
3902 rtx target, enum machine_mode mode, tree orig_exp)
3905 enum built_in_function fcode;
3907 unsigned int dest_align;
3908 rtx dest_mem, dest_addr, len_rtx;
3909 HOST_WIDE_INT expected_size = -1;
3910 unsigned int expected_align = 0;
3912 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3914 /* If DEST is not a pointer type, don't do this operation in-line. */
3915 if (dest_align == 0)
3918 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3919 if (expected_align < dest_align)
3920 expected_align = dest_align;
3922 /* If the LEN parameter is zero, return DEST. */
3923 if (integer_zerop (len))
3925 /* Evaluate and ignore VAL in case it has side-effects. */
3926 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3927 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3930 /* Stabilize the arguments in case we fail. */
3931 dest = builtin_save_expr (dest);
3932 val = builtin_save_expr (val);
3933 len = builtin_save_expr (len);
3935 len_rtx = expand_normal (len);
3936 dest_mem = get_memory_rtx (dest, len);
3938 if (TREE_CODE (val) != INTEGER_CST)
3942 val_rtx = expand_normal (val);
3943 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3946 /* Assume that we can memset by pieces if we can store
3947 * the coefficients by pieces (in the required modes).
3948 * We can't pass builtin_memset_gen_str as that emits RTL. */
3950 if (host_integerp (len, 1)
3951 && can_store_by_pieces (tree_low_cst (len, 1),
3952 builtin_memset_read_str, &c, dest_align,
3955 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3957 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3958 builtin_memset_gen_str, val_rtx, dest_align,
3961 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3962 dest_align, expected_align,
3966 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3967 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3971 if (target_char_cast (val, &c))
3976 if (host_integerp (len, 1)
3977 && can_store_by_pieces (tree_low_cst (len, 1),
3978 builtin_memset_read_str, &c, dest_align,
3980 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3981 builtin_memset_read_str, &c, dest_align, true, 0);
3982 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3983 dest_align, expected_align,
3987 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3988 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3992 set_mem_align (dest_mem, dest_align);
3993 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3994 CALL_EXPR_TAILCALL (orig_exp)
3995 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3996 expected_align, expected_size);
4000 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4001 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4007 fndecl = get_callee_fndecl (orig_exp);
4008 fcode = DECL_FUNCTION_CODE (fndecl);
4009 if (fcode == BUILT_IN_MEMSET)
4010 fn = build_call_expr (fndecl, 3, dest, val, len);
4011 else if (fcode == BUILT_IN_BZERO)
4012 fn = build_call_expr (fndecl, 2, dest, len);
4015 if (TREE_CODE (fn) == CALL_EXPR)
4016 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4017 return expand_call (fn, target, target == const0_rtx);
4020 /* Expand expression EXP, which is a call to the bzero builtin. Return
4021 NULL_RTX if we failed the caller should emit a normal call. */
4024 expand_builtin_bzero (tree exp)
4028 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4031 dest = CALL_EXPR_ARG (exp, 0);
4032 size = CALL_EXPR_ARG (exp, 1);
4034 /* New argument list transforming bzero(ptr x, int y) to
4035 memset(ptr x, int 0, size_t y). This is done this way
4036 so that if it isn't expanded inline, we fallback to
4037 calling bzero instead of memset. */
4039 return expand_builtin_memset_args (dest, integer_zero_node,
4040 fold_convert (sizetype, size),
4041 const0_rtx, VOIDmode, exp);
4044 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4045 caller should emit a normal call, otherwise try to get the result
4046 in TARGET, if convenient (and in mode MODE if that's convenient). */
4049 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4051 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4052 INTEGER_TYPE, VOID_TYPE))
4054 tree type = TREE_TYPE (exp);
4055 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4056 CALL_EXPR_ARG (exp, 1),
4057 CALL_EXPR_ARG (exp, 2), type);
4059 return expand_expr (result, target, mode, EXPAND_NORMAL);
4064 /* Expand expression EXP, which is a call to the memcmp built-in function.
4065 Return NULL_RTX if we failed and the
4066 caller should emit a normal call, otherwise try to get the result in
4067 TARGET, if convenient (and in mode MODE, if that's convenient). */
4070 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4072 if (!validate_arglist (exp,
4073 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4077 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4078 CALL_EXPR_ARG (exp, 1),
4079 CALL_EXPR_ARG (exp, 2));
4081 return expand_expr (result, target, mode, EXPAND_NORMAL);
4084 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4086 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4089 tree arg1 = CALL_EXPR_ARG (exp, 0);
4090 tree arg2 = CALL_EXPR_ARG (exp, 1);
4091 tree len = CALL_EXPR_ARG (exp, 2);
4094 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4096 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4097 enum machine_mode insn_mode;
4099 #ifdef HAVE_cmpmemsi
4101 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4104 #ifdef HAVE_cmpstrnsi
4106 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4111 /* If we don't have POINTER_TYPE, call the function. */
4112 if (arg1_align == 0 || arg2_align == 0)
4115 /* Make a place to write the result of the instruction. */
4118 && REG_P (result) && GET_MODE (result) == insn_mode
4119 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4120 result = gen_reg_rtx (insn_mode);
4122 arg1_rtx = get_memory_rtx (arg1, len);
4123 arg2_rtx = get_memory_rtx (arg2, len);
4124 arg3_rtx = expand_normal (len);
4126 /* Set MEM_SIZE as appropriate. */
4127 if (GET_CODE (arg3_rtx) == CONST_INT)
4129 set_mem_size (arg1_rtx, arg3_rtx);
4130 set_mem_size (arg2_rtx, arg3_rtx);
4133 #ifdef HAVE_cmpmemsi
4135 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4136 GEN_INT (MIN (arg1_align, arg2_align)));
4139 #ifdef HAVE_cmpstrnsi
4141 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4142 GEN_INT (MIN (arg1_align, arg2_align)));
4150 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4151 TYPE_MODE (integer_type_node), 3,
4152 XEXP (arg1_rtx, 0), Pmode,
4153 XEXP (arg2_rtx, 0), Pmode,
4154 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4155 TYPE_UNSIGNED (sizetype)),
4156 TYPE_MODE (sizetype));
4158 /* Return the value in the proper mode for this function. */
4159 mode = TYPE_MODE (TREE_TYPE (exp));
4160 if (GET_MODE (result) == mode)
4162 else if (target != 0)
4164 convert_move (target, result, 0);
4168 return convert_to_mode (mode, result, 0);
4175 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4176 if we failed the caller should emit a normal call, otherwise try to get
4177 the result in TARGET, if convenient. */
4180 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4182 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4186 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4187 CALL_EXPR_ARG (exp, 1));
4189 return expand_expr (result, target, mode, EXPAND_NORMAL);
4192 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4193 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4194 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4196 rtx arg1_rtx, arg2_rtx;
4197 rtx result, insn = NULL_RTX;
4199 tree arg1 = CALL_EXPR_ARG (exp, 0);
4200 tree arg2 = CALL_EXPR_ARG (exp, 1);
4203 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4205 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4207 /* If we don't have POINTER_TYPE, call the function. */
4208 if (arg1_align == 0 || arg2_align == 0)
4211 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4212 arg1 = builtin_save_expr (arg1);
4213 arg2 = builtin_save_expr (arg2);
4215 arg1_rtx = get_memory_rtx (arg1, NULL);
4216 arg2_rtx = get_memory_rtx (arg2, NULL);
4218 #ifdef HAVE_cmpstrsi
4219 /* Try to call cmpstrsi. */
4222 enum machine_mode insn_mode
4223 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4225 /* Make a place to write the result of the instruction. */
4228 && REG_P (result) && GET_MODE (result) == insn_mode
4229 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4230 result = gen_reg_rtx (insn_mode);
4232 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4233 GEN_INT (MIN (arg1_align, arg2_align)));
4236 #ifdef HAVE_cmpstrnsi
4237 /* Try to determine at least one length and call cmpstrnsi. */
4238 if (!insn && HAVE_cmpstrnsi)
4243 enum machine_mode insn_mode
4244 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4245 tree len1 = c_strlen (arg1, 1);
4246 tree len2 = c_strlen (arg2, 1);
4249 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4251 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4253 /* If we don't have a constant length for the first, use the length
4254 of the second, if we know it. We don't require a constant for
4255 this case; some cost analysis could be done if both are available
4256 but neither is constant. For now, assume they're equally cheap,
4257 unless one has side effects. If both strings have constant lengths,
4264 else if (TREE_SIDE_EFFECTS (len1))
4266 else if (TREE_SIDE_EFFECTS (len2))
4268 else if (TREE_CODE (len1) != INTEGER_CST)
4270 else if (TREE_CODE (len2) != INTEGER_CST)
4272 else if (tree_int_cst_lt (len1, len2))
4277 /* If both arguments have side effects, we cannot optimize. */
4278 if (!len || TREE_SIDE_EFFECTS (len))
4281 arg3_rtx = expand_normal (len);
4283 /* Make a place to write the result of the instruction. */
4286 && REG_P (result) && GET_MODE (result) == insn_mode
4287 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4288 result = gen_reg_rtx (insn_mode);
4290 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4291 GEN_INT (MIN (arg1_align, arg2_align)));
4299 /* Return the value in the proper mode for this function. */
4300 mode = TYPE_MODE (TREE_TYPE (exp));
4301 if (GET_MODE (result) == mode)
4304 return convert_to_mode (mode, result, 0);
4305 convert_move (target, result, 0);
4309 /* Expand the library call ourselves using a stabilized argument
4310 list to avoid re-evaluating the function's arguments twice. */
4311 #ifdef HAVE_cmpstrnsi
4314 fndecl = get_callee_fndecl (exp);
4315 fn = build_call_expr (fndecl, 2, arg1, arg2);
4316 if (TREE_CODE (fn) == CALL_EXPR)
4317 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4318 return expand_call (fn, target, target == const0_rtx);
4324 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4325 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4326 the result in TARGET, if convenient. */
4329 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4331 if (!validate_arglist (exp,
4332 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4336 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4337 CALL_EXPR_ARG (exp, 1),
4338 CALL_EXPR_ARG (exp, 2));
4340 return expand_expr (result, target, mode, EXPAND_NORMAL);
4343 /* If c_strlen can determine an expression for one of the string
4344 lengths, and it doesn't have side effects, then emit cmpstrnsi
4345 using length MIN(strlen(string)+1, arg3). */
4346 #ifdef HAVE_cmpstrnsi
4349 tree len, len1, len2;
4350 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4353 tree arg1 = CALL_EXPR_ARG (exp, 0);
4354 tree arg2 = CALL_EXPR_ARG (exp, 1);
4355 tree arg3 = CALL_EXPR_ARG (exp, 2);
4358 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4360 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4361 enum machine_mode insn_mode
4362 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4364 len1 = c_strlen (arg1, 1);
4365 len2 = c_strlen (arg2, 1);
4368 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4370 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4372 /* If we don't have a constant length for the first, use the length
4373 of the second, if we know it. We don't require a constant for
4374 this case; some cost analysis could be done if both are available
4375 but neither is constant. For now, assume they're equally cheap,
4376 unless one has side effects. If both strings have constant lengths,
4383 else if (TREE_SIDE_EFFECTS (len1))
4385 else if (TREE_SIDE_EFFECTS (len2))
4387 else if (TREE_CODE (len1) != INTEGER_CST)
4389 else if (TREE_CODE (len2) != INTEGER_CST)
4391 else if (tree_int_cst_lt (len1, len2))
4396 /* If both arguments have side effects, we cannot optimize. */
4397 if (!len || TREE_SIDE_EFFECTS (len))
4400 /* The actual new length parameter is MIN(len,arg3). */
4401 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4402 fold_convert (TREE_TYPE (len), arg3));
4404 /* If we don't have POINTER_TYPE, call the function. */
4405 if (arg1_align == 0 || arg2_align == 0)
4408 /* Make a place to write the result of the instruction. */
4411 && REG_P (result) && GET_MODE (result) == insn_mode
4412 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4413 result = gen_reg_rtx (insn_mode);
4415 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4416 arg1 = builtin_save_expr (arg1);
4417 arg2 = builtin_save_expr (arg2);
4418 len = builtin_save_expr (len);
4420 arg1_rtx = get_memory_rtx (arg1, len);
4421 arg2_rtx = get_memory_rtx (arg2, len);
4422 arg3_rtx = expand_normal (len);
4423 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4424 GEN_INT (MIN (arg1_align, arg2_align)));
4429 /* Return the value in the proper mode for this function. */
4430 mode = TYPE_MODE (TREE_TYPE (exp));
4431 if (GET_MODE (result) == mode)
4434 return convert_to_mode (mode, result, 0);
4435 convert_move (target, result, 0);
4439 /* Expand the library call ourselves using a stabilized argument
4440 list to avoid re-evaluating the function's arguments twice. */
4441 fndecl = get_callee_fndecl (exp);
4442 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4443 if (TREE_CODE (fn) == CALL_EXPR)
4444 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4445 return expand_call (fn, target, target == const0_rtx);
4451 /* Expand expression EXP, which is a call to the strcat builtin.
4452 Return NULL_RTX if we failed the caller should emit a normal call,
4453 otherwise try to get the result in TARGET, if convenient. */
4456 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4458 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4462 tree dst = CALL_EXPR_ARG (exp, 0);
4463 tree src = CALL_EXPR_ARG (exp, 1);
4464 const char *p = c_getstr (src);
4466 /* If the string length is zero, return the dst parameter. */
4467 if (p && *p == '\0')
4468 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4472 /* See if we can store by pieces into (dst + strlen(dst)). */
4473 tree newsrc, newdst,
4474 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4477 /* Stabilize the argument list. */
4478 newsrc = builtin_save_expr (src);
4479 dst = builtin_save_expr (dst);
4483 /* Create strlen (dst). */
4484 newdst = build_call_expr (strlen_fn, 1, dst);
4485 /* Create (dst p+ strlen (dst)). */
4487 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4488 newdst = builtin_save_expr (newdst);
4490 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4492 end_sequence (); /* Stop sequence. */
4496 /* Output the entire sequence. */
4497 insns = get_insns ();
4501 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4508 /* Expand expression EXP, which is a call to the strncat builtin.
4509 Return NULL_RTX if we failed the caller should emit a normal call,
4510 otherwise try to get the result in TARGET, if convenient. */
4513 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4515 if (validate_arglist (exp,
4516 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4518 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4519 CALL_EXPR_ARG (exp, 1),
4520 CALL_EXPR_ARG (exp, 2));
4522 return expand_expr (result, target, mode, EXPAND_NORMAL);
4527 /* Expand expression EXP, which is a call to the strspn builtin.
4528 Return NULL_RTX if we failed the caller should emit a normal call,
4529 otherwise try to get the result in TARGET, if convenient. */
4532 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4534 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4536 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4537 CALL_EXPR_ARG (exp, 1));
4539 return expand_expr (result, target, mode, EXPAND_NORMAL);
4544 /* Expand expression EXP, which is a call to the strcspn builtin.
4545 Return NULL_RTX if we failed the caller should emit a normal call,
4546 otherwise try to get the result in TARGET, if convenient. */
4549 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4551 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4553 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4554 CALL_EXPR_ARG (exp, 1));
4556 return expand_expr (result, target, mode, EXPAND_NORMAL);
4561 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4562 if that's convenient. */
4565 expand_builtin_saveregs (void)
4569 /* Don't do __builtin_saveregs more than once in a function.
4570 Save the result of the first call and reuse it. */
4571 if (saveregs_value != 0)
4572 return saveregs_value;
4574 /* When this function is called, it means that registers must be
4575 saved on entry to this function. So we migrate the call to the
4576 first insn of this function. */
4580 /* Do whatever the machine needs done in this case. */
4581 val = targetm.calls.expand_builtin_saveregs ();
4586 saveregs_value = val;
4588 /* Put the insns after the NOTE that starts the function. If this
4589 is inside a start_sequence, make the outer-level insn chain current, so
4590 the code is placed at the start of the function. */
4591 push_topmost_sequence ();
4592 emit_insn_after (seq, entry_of_function ());
4593 pop_topmost_sequence ();
4598 /* __builtin_args_info (N) returns word N of the arg space info
4599 for the current function. The number and meanings of words
4600 is controlled by the definition of CUMULATIVE_ARGS. */
4603 expand_builtin_args_info (tree exp)
4605 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4606 int *word_ptr = (int *) &crtl->args.info;
4608 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4610 if (call_expr_nargs (exp) != 0)
4612 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4613 error ("argument of %<__builtin_args_info%> must be constant");
4616 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4618 if (wordnum < 0 || wordnum >= nwords)
4619 error ("argument of %<__builtin_args_info%> out of range");
4621 return GEN_INT (word_ptr[wordnum]);
4625 error ("missing argument in %<__builtin_args_info%>");
4630 /* Expand a call to __builtin_next_arg. */
4633 expand_builtin_next_arg (void)
4635 /* Checking arguments is already done in fold_builtin_next_arg
4636 that must be called before this function. */
4637 return expand_binop (ptr_mode, add_optab,
4638 crtl->args.internal_arg_pointer,
4639 crtl->args.arg_offset_rtx,
4640 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4643 /* Make it easier for the backends by protecting the valist argument
4644 from multiple evaluations. */
4647 stabilize_va_list (tree valist, int needs_lvalue)
4649 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4651 if (TREE_SIDE_EFFECTS (valist))
4652 valist = save_expr (valist);
4654 /* For this case, the backends will be expecting a pointer to
4655 TREE_TYPE (va_list_type_node), but it's possible we've
4656 actually been given an array (an actual va_list_type_node).
4658 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4660 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4661 valist = build_fold_addr_expr_with_type (valist, p1);
4670 if (! TREE_SIDE_EFFECTS (valist))
4673 pt = build_pointer_type (va_list_type_node);
4674 valist = fold_build1 (ADDR_EXPR, pt, valist);
4675 TREE_SIDE_EFFECTS (valist) = 1;
4678 if (TREE_SIDE_EFFECTS (valist))
4679 valist = save_expr (valist);
4680 valist = build_fold_indirect_ref (valist);
4686 /* The "standard" definition of va_list is void*. */
4689 std_build_builtin_va_list (void)
4691 return ptr_type_node;
4694 /* The "standard" implementation of va_start: just assign `nextarg' to
4698 std_expand_builtin_va_start (tree valist, rtx nextarg)
4700 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4701 convert_move (va_r, nextarg, 0);
4704 /* Expand EXP, a call to __builtin_va_start. */
4707 expand_builtin_va_start (tree exp)
4712 if (call_expr_nargs (exp) < 2)
4714 error ("too few arguments to function %<va_start%>");
4718 if (fold_builtin_next_arg (exp, true))
4721 nextarg = expand_builtin_next_arg ();
4722 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4724 if (targetm.expand_builtin_va_start)
4725 targetm.expand_builtin_va_start (valist, nextarg);
4727 std_expand_builtin_va_start (valist, nextarg);
4732 /* The "standard" implementation of va_arg: read the value from the
4733 current (padded) address and increment by the (padded) size. */
4736 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4738 tree addr, t, type_size, rounded_size, valist_tmp;
4739 unsigned HOST_WIDE_INT align, boundary;
4742 #ifdef ARGS_GROW_DOWNWARD
4743 /* All of the alignment and movement below is for args-grow-up machines.
4744 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4745 implement their own specialized gimplify_va_arg_expr routines. */
4749 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4751 type = build_pointer_type (type);
4753 align = PARM_BOUNDARY / BITS_PER_UNIT;
4754 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4756 /* Hoist the valist value into a temporary for the moment. */
4757 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4759 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4760 requires greater alignment, we must perform dynamic alignment. */
4761 if (boundary > align
4762 && !integer_zerop (TYPE_SIZE (type)))
4764 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4765 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4766 valist_tmp, size_int (boundary - 1)));
4767 gimplify_and_add (t, pre_p);
4769 t = fold_convert (sizetype, valist_tmp);
4770 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4771 fold_convert (TREE_TYPE (valist),
4772 fold_build2 (BIT_AND_EXPR, sizetype, t,
4773 size_int (-boundary))));
4774 gimplify_and_add (t, pre_p);
4779 /* If the actual alignment is less than the alignment of the type,
4780 adjust the type accordingly so that we don't assume strict alignment
4781 when deferencing the pointer. */
4782 boundary *= BITS_PER_UNIT;
4783 if (boundary < TYPE_ALIGN (type))
4785 type = build_variant_type_copy (type);
4786 TYPE_ALIGN (type) = boundary;
4789 /* Compute the rounded size of the type. */
4790 type_size = size_in_bytes (type);
4791 rounded_size = round_up (type_size, align);
4793 /* Reduce rounded_size so it's sharable with the postqueue. */
4794 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4798 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4800 /* Small args are padded downward. */
4801 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4802 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4803 size_binop (MINUS_EXPR, rounded_size, type_size));
4804 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4807 /* Compute new value for AP. */
4808 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4809 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4810 gimplify_and_add (t, pre_p);
4812 addr = fold_convert (build_pointer_type (type), addr);
4815 addr = build_va_arg_indirect_ref (addr);
4817 return build_va_arg_indirect_ref (addr);
4820 /* Build an indirect-ref expression over the given TREE, which represents a
4821 piece of a va_arg() expansion. */
4823 build_va_arg_indirect_ref (tree addr)
4825 addr = build_fold_indirect_ref (addr);
4827 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4833 /* Return a dummy expression of type TYPE in order to keep going after an
4837 dummy_object (tree type)
4839 tree t = build_int_cst (build_pointer_type (type), 0);
4840 return build1 (INDIRECT_REF, type, t);
4843 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4844 builtin function, but a very special sort of operator. */
4846 enum gimplify_status
4847 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4849 tree promoted_type, want_va_type, have_va_type;
4850 tree valist = TREE_OPERAND (*expr_p, 0);
4851 tree type = TREE_TYPE (*expr_p);
4854 /* Verify that valist is of the proper type. */
4855 want_va_type = va_list_type_node;
4856 have_va_type = TREE_TYPE (valist);
4858 if (have_va_type == error_mark_node)
4861 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4863 /* If va_list is an array type, the argument may have decayed
4864 to a pointer type, e.g. by being passed to another function.
4865 In that case, unwrap both types so that we can compare the
4866 underlying records. */
4867 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4868 || POINTER_TYPE_P (have_va_type))
4870 want_va_type = TREE_TYPE (want_va_type);
4871 have_va_type = TREE_TYPE (have_va_type);
4875 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4877 error ("first argument to %<va_arg%> not of type %<va_list%>");
4881 /* Generate a diagnostic for requesting data of a type that cannot
4882 be passed through `...' due to type promotion at the call site. */
4883 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4886 static bool gave_help;
4888 /* Unfortunately, this is merely undefined, rather than a constraint
4889 violation, so we cannot make this an error. If this call is never
4890 executed, the program is still strictly conforming. */
4891 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4892 type, promoted_type);
4896 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4897 promoted_type, type);
4900 /* We can, however, treat "undefined" any way we please.
4901 Call abort to encourage the user to fix the program. */
4902 inform ("if this code is reached, the program will abort");
4903 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4904 append_to_statement_list (t, pre_p);
4906 /* This is dead code, but go ahead and finish so that the
4907 mode of the result comes out right. */
4908 *expr_p = dummy_object (type);
4913 /* Make it easier for the backends by protecting the valist argument
4914 from multiple evaluations. */
4915 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4917 /* For this case, the backends will be expecting a pointer to
4918 TREE_TYPE (va_list_type_node), but it's possible we've
4919 actually been given an array (an actual va_list_type_node).
4921 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4923 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4924 valist = build_fold_addr_expr_with_type (valist, p1);
4926 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4929 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4931 if (!targetm.gimplify_va_arg_expr)
4932 /* FIXME:Once most targets are converted we should merely
4933 assert this is non-null. */
4936 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4941 /* Expand EXP, a call to __builtin_va_end. */
4944 expand_builtin_va_end (tree exp)
4946 tree valist = CALL_EXPR_ARG (exp, 0);
4948 /* Evaluate for side effects, if needed. I hate macros that don't
4950 if (TREE_SIDE_EFFECTS (valist))
4951 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4956 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4957 builtin rather than just as an assignment in stdarg.h because of the
4958 nastiness of array-type va_list types. */
4961 expand_builtin_va_copy (tree exp)
4965 dst = CALL_EXPR_ARG (exp, 0);
4966 src = CALL_EXPR_ARG (exp, 1);
4968 dst = stabilize_va_list (dst, 1);
4969 src = stabilize_va_list (src, 0);
4971 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4973 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4974 TREE_SIDE_EFFECTS (t) = 1;
4975 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4979 rtx dstb, srcb, size;
4981 /* Evaluate to pointers. */
4982 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4983 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4984 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4985 VOIDmode, EXPAND_NORMAL);
4987 dstb = convert_memory_address (Pmode, dstb);
4988 srcb = convert_memory_address (Pmode, srcb);
4990 /* "Dereference" to BLKmode memories. */
4991 dstb = gen_rtx_MEM (BLKmode, dstb);
4992 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4993 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4994 srcb = gen_rtx_MEM (BLKmode, srcb);
4995 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4996 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4999 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5005 /* Expand a call to one of the builtin functions __builtin_frame_address or
5006 __builtin_return_address. */
5009 expand_builtin_frame_address (tree fndecl, tree exp)
5011 /* The argument must be a nonnegative integer constant.
5012 It counts the number of frames to scan up the stack.
5013 The value is the return address saved in that frame. */
5014 if (call_expr_nargs (exp) == 0)
5015 /* Warning about missing arg was already issued. */
5017 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5019 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5020 error ("invalid argument to %<__builtin_frame_address%>");
5022 error ("invalid argument to %<__builtin_return_address%>");
5028 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5029 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5031 /* Some ports cannot access arbitrary stack frames. */
5034 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5035 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5037 warning (0, "unsupported argument to %<__builtin_return_address%>");
5041 /* For __builtin_frame_address, return what we've got. */
5042 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5046 && ! CONSTANT_P (tem))
5047 tem = copy_to_mode_reg (Pmode, tem);
5052 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5053 we failed and the caller should emit a normal call, otherwise try to get
5054 the result in TARGET, if convenient. */
5057 expand_builtin_alloca (tree exp, rtx target)
5062 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5063 should always expand to function calls. These can be intercepted
5068 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5071 /* Compute the argument. */
5072 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5074 /* Allocate the desired space. */
5075 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5076 result = convert_memory_address (ptr_mode, result);
5081 /* Expand a call to a bswap builtin with argument ARG0. MODE
5082 is the mode to expand with. */
5085 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5087 enum machine_mode mode;
5091 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5094 arg = CALL_EXPR_ARG (exp, 0);
5095 mode = TYPE_MODE (TREE_TYPE (arg));
5096 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5098 target = expand_unop (mode, bswap_optab, op0, target, 1);
5100 gcc_assert (target);
5102 return convert_to_mode (mode, target, 0);
5105 /* Expand a call to a unary builtin in EXP.
5106 Return NULL_RTX if a normal call should be emitted rather than expanding the
5107 function in-line. If convenient, the result should be placed in TARGET.
5108 SUBTARGET may be used as the target for computing one of EXP's operands. */
5111 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5112 rtx subtarget, optab op_optab)
5116 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5119 /* Compute the argument. */
5120 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5121 VOIDmode, EXPAND_NORMAL);
5122 /* Compute op, into TARGET if possible.
5123 Set TARGET to wherever the result comes back. */
5124 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5125 op_optab, op0, target, 1);
5126 gcc_assert (target);
5128 return convert_to_mode (target_mode, target, 0);
5131 /* If the string passed to fputs is a constant and is one character
5132 long, we attempt to transform this call into __builtin_fputc(). */
5135 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5137 /* Verify the arguments in the original call. */
5138 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5140 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5141 CALL_EXPR_ARG (exp, 1),
5142 (target == const0_rtx),
5143 unlocked, NULL_TREE);
5145 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5150 /* Expand a call to __builtin_expect. We just return our argument
5151 as the builtin_expect semantic should've been already executed by
5152 tree branch prediction pass. */
5155 expand_builtin_expect (tree exp, rtx target)
5159 if (call_expr_nargs (exp) < 2)
5161 arg = CALL_EXPR_ARG (exp, 0);
5162 c = CALL_EXPR_ARG (exp, 1);
5164 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5165 /* When guessing was done, the hints should be already stripped away. */
5166 gcc_assert (!flag_guess_branch_prob
5167 || optimize == 0 || errorcount || sorrycount);
5172 expand_builtin_trap (void)
5176 emit_insn (gen_trap ());
5179 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5183 /* Expand EXP, a call to fabs, fabsf or fabsl.
5184 Return NULL_RTX if a normal call should be emitted rather than expanding
5185 the function inline. If convenient, the result should be placed
5186 in TARGET. SUBTARGET may be used as the target for computing
5190 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5192 enum machine_mode mode;
5196 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5199 arg = CALL_EXPR_ARG (exp, 0);
5200 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5201 mode = TYPE_MODE (TREE_TYPE (arg));
5202 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5203 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5206 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5207 Return NULL is a normal call should be emitted rather than expanding the
5208 function inline. If convenient, the result should be placed in TARGET.
5209 SUBTARGET may be used as the target for computing the operand. */
5212 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5217 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5220 arg = CALL_EXPR_ARG (exp, 0);
5221 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5223 arg = CALL_EXPR_ARG (exp, 1);
5224 op1 = expand_normal (arg);
5226 return expand_copysign (op0, op1, target);
5229 /* Create a new constant string literal and return a char* pointer to it.
5230 The STRING_CST value is the LEN characters at STR. */
5232 build_string_literal (int len, const char *str)
5234 tree t, elem, index, type;
5236 t = build_string (len, str);
5237 elem = build_type_variant (char_type_node, 1, 0);
5238 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5239 type = build_array_type (elem, index);
5240 TREE_TYPE (t) = type;
5241 TREE_CONSTANT (t) = 1;
5242 TREE_INVARIANT (t) = 1;
5243 TREE_READONLY (t) = 1;
5244 TREE_STATIC (t) = 1;
5246 type = build_pointer_type (type);
5247 t = build1 (ADDR_EXPR, type, t);
5249 type = build_pointer_type (elem);
5250 t = build1 (NOP_EXPR, type, t);
5254 /* Expand EXP, a call to printf or printf_unlocked.
5255 Return NULL_RTX if a normal call should be emitted rather than transforming
5256 the function inline. If convenient, the result should be placed in
5257 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5260 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5263 /* If we're using an unlocked function, assume the other unlocked
5264 functions exist explicitly. */
5265 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5266 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5267 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5268 : implicit_built_in_decls[BUILT_IN_PUTS];
5269 const char *fmt_str;
5272 int nargs = call_expr_nargs (exp);
5274 /* If the return value is used, don't do the transformation. */
5275 if (target != const0_rtx)
5278 /* Verify the required arguments in the original call. */
5281 fmt = CALL_EXPR_ARG (exp, 0);
5282 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5285 /* Check whether the format is a literal string constant. */
5286 fmt_str = c_getstr (fmt);
5287 if (fmt_str == NULL)
5290 if (!init_target_chars ())
5293 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5294 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5297 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5300 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5302 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5303 else if (strcmp (fmt_str, target_percent_c) == 0)
5306 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5309 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5313 /* We can't handle anything else with % args or %% ... yet. */
5314 if (strchr (fmt_str, target_percent))
5320 /* If the format specifier was "", printf does nothing. */
5321 if (fmt_str[0] == '\0')
5323 /* If the format specifier has length of 1, call putchar. */
5324 if (fmt_str[1] == '\0')
5326 /* Given printf("c"), (where c is any one character,)
5327 convert "c"[0] to an int and pass that to the replacement
5329 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5331 fn = build_call_expr (fn_putchar, 1, arg);
5335 /* If the format specifier was "string\n", call puts("string"). */
5336 size_t len = strlen (fmt_str);
5337 if ((unsigned char)fmt_str[len - 1] == target_newline)
5339 /* Create a NUL-terminated string that's one char shorter
5340 than the original, stripping off the trailing '\n'. */
5341 char *newstr = alloca (len);
5342 memcpy (newstr, fmt_str, len - 1);
5343 newstr[len - 1] = 0;
5344 arg = build_string_literal (len, newstr);
5346 fn = build_call_expr (fn_puts, 1, arg);
5349 /* We'd like to arrange to call fputs(string,stdout) here,
5350 but we need stdout and don't have a way to get it yet. */
5357 if (TREE_CODE (fn) == CALL_EXPR)
5358 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5359 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5362 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5363 Return NULL_RTX if a normal call should be emitted rather than transforming
5364 the function inline. If convenient, the result should be placed in
5365 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5368 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5371 /* If we're using an unlocked function, assume the other unlocked
5372 functions exist explicitly. */
5373 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5374 : implicit_built_in_decls[BUILT_IN_FPUTC];
5375 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5376 : implicit_built_in_decls[BUILT_IN_FPUTS];
5377 const char *fmt_str;
5380 int nargs = call_expr_nargs (exp);
5382 /* If the return value is used, don't do the transformation. */
5383 if (target != const0_rtx)
5386 /* Verify the required arguments in the original call. */
5389 fp = CALL_EXPR_ARG (exp, 0);
5390 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5392 fmt = CALL_EXPR_ARG (exp, 1);
5393 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5396 /* Check whether the format is a literal string constant. */
5397 fmt_str = c_getstr (fmt);
5398 if (fmt_str == NULL)
5401 if (!init_target_chars ())
5404 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5405 if (strcmp (fmt_str, target_percent_s) == 0)
5408 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5410 arg = CALL_EXPR_ARG (exp, 2);
5412 fn = build_call_expr (fn_fputs, 2, arg, fp);
5414 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5415 else if (strcmp (fmt_str, target_percent_c) == 0)
5418 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5420 arg = CALL_EXPR_ARG (exp, 2);
5422 fn = build_call_expr (fn_fputc, 2, arg, fp);
5426 /* We can't handle anything else with % args or %% ... yet. */
5427 if (strchr (fmt_str, target_percent))
5433 /* If the format specifier was "", fprintf does nothing. */
5434 if (fmt_str[0] == '\0')
5436 /* Evaluate and ignore FILE* argument for side-effects. */
5437 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5441 /* When "string" doesn't contain %, replace all cases of
5442 fprintf(stream,string) with fputs(string,stream). The fputs
5443 builtin will take care of special cases like length == 1. */
5445 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5450 if (TREE_CODE (fn) == CALL_EXPR)
5451 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5452 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5455 /* Expand a call EXP to sprintf. Return NULL_RTX if
5456 a normal call should be emitted rather than expanding the function
5457 inline. If convenient, the result should be placed in TARGET with
5461 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5464 const char *fmt_str;
5465 int nargs = call_expr_nargs (exp);
5467 /* Verify the required arguments in the original call. */
5470 dest = CALL_EXPR_ARG (exp, 0);
5471 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5473 fmt = CALL_EXPR_ARG (exp, 0);
5474 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5477 /* Check whether the format is a literal string constant. */
5478 fmt_str = c_getstr (fmt);
5479 if (fmt_str == NULL)
5482 if (!init_target_chars ())
5485 /* If the format doesn't contain % args or %%, use strcpy. */
5486 if (strchr (fmt_str, target_percent) == 0)
5488 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5491 if ((nargs > 2) || ! fn)
5493 expand_expr (build_call_expr (fn, 2, dest, fmt),
5494 const0_rtx, VOIDmode, EXPAND_NORMAL);
5495 if (target == const0_rtx)
5497 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5498 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5500 /* If the format is "%s", use strcpy if the result isn't used. */
5501 else if (strcmp (fmt_str, target_percent_s) == 0)
5504 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5510 arg = CALL_EXPR_ARG (exp, 2);
5511 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5514 if (target != const0_rtx)
5516 len = c_strlen (arg, 1);
5517 if (! len || TREE_CODE (len) != INTEGER_CST)
5523 expand_expr (build_call_expr (fn, 2, dest, arg),
5524 const0_rtx, VOIDmode, EXPAND_NORMAL);
5526 if (target == const0_rtx)
5528 return expand_expr (len, target, mode, EXPAND_NORMAL);
5534 /* Expand a call to either the entry or exit function profiler. */
5537 expand_builtin_profile_func (bool exitp)
5541 this = DECL_RTL (current_function_decl);
5542 gcc_assert (MEM_P (this));
5543 this = XEXP (this, 0);
5546 which = profile_function_exit_libfunc;
5548 which = profile_function_entry_libfunc;
5550 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5551 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5558 /* Expand a call to __builtin___clear_cache. */
5561 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5563 #ifndef HAVE_clear_cache
5564 #ifdef CLEAR_INSN_CACHE
5565 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5566 does something. Just do the default expansion to a call to
5570 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5571 does nothing. There is no need to call it. Do nothing. */
5573 #endif /* CLEAR_INSN_CACHE */
5575 /* We have a "clear_cache" insn, and it will handle everything. */
5577 rtx begin_rtx, end_rtx;
5578 enum insn_code icode;
5580 /* We must not expand to a library call. If we did, any
5581 fallback library function in libgcc that might contain a call to
5582 __builtin___clear_cache() would recurse infinitely. */
5583 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5585 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5589 if (HAVE_clear_cache)
5591 icode = CODE_FOR_clear_cache;
5593 begin = CALL_EXPR_ARG (exp, 0);
5594 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5595 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5596 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5597 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5599 end = CALL_EXPR_ARG (exp, 1);
5600 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5601 end_rtx = convert_memory_address (Pmode, end_rtx);
5602 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5603 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5605 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5608 #endif /* HAVE_clear_cache */
5611 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5614 round_trampoline_addr (rtx tramp)
5616 rtx temp, addend, mask;
5618 /* If we don't need too much alignment, we'll have been guaranteed
5619 proper alignment by get_trampoline_type. */
5620 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5623 /* Round address up to desired boundary. */
5624 temp = gen_reg_rtx (Pmode);
5625 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5626 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5628 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5629 temp, 0, OPTAB_LIB_WIDEN);
5630 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5631 temp, 0, OPTAB_LIB_WIDEN);
5637 expand_builtin_init_trampoline (tree exp)
5639 tree t_tramp, t_func, t_chain;
5640 rtx r_tramp, r_func, r_chain;
5641 #ifdef TRAMPOLINE_TEMPLATE
5645 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5646 POINTER_TYPE, VOID_TYPE))
5649 t_tramp = CALL_EXPR_ARG (exp, 0);
5650 t_func = CALL_EXPR_ARG (exp, 1);
5651 t_chain = CALL_EXPR_ARG (exp, 2);
5653 r_tramp = expand_normal (t_tramp);
5654 r_func = expand_normal (t_func);
5655 r_chain = expand_normal (t_chain);
5657 /* Generate insns to initialize the trampoline. */
5658 r_tramp = round_trampoline_addr (r_tramp);
5659 #ifdef TRAMPOLINE_TEMPLATE
5660 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5661 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5662 emit_block_move (blktramp, assemble_trampoline_template (),
5663 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5665 trampolines_created = 1;
5666 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5672 expand_builtin_adjust_trampoline (tree exp)
5676 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5679 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5680 tramp = round_trampoline_addr (tramp);
5681 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5682 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5688 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5689 function. The function first checks whether the back end provides
5690 an insn to implement signbit for the respective mode. If not, it
5691 checks whether the floating point format of the value is such that
5692 the sign bit can be extracted. If that is not the case, the
5693 function returns NULL_RTX to indicate that a normal call should be
5694 emitted rather than expanding the function in-line. EXP is the
5695 expression that is a call to the builtin function; if convenient,
5696 the result should be placed in TARGET. */
5698 expand_builtin_signbit (tree exp, rtx target)
5700 const struct real_format *fmt;
5701 enum machine_mode fmode, imode, rmode;
5702 HOST_WIDE_INT hi, lo;
5705 enum insn_code icode;
5708 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5711 arg = CALL_EXPR_ARG (exp, 0);
5712 fmode = TYPE_MODE (TREE_TYPE (arg));
5713 rmode = TYPE_MODE (TREE_TYPE (exp));
5714 fmt = REAL_MODE_FORMAT (fmode);
5716 arg = builtin_save_expr (arg);
5718 /* Expand the argument yielding a RTX expression. */
5719 temp = expand_normal (arg);
5721 /* Check if the back end provides an insn that handles signbit for the
5723 icode = signbit_optab->handlers [(int) fmode].insn_code;
5724 if (icode != CODE_FOR_nothing)
5726 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5727 emit_unop_insn (icode, target, temp, UNKNOWN);
5731 /* For floating point formats without a sign bit, implement signbit
5733 bitpos = fmt->signbit_ro;
5736 /* But we can't do this if the format supports signed zero. */
5737 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5740 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5741 build_real (TREE_TYPE (arg), dconst0));
5742 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5745 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5747 imode = int_mode_for_mode (fmode);
5748 if (imode == BLKmode)
5750 temp = gen_lowpart (imode, temp);
5755 /* Handle targets with different FP word orders. */
5756 if (FLOAT_WORDS_BIG_ENDIAN)
5757 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5759 word = bitpos / BITS_PER_WORD;
5760 temp = operand_subword_force (temp, word, fmode);
5761 bitpos = bitpos % BITS_PER_WORD;
5764 /* Force the intermediate word_mode (or narrower) result into a
5765 register. This avoids attempting to create paradoxical SUBREGs
5766 of floating point modes below. */
5767 temp = force_reg (imode, temp);
5769 /* If the bitpos is within the "result mode" lowpart, the operation
5770 can be implement with a single bitwise AND. Otherwise, we need
5771 a right shift and an AND. */
5773 if (bitpos < GET_MODE_BITSIZE (rmode))
5775 if (bitpos < HOST_BITS_PER_WIDE_INT)
5778 lo = (HOST_WIDE_INT) 1 << bitpos;
5782 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5787 temp = gen_lowpart (rmode, temp);
5788 temp = expand_binop (rmode, and_optab, temp,
5789 immed_double_const (lo, hi, rmode),
5790 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5794 /* Perform a logical right shift to place the signbit in the least
5795 significant bit, then truncate the result to the desired mode
5796 and mask just this bit. */
5797 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5798 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5799 temp = gen_lowpart (rmode, temp);
5800 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5801 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5807 /* Expand fork or exec calls. TARGET is the desired target of the
5808 call. EXP is the call. FN is the
5809 identificator of the actual function. IGNORE is nonzero if the
5810 value is to be ignored. */
5813 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5818 /* If we are not profiling, just call the function. */
5819 if (!profile_arc_flag)
5822 /* Otherwise call the wrapper. This should be equivalent for the rest of
5823 compiler, so the code does not diverge, and the wrapper may run the
5824 code necessary for keeping the profiling sane. */
5826 switch (DECL_FUNCTION_CODE (fn))
5829 id = get_identifier ("__gcov_fork");
5832 case BUILT_IN_EXECL:
5833 id = get_identifier ("__gcov_execl");
5836 case BUILT_IN_EXECV:
5837 id = get_identifier ("__gcov_execv");
5840 case BUILT_IN_EXECLP:
5841 id = get_identifier ("__gcov_execlp");
5844 case BUILT_IN_EXECLE:
5845 id = get_identifier ("__gcov_execle");
5848 case BUILT_IN_EXECVP:
5849 id = get_identifier ("__gcov_execvp");
5852 case BUILT_IN_EXECVE:
5853 id = get_identifier ("__gcov_execve");
5860 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5861 DECL_EXTERNAL (decl) = 1;
5862 TREE_PUBLIC (decl) = 1;
5863 DECL_ARTIFICIAL (decl) = 1;
5864 TREE_NOTHROW (decl) = 1;
5865 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5866 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5867 call = rewrite_call_expr (exp, 0, decl, 0);
5868 return expand_call (call, target, ignore);
5873 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5874 the pointer in these functions is void*, the tree optimizers may remove
5875 casts. The mode computed in expand_builtin isn't reliable either, due
5876 to __sync_bool_compare_and_swap.
5878 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5879 group of builtins. This gives us log2 of the mode size. */
5881 static inline enum machine_mode
5882 get_builtin_sync_mode (int fcode_diff)
5884 /* The size is not negotiable, so ask not to get BLKmode in return
5885 if the target indicates that a smaller size would be better. */
5886 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5889 /* Expand the memory expression LOC and return the appropriate memory operand
5890 for the builtin_sync operations. */
5893 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5897 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5899 /* Note that we explicitly do not want any alias information for this
5900 memory, so that we kill all other live memories. Otherwise we don't
5901 satisfy the full barrier semantics of the intrinsic. */
5902 mem = validize_mem (gen_rtx_MEM (mode, addr));
5904 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5905 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5906 MEM_VOLATILE_P (mem) = 1;
5911 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5912 EXP is the CALL_EXPR. CODE is the rtx code
5913 that corresponds to the arithmetic or logical operation from the name;
5914 an exception here is that NOT actually means NAND. TARGET is an optional
5915 place for us to store the results; AFTER is true if this is the
5916 fetch_and_xxx form. IGNORE is true if we don't actually care about
5917 the result of the operation at all. */
5920 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5921 enum rtx_code code, bool after,
5922 rtx target, bool ignore)
5925 enum machine_mode old_mode;
5927 /* Expand the operands. */
5928 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5930 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5931 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5932 of CONST_INTs, where we know the old_mode only from the call argument. */
5933 old_mode = GET_MODE (val);
5934 if (old_mode == VOIDmode)
5935 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5936 val = convert_modes (mode, old_mode, val, 1);
5939 return expand_sync_operation (mem, val, code);
5941 return expand_sync_fetch_operation (mem, val, code, after, target);
5944 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5945 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5946 true if this is the boolean form. TARGET is a place for us to store the
5947 results; this is NOT optional if IS_BOOL is true. */
5950 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5951 bool is_bool, rtx target)
5953 rtx old_val, new_val, mem;
5954 enum machine_mode old_mode;
5956 /* Expand the operands. */
5957 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5960 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5961 mode, EXPAND_NORMAL);
5962 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5963 of CONST_INTs, where we know the old_mode only from the call argument. */
5964 old_mode = GET_MODE (old_val);
5965 if (old_mode == VOIDmode)
5966 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5967 old_val = convert_modes (mode, old_mode, old_val, 1);
5969 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5970 mode, EXPAND_NORMAL);
5971 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5972 of CONST_INTs, where we know the old_mode only from the call argument. */
5973 old_mode = GET_MODE (new_val);
5974 if (old_mode == VOIDmode)
5975 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5976 new_val = convert_modes (mode, old_mode, new_val, 1);
5979 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5981 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5984 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5985 general form is actually an atomic exchange, and some targets only
5986 support a reduced form with the second argument being a constant 1.
5987 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5991 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5995 enum machine_mode old_mode;
5997 /* Expand the operands. */
5998 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5999 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6000 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6001 of CONST_INTs, where we know the old_mode only from the call argument. */
6002 old_mode = GET_MODE (val);
6003 if (old_mode == VOIDmode)
6004 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6005 val = convert_modes (mode, old_mode, val, 1);
6007 return expand_sync_lock_test_and_set (mem, val, target);
6010 /* Expand the __sync_synchronize intrinsic. */
6013 expand_builtin_synchronize (void)
6017 #ifdef HAVE_memory_barrier
6018 if (HAVE_memory_barrier)
6020 emit_insn (gen_memory_barrier ());
6025 /* If no explicit memory barrier instruction is available, create an
6026 empty asm stmt with a memory clobber. */
6027 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6028 tree_cons (NULL, build_string (6, "memory"), NULL));
6029 ASM_VOLATILE_P (x) = 1;
6030 expand_asm_expr (x);
6033 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6036 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6038 enum insn_code icode;
6040 rtx val = const0_rtx;
6042 /* Expand the operands. */
6043 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6045 /* If there is an explicit operation in the md file, use it. */
6046 icode = sync_lock_release[mode];
6047 if (icode != CODE_FOR_nothing)
6049 if (!insn_data[icode].operand[1].predicate (val, mode))
6050 val = force_reg (mode, val);
6052 insn = GEN_FCN (icode) (mem, val);
6060 /* Otherwise we can implement this operation by emitting a barrier
6061 followed by a store of zero. */
6062 expand_builtin_synchronize ();
6063 emit_move_insn (mem, val);
6066 /* Expand an expression EXP that calls a built-in function,
6067 with result going to TARGET if that's convenient
6068 (and in mode MODE if that's convenient).
6069 SUBTARGET may be used as the target for computing one of EXP's operands.
6070 IGNORE is nonzero if the value is to be ignored. */
6073 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6076 tree fndecl = get_callee_fndecl (exp);
6077 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6078 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6080 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6081 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6083 /* When not optimizing, generate calls to library functions for a certain
6086 && !called_as_built_in (fndecl)
6087 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6088 && fcode != BUILT_IN_ALLOCA)
6089 return expand_call (exp, target, ignore);
6091 /* The built-in function expanders test for target == const0_rtx
6092 to determine whether the function's result will be ignored. */
6094 target = const0_rtx;
6096 /* If the result of a pure or const built-in function is ignored, and
6097 none of its arguments are volatile, we can avoid expanding the
6098 built-in call and just evaluate the arguments for side-effects. */
6099 if (target == const0_rtx
6100 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
6102 bool volatilep = false;
6104 call_expr_arg_iterator iter;
6106 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6107 if (TREE_THIS_VOLATILE (arg))
6115 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6116 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6123 CASE_FLT_FN (BUILT_IN_FABS):
6124 target = expand_builtin_fabs (exp, target, subtarget);
6129 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6130 target = expand_builtin_copysign (exp, target, subtarget);
6135 /* Just do a normal library call if we were unable to fold
6137 CASE_FLT_FN (BUILT_IN_CABS):
6140 CASE_FLT_FN (BUILT_IN_EXP):
6141 CASE_FLT_FN (BUILT_IN_EXP10):
6142 CASE_FLT_FN (BUILT_IN_POW10):
6143 CASE_FLT_FN (BUILT_IN_EXP2):
6144 CASE_FLT_FN (BUILT_IN_EXPM1):
6145 CASE_FLT_FN (BUILT_IN_LOGB):
6146 CASE_FLT_FN (BUILT_IN_LOG):
6147 CASE_FLT_FN (BUILT_IN_LOG10):
6148 CASE_FLT_FN (BUILT_IN_LOG2):
6149 CASE_FLT_FN (BUILT_IN_LOG1P):
6150 CASE_FLT_FN (BUILT_IN_TAN):
6151 CASE_FLT_FN (BUILT_IN_ASIN):
6152 CASE_FLT_FN (BUILT_IN_ACOS):
6153 CASE_FLT_FN (BUILT_IN_ATAN):
6154 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6155 because of possible accuracy problems. */
6156 if (! flag_unsafe_math_optimizations)
6158 CASE_FLT_FN (BUILT_IN_SQRT):
6159 CASE_FLT_FN (BUILT_IN_FLOOR):
6160 CASE_FLT_FN (BUILT_IN_CEIL):
6161 CASE_FLT_FN (BUILT_IN_TRUNC):
6162 CASE_FLT_FN (BUILT_IN_ROUND):
6163 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6164 CASE_FLT_FN (BUILT_IN_RINT):
6165 target = expand_builtin_mathfn (exp, target, subtarget);
6170 CASE_FLT_FN (BUILT_IN_ILOGB):
6171 if (! flag_unsafe_math_optimizations)
6173 CASE_FLT_FN (BUILT_IN_ISINF):
6174 CASE_FLT_FN (BUILT_IN_FINITE):
6175 case BUILT_IN_ISFINITE:
6176 case BUILT_IN_ISNORMAL:
6177 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6182 CASE_FLT_FN (BUILT_IN_LCEIL):
6183 CASE_FLT_FN (BUILT_IN_LLCEIL):
6184 CASE_FLT_FN (BUILT_IN_LFLOOR):
6185 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6186 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6191 CASE_FLT_FN (BUILT_IN_LRINT):
6192 CASE_FLT_FN (BUILT_IN_LLRINT):
6193 CASE_FLT_FN (BUILT_IN_LROUND):
6194 CASE_FLT_FN (BUILT_IN_LLROUND):
6195 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6200 CASE_FLT_FN (BUILT_IN_POW):
6201 target = expand_builtin_pow (exp, target, subtarget);
6206 CASE_FLT_FN (BUILT_IN_POWI):
6207 target = expand_builtin_powi (exp, target, subtarget);
6212 CASE_FLT_FN (BUILT_IN_ATAN2):
6213 CASE_FLT_FN (BUILT_IN_LDEXP):
6214 CASE_FLT_FN (BUILT_IN_SCALB):
6215 CASE_FLT_FN (BUILT_IN_SCALBN):
6216 CASE_FLT_FN (BUILT_IN_SCALBLN):
6217 if (! flag_unsafe_math_optimizations)
6220 CASE_FLT_FN (BUILT_IN_FMOD):
6221 CASE_FLT_FN (BUILT_IN_REMAINDER):
6222 CASE_FLT_FN (BUILT_IN_DREM):
6223 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6228 CASE_FLT_FN (BUILT_IN_CEXPI):
6229 target = expand_builtin_cexpi (exp, target, subtarget);
6230 gcc_assert (target);
6233 CASE_FLT_FN (BUILT_IN_SIN):
6234 CASE_FLT_FN (BUILT_IN_COS):
6235 if (! flag_unsafe_math_optimizations)
6237 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6242 CASE_FLT_FN (BUILT_IN_SINCOS):
6243 if (! flag_unsafe_math_optimizations)
6245 target = expand_builtin_sincos (exp);
6250 case BUILT_IN_APPLY_ARGS:
6251 return expand_builtin_apply_args ();
6253 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6254 FUNCTION with a copy of the parameters described by
6255 ARGUMENTS, and ARGSIZE. It returns a block of memory
6256 allocated on the stack into which is stored all the registers
6257 that might possibly be used for returning the result of a
6258 function. ARGUMENTS is the value returned by
6259 __builtin_apply_args. ARGSIZE is the number of bytes of
6260 arguments that must be copied. ??? How should this value be
6261 computed? We'll also need a safe worst case value for varargs
6263 case BUILT_IN_APPLY:
6264 if (!validate_arglist (exp, POINTER_TYPE,
6265 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6266 && !validate_arglist (exp, REFERENCE_TYPE,
6267 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6273 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6274 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6275 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6277 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6280 /* __builtin_return (RESULT) causes the function to return the
6281 value described by RESULT. RESULT is address of the block of
6282 memory returned by __builtin_apply. */
6283 case BUILT_IN_RETURN:
6284 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6285 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6288 case BUILT_IN_SAVEREGS:
6289 return expand_builtin_saveregs ();
6291 case BUILT_IN_ARGS_INFO:
6292 return expand_builtin_args_info (exp);
6294 case BUILT_IN_VA_ARG_PACK:
6295 /* All valid uses of __builtin_va_arg_pack () are removed during
6297 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6300 case BUILT_IN_VA_ARG_PACK_LEN:
6301 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6303 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6306 /* Return the address of the first anonymous stack arg. */
6307 case BUILT_IN_NEXT_ARG:
6308 if (fold_builtin_next_arg (exp, false))
6310 return expand_builtin_next_arg ();
6312 case BUILT_IN_CLEAR_CACHE:
6313 target = expand_builtin___clear_cache (exp);
6318 case BUILT_IN_CLASSIFY_TYPE:
6319 return expand_builtin_classify_type (exp);
6321 case BUILT_IN_CONSTANT_P:
6324 case BUILT_IN_FRAME_ADDRESS:
6325 case BUILT_IN_RETURN_ADDRESS:
6326 return expand_builtin_frame_address (fndecl, exp);
6328 /* Returns the address of the area where the structure is returned.
6330 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6331 if (call_expr_nargs (exp) != 0
6332 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6333 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6336 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6338 case BUILT_IN_ALLOCA:
6339 target = expand_builtin_alloca (exp, target);
6344 case BUILT_IN_STACK_SAVE:
6345 return expand_stack_save ();
6347 case BUILT_IN_STACK_RESTORE:
6348 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6351 case BUILT_IN_BSWAP32:
6352 case BUILT_IN_BSWAP64:
6353 target = expand_builtin_bswap (exp, target, subtarget);
6359 CASE_INT_FN (BUILT_IN_FFS):
6360 case BUILT_IN_FFSIMAX:
6361 target = expand_builtin_unop (target_mode, exp, target,
6362 subtarget, ffs_optab);
6367 CASE_INT_FN (BUILT_IN_CLZ):
6368 case BUILT_IN_CLZIMAX:
6369 target = expand_builtin_unop (target_mode, exp, target,
6370 subtarget, clz_optab);
6375 CASE_INT_FN (BUILT_IN_CTZ):
6376 case BUILT_IN_CTZIMAX:
6377 target = expand_builtin_unop (target_mode, exp, target,
6378 subtarget, ctz_optab);
6383 CASE_INT_FN (BUILT_IN_POPCOUNT):
6384 case BUILT_IN_POPCOUNTIMAX:
6385 target = expand_builtin_unop (target_mode, exp, target,
6386 subtarget, popcount_optab);
6391 CASE_INT_FN (BUILT_IN_PARITY):
6392 case BUILT_IN_PARITYIMAX:
6393 target = expand_builtin_unop (target_mode, exp, target,
6394 subtarget, parity_optab);
6399 case BUILT_IN_STRLEN:
6400 target = expand_builtin_strlen (exp, target, target_mode);
6405 case BUILT_IN_STRCPY:
6406 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6411 case BUILT_IN_STRNCPY:
6412 target = expand_builtin_strncpy (exp, target, mode);
6417 case BUILT_IN_STPCPY:
6418 target = expand_builtin_stpcpy (exp, target, mode);
6423 case BUILT_IN_STRCAT:
6424 target = expand_builtin_strcat (fndecl, exp, target, mode);
6429 case BUILT_IN_STRNCAT:
6430 target = expand_builtin_strncat (exp, target, mode);
6435 case BUILT_IN_STRSPN:
6436 target = expand_builtin_strspn (exp, target, mode);
6441 case BUILT_IN_STRCSPN:
6442 target = expand_builtin_strcspn (exp, target, mode);
6447 case BUILT_IN_STRSTR:
6448 target = expand_builtin_strstr (exp, target, mode);
6453 case BUILT_IN_STRPBRK:
6454 target = expand_builtin_strpbrk (exp, target, mode);
6459 case BUILT_IN_INDEX:
6460 case BUILT_IN_STRCHR:
6461 target = expand_builtin_strchr (exp, target, mode);
6466 case BUILT_IN_RINDEX:
6467 case BUILT_IN_STRRCHR:
6468 target = expand_builtin_strrchr (exp, target, mode);
6473 case BUILT_IN_MEMCPY:
6474 target = expand_builtin_memcpy (exp, target, mode);
6479 case BUILT_IN_MEMPCPY:
6480 target = expand_builtin_mempcpy (exp, target, mode);
6485 case BUILT_IN_MEMMOVE:
6486 target = expand_builtin_memmove (exp, target, mode, ignore);
6491 case BUILT_IN_BCOPY:
6492 target = expand_builtin_bcopy (exp, ignore);
6497 case BUILT_IN_MEMSET:
6498 target = expand_builtin_memset (exp, target, mode);
6503 case BUILT_IN_BZERO:
6504 target = expand_builtin_bzero (exp);
6509 case BUILT_IN_STRCMP:
6510 target = expand_builtin_strcmp (exp, target, mode);
6515 case BUILT_IN_STRNCMP:
6516 target = expand_builtin_strncmp (exp, target, mode);
6521 case BUILT_IN_MEMCHR:
6522 target = expand_builtin_memchr (exp, target, mode);
6528 case BUILT_IN_MEMCMP:
6529 target = expand_builtin_memcmp (exp, target, mode);
6534 case BUILT_IN_SETJMP:
6535 /* This should have been lowered to the builtins below. */
6538 case BUILT_IN_SETJMP_SETUP:
6539 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6540 and the receiver label. */
6541 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6543 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6544 VOIDmode, EXPAND_NORMAL);
6545 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6546 rtx label_r = label_rtx (label);
6548 /* This is copied from the handling of non-local gotos. */
6549 expand_builtin_setjmp_setup (buf_addr, label_r);
6550 nonlocal_goto_handler_labels
6551 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6552 nonlocal_goto_handler_labels);
6553 /* ??? Do not let expand_label treat us as such since we would
6554 not want to be both on the list of non-local labels and on
6555 the list of forced labels. */
6556 FORCED_LABEL (label) = 0;
6561 case BUILT_IN_SETJMP_DISPATCHER:
6562 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6563 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6565 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6566 rtx label_r = label_rtx (label);
6568 /* Remove the dispatcher label from the list of non-local labels
6569 since the receiver labels have been added to it above. */
6570 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6575 case BUILT_IN_SETJMP_RECEIVER:
6576 /* __builtin_setjmp_receiver is passed the receiver label. */
6577 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6579 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6580 rtx label_r = label_rtx (label);
6582 expand_builtin_setjmp_receiver (label_r);
6587 /* __builtin_longjmp is passed a pointer to an array of five words.
6588 It's similar to the C library longjmp function but works with
6589 __builtin_setjmp above. */
6590 case BUILT_IN_LONGJMP:
6591 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6593 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6594 VOIDmode, EXPAND_NORMAL);
6595 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6597 if (value != const1_rtx)
6599 error ("%<__builtin_longjmp%> second argument must be 1");
6603 expand_builtin_longjmp (buf_addr, value);
6608 case BUILT_IN_NONLOCAL_GOTO:
6609 target = expand_builtin_nonlocal_goto (exp);
6614 /* This updates the setjmp buffer that is its argument with the value
6615 of the current stack pointer. */
6616 case BUILT_IN_UPDATE_SETJMP_BUF:
6617 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6620 = expand_normal (CALL_EXPR_ARG (exp, 0));
6622 expand_builtin_update_setjmp_buf (buf_addr);
6628 expand_builtin_trap ();
6631 case BUILT_IN_PRINTF:
6632 target = expand_builtin_printf (exp, target, mode, false);
6637 case BUILT_IN_PRINTF_UNLOCKED:
6638 target = expand_builtin_printf (exp, target, mode, true);
6643 case BUILT_IN_FPUTS:
6644 target = expand_builtin_fputs (exp, target, false);
6648 case BUILT_IN_FPUTS_UNLOCKED:
6649 target = expand_builtin_fputs (exp, target, true);
6654 case BUILT_IN_FPRINTF:
6655 target = expand_builtin_fprintf (exp, target, mode, false);
6660 case BUILT_IN_FPRINTF_UNLOCKED:
6661 target = expand_builtin_fprintf (exp, target, mode, true);
6666 case BUILT_IN_SPRINTF:
6667 target = expand_builtin_sprintf (exp, target, mode);
6672 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6673 case BUILT_IN_SIGNBITD32:
6674 case BUILT_IN_SIGNBITD64:
6675 case BUILT_IN_SIGNBITD128:
6676 target = expand_builtin_signbit (exp, target);
6681 /* Various hooks for the DWARF 2 __throw routine. */
6682 case BUILT_IN_UNWIND_INIT:
6683 expand_builtin_unwind_init ();
6685 case BUILT_IN_DWARF_CFA:
6686 return virtual_cfa_rtx;
6687 #ifdef DWARF2_UNWIND_INFO
6688 case BUILT_IN_DWARF_SP_COLUMN:
6689 return expand_builtin_dwarf_sp_column ();
6690 case BUILT_IN_INIT_DWARF_REG_SIZES:
6691 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6694 case BUILT_IN_FROB_RETURN_ADDR:
6695 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6696 case BUILT_IN_EXTRACT_RETURN_ADDR:
6697 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6698 case BUILT_IN_EH_RETURN:
6699 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6700 CALL_EXPR_ARG (exp, 1));
6702 #ifdef EH_RETURN_DATA_REGNO
6703 case BUILT_IN_EH_RETURN_DATA_REGNO:
6704 return expand_builtin_eh_return_data_regno (exp);
6706 case BUILT_IN_EXTEND_POINTER:
6707 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6709 case BUILT_IN_VA_START:
6710 return expand_builtin_va_start (exp);
6711 case BUILT_IN_VA_END:
6712 return expand_builtin_va_end (exp);
6713 case BUILT_IN_VA_COPY:
6714 return expand_builtin_va_copy (exp);
6715 case BUILT_IN_EXPECT:
6716 return expand_builtin_expect (exp, target);
6717 case BUILT_IN_PREFETCH:
6718 expand_builtin_prefetch (exp);
6721 case BUILT_IN_PROFILE_FUNC_ENTER:
6722 return expand_builtin_profile_func (false);
6723 case BUILT_IN_PROFILE_FUNC_EXIT:
6724 return expand_builtin_profile_func (true);
6726 case BUILT_IN_INIT_TRAMPOLINE:
6727 return expand_builtin_init_trampoline (exp);
6728 case BUILT_IN_ADJUST_TRAMPOLINE:
6729 return expand_builtin_adjust_trampoline (exp);
6732 case BUILT_IN_EXECL:
6733 case BUILT_IN_EXECV:
6734 case BUILT_IN_EXECLP:
6735 case BUILT_IN_EXECLE:
6736 case BUILT_IN_EXECVP:
6737 case BUILT_IN_EXECVE:
6738 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6743 case BUILT_IN_FETCH_AND_ADD_1:
6744 case BUILT_IN_FETCH_AND_ADD_2:
6745 case BUILT_IN_FETCH_AND_ADD_4:
6746 case BUILT_IN_FETCH_AND_ADD_8:
6747 case BUILT_IN_FETCH_AND_ADD_16:
6748 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6749 target = expand_builtin_sync_operation (mode, exp, PLUS,
6750 false, target, ignore);
6755 case BUILT_IN_FETCH_AND_SUB_1:
6756 case BUILT_IN_FETCH_AND_SUB_2:
6757 case BUILT_IN_FETCH_AND_SUB_4:
6758 case BUILT_IN_FETCH_AND_SUB_8:
6759 case BUILT_IN_FETCH_AND_SUB_16:
6760 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6761 target = expand_builtin_sync_operation (mode, exp, MINUS,
6762 false, target, ignore);
6767 case BUILT_IN_FETCH_AND_OR_1:
6768 case BUILT_IN_FETCH_AND_OR_2:
6769 case BUILT_IN_FETCH_AND_OR_4:
6770 case BUILT_IN_FETCH_AND_OR_8:
6771 case BUILT_IN_FETCH_AND_OR_16:
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6773 target = expand_builtin_sync_operation (mode, exp, IOR,
6774 false, target, ignore);
6779 case BUILT_IN_FETCH_AND_AND_1:
6780 case BUILT_IN_FETCH_AND_AND_2:
6781 case BUILT_IN_FETCH_AND_AND_4:
6782 case BUILT_IN_FETCH_AND_AND_8:
6783 case BUILT_IN_FETCH_AND_AND_16:
6784 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6785 target = expand_builtin_sync_operation (mode, exp, AND,
6786 false, target, ignore);
6791 case BUILT_IN_FETCH_AND_XOR_1:
6792 case BUILT_IN_FETCH_AND_XOR_2:
6793 case BUILT_IN_FETCH_AND_XOR_4:
6794 case BUILT_IN_FETCH_AND_XOR_8:
6795 case BUILT_IN_FETCH_AND_XOR_16:
6796 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6797 target = expand_builtin_sync_operation (mode, exp, XOR,
6798 false, target, ignore);
6803 case BUILT_IN_FETCH_AND_NAND_1:
6804 case BUILT_IN_FETCH_AND_NAND_2:
6805 case BUILT_IN_FETCH_AND_NAND_4:
6806 case BUILT_IN_FETCH_AND_NAND_8:
6807 case BUILT_IN_FETCH_AND_NAND_16:
6808 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6809 target = expand_builtin_sync_operation (mode, exp, NOT,
6810 false, target, ignore);
6815 case BUILT_IN_ADD_AND_FETCH_1:
6816 case BUILT_IN_ADD_AND_FETCH_2:
6817 case BUILT_IN_ADD_AND_FETCH_4:
6818 case BUILT_IN_ADD_AND_FETCH_8:
6819 case BUILT_IN_ADD_AND_FETCH_16:
6820 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6821 target = expand_builtin_sync_operation (mode, exp, PLUS,
6822 true, target, ignore);
6827 case BUILT_IN_SUB_AND_FETCH_1:
6828 case BUILT_IN_SUB_AND_FETCH_2:
6829 case BUILT_IN_SUB_AND_FETCH_4:
6830 case BUILT_IN_SUB_AND_FETCH_8:
6831 case BUILT_IN_SUB_AND_FETCH_16:
6832 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6833 target = expand_builtin_sync_operation (mode, exp, MINUS,
6834 true, target, ignore);
6839 case BUILT_IN_OR_AND_FETCH_1:
6840 case BUILT_IN_OR_AND_FETCH_2:
6841 case BUILT_IN_OR_AND_FETCH_4:
6842 case BUILT_IN_OR_AND_FETCH_8:
6843 case BUILT_IN_OR_AND_FETCH_16:
6844 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6845 target = expand_builtin_sync_operation (mode, exp, IOR,
6846 true, target, ignore);
6851 case BUILT_IN_AND_AND_FETCH_1:
6852 case BUILT_IN_AND_AND_FETCH_2:
6853 case BUILT_IN_AND_AND_FETCH_4:
6854 case BUILT_IN_AND_AND_FETCH_8:
6855 case BUILT_IN_AND_AND_FETCH_16:
6856 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6857 target = expand_builtin_sync_operation (mode, exp, AND,
6858 true, target, ignore);
6863 case BUILT_IN_XOR_AND_FETCH_1:
6864 case BUILT_IN_XOR_AND_FETCH_2:
6865 case BUILT_IN_XOR_AND_FETCH_4:
6866 case BUILT_IN_XOR_AND_FETCH_8:
6867 case BUILT_IN_XOR_AND_FETCH_16:
6868 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6869 target = expand_builtin_sync_operation (mode, exp, XOR,
6870 true, target, ignore);
6875 case BUILT_IN_NAND_AND_FETCH_1:
6876 case BUILT_IN_NAND_AND_FETCH_2:
6877 case BUILT_IN_NAND_AND_FETCH_4:
6878 case BUILT_IN_NAND_AND_FETCH_8:
6879 case BUILT_IN_NAND_AND_FETCH_16:
6880 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6881 target = expand_builtin_sync_operation (mode, exp, NOT,
6882 true, target, ignore);
6887 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6888 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6889 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6890 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6891 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6892 if (mode == VOIDmode)
6893 mode = TYPE_MODE (boolean_type_node);
6894 if (!target || !register_operand (target, mode))
6895 target = gen_reg_rtx (mode);
6897 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6898 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6903 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6904 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6905 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6906 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6907 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6908 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6909 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6914 case BUILT_IN_LOCK_TEST_AND_SET_1:
6915 case BUILT_IN_LOCK_TEST_AND_SET_2:
6916 case BUILT_IN_LOCK_TEST_AND_SET_4:
6917 case BUILT_IN_LOCK_TEST_AND_SET_8:
6918 case BUILT_IN_LOCK_TEST_AND_SET_16:
6919 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6920 target = expand_builtin_lock_test_and_set (mode, exp, target);
6925 case BUILT_IN_LOCK_RELEASE_1:
6926 case BUILT_IN_LOCK_RELEASE_2:
6927 case BUILT_IN_LOCK_RELEASE_4:
6928 case BUILT_IN_LOCK_RELEASE_8:
6929 case BUILT_IN_LOCK_RELEASE_16:
6930 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6931 expand_builtin_lock_release (mode, exp);
6934 case BUILT_IN_SYNCHRONIZE:
6935 expand_builtin_synchronize ();
6938 case BUILT_IN_OBJECT_SIZE:
6939 return expand_builtin_object_size (exp);
6941 case BUILT_IN_MEMCPY_CHK:
6942 case BUILT_IN_MEMPCPY_CHK:
6943 case BUILT_IN_MEMMOVE_CHK:
6944 case BUILT_IN_MEMSET_CHK:
6945 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6950 case BUILT_IN_STRCPY_CHK:
6951 case BUILT_IN_STPCPY_CHK:
6952 case BUILT_IN_STRNCPY_CHK:
6953 case BUILT_IN_STRCAT_CHK:
6954 case BUILT_IN_STRNCAT_CHK:
6955 case BUILT_IN_SNPRINTF_CHK:
6956 case BUILT_IN_VSNPRINTF_CHK:
6957 maybe_emit_chk_warning (exp, fcode);
6960 case BUILT_IN_SPRINTF_CHK:
6961 case BUILT_IN_VSPRINTF_CHK:
6962 maybe_emit_sprintf_chk_warning (exp, fcode);
6965 default: /* just do library call, if unknown builtin */
6969 /* The switch statement above can drop through to cause the function
6970 to be called normally. */
6971 return expand_call (exp, target, ignore);
6974 /* Determine whether a tree node represents a call to a built-in
6975 function. If the tree T is a call to a built-in function with
6976 the right number of arguments of the appropriate types, return
6977 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6978 Otherwise the return value is END_BUILTINS. */
6980 enum built_in_function
6981 builtin_mathfn_code (const_tree t)
6983 const_tree fndecl, arg, parmlist;
6984 const_tree argtype, parmtype;
6985 const_call_expr_arg_iterator iter;
6987 if (TREE_CODE (t) != CALL_EXPR
6988 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6989 return END_BUILTINS;
6991 fndecl = get_callee_fndecl (t);
6992 if (fndecl == NULL_TREE
6993 || TREE_CODE (fndecl) != FUNCTION_DECL
6994 || ! DECL_BUILT_IN (fndecl)
6995 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6996 return END_BUILTINS;
6998 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6999 init_const_call_expr_arg_iterator (t, &iter);
7000 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7002 /* If a function doesn't take a variable number of arguments,
7003 the last element in the list will have type `void'. */
7004 parmtype = TREE_VALUE (parmlist);
7005 if (VOID_TYPE_P (parmtype))
7007 if (more_const_call_expr_args_p (&iter))
7008 return END_BUILTINS;
7009 return DECL_FUNCTION_CODE (fndecl);
7012 if (! more_const_call_expr_args_p (&iter))
7013 return END_BUILTINS;
7015 arg = next_const_call_expr_arg (&iter);
7016 argtype = TREE_TYPE (arg);
7018 if (SCALAR_FLOAT_TYPE_P (parmtype))
7020 if (! SCALAR_FLOAT_TYPE_P (argtype))
7021 return END_BUILTINS;
7023 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7025 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7026 return END_BUILTINS;
7028 else if (POINTER_TYPE_P (parmtype))
7030 if (! POINTER_TYPE_P (argtype))
7031 return END_BUILTINS;
7033 else if (INTEGRAL_TYPE_P (parmtype))
7035 if (! INTEGRAL_TYPE_P (argtype))
7036 return END_BUILTINS;
7039 return END_BUILTINS;
7042 /* Variable-length argument list. */
7043 return DECL_FUNCTION_CODE (fndecl);
7046 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7047 evaluate to a constant. */
7050 fold_builtin_constant_p (tree arg)
7052 /* We return 1 for a numeric type that's known to be a constant
7053 value at compile-time or for an aggregate type that's a
7054 literal constant. */
7057 /* If we know this is a constant, emit the constant of one. */
7058 if (CONSTANT_CLASS_P (arg)
7059 || (TREE_CODE (arg) == CONSTRUCTOR
7060 && TREE_CONSTANT (arg)))
7061 return integer_one_node;
7062 if (TREE_CODE (arg) == ADDR_EXPR)
7064 tree op = TREE_OPERAND (arg, 0);
7065 if (TREE_CODE (op) == STRING_CST
7066 || (TREE_CODE (op) == ARRAY_REF
7067 && integer_zerop (TREE_OPERAND (op, 1))
7068 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7069 return integer_one_node;
7072 /* If this expression has side effects, show we don't know it to be a
7073 constant. Likewise if it's a pointer or aggregate type since in
7074 those case we only want literals, since those are only optimized
7075 when generating RTL, not later.
7076 And finally, if we are compiling an initializer, not code, we
7077 need to return a definite result now; there's not going to be any
7078 more optimization done. */
7079 if (TREE_SIDE_EFFECTS (arg)
7080 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7081 || POINTER_TYPE_P (TREE_TYPE (arg))
7083 || folding_initializer)
7084 return integer_zero_node;
7089 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7090 return it as a truthvalue. */
7093 build_builtin_expect_predicate (tree pred, tree expected)
7095 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7097 fn = built_in_decls[BUILT_IN_EXPECT];
7098 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7099 ret_type = TREE_TYPE (TREE_TYPE (fn));
7100 pred_type = TREE_VALUE (arg_types);
7101 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7103 pred = fold_convert (pred_type, pred);
7104 expected = fold_convert (expected_type, expected);
7105 call_expr = build_call_expr (fn, 2, pred, expected);
7107 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7108 build_int_cst (ret_type, 0));
7111 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7112 NULL_TREE if no simplification is possible. */
7115 fold_builtin_expect (tree arg0, tree arg1)
7118 enum tree_code code;
7120 /* If this is a builtin_expect within a builtin_expect keep the
7121 inner one. See through a comparison against a constant. It
7122 might have been added to create a thruthvalue. */
7124 if (COMPARISON_CLASS_P (inner)
7125 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7126 inner = TREE_OPERAND (inner, 0);
7128 if (TREE_CODE (inner) == CALL_EXPR
7129 && (fndecl = get_callee_fndecl (inner))
7130 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7131 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7134 /* Distribute the expected value over short-circuiting operators.
7135 See through the cast from truthvalue_type_node to long. */
7137 while (TREE_CODE (inner) == NOP_EXPR
7138 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7139 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7140 inner = TREE_OPERAND (inner, 0);
7142 code = TREE_CODE (inner);
7143 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7145 tree op0 = TREE_OPERAND (inner, 0);
7146 tree op1 = TREE_OPERAND (inner, 1);
7148 op0 = build_builtin_expect_predicate (op0, arg1);
7149 op1 = build_builtin_expect_predicate (op1, arg1);
7150 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7152 return fold_convert (TREE_TYPE (arg0), inner);
7155 /* If the argument isn't invariant then there's nothing else we can do. */
7156 if (!TREE_INVARIANT (arg0))
7159 /* If we expect that a comparison against the argument will fold to
7160 a constant return the constant. In practice, this means a true
7161 constant or the address of a non-weak symbol. */
7164 if (TREE_CODE (inner) == ADDR_EXPR)
7168 inner = TREE_OPERAND (inner, 0);
7170 while (TREE_CODE (inner) == COMPONENT_REF
7171 || TREE_CODE (inner) == ARRAY_REF);
7172 if (DECL_P (inner) && DECL_WEAK (inner))
7176 /* Otherwise, ARG0 already has the proper type for the return value. */
7180 /* Fold a call to __builtin_classify_type with argument ARG. */
7183 fold_builtin_classify_type (tree arg)
7186 return build_int_cst (NULL_TREE, no_type_class);
7188 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7191 /* Fold a call to __builtin_strlen with argument ARG. */
7194 fold_builtin_strlen (tree arg)
7196 if (!validate_arg (arg, POINTER_TYPE))
7200 tree len = c_strlen (arg, 0);
7204 /* Convert from the internal "sizetype" type to "size_t". */
7206 len = fold_convert (size_type_node, len);
7214 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7217 fold_builtin_inf (tree type, int warn)
7219 REAL_VALUE_TYPE real;
7221 /* __builtin_inff is intended to be usable to define INFINITY on all
7222 targets. If an infinity is not available, INFINITY expands "to a
7223 positive constant of type float that overflows at translation
7224 time", footnote "In this case, using INFINITY will violate the
7225 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7226 Thus we pedwarn to ensure this constraint violation is
7228 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7229 pedwarn ("target format does not support infinity");
7232 return build_real (type, real);
7235 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7238 fold_builtin_nan (tree arg, tree type, int quiet)
7240 REAL_VALUE_TYPE real;
7243 if (!validate_arg (arg, POINTER_TYPE))
7245 str = c_getstr (arg);
7249 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7252 return build_real (type, real);
7255 /* Return true if the floating point expression T has an integer value.
7256 We also allow +Inf, -Inf and NaN to be considered integer values. */
7259 integer_valued_real_p (tree t)
7261 switch (TREE_CODE (t))
7268 case NON_LVALUE_EXPR:
7269 return integer_valued_real_p (TREE_OPERAND (t, 0));
7274 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7281 return integer_valued_real_p (TREE_OPERAND (t, 0))
7282 && integer_valued_real_p (TREE_OPERAND (t, 1));
7285 return integer_valued_real_p (TREE_OPERAND (t, 1))
7286 && integer_valued_real_p (TREE_OPERAND (t, 2));
7289 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7293 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7294 if (TREE_CODE (type) == INTEGER_TYPE)
7296 if (TREE_CODE (type) == REAL_TYPE)
7297 return integer_valued_real_p (TREE_OPERAND (t, 0));
7302 switch (builtin_mathfn_code (t))
7304 CASE_FLT_FN (BUILT_IN_CEIL):
7305 CASE_FLT_FN (BUILT_IN_FLOOR):
7306 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7307 CASE_FLT_FN (BUILT_IN_RINT):
7308 CASE_FLT_FN (BUILT_IN_ROUND):
7309 CASE_FLT_FN (BUILT_IN_TRUNC):
7312 CASE_FLT_FN (BUILT_IN_FMIN):
7313 CASE_FLT_FN (BUILT_IN_FMAX):
7314 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7315 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7328 /* FNDECL is assumed to be a builtin where truncation can be propagated
7329 across (for instance floor((double)f) == (double)floorf (f).
7330 Do the transformation for a call with argument ARG. */
7333 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7335 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7337 if (!validate_arg (arg, REAL_TYPE))
7340 /* Integer rounding functions are idempotent. */
7341 if (fcode == builtin_mathfn_code (arg))
7344 /* If argument is already integer valued, and we don't need to worry
7345 about setting errno, there's no need to perform rounding. */
7346 if (! flag_errno_math && integer_valued_real_p (arg))
7351 tree arg0 = strip_float_extensions (arg);
7352 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7353 tree newtype = TREE_TYPE (arg0);
7356 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7357 && (decl = mathfn_built_in (newtype, fcode)))
7358 return fold_convert (ftype,
7359 build_call_expr (decl, 1,
7360 fold_convert (newtype, arg0)));
7365 /* FNDECL is assumed to be builtin which can narrow the FP type of
7366 the argument, for instance lround((double)f) -> lroundf (f).
7367 Do the transformation for a call with argument ARG. */
7370 fold_fixed_mathfn (tree fndecl, tree arg)
7372 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7374 if (!validate_arg (arg, REAL_TYPE))
7377 /* If argument is already integer valued, and we don't need to worry
7378 about setting errno, there's no need to perform rounding. */
7379 if (! flag_errno_math && integer_valued_real_p (arg))
7380 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7384 tree ftype = TREE_TYPE (arg);
7385 tree arg0 = strip_float_extensions (arg);
7386 tree newtype = TREE_TYPE (arg0);
7389 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7390 && (decl = mathfn_built_in (newtype, fcode)))
7391 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7394 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7395 sizeof (long long) == sizeof (long). */
7396 if (TYPE_PRECISION (long_long_integer_type_node)
7397 == TYPE_PRECISION (long_integer_type_node))
7399 tree newfn = NULL_TREE;
7402 CASE_FLT_FN (BUILT_IN_LLCEIL):
7403 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7406 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7407 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7410 CASE_FLT_FN (BUILT_IN_LLROUND):
7411 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7414 CASE_FLT_FN (BUILT_IN_LLRINT):
7415 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7424 tree newcall = build_call_expr(newfn, 1, arg);
7425 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7432 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7433 return type. Return NULL_TREE if no simplification can be made. */
7436 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7440 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7441 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7444 /* Calculate the result when the argument is a constant. */
7445 if (TREE_CODE (arg) == COMPLEX_CST
7446 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7450 if (TREE_CODE (arg) == COMPLEX_EXPR)
7452 tree real = TREE_OPERAND (arg, 0);
7453 tree imag = TREE_OPERAND (arg, 1);
7455 /* If either part is zero, cabs is fabs of the other. */
7456 if (real_zerop (real))
7457 return fold_build1 (ABS_EXPR, type, imag);
7458 if (real_zerop (imag))
7459 return fold_build1 (ABS_EXPR, type, real);
7461 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7462 if (flag_unsafe_math_optimizations
7463 && operand_equal_p (real, imag, OEP_PURE_SAME))
7465 const REAL_VALUE_TYPE sqrt2_trunc
7466 = real_value_truncate (TYPE_MODE (type),
7467 *get_real_const (rv_sqrt2));
7469 return fold_build2 (MULT_EXPR, type,
7470 fold_build1 (ABS_EXPR, type, real),
7471 build_real (type, sqrt2_trunc));
7475 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7476 if (TREE_CODE (arg) == NEGATE_EXPR
7477 || TREE_CODE (arg) == CONJ_EXPR)
7478 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7480 /* Don't do this when optimizing for size. */
7481 if (flag_unsafe_math_optimizations
7482 && optimize && !optimize_size)
7484 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7486 if (sqrtfn != NULL_TREE)
7488 tree rpart, ipart, result;
7490 arg = builtin_save_expr (arg);
7492 rpart = fold_build1 (REALPART_EXPR, type, arg);
7493 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7495 rpart = builtin_save_expr (rpart);
7496 ipart = builtin_save_expr (ipart);
7498 result = fold_build2 (PLUS_EXPR, type,
7499 fold_build2 (MULT_EXPR, type,
7501 fold_build2 (MULT_EXPR, type,
7504 return build_call_expr (sqrtfn, 1, result);
7511 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7512 Return NULL_TREE if no simplification can be made. */
7515 fold_builtin_sqrt (tree arg, tree type)
7518 enum built_in_function fcode;
7521 if (!validate_arg (arg, REAL_TYPE))
7524 /* Calculate the result when the argument is a constant. */
7525 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7528 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7529 fcode = builtin_mathfn_code (arg);
7530 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7532 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7533 arg = fold_build2 (MULT_EXPR, type,
7534 CALL_EXPR_ARG (arg, 0),
7535 build_real (type, dconsthalf));
7536 return build_call_expr (expfn, 1, arg);
7539 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7540 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7542 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7546 tree arg0 = CALL_EXPR_ARG (arg, 0);
7548 /* The inner root was either sqrt or cbrt. */
7549 REAL_VALUE_TYPE dconstroot =
7550 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7552 /* Adjust for the outer root. */
7553 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7554 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7555 tree_root = build_real (type, dconstroot);
7556 return build_call_expr (powfn, 2, arg0, tree_root);
7560 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7561 if (flag_unsafe_math_optimizations
7562 && (fcode == BUILT_IN_POW
7563 || fcode == BUILT_IN_POWF
7564 || fcode == BUILT_IN_POWL))
7566 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7567 tree arg0 = CALL_EXPR_ARG (arg, 0);
7568 tree arg1 = CALL_EXPR_ARG (arg, 1);
7570 if (!tree_expr_nonnegative_p (arg0))
7571 arg0 = build1 (ABS_EXPR, type, arg0);
7572 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7573 build_real (type, dconsthalf));
7574 return build_call_expr (powfn, 2, arg0, narg1);
7580 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7581 Return NULL_TREE if no simplification can be made. */
7584 fold_builtin_cbrt (tree arg, tree type)
7586 const enum built_in_function fcode = builtin_mathfn_code (arg);
7589 if (!validate_arg (arg, REAL_TYPE))
7592 /* Calculate the result when the argument is a constant. */
7593 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7596 if (flag_unsafe_math_optimizations)
7598 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7599 if (BUILTIN_EXPONENT_P (fcode))
7601 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7602 const REAL_VALUE_TYPE third_trunc =
7603 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7604 arg = fold_build2 (MULT_EXPR, type,
7605 CALL_EXPR_ARG (arg, 0),
7606 build_real (type, third_trunc));
7607 return build_call_expr (expfn, 1, arg);
7610 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7611 if (BUILTIN_SQRT_P (fcode))
7613 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7617 tree arg0 = CALL_EXPR_ARG (arg, 0);
7619 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7621 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7622 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7623 tree_root = build_real (type, dconstroot);
7624 return build_call_expr (powfn, 2, arg0, tree_root);
7628 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7629 if (BUILTIN_CBRT_P (fcode))
7631 tree arg0 = CALL_EXPR_ARG (arg, 0);
7632 if (tree_expr_nonnegative_p (arg0))
7634 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7639 REAL_VALUE_TYPE dconstroot;
7641 real_arithmetic (&dconstroot, MULT_EXPR,
7642 get_real_const (rv_third),
7643 get_real_const (rv_third));
7644 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7645 tree_root = build_real (type, dconstroot);
7646 return build_call_expr (powfn, 2, arg0, tree_root);
7651 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7652 if (fcode == BUILT_IN_POW
7653 || fcode == BUILT_IN_POWF
7654 || fcode == BUILT_IN_POWL)
7656 tree arg00 = CALL_EXPR_ARG (arg, 0);
7657 tree arg01 = CALL_EXPR_ARG (arg, 1);
7658 if (tree_expr_nonnegative_p (arg00))
7660 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7661 const REAL_VALUE_TYPE dconstroot
7662 = real_value_truncate (TYPE_MODE (type),
7663 *get_real_const (rv_third));
7664 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7665 build_real (type, dconstroot));
7666 return build_call_expr (powfn, 2, arg00, narg01);
7673 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7674 TYPE is the type of the return value. Return NULL_TREE if no
7675 simplification can be made. */
7678 fold_builtin_cos (tree arg, tree type, tree fndecl)
7682 if (!validate_arg (arg, REAL_TYPE))
7685 /* Calculate the result when the argument is a constant. */
7686 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7689 /* Optimize cos(-x) into cos (x). */
7690 if ((narg = fold_strip_sign_ops (arg)))
7691 return build_call_expr (fndecl, 1, narg);
7696 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7697 Return NULL_TREE if no simplification can be made. */
7700 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7702 if (validate_arg (arg, REAL_TYPE))
7706 /* Calculate the result when the argument is a constant. */
7707 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7710 /* Optimize cosh(-x) into cosh (x). */
7711 if ((narg = fold_strip_sign_ops (arg)))
7712 return build_call_expr (fndecl, 1, narg);
7718 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7719 Return NULL_TREE if no simplification can be made. */
7722 fold_builtin_tan (tree arg, tree type)
7724 enum built_in_function fcode;
7727 if (!validate_arg (arg, REAL_TYPE))
7730 /* Calculate the result when the argument is a constant. */
7731 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7734 /* Optimize tan(atan(x)) = x. */
7735 fcode = builtin_mathfn_code (arg);
7736 if (flag_unsafe_math_optimizations
7737 && (fcode == BUILT_IN_ATAN
7738 || fcode == BUILT_IN_ATANF
7739 || fcode == BUILT_IN_ATANL))
7740 return CALL_EXPR_ARG (arg, 0);
7745 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7746 NULL_TREE if no simplification can be made. */
7749 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7754 if (!validate_arg (arg0, REAL_TYPE)
7755 || !validate_arg (arg1, POINTER_TYPE)
7756 || !validate_arg (arg2, POINTER_TYPE))
7759 type = TREE_TYPE (arg0);
7761 /* Calculate the result when the argument is a constant. */
7762 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7765 /* Canonicalize sincos to cexpi. */
7766 if (!TARGET_C99_FUNCTIONS)
7768 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7772 call = build_call_expr (fn, 1, arg0);
7773 call = builtin_save_expr (call);
7775 return build2 (COMPOUND_EXPR, type,
7776 build2 (MODIFY_EXPR, void_type_node,
7777 build_fold_indirect_ref (arg1),
7778 build1 (IMAGPART_EXPR, type, call)),
7779 build2 (MODIFY_EXPR, void_type_node,
7780 build_fold_indirect_ref (arg2),
7781 build1 (REALPART_EXPR, type, call)));
7784 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7785 NULL_TREE if no simplification can be made. */
7788 fold_builtin_cexp (tree arg0, tree type)
7791 tree realp, imagp, ifn;
7793 if (!validate_arg (arg0, COMPLEX_TYPE))
7796 rtype = TREE_TYPE (TREE_TYPE (arg0));
7798 /* In case we can figure out the real part of arg0 and it is constant zero
7800 if (!TARGET_C99_FUNCTIONS)
7802 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7806 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7807 && real_zerop (realp))
7809 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7810 return build_call_expr (ifn, 1, narg);
7813 /* In case we can easily decompose real and imaginary parts split cexp
7814 to exp (r) * cexpi (i). */
7815 if (flag_unsafe_math_optimizations
7818 tree rfn, rcall, icall;
7820 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7824 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7828 icall = build_call_expr (ifn, 1, imagp);
7829 icall = builtin_save_expr (icall);
7830 rcall = build_call_expr (rfn, 1, realp);
7831 rcall = builtin_save_expr (rcall);
7832 return fold_build2 (COMPLEX_EXPR, type,
7833 fold_build2 (MULT_EXPR, rtype,
7835 fold_build1 (REALPART_EXPR, rtype, icall)),
7836 fold_build2 (MULT_EXPR, rtype,
7838 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7844 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7845 Return NULL_TREE if no simplification can be made. */
7848 fold_builtin_trunc (tree fndecl, tree arg)
7850 if (!validate_arg (arg, REAL_TYPE))
7853 /* Optimize trunc of constant value. */
7854 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7856 REAL_VALUE_TYPE r, x;
7857 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7859 x = TREE_REAL_CST (arg);
7860 real_trunc (&r, TYPE_MODE (type), &x);
7861 return build_real (type, r);
7864 return fold_trunc_transparent_mathfn (fndecl, arg);
7867 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7868 Return NULL_TREE if no simplification can be made. */
7871 fold_builtin_floor (tree fndecl, tree arg)
7873 if (!validate_arg (arg, REAL_TYPE))
7876 /* Optimize floor of constant value. */
7877 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7881 x = TREE_REAL_CST (arg);
7882 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7884 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7887 real_floor (&r, TYPE_MODE (type), &x);
7888 return build_real (type, r);
7892 /* Fold floor (x) where x is nonnegative to trunc (x). */
7893 if (tree_expr_nonnegative_p (arg))
7895 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7897 return build_call_expr (truncfn, 1, arg);
7900 return fold_trunc_transparent_mathfn (fndecl, arg);
7903 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7904 Return NULL_TREE if no simplification can be made. */
7907 fold_builtin_ceil (tree fndecl, tree arg)
7909 if (!validate_arg (arg, REAL_TYPE))
7912 /* Optimize ceil of constant value. */
7913 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7917 x = TREE_REAL_CST (arg);
7918 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7920 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7923 real_ceil (&r, TYPE_MODE (type), &x);
7924 return build_real (type, r);
7928 return fold_trunc_transparent_mathfn (fndecl, arg);
7931 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7932 Return NULL_TREE if no simplification can be made. */
7935 fold_builtin_round (tree fndecl, tree arg)
7937 if (!validate_arg (arg, REAL_TYPE))
7940 /* Optimize round of constant value. */
7941 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7945 x = TREE_REAL_CST (arg);
7946 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7948 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7951 real_round (&r, TYPE_MODE (type), &x);
7952 return build_real (type, r);
7956 return fold_trunc_transparent_mathfn (fndecl, arg);
7959 /* Fold function call to builtin lround, lroundf or lroundl (or the
7960 corresponding long long versions) and other rounding functions. ARG
7961 is the argument to the call. Return NULL_TREE if no simplification
7965 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7967 if (!validate_arg (arg, REAL_TYPE))
7970 /* Optimize lround of constant value. */
7971 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7973 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7975 if (real_isfinite (&x))
7977 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7978 tree ftype = TREE_TYPE (arg);
7979 unsigned HOST_WIDE_INT lo2;
7980 HOST_WIDE_INT hi, lo;
7983 switch (DECL_FUNCTION_CODE (fndecl))
7985 CASE_FLT_FN (BUILT_IN_LFLOOR):
7986 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7987 real_floor (&r, TYPE_MODE (ftype), &x);
7990 CASE_FLT_FN (BUILT_IN_LCEIL):
7991 CASE_FLT_FN (BUILT_IN_LLCEIL):
7992 real_ceil (&r, TYPE_MODE (ftype), &x);
7995 CASE_FLT_FN (BUILT_IN_LROUND):
7996 CASE_FLT_FN (BUILT_IN_LLROUND):
7997 real_round (&r, TYPE_MODE (ftype), &x);
8004 REAL_VALUE_TO_INT (&lo, &hi, r);
8005 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8006 return build_int_cst_wide (itype, lo2, hi);
8010 switch (DECL_FUNCTION_CODE (fndecl))
8012 CASE_FLT_FN (BUILT_IN_LFLOOR):
8013 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8014 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8015 if (tree_expr_nonnegative_p (arg))
8016 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8022 return fold_fixed_mathfn (fndecl, arg);
8025 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8026 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8027 the argument to the call. Return NULL_TREE if no simplification can
8031 fold_builtin_bitop (tree fndecl, tree arg)
8033 if (!validate_arg (arg, INTEGER_TYPE))
8036 /* Optimize for constant argument. */
8037 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8039 HOST_WIDE_INT hi, width, result;
8040 unsigned HOST_WIDE_INT lo;
8043 type = TREE_TYPE (arg);
8044 width = TYPE_PRECISION (type);
8045 lo = TREE_INT_CST_LOW (arg);
8047 /* Clear all the bits that are beyond the type's precision. */
8048 if (width > HOST_BITS_PER_WIDE_INT)
8050 hi = TREE_INT_CST_HIGH (arg);
8051 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8052 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8057 if (width < HOST_BITS_PER_WIDE_INT)
8058 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8061 switch (DECL_FUNCTION_CODE (fndecl))
8063 CASE_INT_FN (BUILT_IN_FFS):
8065 result = exact_log2 (lo & -lo) + 1;
8067 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8072 CASE_INT_FN (BUILT_IN_CLZ):
8074 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8076 result = width - floor_log2 (lo) - 1;
8077 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8081 CASE_INT_FN (BUILT_IN_CTZ):
8083 result = exact_log2 (lo & -lo);
8085 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8086 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8090 CASE_INT_FN (BUILT_IN_POPCOUNT):
8093 result++, lo &= lo - 1;
8095 result++, hi &= hi - 1;
8098 CASE_INT_FN (BUILT_IN_PARITY):
8101 result++, lo &= lo - 1;
8103 result++, hi &= hi - 1;
8111 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8117 /* Fold function call to builtin_bswap and the long and long long
8118 variants. Return NULL_TREE if no simplification can be made. */
8120 fold_builtin_bswap (tree fndecl, tree arg)
8122 if (! validate_arg (arg, INTEGER_TYPE))
8125 /* Optimize constant value. */
8126 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8128 HOST_WIDE_INT hi, width, r_hi = 0;
8129 unsigned HOST_WIDE_INT lo, r_lo = 0;
8132 type = TREE_TYPE (arg);
8133 width = TYPE_PRECISION (type);
8134 lo = TREE_INT_CST_LOW (arg);
8135 hi = TREE_INT_CST_HIGH (arg);
8137 switch (DECL_FUNCTION_CODE (fndecl))
8139 case BUILT_IN_BSWAP32:
8140 case BUILT_IN_BSWAP64:
8144 for (s = 0; s < width; s += 8)
8146 int d = width - s - 8;
8147 unsigned HOST_WIDE_INT byte;
8149 if (s < HOST_BITS_PER_WIDE_INT)
8150 byte = (lo >> s) & 0xff;
8152 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8154 if (d < HOST_BITS_PER_WIDE_INT)
8157 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8167 if (width < HOST_BITS_PER_WIDE_INT)
8168 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8170 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8176 /* Return true if EXPR is the real constant contained in VALUE. */
8179 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8183 return ((TREE_CODE (expr) == REAL_CST
8184 && !TREE_OVERFLOW (expr)
8185 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8186 || (TREE_CODE (expr) == COMPLEX_CST
8187 && real_dconstp (TREE_REALPART (expr), value)
8188 && real_zerop (TREE_IMAGPART (expr))));
8191 /* A subroutine of fold_builtin to fold the various logarithmic
8192 functions. Return NULL_TREE if no simplification can me made.
8193 FUNC is the corresponding MPFR logarithm function. */
8196 fold_builtin_logarithm (tree fndecl, tree arg,
8197 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8199 if (validate_arg (arg, REAL_TYPE))
8201 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8203 const enum built_in_function fcode = builtin_mathfn_code (arg);
8205 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8206 instead we'll look for 'e' truncated to MODE. So only do
8207 this if flag_unsafe_math_optimizations is set. */
8208 if (flag_unsafe_math_optimizations && func == mpfr_log)
8210 const REAL_VALUE_TYPE e_truncated =
8211 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8212 if (real_dconstp (arg, &e_truncated))
8213 return build_real (type, dconst1);
8216 /* Calculate the result when the argument is a constant. */
8217 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8220 /* Special case, optimize logN(expN(x)) = x. */
8221 if (flag_unsafe_math_optimizations
8222 && ((func == mpfr_log
8223 && (fcode == BUILT_IN_EXP
8224 || fcode == BUILT_IN_EXPF
8225 || fcode == BUILT_IN_EXPL))
8226 || (func == mpfr_log2
8227 && (fcode == BUILT_IN_EXP2
8228 || fcode == BUILT_IN_EXP2F
8229 || fcode == BUILT_IN_EXP2L))
8230 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8231 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8233 /* Optimize logN(func()) for various exponential functions. We
8234 want to determine the value "x" and the power "exponent" in
8235 order to transform logN(x**exponent) into exponent*logN(x). */
8236 if (flag_unsafe_math_optimizations)
8238 tree exponent = 0, x = 0;
8242 CASE_FLT_FN (BUILT_IN_EXP):
8243 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8244 x = build_real (type,
8245 real_value_truncate (TYPE_MODE (type),
8246 *get_real_const (rv_e)));
8247 exponent = CALL_EXPR_ARG (arg, 0);
8249 CASE_FLT_FN (BUILT_IN_EXP2):
8250 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8251 x = build_real (type, dconst2);
8252 exponent = CALL_EXPR_ARG (arg, 0);
8254 CASE_FLT_FN (BUILT_IN_EXP10):
8255 CASE_FLT_FN (BUILT_IN_POW10):
8256 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8258 REAL_VALUE_TYPE dconst10;
8259 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8260 x = build_real (type, dconst10);
8262 exponent = CALL_EXPR_ARG (arg, 0);
8264 CASE_FLT_FN (BUILT_IN_SQRT):
8265 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8266 x = CALL_EXPR_ARG (arg, 0);
8267 exponent = build_real (type, dconsthalf);
8269 CASE_FLT_FN (BUILT_IN_CBRT):
8270 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8271 x = CALL_EXPR_ARG (arg, 0);
8272 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8273 *get_real_const (rv_third)));
8275 CASE_FLT_FN (BUILT_IN_POW):
8276 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8277 x = CALL_EXPR_ARG (arg, 0);
8278 exponent = CALL_EXPR_ARG (arg, 1);
8284 /* Now perform the optimization. */
8287 tree logfn = build_call_expr (fndecl, 1, x);
8288 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8296 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8297 NULL_TREE if no simplification can be made. */
8300 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8302 tree res, narg0, narg1;
8304 if (!validate_arg (arg0, REAL_TYPE)
8305 || !validate_arg (arg1, REAL_TYPE))
8308 /* Calculate the result when the argument is a constant. */
8309 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8312 /* If either argument to hypot has a negate or abs, strip that off.
8313 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8314 narg0 = fold_strip_sign_ops (arg0);
8315 narg1 = fold_strip_sign_ops (arg1);
8318 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8319 narg1 ? narg1 : arg1);
8322 /* If either argument is zero, hypot is fabs of the other. */
8323 if (real_zerop (arg0))
8324 return fold_build1 (ABS_EXPR, type, arg1);
8325 else if (real_zerop (arg1))
8326 return fold_build1 (ABS_EXPR, type, arg0);
8328 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8329 if (flag_unsafe_math_optimizations
8330 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8332 const REAL_VALUE_TYPE sqrt2_trunc
8333 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8334 return fold_build2 (MULT_EXPR, type,
8335 fold_build1 (ABS_EXPR, type, arg0),
8336 build_real (type, sqrt2_trunc));
8343 /* Fold a builtin function call to pow, powf, or powl. Return
8344 NULL_TREE if no simplification can be made. */
8346 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8350 if (!validate_arg (arg0, REAL_TYPE)
8351 || !validate_arg (arg1, REAL_TYPE))
8354 /* Calculate the result when the argument is a constant. */
8355 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8358 /* Optimize pow(1.0,y) = 1.0. */
8359 if (real_onep (arg0))
8360 return omit_one_operand (type, build_real (type, dconst1), arg1);
8362 if (TREE_CODE (arg1) == REAL_CST
8363 && !TREE_OVERFLOW (arg1))
8365 REAL_VALUE_TYPE cint;
8369 c = TREE_REAL_CST (arg1);
8371 /* Optimize pow(x,0.0) = 1.0. */
8372 if (REAL_VALUES_EQUAL (c, dconst0))
8373 return omit_one_operand (type, build_real (type, dconst1),
8376 /* Optimize pow(x,1.0) = x. */
8377 if (REAL_VALUES_EQUAL (c, dconst1))
8380 /* Optimize pow(x,-1.0) = 1.0/x. */
8381 if (REAL_VALUES_EQUAL (c, dconstm1))
8382 return fold_build2 (RDIV_EXPR, type,
8383 build_real (type, dconst1), arg0);
8385 /* Optimize pow(x,0.5) = sqrt(x). */
8386 if (flag_unsafe_math_optimizations
8387 && REAL_VALUES_EQUAL (c, dconsthalf))
8389 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8391 if (sqrtfn != NULL_TREE)
8392 return build_call_expr (sqrtfn, 1, arg0);
8395 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8396 if (flag_unsafe_math_optimizations)
8398 const REAL_VALUE_TYPE dconstroot
8399 = real_value_truncate (TYPE_MODE (type),
8400 *get_real_const (rv_third));
8402 if (REAL_VALUES_EQUAL (c, dconstroot))
8404 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8405 if (cbrtfn != NULL_TREE)
8406 return build_call_expr (cbrtfn, 1, arg0);
8410 /* Check for an integer exponent. */
8411 n = real_to_integer (&c);
8412 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8413 if (real_identical (&c, &cint))
8415 /* Attempt to evaluate pow at compile-time. */
8416 if (TREE_CODE (arg0) == REAL_CST
8417 && !TREE_OVERFLOW (arg0))
8422 x = TREE_REAL_CST (arg0);
8423 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8424 if (flag_unsafe_math_optimizations || !inexact)
8425 return build_real (type, x);
8428 /* Strip sign ops from even integer powers. */
8429 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8431 tree narg0 = fold_strip_sign_ops (arg0);
8433 return build_call_expr (fndecl, 2, narg0, arg1);
8438 if (flag_unsafe_math_optimizations)
8440 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8442 /* Optimize pow(expN(x),y) = expN(x*y). */
8443 if (BUILTIN_EXPONENT_P (fcode))
8445 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8446 tree arg = CALL_EXPR_ARG (arg0, 0);
8447 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8448 return build_call_expr (expfn, 1, arg);
8451 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8452 if (BUILTIN_SQRT_P (fcode))
8454 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8455 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8456 build_real (type, dconsthalf));
8457 return build_call_expr (fndecl, 2, narg0, narg1);
8460 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8461 if (BUILTIN_CBRT_P (fcode))
8463 tree arg = CALL_EXPR_ARG (arg0, 0);
8464 if (tree_expr_nonnegative_p (arg))
8466 const REAL_VALUE_TYPE dconstroot
8467 = real_value_truncate (TYPE_MODE (type),
8468 *get_real_const (rv_third));
8469 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8470 build_real (type, dconstroot));
8471 return build_call_expr (fndecl, 2, arg, narg1);
8475 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8476 if (fcode == BUILT_IN_POW
8477 || fcode == BUILT_IN_POWF
8478 || fcode == BUILT_IN_POWL)
8480 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8481 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8482 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8483 return build_call_expr (fndecl, 2, arg00, narg1);
8490 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8491 Return NULL_TREE if no simplification can be made. */
8493 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8494 tree arg0, tree arg1, tree type)
8496 if (!validate_arg (arg0, REAL_TYPE)
8497 || !validate_arg (arg1, INTEGER_TYPE))
8500 /* Optimize pow(1.0,y) = 1.0. */
8501 if (real_onep (arg0))
8502 return omit_one_operand (type, build_real (type, dconst1), arg1);
8504 if (host_integerp (arg1, 0))
8506 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8508 /* Evaluate powi at compile-time. */
8509 if (TREE_CODE (arg0) == REAL_CST
8510 && !TREE_OVERFLOW (arg0))
8513 x = TREE_REAL_CST (arg0);
8514 real_powi (&x, TYPE_MODE (type), &x, c);
8515 return build_real (type, x);
8518 /* Optimize pow(x,0) = 1.0. */
8520 return omit_one_operand (type, build_real (type, dconst1),
8523 /* Optimize pow(x,1) = x. */
8527 /* Optimize pow(x,-1) = 1.0/x. */
8529 return fold_build2 (RDIV_EXPR, type,
8530 build_real (type, dconst1), arg0);
8536 /* A subroutine of fold_builtin to fold the various exponent
8537 functions. Return NULL_TREE if no simplification can be made.
8538 FUNC is the corresponding MPFR exponent function. */
8541 fold_builtin_exponent (tree fndecl, tree arg,
8542 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8544 if (validate_arg (arg, REAL_TYPE))
8546 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8549 /* Calculate the result when the argument is a constant. */
8550 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8553 /* Optimize expN(logN(x)) = x. */
8554 if (flag_unsafe_math_optimizations)
8556 const enum built_in_function fcode = builtin_mathfn_code (arg);
8558 if ((func == mpfr_exp
8559 && (fcode == BUILT_IN_LOG
8560 || fcode == BUILT_IN_LOGF
8561 || fcode == BUILT_IN_LOGL))
8562 || (func == mpfr_exp2
8563 && (fcode == BUILT_IN_LOG2
8564 || fcode == BUILT_IN_LOG2F
8565 || fcode == BUILT_IN_LOG2L))
8566 || (func == mpfr_exp10
8567 && (fcode == BUILT_IN_LOG10
8568 || fcode == BUILT_IN_LOG10F
8569 || fcode == BUILT_IN_LOG10L)))
8570 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8577 /* Return true if VAR is a VAR_DECL or a component thereof. */
8580 var_decl_component_p (tree var)
8583 while (handled_component_p (inner))
8584 inner = TREE_OPERAND (inner, 0);
8585 return SSA_VAR_P (inner);
8588 /* Fold function call to builtin memset. Return
8589 NULL_TREE if no simplification can be made. */
8592 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8595 unsigned HOST_WIDE_INT length, cval;
8597 if (! validate_arg (dest, POINTER_TYPE)
8598 || ! validate_arg (c, INTEGER_TYPE)
8599 || ! validate_arg (len, INTEGER_TYPE))
8602 if (! host_integerp (len, 1))
8605 /* If the LEN parameter is zero, return DEST. */
8606 if (integer_zerop (len))
8607 return omit_one_operand (type, dest, c);
8609 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8614 if (TREE_CODE (var) != ADDR_EXPR)
8617 var = TREE_OPERAND (var, 0);
8618 if (TREE_THIS_VOLATILE (var))
8621 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8622 && !POINTER_TYPE_P (TREE_TYPE (var)))
8625 if (! var_decl_component_p (var))
8628 length = tree_low_cst (len, 1);
8629 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8630 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8634 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8637 if (integer_zerop (c))
8641 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8644 cval = tree_low_cst (c, 1);
8648 cval |= (cval << 31) << 1;
8651 ret = build_int_cst_type (TREE_TYPE (var), cval);
8652 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8656 return omit_one_operand (type, dest, ret);
8659 /* Fold function call to builtin memset. Return
8660 NULL_TREE if no simplification can be made. */
8663 fold_builtin_bzero (tree dest, tree size, bool ignore)
8665 if (! validate_arg (dest, POINTER_TYPE)
8666 || ! validate_arg (size, INTEGER_TYPE))
8672 /* New argument list transforming bzero(ptr x, int y) to
8673 memset(ptr x, int 0, size_t y). This is done this way
8674 so that if it isn't expanded inline, we fallback to
8675 calling bzero instead of memset. */
8677 return fold_builtin_memset (dest, integer_zero_node,
8678 fold_convert (sizetype, size),
8679 void_type_node, ignore);
8682 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8683 NULL_TREE if no simplification can be made.
8684 If ENDP is 0, return DEST (like memcpy).
8685 If ENDP is 1, return DEST+LEN (like mempcpy).
8686 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8687 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8691 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8693 tree destvar, srcvar, expr;
8695 if (! validate_arg (dest, POINTER_TYPE)
8696 || ! validate_arg (src, POINTER_TYPE)
8697 || ! validate_arg (len, INTEGER_TYPE))
8700 /* If the LEN parameter is zero, return DEST. */
8701 if (integer_zerop (len))
8702 return omit_one_operand (type, dest, src);
8704 /* If SRC and DEST are the same (and not volatile), return
8705 DEST{,+LEN,+LEN-1}. */
8706 if (operand_equal_p (src, dest, 0))
8710 tree srctype, desttype;
8713 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8714 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8716 /* Both DEST and SRC must be pointer types.
8717 ??? This is what old code did. Is the testing for pointer types
8720 If either SRC is readonly or length is 1, we can use memcpy. */
8721 if (dest_align && src_align
8722 && (readonly_data_expr (src)
8723 || (host_integerp (len, 1)
8724 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8725 tree_low_cst (len, 1)))))
8727 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8730 return build_call_expr (fn, 3, dest, src, len);
8735 if (!host_integerp (len, 0))
8738 This logic lose for arguments like (type *)malloc (sizeof (type)),
8739 since we strip the casts of up to VOID return value from malloc.
8740 Perhaps we ought to inherit type from non-VOID argument here? */
8743 srctype = TREE_TYPE (TREE_TYPE (src));
8744 desttype = TREE_TYPE (TREE_TYPE (dest));
8745 if (!srctype || !desttype
8746 || !TYPE_SIZE_UNIT (srctype)
8747 || !TYPE_SIZE_UNIT (desttype)
8748 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8749 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8750 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8751 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8754 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8755 < (int) TYPE_ALIGN (desttype)
8756 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8757 < (int) TYPE_ALIGN (srctype)))
8761 dest = builtin_save_expr (dest);
8763 srcvar = build_fold_indirect_ref (src);
8764 if (TREE_THIS_VOLATILE (srcvar))
8766 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8768 /* With memcpy, it is possible to bypass aliasing rules, so without
8769 this check i. e. execute/20060930-2.c would be misoptimized, because
8770 it use conflicting alias set to hold argument for the memcpy call.
8771 This check is probably unnecesary with -fno-strict-aliasing.
8772 Similarly for destvar. See also PR29286. */
8773 if (!var_decl_component_p (srcvar)
8774 /* Accept: memcpy (*char_var, "test", 1); that simplify
8776 || is_gimple_min_invariant (srcvar)
8777 || readonly_data_expr (src))
8780 destvar = build_fold_indirect_ref (dest);
8781 if (TREE_THIS_VOLATILE (destvar))
8783 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8785 if (!var_decl_component_p (destvar))
8788 if (srctype == desttype
8789 || (gimple_in_ssa_p (cfun)
8790 && useless_type_conversion_p (desttype, srctype)))
8792 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8793 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8794 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8795 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8796 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8798 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8799 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8805 if (endp == 0 || endp == 3)
8806 return omit_one_operand (type, dest, expr);
8812 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8815 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8816 dest = fold_convert (type, dest);
8818 dest = omit_one_operand (type, dest, expr);
8822 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8823 If LEN is not NULL, it represents the length of the string to be
8824 copied. Return NULL_TREE if no simplification can be made. */
8827 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8831 if (!validate_arg (dest, POINTER_TYPE)
8832 || !validate_arg (src, POINTER_TYPE))
8835 /* If SRC and DEST are the same (and not volatile), return DEST. */
8836 if (operand_equal_p (src, dest, 0))
8837 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8842 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8848 len = c_strlen (src, 1);
8849 if (! len || TREE_SIDE_EFFECTS (len))
8853 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8854 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8855 build_call_expr (fn, 3, dest, src, len));
8858 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8859 If SLEN is not NULL, it represents the length of the source string.
8860 Return NULL_TREE if no simplification can be made. */
8863 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8867 if (!validate_arg (dest, POINTER_TYPE)
8868 || !validate_arg (src, POINTER_TYPE)
8869 || !validate_arg (len, INTEGER_TYPE))
8872 /* If the LEN parameter is zero, return DEST. */
8873 if (integer_zerop (len))
8874 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8876 /* We can't compare slen with len as constants below if len is not a
8878 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8882 slen = c_strlen (src, 1);
8884 /* Now, we must be passed a constant src ptr parameter. */
8885 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8888 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8890 /* We do not support simplification of this case, though we do
8891 support it when expanding trees into RTL. */
8892 /* FIXME: generate a call to __builtin_memset. */
8893 if (tree_int_cst_lt (slen, len))
8896 /* OK transform into builtin memcpy. */
8897 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8900 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8901 build_call_expr (fn, 3, dest, src, len));
8904 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8905 arguments to the call, and TYPE is its return type.
8906 Return NULL_TREE if no simplification can be made. */
8909 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8911 if (!validate_arg (arg1, POINTER_TYPE)
8912 || !validate_arg (arg2, INTEGER_TYPE)
8913 || !validate_arg (len, INTEGER_TYPE))
8919 if (TREE_CODE (arg2) != INTEGER_CST
8920 || !host_integerp (len, 1))
8923 p1 = c_getstr (arg1);
8924 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8930 if (target_char_cast (arg2, &c))
8933 r = memchr (p1, c, tree_low_cst (len, 1));
8936 return build_int_cst (TREE_TYPE (arg1), 0);
8938 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8940 return fold_convert (type, tem);
8946 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8947 Return NULL_TREE if no simplification can be made. */
8950 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8952 const char *p1, *p2;
8954 if (!validate_arg (arg1, POINTER_TYPE)
8955 || !validate_arg (arg2, POINTER_TYPE)
8956 || !validate_arg (len, INTEGER_TYPE))
8959 /* If the LEN parameter is zero, return zero. */
8960 if (integer_zerop (len))
8961 return omit_two_operands (integer_type_node, integer_zero_node,
8964 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8965 if (operand_equal_p (arg1, arg2, 0))
8966 return omit_one_operand (integer_type_node, integer_zero_node, len);
8968 p1 = c_getstr (arg1);
8969 p2 = c_getstr (arg2);
8971 /* If all arguments are constant, and the value of len is not greater
8972 than the lengths of arg1 and arg2, evaluate at compile-time. */
8973 if (host_integerp (len, 1) && p1 && p2
8974 && compare_tree_int (len, strlen (p1) + 1) <= 0
8975 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8977 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8980 return integer_one_node;
8982 return integer_minus_one_node;
8984 return integer_zero_node;
8987 /* If len parameter is one, return an expression corresponding to
8988 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8989 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8991 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8992 tree cst_uchar_ptr_node
8993 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8995 tree ind1 = fold_convert (integer_type_node,
8996 build1 (INDIRECT_REF, cst_uchar_node,
8997 fold_convert (cst_uchar_ptr_node,
8999 tree ind2 = fold_convert (integer_type_node,
9000 build1 (INDIRECT_REF, cst_uchar_node,
9001 fold_convert (cst_uchar_ptr_node,
9003 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9009 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9010 Return NULL_TREE if no simplification can be made. */
9013 fold_builtin_strcmp (tree arg1, tree arg2)
9015 const char *p1, *p2;
9017 if (!validate_arg (arg1, POINTER_TYPE)
9018 || !validate_arg (arg2, POINTER_TYPE))
9021 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9022 if (operand_equal_p (arg1, arg2, 0))
9023 return integer_zero_node;
9025 p1 = c_getstr (arg1);
9026 p2 = c_getstr (arg2);
9030 const int i = strcmp (p1, p2);
9032 return integer_minus_one_node;
9034 return integer_one_node;
9036 return integer_zero_node;
9039 /* If the second arg is "", return *(const unsigned char*)arg1. */
9040 if (p2 && *p2 == '\0')
9042 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9043 tree cst_uchar_ptr_node
9044 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9046 return fold_convert (integer_type_node,
9047 build1 (INDIRECT_REF, cst_uchar_node,
9048 fold_convert (cst_uchar_ptr_node,
9052 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9053 if (p1 && *p1 == '\0')
9055 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9056 tree cst_uchar_ptr_node
9057 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9059 tree temp = fold_convert (integer_type_node,
9060 build1 (INDIRECT_REF, cst_uchar_node,
9061 fold_convert (cst_uchar_ptr_node,
9063 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9069 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9070 Return NULL_TREE if no simplification can be made. */
9073 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9075 const char *p1, *p2;
9077 if (!validate_arg (arg1, POINTER_TYPE)
9078 || !validate_arg (arg2, POINTER_TYPE)
9079 || !validate_arg (len, INTEGER_TYPE))
9082 /* If the LEN parameter is zero, return zero. */
9083 if (integer_zerop (len))
9084 return omit_two_operands (integer_type_node, integer_zero_node,
9087 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9088 if (operand_equal_p (arg1, arg2, 0))
9089 return omit_one_operand (integer_type_node, integer_zero_node, len);
9091 p1 = c_getstr (arg1);
9092 p2 = c_getstr (arg2);
9094 if (host_integerp (len, 1) && p1 && p2)
9096 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9098 return integer_one_node;
9100 return integer_minus_one_node;
9102 return integer_zero_node;
9105 /* If the second arg is "", and the length is greater than zero,
9106 return *(const unsigned char*)arg1. */
9107 if (p2 && *p2 == '\0'
9108 && TREE_CODE (len) == INTEGER_CST
9109 && tree_int_cst_sgn (len) == 1)
9111 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9112 tree cst_uchar_ptr_node
9113 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9115 return fold_convert (integer_type_node,
9116 build1 (INDIRECT_REF, cst_uchar_node,
9117 fold_convert (cst_uchar_ptr_node,
9121 /* If the first arg is "", and the length is greater than zero,
9122 return -*(const unsigned char*)arg2. */
9123 if (p1 && *p1 == '\0'
9124 && TREE_CODE (len) == INTEGER_CST
9125 && tree_int_cst_sgn (len) == 1)
9127 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9128 tree cst_uchar_ptr_node
9129 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9131 tree temp = fold_convert (integer_type_node,
9132 build1 (INDIRECT_REF, cst_uchar_node,
9133 fold_convert (cst_uchar_ptr_node,
9135 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9138 /* If len parameter is one, return an expression corresponding to
9139 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9140 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9142 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9143 tree cst_uchar_ptr_node
9144 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9146 tree ind1 = fold_convert (integer_type_node,
9147 build1 (INDIRECT_REF, cst_uchar_node,
9148 fold_convert (cst_uchar_ptr_node,
9150 tree ind2 = fold_convert (integer_type_node,
9151 build1 (INDIRECT_REF, cst_uchar_node,
9152 fold_convert (cst_uchar_ptr_node,
9154 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9160 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9161 ARG. Return NULL_TREE if no simplification can be made. */
9164 fold_builtin_signbit (tree arg, tree type)
9168 if (!validate_arg (arg, REAL_TYPE))
9171 /* If ARG is a compile-time constant, determine the result. */
9172 if (TREE_CODE (arg) == REAL_CST
9173 && !TREE_OVERFLOW (arg))
9177 c = TREE_REAL_CST (arg);
9178 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9179 return fold_convert (type, temp);
9182 /* If ARG is non-negative, the result is always zero. */
9183 if (tree_expr_nonnegative_p (arg))
9184 return omit_one_operand (type, integer_zero_node, arg);
9186 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9187 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9188 return fold_build2 (LT_EXPR, type, arg,
9189 build_real (TREE_TYPE (arg), dconst0));
9194 /* Fold function call to builtin copysign, copysignf or copysignl with
9195 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9199 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9203 if (!validate_arg (arg1, REAL_TYPE)
9204 || !validate_arg (arg2, REAL_TYPE))
9207 /* copysign(X,X) is X. */
9208 if (operand_equal_p (arg1, arg2, 0))
9209 return fold_convert (type, arg1);
9211 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9212 if (TREE_CODE (arg1) == REAL_CST
9213 && TREE_CODE (arg2) == REAL_CST
9214 && !TREE_OVERFLOW (arg1)
9215 && !TREE_OVERFLOW (arg2))
9217 REAL_VALUE_TYPE c1, c2;
9219 c1 = TREE_REAL_CST (arg1);
9220 c2 = TREE_REAL_CST (arg2);
9221 /* c1.sign := c2.sign. */
9222 real_copysign (&c1, &c2);
9223 return build_real (type, c1);
9226 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9227 Remember to evaluate Y for side-effects. */
9228 if (tree_expr_nonnegative_p (arg2))
9229 return omit_one_operand (type,
9230 fold_build1 (ABS_EXPR, type, arg1),
9233 /* Strip sign changing operations for the first argument. */
9234 tem = fold_strip_sign_ops (arg1);
9236 return build_call_expr (fndecl, 2, tem, arg2);
9241 /* Fold a call to builtin isascii with argument ARG. */
9244 fold_builtin_isascii (tree arg)
9246 if (!validate_arg (arg, INTEGER_TYPE))
9250 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9251 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9252 build_int_cst (NULL_TREE,
9253 ~ (unsigned HOST_WIDE_INT) 0x7f));
9254 return fold_build2 (EQ_EXPR, integer_type_node,
9255 arg, integer_zero_node);
9259 /* Fold a call to builtin toascii with argument ARG. */
9262 fold_builtin_toascii (tree arg)
9264 if (!validate_arg (arg, INTEGER_TYPE))
9267 /* Transform toascii(c) -> (c & 0x7f). */
9268 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9269 build_int_cst (NULL_TREE, 0x7f));
9272 /* Fold a call to builtin isdigit with argument ARG. */
9275 fold_builtin_isdigit (tree arg)
9277 if (!validate_arg (arg, INTEGER_TYPE))
9281 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9282 /* According to the C standard, isdigit is unaffected by locale.
9283 However, it definitely is affected by the target character set. */
9284 unsigned HOST_WIDE_INT target_digit0
9285 = lang_hooks.to_target_charset ('0');
9287 if (target_digit0 == 0)
9290 arg = fold_convert (unsigned_type_node, arg);
9291 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9292 build_int_cst (unsigned_type_node, target_digit0));
9293 return fold_build2 (LE_EXPR, integer_type_node, arg,
9294 build_int_cst (unsigned_type_node, 9));
9298 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9301 fold_builtin_fabs (tree arg, tree type)
9303 if (!validate_arg (arg, REAL_TYPE))
9306 arg = fold_convert (type, arg);
9307 if (TREE_CODE (arg) == REAL_CST)
9308 return fold_abs_const (arg, type);
9309 return fold_build1 (ABS_EXPR, type, arg);
9312 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9315 fold_builtin_abs (tree arg, tree type)
9317 if (!validate_arg (arg, INTEGER_TYPE))
9320 arg = fold_convert (type, arg);
9321 if (TREE_CODE (arg) == INTEGER_CST)
9322 return fold_abs_const (arg, type);
9323 return fold_build1 (ABS_EXPR, type, arg);
9326 /* Fold a call to builtin fmin or fmax. */
9329 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9331 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9333 /* Calculate the result when the argument is a constant. */
9334 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9339 /* If either argument is NaN, return the other one. Avoid the
9340 transformation if we get (and honor) a signalling NaN. Using
9341 omit_one_operand() ensures we create a non-lvalue. */
9342 if (TREE_CODE (arg0) == REAL_CST
9343 && real_isnan (&TREE_REAL_CST (arg0))
9344 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9345 || ! TREE_REAL_CST (arg0).signalling))
9346 return omit_one_operand (type, arg1, arg0);
9347 if (TREE_CODE (arg1) == REAL_CST
9348 && real_isnan (&TREE_REAL_CST (arg1))
9349 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9350 || ! TREE_REAL_CST (arg1).signalling))
9351 return omit_one_operand (type, arg0, arg1);
9353 /* Transform fmin/fmax(x,x) -> x. */
9354 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9355 return omit_one_operand (type, arg0, arg1);
9357 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9358 functions to return the numeric arg if the other one is NaN.
9359 These tree codes don't honor that, so only transform if
9360 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9361 handled, so we don't have to worry about it either. */
9362 if (flag_finite_math_only)
9363 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9364 fold_convert (type, arg0),
9365 fold_convert (type, arg1));
9370 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9373 fold_builtin_carg (tree arg, tree type)
9375 if (validate_arg (arg, COMPLEX_TYPE))
9377 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9381 tree new_arg = builtin_save_expr (arg);
9382 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9383 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9384 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9391 /* Fold a call to builtin logb/ilogb. */
9394 fold_builtin_logb (tree arg, tree rettype)
9396 if (! validate_arg (arg, REAL_TYPE))
9401 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9403 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9409 /* If arg is Inf or NaN and we're logb, return it. */
9410 if (TREE_CODE (rettype) == REAL_TYPE)
9411 return fold_convert (rettype, arg);
9412 /* Fall through... */
9414 /* Zero may set errno and/or raise an exception for logb, also
9415 for ilogb we don't know FP_ILOGB0. */
9418 /* For normal numbers, proceed iff radix == 2. In GCC,
9419 normalized significands are in the range [0.5, 1.0). We
9420 want the exponent as if they were [1.0, 2.0) so get the
9421 exponent and subtract 1. */
9422 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9423 return fold_convert (rettype, build_int_cst (NULL_TREE,
9424 REAL_EXP (value)-1));
9432 /* Fold a call to builtin significand, if radix == 2. */
9435 fold_builtin_significand (tree arg, tree rettype)
9437 if (! validate_arg (arg, REAL_TYPE))
9442 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9444 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9451 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9452 return fold_convert (rettype, arg);
9454 /* For normal numbers, proceed iff radix == 2. */
9455 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9457 REAL_VALUE_TYPE result = *value;
9458 /* In GCC, normalized significands are in the range [0.5,
9459 1.0). We want them to be [1.0, 2.0) so set the
9461 SET_REAL_EXP (&result, 1);
9462 return build_real (rettype, result);
9471 /* Fold a call to builtin frexp, we can assume the base is 2. */
9474 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9476 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9481 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9484 arg1 = build_fold_indirect_ref (arg1);
9486 /* Proceed if a valid pointer type was passed in. */
9487 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9489 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9495 /* For +-0, return (*exp = 0, +-0). */
9496 exp = integer_zero_node;
9501 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9502 return omit_one_operand (rettype, arg0, arg1);
9505 /* Since the frexp function always expects base 2, and in
9506 GCC normalized significands are already in the range
9507 [0.5, 1.0), we have exactly what frexp wants. */
9508 REAL_VALUE_TYPE frac_rvt = *value;
9509 SET_REAL_EXP (&frac_rvt, 0);
9510 frac = build_real (rettype, frac_rvt);
9511 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9518 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9519 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9520 TREE_SIDE_EFFECTS (arg1) = 1;
9521 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9527 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9528 then we can assume the base is two. If it's false, then we have to
9529 check the mode of the TYPE parameter in certain cases. */
9532 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9534 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9539 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9540 if (real_zerop (arg0) || integer_zerop (arg1)
9541 || (TREE_CODE (arg0) == REAL_CST
9542 && !real_isfinite (&TREE_REAL_CST (arg0))))
9543 return omit_one_operand (type, arg0, arg1);
9545 /* If both arguments are constant, then try to evaluate it. */
9546 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9547 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9548 && host_integerp (arg1, 0))
9550 /* Bound the maximum adjustment to twice the range of the
9551 mode's valid exponents. Use abs to ensure the range is
9552 positive as a sanity check. */
9553 const long max_exp_adj = 2 *
9554 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9555 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9557 /* Get the user-requested adjustment. */
9558 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9560 /* The requested adjustment must be inside this range. This
9561 is a preliminary cap to avoid things like overflow, we
9562 may still fail to compute the result for other reasons. */
9563 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9565 REAL_VALUE_TYPE initial_result;
9567 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9569 /* Ensure we didn't overflow. */
9570 if (! real_isinf (&initial_result))
9572 const REAL_VALUE_TYPE trunc_result
9573 = real_value_truncate (TYPE_MODE (type), initial_result);
9575 /* Only proceed if the target mode can hold the
9577 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9578 return build_real (type, trunc_result);
9587 /* Fold a call to builtin modf. */
9590 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9592 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9597 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9600 arg1 = build_fold_indirect_ref (arg1);
9602 /* Proceed if a valid pointer type was passed in. */
9603 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9605 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9606 REAL_VALUE_TYPE trunc, frac;
9612 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9613 trunc = frac = *value;
9616 /* For +-Inf, return (*arg1 = arg0, +-0). */
9618 frac.sign = value->sign;
9622 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9623 real_trunc (&trunc, VOIDmode, value);
9624 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9625 /* If the original number was negative and already
9626 integral, then the fractional part is -0.0. */
9627 if (value->sign && frac.cl == rvc_zero)
9628 frac.sign = value->sign;
9632 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9633 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9634 build_real (rettype, trunc));
9635 TREE_SIDE_EFFECTS (arg1) = 1;
9636 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9637 build_real (rettype, frac));
9643 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9644 ARG is the argument for the call. */
9647 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9649 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9652 if (!validate_arg (arg, REAL_TYPE))
9654 error ("non-floating-point argument to function %qs",
9655 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9656 return error_mark_node;
9659 switch (builtin_index)
9661 case BUILT_IN_ISINF:
9662 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9663 return omit_one_operand (type, integer_zero_node, arg);
9665 if (TREE_CODE (arg) == REAL_CST)
9667 r = TREE_REAL_CST (arg);
9668 if (real_isinf (&r))
9669 return real_compare (GT_EXPR, &r, &dconst0)
9670 ? integer_one_node : integer_minus_one_node;
9672 return integer_zero_node;
9677 case BUILT_IN_ISFINITE:
9678 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9679 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9680 return omit_one_operand (type, integer_one_node, arg);
9682 if (TREE_CODE (arg) == REAL_CST)
9684 r = TREE_REAL_CST (arg);
9685 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9690 case BUILT_IN_ISNAN:
9691 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9692 return omit_one_operand (type, integer_zero_node, arg);
9694 if (TREE_CODE (arg) == REAL_CST)
9696 r = TREE_REAL_CST (arg);
9697 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9700 arg = builtin_save_expr (arg);
9701 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9708 /* Fold a call to an unordered comparison function such as
9709 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9710 being called and ARG0 and ARG1 are the arguments for the call.
9711 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9712 the opposite of the desired result. UNORDERED_CODE is used
9713 for modes that can hold NaNs and ORDERED_CODE is used for
9717 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9718 enum tree_code unordered_code,
9719 enum tree_code ordered_code)
9721 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9722 enum tree_code code;
9724 enum tree_code code0, code1;
9725 tree cmp_type = NULL_TREE;
9727 type0 = TREE_TYPE (arg0);
9728 type1 = TREE_TYPE (arg1);
9730 code0 = TREE_CODE (type0);
9731 code1 = TREE_CODE (type1);
9733 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9734 /* Choose the wider of two real types. */
9735 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9737 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9739 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9743 error ("non-floating-point argument to function %qs",
9744 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9745 return error_mark_node;
9748 arg0 = fold_convert (cmp_type, arg0);
9749 arg1 = fold_convert (cmp_type, arg1);
9751 if (unordered_code == UNORDERED_EXPR)
9753 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9754 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9755 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9758 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9760 return fold_build1 (TRUTH_NOT_EXPR, type,
9761 fold_build2 (code, type, arg0, arg1));
9764 /* Fold a call to built-in function FNDECL with 0 arguments.
9765 IGNORE is true if the result of the function call is ignored. This
9766 function returns NULL_TREE if no simplification was possible. */
9769 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9771 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9772 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9775 CASE_FLT_FN (BUILT_IN_INF):
9776 case BUILT_IN_INFD32:
9777 case BUILT_IN_INFD64:
9778 case BUILT_IN_INFD128:
9779 return fold_builtin_inf (type, true);
9781 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9782 return fold_builtin_inf (type, false);
9784 case BUILT_IN_CLASSIFY_TYPE:
9785 return fold_builtin_classify_type (NULL_TREE);
9793 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9794 IGNORE is true if the result of the function call is ignored. This
9795 function returns NULL_TREE if no simplification was possible. */
9798 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9800 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9801 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9805 case BUILT_IN_CONSTANT_P:
9807 tree val = fold_builtin_constant_p (arg0);
9809 /* Gimplification will pull the CALL_EXPR for the builtin out of
9810 an if condition. When not optimizing, we'll not CSE it back.
9811 To avoid link error types of regressions, return false now. */
9812 if (!val && !optimize)
9813 val = integer_zero_node;
9818 case BUILT_IN_CLASSIFY_TYPE:
9819 return fold_builtin_classify_type (arg0);
9821 case BUILT_IN_STRLEN:
9822 return fold_builtin_strlen (arg0);
9824 CASE_FLT_FN (BUILT_IN_FABS):
9825 return fold_builtin_fabs (arg0, type);
9829 case BUILT_IN_LLABS:
9830 case BUILT_IN_IMAXABS:
9831 return fold_builtin_abs (arg0, type);
9833 CASE_FLT_FN (BUILT_IN_CONJ):
9834 if (validate_arg (arg0, COMPLEX_TYPE))
9835 return fold_build1 (CONJ_EXPR, type, arg0);
9838 CASE_FLT_FN (BUILT_IN_CREAL):
9839 if (validate_arg (arg0, COMPLEX_TYPE))
9840 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9843 CASE_FLT_FN (BUILT_IN_CIMAG):
9844 if (validate_arg (arg0, COMPLEX_TYPE))
9845 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9848 CASE_FLT_FN (BUILT_IN_CCOS):
9849 CASE_FLT_FN (BUILT_IN_CCOSH):
9850 /* These functions are "even", i.e. f(x) == f(-x). */
9851 if (validate_arg (arg0, COMPLEX_TYPE))
9853 tree narg = fold_strip_sign_ops (arg0);
9855 return build_call_expr (fndecl, 1, narg);
9859 CASE_FLT_FN (BUILT_IN_CABS):
9860 return fold_builtin_cabs (arg0, type, fndecl);
9862 CASE_FLT_FN (BUILT_IN_CARG):
9863 return fold_builtin_carg (arg0, type);
9865 CASE_FLT_FN (BUILT_IN_SQRT):
9866 return fold_builtin_sqrt (arg0, type);
9868 CASE_FLT_FN (BUILT_IN_CBRT):
9869 return fold_builtin_cbrt (arg0, type);
9871 CASE_FLT_FN (BUILT_IN_ASIN):
9872 if (validate_arg (arg0, REAL_TYPE))
9873 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9874 &dconstm1, &dconst1, true);
9877 CASE_FLT_FN (BUILT_IN_ACOS):
9878 if (validate_arg (arg0, REAL_TYPE))
9879 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9880 &dconstm1, &dconst1, true);
9883 CASE_FLT_FN (BUILT_IN_ATAN):
9884 if (validate_arg (arg0, REAL_TYPE))
9885 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9888 CASE_FLT_FN (BUILT_IN_ASINH):
9889 if (validate_arg (arg0, REAL_TYPE))
9890 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9893 CASE_FLT_FN (BUILT_IN_ACOSH):
9894 if (validate_arg (arg0, REAL_TYPE))
9895 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9896 &dconst1, NULL, true);
9899 CASE_FLT_FN (BUILT_IN_ATANH):
9900 if (validate_arg (arg0, REAL_TYPE))
9901 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9902 &dconstm1, &dconst1, false);
9905 CASE_FLT_FN (BUILT_IN_SIN):
9906 if (validate_arg (arg0, REAL_TYPE))
9907 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9910 CASE_FLT_FN (BUILT_IN_COS):
9911 return fold_builtin_cos (arg0, type, fndecl);
9914 CASE_FLT_FN (BUILT_IN_TAN):
9915 return fold_builtin_tan (arg0, type);
9917 CASE_FLT_FN (BUILT_IN_CEXP):
9918 return fold_builtin_cexp (arg0, type);
9920 CASE_FLT_FN (BUILT_IN_CEXPI):
9921 if (validate_arg (arg0, REAL_TYPE))
9922 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9925 CASE_FLT_FN (BUILT_IN_SINH):
9926 if (validate_arg (arg0, REAL_TYPE))
9927 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9930 CASE_FLT_FN (BUILT_IN_COSH):
9931 return fold_builtin_cosh (arg0, type, fndecl);
9933 CASE_FLT_FN (BUILT_IN_TANH):
9934 if (validate_arg (arg0, REAL_TYPE))
9935 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9938 CASE_FLT_FN (BUILT_IN_ERF):
9939 if (validate_arg (arg0, REAL_TYPE))
9940 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9943 CASE_FLT_FN (BUILT_IN_ERFC):
9944 if (validate_arg (arg0, REAL_TYPE))
9945 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9948 CASE_FLT_FN (BUILT_IN_TGAMMA):
9949 if (validate_arg (arg0, REAL_TYPE))
9950 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9953 CASE_FLT_FN (BUILT_IN_EXP):
9954 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9956 CASE_FLT_FN (BUILT_IN_EXP2):
9957 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9959 CASE_FLT_FN (BUILT_IN_EXP10):
9960 CASE_FLT_FN (BUILT_IN_POW10):
9961 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9963 CASE_FLT_FN (BUILT_IN_EXPM1):
9964 if (validate_arg (arg0, REAL_TYPE))
9965 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9968 CASE_FLT_FN (BUILT_IN_LOG):
9969 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9971 CASE_FLT_FN (BUILT_IN_LOG2):
9972 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9974 CASE_FLT_FN (BUILT_IN_LOG10):
9975 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9977 CASE_FLT_FN (BUILT_IN_LOG1P):
9978 if (validate_arg (arg0, REAL_TYPE))
9979 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9980 &dconstm1, NULL, false);
9983 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9984 CASE_FLT_FN (BUILT_IN_J0):
9985 if (validate_arg (arg0, REAL_TYPE))
9986 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9990 CASE_FLT_FN (BUILT_IN_J1):
9991 if (validate_arg (arg0, REAL_TYPE))
9992 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9996 CASE_FLT_FN (BUILT_IN_Y0):
9997 if (validate_arg (arg0, REAL_TYPE))
9998 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9999 &dconst0, NULL, false);
10002 CASE_FLT_FN (BUILT_IN_Y1):
10003 if (validate_arg (arg0, REAL_TYPE))
10004 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10005 &dconst0, NULL, false);
10009 CASE_FLT_FN (BUILT_IN_NAN):
10010 case BUILT_IN_NAND32:
10011 case BUILT_IN_NAND64:
10012 case BUILT_IN_NAND128:
10013 return fold_builtin_nan (arg0, type, true);
10015 CASE_FLT_FN (BUILT_IN_NANS):
10016 return fold_builtin_nan (arg0, type, false);
10018 CASE_FLT_FN (BUILT_IN_FLOOR):
10019 return fold_builtin_floor (fndecl, arg0);
10021 CASE_FLT_FN (BUILT_IN_CEIL):
10022 return fold_builtin_ceil (fndecl, arg0);
10024 CASE_FLT_FN (BUILT_IN_TRUNC):
10025 return fold_builtin_trunc (fndecl, arg0);
10027 CASE_FLT_FN (BUILT_IN_ROUND):
10028 return fold_builtin_round (fndecl, arg0);
10030 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10031 CASE_FLT_FN (BUILT_IN_RINT):
10032 return fold_trunc_transparent_mathfn (fndecl, arg0);
10034 CASE_FLT_FN (BUILT_IN_LCEIL):
10035 CASE_FLT_FN (BUILT_IN_LLCEIL):
10036 CASE_FLT_FN (BUILT_IN_LFLOOR):
10037 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10038 CASE_FLT_FN (BUILT_IN_LROUND):
10039 CASE_FLT_FN (BUILT_IN_LLROUND):
10040 return fold_builtin_int_roundingfn (fndecl, arg0);
10042 CASE_FLT_FN (BUILT_IN_LRINT):
10043 CASE_FLT_FN (BUILT_IN_LLRINT):
10044 return fold_fixed_mathfn (fndecl, arg0);
10046 case BUILT_IN_BSWAP32:
10047 case BUILT_IN_BSWAP64:
10048 return fold_builtin_bswap (fndecl, arg0);
10050 CASE_INT_FN (BUILT_IN_FFS):
10051 CASE_INT_FN (BUILT_IN_CLZ):
10052 CASE_INT_FN (BUILT_IN_CTZ):
10053 CASE_INT_FN (BUILT_IN_POPCOUNT):
10054 CASE_INT_FN (BUILT_IN_PARITY):
10055 return fold_builtin_bitop (fndecl, arg0);
10057 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10058 return fold_builtin_signbit (arg0, type);
10060 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10061 return fold_builtin_significand (arg0, type);
10063 CASE_FLT_FN (BUILT_IN_ILOGB):
10064 CASE_FLT_FN (BUILT_IN_LOGB):
10065 return fold_builtin_logb (arg0, type);
10067 case BUILT_IN_ISASCII:
10068 return fold_builtin_isascii (arg0);
10070 case BUILT_IN_TOASCII:
10071 return fold_builtin_toascii (arg0);
10073 case BUILT_IN_ISDIGIT:
10074 return fold_builtin_isdigit (arg0);
10076 CASE_FLT_FN (BUILT_IN_FINITE):
10077 case BUILT_IN_FINITED32:
10078 case BUILT_IN_FINITED64:
10079 case BUILT_IN_FINITED128:
10080 case BUILT_IN_ISFINITE:
10081 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10083 CASE_FLT_FN (BUILT_IN_ISINF):
10084 case BUILT_IN_ISINFD32:
10085 case BUILT_IN_ISINFD64:
10086 case BUILT_IN_ISINFD128:
10087 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10089 CASE_FLT_FN (BUILT_IN_ISNAN):
10090 case BUILT_IN_ISNAND32:
10091 case BUILT_IN_ISNAND64:
10092 case BUILT_IN_ISNAND128:
10093 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10095 case BUILT_IN_ISNORMAL:
10096 if (!validate_arg (arg0, REAL_TYPE))
10098 error ("non-floating-point argument to function %qs",
10099 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
10100 return error_mark_node;
10104 case BUILT_IN_PRINTF:
10105 case BUILT_IN_PRINTF_UNLOCKED:
10106 case BUILT_IN_VPRINTF:
10107 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10117 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10118 IGNORE is true if the result of the function call is ignored. This
10119 function returns NULL_TREE if no simplification was possible. */
10122 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10124 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10125 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10129 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10130 CASE_FLT_FN (BUILT_IN_JN):
10131 if (validate_arg (arg0, INTEGER_TYPE)
10132 && validate_arg (arg1, REAL_TYPE))
10133 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10136 CASE_FLT_FN (BUILT_IN_YN):
10137 if (validate_arg (arg0, INTEGER_TYPE)
10138 && validate_arg (arg1, REAL_TYPE))
10139 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10143 CASE_FLT_FN (BUILT_IN_DREM):
10144 CASE_FLT_FN (BUILT_IN_REMAINDER):
10145 if (validate_arg (arg0, REAL_TYPE)
10146 && validate_arg(arg1, REAL_TYPE))
10147 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10150 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10151 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10152 if (validate_arg (arg0, REAL_TYPE)
10153 && validate_arg(arg1, POINTER_TYPE))
10154 return do_mpfr_lgamma_r (arg0, arg1, type);
10158 CASE_FLT_FN (BUILT_IN_ATAN2):
10159 if (validate_arg (arg0, REAL_TYPE)
10160 && validate_arg(arg1, REAL_TYPE))
10161 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10164 CASE_FLT_FN (BUILT_IN_FDIM):
10165 if (validate_arg (arg0, REAL_TYPE)
10166 && validate_arg(arg1, REAL_TYPE))
10167 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10170 CASE_FLT_FN (BUILT_IN_HYPOT):
10171 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10173 CASE_FLT_FN (BUILT_IN_LDEXP):
10174 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10175 CASE_FLT_FN (BUILT_IN_SCALBN):
10176 CASE_FLT_FN (BUILT_IN_SCALBLN):
10177 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10179 CASE_FLT_FN (BUILT_IN_FREXP):
10180 return fold_builtin_frexp (arg0, arg1, type);
10182 CASE_FLT_FN (BUILT_IN_MODF):
10183 return fold_builtin_modf (arg0, arg1, type);
10185 case BUILT_IN_BZERO:
10186 return fold_builtin_bzero (arg0, arg1, ignore);
10188 case BUILT_IN_FPUTS:
10189 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10191 case BUILT_IN_FPUTS_UNLOCKED:
10192 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10194 case BUILT_IN_STRSTR:
10195 return fold_builtin_strstr (arg0, arg1, type);
10197 case BUILT_IN_STRCAT:
10198 return fold_builtin_strcat (arg0, arg1);
10200 case BUILT_IN_STRSPN:
10201 return fold_builtin_strspn (arg0, arg1);
10203 case BUILT_IN_STRCSPN:
10204 return fold_builtin_strcspn (arg0, arg1);
10206 case BUILT_IN_STRCHR:
10207 case BUILT_IN_INDEX:
10208 return fold_builtin_strchr (arg0, arg1, type);
10210 case BUILT_IN_STRRCHR:
10211 case BUILT_IN_RINDEX:
10212 return fold_builtin_strrchr (arg0, arg1, type);
10214 case BUILT_IN_STRCPY:
10215 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10217 case BUILT_IN_STRCMP:
10218 return fold_builtin_strcmp (arg0, arg1);
10220 case BUILT_IN_STRPBRK:
10221 return fold_builtin_strpbrk (arg0, arg1, type);
10223 case BUILT_IN_EXPECT:
10224 return fold_builtin_expect (arg0, arg1);
10226 CASE_FLT_FN (BUILT_IN_POW):
10227 return fold_builtin_pow (fndecl, arg0, arg1, type);
10229 CASE_FLT_FN (BUILT_IN_POWI):
10230 return fold_builtin_powi (fndecl, arg0, arg1, type);
10232 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10233 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10235 CASE_FLT_FN (BUILT_IN_FMIN):
10236 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10238 CASE_FLT_FN (BUILT_IN_FMAX):
10239 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10241 case BUILT_IN_ISGREATER:
10242 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10243 case BUILT_IN_ISGREATEREQUAL:
10244 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10245 case BUILT_IN_ISLESS:
10246 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10247 case BUILT_IN_ISLESSEQUAL:
10248 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10249 case BUILT_IN_ISLESSGREATER:
10250 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10251 case BUILT_IN_ISUNORDERED:
10252 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10255 /* We do the folding for va_start in the expander. */
10256 case BUILT_IN_VA_START:
10259 case BUILT_IN_SPRINTF:
10260 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10262 case BUILT_IN_OBJECT_SIZE:
10263 return fold_builtin_object_size (arg0, arg1);
10265 case BUILT_IN_PRINTF:
10266 case BUILT_IN_PRINTF_UNLOCKED:
10267 case BUILT_IN_VPRINTF:
10268 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10270 case BUILT_IN_PRINTF_CHK:
10271 case BUILT_IN_VPRINTF_CHK:
10272 if (!validate_arg (arg0, INTEGER_TYPE)
10273 || TREE_SIDE_EFFECTS (arg0))
10276 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10279 case BUILT_IN_FPRINTF:
10280 case BUILT_IN_FPRINTF_UNLOCKED:
10281 case BUILT_IN_VFPRINTF:
10282 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10291 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10292 and ARG2. IGNORE is true if the result of the function call is ignored.
10293 This function returns NULL_TREE if no simplification was possible. */
10296 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10298 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10299 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10303 CASE_FLT_FN (BUILT_IN_SINCOS):
10304 return fold_builtin_sincos (arg0, arg1, arg2);
10306 CASE_FLT_FN (BUILT_IN_FMA):
10307 if (validate_arg (arg0, REAL_TYPE)
10308 && validate_arg(arg1, REAL_TYPE)
10309 && validate_arg(arg2, REAL_TYPE))
10310 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10313 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10314 CASE_FLT_FN (BUILT_IN_REMQUO):
10315 if (validate_arg (arg0, REAL_TYPE)
10316 && validate_arg(arg1, REAL_TYPE)
10317 && validate_arg(arg2, POINTER_TYPE))
10318 return do_mpfr_remquo (arg0, arg1, arg2);
10322 case BUILT_IN_MEMSET:
10323 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10325 case BUILT_IN_BCOPY:
10326 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10328 case BUILT_IN_MEMCPY:
10329 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10331 case BUILT_IN_MEMPCPY:
10332 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10334 case BUILT_IN_MEMMOVE:
10335 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10337 case BUILT_IN_STRNCAT:
10338 return fold_builtin_strncat (arg0, arg1, arg2);
10340 case BUILT_IN_STRNCPY:
10341 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10343 case BUILT_IN_STRNCMP:
10344 return fold_builtin_strncmp (arg0, arg1, arg2);
10346 case BUILT_IN_MEMCHR:
10347 return fold_builtin_memchr (arg0, arg1, arg2, type);
10349 case BUILT_IN_BCMP:
10350 case BUILT_IN_MEMCMP:
10351 return fold_builtin_memcmp (arg0, arg1, arg2);;
10353 case BUILT_IN_SPRINTF:
10354 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10356 case BUILT_IN_STRCPY_CHK:
10357 case BUILT_IN_STPCPY_CHK:
10358 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10361 case BUILT_IN_STRCAT_CHK:
10362 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10364 case BUILT_IN_PRINTF_CHK:
10365 case BUILT_IN_VPRINTF_CHK:
10366 if (!validate_arg (arg0, INTEGER_TYPE)
10367 || TREE_SIDE_EFFECTS (arg0))
10370 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10373 case BUILT_IN_FPRINTF:
10374 case BUILT_IN_FPRINTF_UNLOCKED:
10375 case BUILT_IN_VFPRINTF:
10376 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10378 case BUILT_IN_FPRINTF_CHK:
10379 case BUILT_IN_VFPRINTF_CHK:
10380 if (!validate_arg (arg1, INTEGER_TYPE)
10381 || TREE_SIDE_EFFECTS (arg1))
10384 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10393 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10394 ARG2, and ARG3. IGNORE is true if the result of the function call is
10395 ignored. This function returns NULL_TREE if no simplification was
10399 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10402 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10406 case BUILT_IN_MEMCPY_CHK:
10407 case BUILT_IN_MEMPCPY_CHK:
10408 case BUILT_IN_MEMMOVE_CHK:
10409 case BUILT_IN_MEMSET_CHK:
10410 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10412 DECL_FUNCTION_CODE (fndecl));
10414 case BUILT_IN_STRNCPY_CHK:
10415 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10417 case BUILT_IN_STRNCAT_CHK:
10418 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10420 case BUILT_IN_FPRINTF_CHK:
10421 case BUILT_IN_VFPRINTF_CHK:
10422 if (!validate_arg (arg1, INTEGER_TYPE)
10423 || TREE_SIDE_EFFECTS (arg1))
10426 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10436 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10437 arguments, where NARGS <= 4. IGNORE is true if the result of the
10438 function call is ignored. This function returns NULL_TREE if no
10439 simplification was possible. Note that this only folds builtins with
10440 fixed argument patterns. Foldings that do varargs-to-varargs
10441 transformations, or that match calls with more than 4 arguments,
10442 need to be handled with fold_builtin_varargs instead. */
10444 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10447 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10449 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10450 tree ret = NULL_TREE;
10452 /* Verify the number of arguments for type-generic and thus variadic
10456 case BUILT_IN_ISFINITE:
10457 case BUILT_IN_ISINF:
10458 case BUILT_IN_ISNAN:
10459 case BUILT_IN_ISNORMAL:
10462 error ("too few arguments to function %qs",
10463 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
10464 return error_mark_node;
10466 else if (nargs > 1)
10468 error ("too many arguments to function %qs",
10469 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
10470 return error_mark_node;
10474 case BUILT_IN_ISGREATER:
10475 case BUILT_IN_ISGREATEREQUAL:
10476 case BUILT_IN_ISLESS:
10477 case BUILT_IN_ISLESSEQUAL:
10478 case BUILT_IN_ISLESSGREATER:
10479 case BUILT_IN_ISUNORDERED:
10482 error ("too few arguments to function %qs",
10483 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
10484 return error_mark_node;
10486 else if (nargs > 2)
10488 error ("too many arguments to function %qs",
10489 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
10490 return error_mark_node;
10501 ret = fold_builtin_0 (fndecl, ignore);
10504 ret = fold_builtin_1 (fndecl, args[0], ignore);
10507 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10510 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10513 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10521 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10522 TREE_NO_WARNING (ret) = 1;
10528 /* Builtins with folding operations that operate on "..." arguments
10529 need special handling; we need to store the arguments in a convenient
10530 data structure before attempting any folding. Fortunately there are
10531 only a few builtins that fall into this category. FNDECL is the
10532 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10533 result of the function call is ignored. */
10536 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10538 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10539 tree ret = NULL_TREE;
10543 case BUILT_IN_SPRINTF_CHK:
10544 case BUILT_IN_VSPRINTF_CHK:
10545 ret = fold_builtin_sprintf_chk (exp, fcode);
10548 case BUILT_IN_SNPRINTF_CHK:
10549 case BUILT_IN_VSNPRINTF_CHK:
10550 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10557 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10558 TREE_NO_WARNING (ret) = 1;
10564 /* A wrapper function for builtin folding that prevents warnings for
10565 "statement without effect" and the like, caused by removing the
10566 call node earlier than the warning is generated. */
10569 fold_call_expr (tree exp, bool ignore)
10571 tree ret = NULL_TREE;
10572 tree fndecl = get_callee_fndecl (exp);
10574 && TREE_CODE (fndecl) == FUNCTION_DECL
10575 && DECL_BUILT_IN (fndecl)
10576 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10577 yet. Defer folding until we see all the arguments
10578 (after inlining). */
10579 && !CALL_EXPR_VA_ARG_PACK (exp))
10581 int nargs = call_expr_nargs (exp);
10583 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10584 instead last argument is __builtin_va_arg_pack (). Defer folding
10585 even in that case, until arguments are finalized. */
10586 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10588 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10590 && TREE_CODE (fndecl2) == FUNCTION_DECL
10591 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10592 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10596 /* FIXME: Don't use a list in this interface. */
10597 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10598 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10601 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10603 tree *args = CALL_EXPR_ARGP (exp);
10604 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10607 ret = fold_builtin_varargs (fndecl, exp, ignore);
10610 /* Propagate location information from original call to
10611 expansion of builtin. Otherwise things like
10612 maybe_emit_chk_warning, that operate on the expansion
10613 of a builtin, will use the wrong location information. */
10614 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10616 tree realret = ret;
10617 if (TREE_CODE (ret) == NOP_EXPR)
10618 realret = TREE_OPERAND (ret, 0);
10619 if (CAN_HAVE_LOCATION_P (realret)
10620 && !EXPR_HAS_LOCATION (realret))
10621 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10630 /* Conveniently construct a function call expression. FNDECL names the
10631 function to be called and ARGLIST is a TREE_LIST of arguments. */
10634 build_function_call_expr (tree fndecl, tree arglist)
10636 tree fntype = TREE_TYPE (fndecl);
10637 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10638 int n = list_length (arglist);
10639 tree *argarray = (tree *) alloca (n * sizeof (tree));
10642 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10643 argarray[i] = TREE_VALUE (arglist);
10644 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10647 /* Conveniently construct a function call expression. FNDECL names the
10648 function to be called, N is the number of arguments, and the "..."
10649 parameters are the argument expressions. */
10652 build_call_expr (tree fndecl, int n, ...)
10655 tree fntype = TREE_TYPE (fndecl);
10656 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10657 tree *argarray = (tree *) alloca (n * sizeof (tree));
10661 for (i = 0; i < n; i++)
10662 argarray[i] = va_arg (ap, tree);
10664 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10667 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10668 N arguments are passed in the array ARGARRAY. */
10671 fold_builtin_call_array (tree type,
10676 tree ret = NULL_TREE;
10680 if (TREE_CODE (fn) == ADDR_EXPR)
10682 tree fndecl = TREE_OPERAND (fn, 0);
10683 if (TREE_CODE (fndecl) == FUNCTION_DECL
10684 && DECL_BUILT_IN (fndecl))
10686 /* If last argument is __builtin_va_arg_pack (), arguments to this
10687 function are not finalized yet. Defer folding until they are. */
10688 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10690 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10692 && TREE_CODE (fndecl2) == FUNCTION_DECL
10693 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10694 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10695 return build_call_array (type, fn, n, argarray);
10697 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10699 tree arglist = NULL_TREE;
10700 for (i = n - 1; i >= 0; i--)
10701 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10702 ret = targetm.fold_builtin (fndecl, arglist, false);
10706 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10708 /* First try the transformations that don't require consing up
10710 ret = fold_builtin_n (fndecl, argarray, n, false);
10715 /* If we got this far, we need to build an exp. */
10716 exp = build_call_array (type, fn, n, argarray);
10717 ret = fold_builtin_varargs (fndecl, exp, false);
10718 return ret ? ret : exp;
10722 return build_call_array (type, fn, n, argarray);
10725 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10726 along with N new arguments specified as the "..." parameters. SKIP
10727 is the number of arguments in EXP to be omitted. This function is used
10728 to do varargs-to-varargs transformations. */
10731 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10733 int oldnargs = call_expr_nargs (exp);
10734 int nargs = oldnargs - skip + n;
10735 tree fntype = TREE_TYPE (fndecl);
10736 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10744 buffer = alloca (nargs * sizeof (tree));
10746 for (i = 0; i < n; i++)
10747 buffer[i] = va_arg (ap, tree);
10749 for (j = skip; j < oldnargs; j++, i++)
10750 buffer[i] = CALL_EXPR_ARG (exp, j);
10753 buffer = CALL_EXPR_ARGP (exp) + skip;
10755 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10758 /* Validate a single argument ARG against a tree code CODE representing
10762 validate_arg (const_tree arg, enum tree_code code)
10766 else if (code == POINTER_TYPE)
10767 return POINTER_TYPE_P (TREE_TYPE (arg));
10768 else if (code == INTEGER_TYPE)
10769 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10770 return code == TREE_CODE (TREE_TYPE (arg));
10773 /* This function validates the types of a function call argument list
10774 against a specified list of tree_codes. If the last specifier is a 0,
10775 that represents an ellipses, otherwise the last specifier must be a
10779 validate_arglist (const_tree callexpr, ...)
10781 enum tree_code code;
10784 const_call_expr_arg_iterator iter;
10787 va_start (ap, callexpr);
10788 init_const_call_expr_arg_iterator (callexpr, &iter);
10792 code = va_arg (ap, enum tree_code);
10796 /* This signifies an ellipses, any further arguments are all ok. */
10800 /* This signifies an endlink, if no arguments remain, return
10801 true, otherwise return false. */
10802 res = !more_const_call_expr_args_p (&iter);
10805 /* If no parameters remain or the parameter's code does not
10806 match the specified code, return false. Otherwise continue
10807 checking any remaining arguments. */
10808 arg = next_const_call_expr_arg (&iter);
10809 if (!validate_arg (arg, code))
10816 /* We need gotos here since we can only have one VA_CLOSE in a
10824 /* Default target-specific builtin expander that does nothing. */
10827 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10828 rtx target ATTRIBUTE_UNUSED,
10829 rtx subtarget ATTRIBUTE_UNUSED,
10830 enum machine_mode mode ATTRIBUTE_UNUSED,
10831 int ignore ATTRIBUTE_UNUSED)
10836 /* Returns true is EXP represents data that would potentially reside
10837 in a readonly section. */
10840 readonly_data_expr (tree exp)
10844 if (TREE_CODE (exp) != ADDR_EXPR)
10847 exp = get_base_address (TREE_OPERAND (exp, 0));
10851 /* Make sure we call decl_readonly_section only for trees it
10852 can handle (since it returns true for everything it doesn't
10854 if (TREE_CODE (exp) == STRING_CST
10855 || TREE_CODE (exp) == CONSTRUCTOR
10856 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10857 return decl_readonly_section (exp, 0);
10862 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10863 to the call, and TYPE is its return type.
10865 Return NULL_TREE if no simplification was possible, otherwise return the
10866 simplified form of the call as a tree.
10868 The simplified form may be a constant or other expression which
10869 computes the same value, but in a more efficient manner (including
10870 calls to other builtin functions).
10872 The call may contain arguments which need to be evaluated, but
10873 which are not useful to determine the result of the call. In
10874 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10875 COMPOUND_EXPR will be an argument which must be evaluated.
10876 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10877 COMPOUND_EXPR in the chain will contain the tree for the simplified
10878 form of the builtin function call. */
10881 fold_builtin_strstr (tree s1, tree s2, tree type)
10883 if (!validate_arg (s1, POINTER_TYPE)
10884 || !validate_arg (s2, POINTER_TYPE))
10889 const char *p1, *p2;
10891 p2 = c_getstr (s2);
10895 p1 = c_getstr (s1);
10898 const char *r = strstr (p1, p2);
10902 return build_int_cst (TREE_TYPE (s1), 0);
10904 /* Return an offset into the constant string argument. */
10905 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10906 s1, size_int (r - p1));
10907 return fold_convert (type, tem);
10910 /* The argument is const char *, and the result is char *, so we need
10911 a type conversion here to avoid a warning. */
10913 return fold_convert (type, s1);
10918 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10922 /* New argument list transforming strstr(s1, s2) to
10923 strchr(s1, s2[0]). */
10924 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10928 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10929 the call, and TYPE is its return type.
10931 Return NULL_TREE if no simplification was possible, otherwise return the
10932 simplified form of the call as a tree.
10934 The simplified form may be a constant or other expression which
10935 computes the same value, but in a more efficient manner (including
10936 calls to other builtin functions).
10938 The call may contain arguments which need to be evaluated, but
10939 which are not useful to determine the result of the call. In
10940 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10941 COMPOUND_EXPR will be an argument which must be evaluated.
10942 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10943 COMPOUND_EXPR in the chain will contain the tree for the simplified
10944 form of the builtin function call. */
10947 fold_builtin_strchr (tree s1, tree s2, tree type)
10949 if (!validate_arg (s1, POINTER_TYPE)
10950 || !validate_arg (s2, INTEGER_TYPE))
10956 if (TREE_CODE (s2) != INTEGER_CST)
10959 p1 = c_getstr (s1);
10966 if (target_char_cast (s2, &c))
10969 r = strchr (p1, c);
10972 return build_int_cst (TREE_TYPE (s1), 0);
10974 /* Return an offset into the constant string argument. */
10975 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10976 s1, size_int (r - p1));
10977 return fold_convert (type, tem);
10983 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10984 the call, and TYPE is its return type.
10986 Return NULL_TREE if no simplification was possible, otherwise return the
10987 simplified form of the call as a tree.
10989 The simplified form may be a constant or other expression which
10990 computes the same value, but in a more efficient manner (including
10991 calls to other builtin functions).
10993 The call may contain arguments which need to be evaluated, but
10994 which are not useful to determine the result of the call. In
10995 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10996 COMPOUND_EXPR will be an argument which must be evaluated.
10997 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10998 COMPOUND_EXPR in the chain will contain the tree for the simplified
10999 form of the builtin function call. */
11002 fold_builtin_strrchr (tree s1, tree s2, tree type)
11004 if (!validate_arg (s1, POINTER_TYPE)
11005 || !validate_arg (s2, INTEGER_TYPE))
11012 if (TREE_CODE (s2) != INTEGER_CST)
11015 p1 = c_getstr (s1);
11022 if (target_char_cast (s2, &c))
11025 r = strrchr (p1, c);
11028 return build_int_cst (TREE_TYPE (s1), 0);
11030 /* Return an offset into the constant string argument. */
11031 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11032 s1, size_int (r - p1));
11033 return fold_convert (type, tem);
11036 if (! integer_zerop (s2))
11039 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11043 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11044 return build_call_expr (fn, 2, s1, s2);
11048 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11049 to the call, and TYPE is its return type.
11051 Return NULL_TREE if no simplification was possible, otherwise return the
11052 simplified form of the call as a tree.
11054 The simplified form may be a constant or other expression which
11055 computes the same value, but in a more efficient manner (including
11056 calls to other builtin functions).
11058 The call may contain arguments which need to be evaluated, but
11059 which are not useful to determine the result of the call. In
11060 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11061 COMPOUND_EXPR will be an argument which must be evaluated.
11062 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11063 COMPOUND_EXPR in the chain will contain the tree for the simplified
11064 form of the builtin function call. */
11067 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11069 if (!validate_arg (s1, POINTER_TYPE)
11070 || !validate_arg (s2, POINTER_TYPE))
11075 const char *p1, *p2;
11077 p2 = c_getstr (s2);
11081 p1 = c_getstr (s1);
11084 const char *r = strpbrk (p1, p2);
11088 return build_int_cst (TREE_TYPE (s1), 0);
11090 /* Return an offset into the constant string argument. */
11091 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11092 s1, size_int (r - p1));
11093 return fold_convert (type, tem);
11097 /* strpbrk(x, "") == NULL.
11098 Evaluate and ignore s1 in case it had side-effects. */
11099 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11102 return NULL_TREE; /* Really call strpbrk. */
11104 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11108 /* New argument list transforming strpbrk(s1, s2) to
11109 strchr(s1, s2[0]). */
11110 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11114 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11117 Return NULL_TREE if no simplification was possible, otherwise return the
11118 simplified form of the call as a tree.
11120 The simplified form may be a constant or other expression which
11121 computes the same value, but in a more efficient manner (including
11122 calls to other builtin functions).
11124 The call may contain arguments which need to be evaluated, but
11125 which are not useful to determine the result of the call. In
11126 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11127 COMPOUND_EXPR will be an argument which must be evaluated.
11128 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11129 COMPOUND_EXPR in the chain will contain the tree for the simplified
11130 form of the builtin function call. */
11133 fold_builtin_strcat (tree dst, tree src)
11135 if (!validate_arg (dst, POINTER_TYPE)
11136 || !validate_arg (src, POINTER_TYPE))
11140 const char *p = c_getstr (src);
11142 /* If the string length is zero, return the dst parameter. */
11143 if (p && *p == '\0')
11150 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11151 arguments to the call.
11153 Return NULL_TREE if no simplification was possible, otherwise return the
11154 simplified form of the call as a tree.
11156 The simplified form may be a constant or other expression which
11157 computes the same value, but in a more efficient manner (including
11158 calls to other builtin functions).
11160 The call may contain arguments which need to be evaluated, but
11161 which are not useful to determine the result of the call. In
11162 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11163 COMPOUND_EXPR will be an argument which must be evaluated.
11164 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11165 COMPOUND_EXPR in the chain will contain the tree for the simplified
11166 form of the builtin function call. */
11169 fold_builtin_strncat (tree dst, tree src, tree len)
11171 if (!validate_arg (dst, POINTER_TYPE)
11172 || !validate_arg (src, POINTER_TYPE)
11173 || !validate_arg (len, INTEGER_TYPE))
11177 const char *p = c_getstr (src);
11179 /* If the requested length is zero, or the src parameter string
11180 length is zero, return the dst parameter. */
11181 if (integer_zerop (len) || (p && *p == '\0'))
11182 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11184 /* If the requested len is greater than or equal to the string
11185 length, call strcat. */
11186 if (TREE_CODE (len) == INTEGER_CST && p
11187 && compare_tree_int (len, strlen (p)) >= 0)
11189 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11191 /* If the replacement _DECL isn't initialized, don't do the
11196 return build_call_expr (fn, 2, dst, src);
11202 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11205 Return NULL_TREE if no simplification was possible, otherwise return the
11206 simplified form of the call as a tree.
11208 The simplified form may be a constant or other expression which
11209 computes the same value, but in a more efficient manner (including
11210 calls to other builtin functions).
11212 The call may contain arguments which need to be evaluated, but
11213 which are not useful to determine the result of the call. In
11214 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11215 COMPOUND_EXPR will be an argument which must be evaluated.
11216 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11217 COMPOUND_EXPR in the chain will contain the tree for the simplified
11218 form of the builtin function call. */
11221 fold_builtin_strspn (tree s1, tree s2)
11223 if (!validate_arg (s1, POINTER_TYPE)
11224 || !validate_arg (s2, POINTER_TYPE))
11228 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11230 /* If both arguments are constants, evaluate at compile-time. */
11233 const size_t r = strspn (p1, p2);
11234 return size_int (r);
11237 /* If either argument is "", return NULL_TREE. */
11238 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11239 /* Evaluate and ignore both arguments in case either one has
11241 return omit_two_operands (integer_type_node, integer_zero_node,
11247 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11250 Return NULL_TREE if no simplification was possible, otherwise return the
11251 simplified form of the call as a tree.
11253 The simplified form may be a constant or other expression which
11254 computes the same value, but in a more efficient manner (including
11255 calls to other builtin functions).
11257 The call may contain arguments which need to be evaluated, but
11258 which are not useful to determine the result of the call. In
11259 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11260 COMPOUND_EXPR will be an argument which must be evaluated.
11261 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11262 COMPOUND_EXPR in the chain will contain the tree for the simplified
11263 form of the builtin function call. */
11266 fold_builtin_strcspn (tree s1, tree s2)
11268 if (!validate_arg (s1, POINTER_TYPE)
11269 || !validate_arg (s2, POINTER_TYPE))
11273 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11275 /* If both arguments are constants, evaluate at compile-time. */
11278 const size_t r = strcspn (p1, p2);
11279 return size_int (r);
11282 /* If the first argument is "", return NULL_TREE. */
11283 if (p1 && *p1 == '\0')
11285 /* Evaluate and ignore argument s2 in case it has
11287 return omit_one_operand (integer_type_node,
11288 integer_zero_node, s2);
11291 /* If the second argument is "", return __builtin_strlen(s1). */
11292 if (p2 && *p2 == '\0')
11294 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11296 /* If the replacement _DECL isn't initialized, don't do the
11301 return build_call_expr (fn, 1, s1);
11307 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11308 to the call. IGNORE is true if the value returned
11309 by the builtin will be ignored. UNLOCKED is true is true if this
11310 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11311 the known length of the string. Return NULL_TREE if no simplification
11315 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11317 /* If we're using an unlocked function, assume the other unlocked
11318 functions exist explicitly. */
11319 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11320 : implicit_built_in_decls[BUILT_IN_FPUTC];
11321 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11322 : implicit_built_in_decls[BUILT_IN_FWRITE];
11324 /* If the return value is used, don't do the transformation. */
11328 /* Verify the arguments in the original call. */
11329 if (!validate_arg (arg0, POINTER_TYPE)
11330 || !validate_arg (arg1, POINTER_TYPE))
11334 len = c_strlen (arg0, 0);
11336 /* Get the length of the string passed to fputs. If the length
11337 can't be determined, punt. */
11339 || TREE_CODE (len) != INTEGER_CST)
11342 switch (compare_tree_int (len, 1))
11344 case -1: /* length is 0, delete the call entirely . */
11345 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11347 case 0: /* length is 1, call fputc. */
11349 const char *p = c_getstr (arg0);
11354 return build_call_expr (fn_fputc, 2,
11355 build_int_cst (NULL_TREE, p[0]), arg1);
11361 case 1: /* length is greater than 1, call fwrite. */
11363 /* If optimizing for size keep fputs. */
11366 /* New argument list transforming fputs(string, stream) to
11367 fwrite(string, 1, len, stream). */
11369 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11374 gcc_unreachable ();
11379 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11380 produced. False otherwise. This is done so that we don't output the error
11381 or warning twice or three times. */
11383 fold_builtin_next_arg (tree exp, bool va_start_p)
11385 tree fntype = TREE_TYPE (current_function_decl);
11386 int nargs = call_expr_nargs (exp);
11389 if (TYPE_ARG_TYPES (fntype) == 0
11390 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11391 == void_type_node))
11393 error ("%<va_start%> used in function with fixed args");
11399 if (va_start_p && (nargs != 2))
11401 error ("wrong number of arguments to function %<va_start%>");
11404 arg = CALL_EXPR_ARG (exp, 1);
11406 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11407 when we checked the arguments and if needed issued a warning. */
11412 /* Evidently an out of date version of <stdarg.h>; can't validate
11413 va_start's second argument, but can still work as intended. */
11414 warning (0, "%<__builtin_next_arg%> called without an argument");
11417 else if (nargs > 1)
11419 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11422 arg = CALL_EXPR_ARG (exp, 0);
11425 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11426 or __builtin_next_arg (0) the first time we see it, after checking
11427 the arguments and if needed issuing a warning. */
11428 if (!integer_zerop (arg))
11430 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11432 /* Strip off all nops for the sake of the comparison. This
11433 is not quite the same as STRIP_NOPS. It does more.
11434 We must also strip off INDIRECT_EXPR for C++ reference
11436 while (TREE_CODE (arg) == NOP_EXPR
11437 || TREE_CODE (arg) == CONVERT_EXPR
11438 || TREE_CODE (arg) == NON_LVALUE_EXPR
11439 || TREE_CODE (arg) == INDIRECT_REF)
11440 arg = TREE_OPERAND (arg, 0);
11441 if (arg != last_parm)
11443 /* FIXME: Sometimes with the tree optimizers we can get the
11444 not the last argument even though the user used the last
11445 argument. We just warn and set the arg to be the last
11446 argument so that we will get wrong-code because of
11448 warning (0, "second parameter of %<va_start%> not last named argument");
11450 /* We want to verify the second parameter just once before the tree
11451 optimizers are run and then avoid keeping it in the tree,
11452 as otherwise we could warn even for correct code like:
11453 void foo (int i, ...)
11454 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11456 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11458 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11464 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11465 ORIG may be null if this is a 2-argument call. We don't attempt to
11466 simplify calls with more than 3 arguments.
11468 Return NULL_TREE if no simplification was possible, otherwise return the
11469 simplified form of the call as a tree. If IGNORED is true, it means that
11470 the caller does not use the returned value of the function. */
11473 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11476 const char *fmt_str = NULL;
11478 /* Verify the required arguments in the original call. We deal with two
11479 types of sprintf() calls: 'sprintf (str, fmt)' and
11480 'sprintf (dest, "%s", orig)'. */
11481 if (!validate_arg (dest, POINTER_TYPE)
11482 || !validate_arg (fmt, POINTER_TYPE))
11484 if (orig && !validate_arg (orig, POINTER_TYPE))
11487 /* Check whether the format is a literal string constant. */
11488 fmt_str = c_getstr (fmt);
11489 if (fmt_str == NULL)
11493 retval = NULL_TREE;
11495 if (!init_target_chars ())
11498 /* If the format doesn't contain % args or %%, use strcpy. */
11499 if (strchr (fmt_str, target_percent) == NULL)
11501 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11506 /* Don't optimize sprintf (buf, "abc", ptr++). */
11510 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11511 'format' is known to contain no % formats. */
11512 call = build_call_expr (fn, 2, dest, fmt);
11514 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11517 /* If the format is "%s", use strcpy if the result isn't used. */
11518 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11521 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11526 /* Don't crash on sprintf (str1, "%s"). */
11530 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11533 retval = c_strlen (orig, 1);
11534 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11537 call = build_call_expr (fn, 2, dest, orig);
11540 if (call && retval)
11542 retval = fold_convert
11543 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11545 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11551 /* Expand a call EXP to __builtin_object_size. */
11554 expand_builtin_object_size (tree exp)
11557 int object_size_type;
11558 tree fndecl = get_callee_fndecl (exp);
11560 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11562 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11564 expand_builtin_trap ();
11568 ost = CALL_EXPR_ARG (exp, 1);
11571 if (TREE_CODE (ost) != INTEGER_CST
11572 || tree_int_cst_sgn (ost) < 0
11573 || compare_tree_int (ost, 3) > 0)
11575 error ("%Klast argument of %D is not integer constant between 0 and 3",
11577 expand_builtin_trap ();
11581 object_size_type = tree_low_cst (ost, 0);
11583 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11586 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11587 FCODE is the BUILT_IN_* to use.
11588 Return NULL_RTX if we failed; the caller should emit a normal call,
11589 otherwise try to get the result in TARGET, if convenient (and in
11590 mode MODE if that's convenient). */
11593 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11594 enum built_in_function fcode)
11596 tree dest, src, len, size;
11598 if (!validate_arglist (exp,
11600 fcode == BUILT_IN_MEMSET_CHK
11601 ? INTEGER_TYPE : POINTER_TYPE,
11602 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11605 dest = CALL_EXPR_ARG (exp, 0);
11606 src = CALL_EXPR_ARG (exp, 1);
11607 len = CALL_EXPR_ARG (exp, 2);
11608 size = CALL_EXPR_ARG (exp, 3);
11610 if (! host_integerp (size, 1))
11613 if (host_integerp (len, 1) || integer_all_onesp (size))
11617 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11619 warning (0, "%Kcall to %D will always overflow destination buffer",
11620 exp, get_callee_fndecl (exp));
11625 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11626 mem{cpy,pcpy,move,set} is available. */
11629 case BUILT_IN_MEMCPY_CHK:
11630 fn = built_in_decls[BUILT_IN_MEMCPY];
11632 case BUILT_IN_MEMPCPY_CHK:
11633 fn = built_in_decls[BUILT_IN_MEMPCPY];
11635 case BUILT_IN_MEMMOVE_CHK:
11636 fn = built_in_decls[BUILT_IN_MEMMOVE];
11638 case BUILT_IN_MEMSET_CHK:
11639 fn = built_in_decls[BUILT_IN_MEMSET];
11648 fn = build_call_expr (fn, 3, dest, src, len);
11649 STRIP_TYPE_NOPS (fn);
11650 while (TREE_CODE (fn) == COMPOUND_EXPR)
11652 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11654 fn = TREE_OPERAND (fn, 1);
11656 if (TREE_CODE (fn) == CALL_EXPR)
11657 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11658 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11660 else if (fcode == BUILT_IN_MEMSET_CHK)
11664 unsigned int dest_align
11665 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11667 /* If DEST is not a pointer type, call the normal function. */
11668 if (dest_align == 0)
11671 /* If SRC and DEST are the same (and not volatile), do nothing. */
11672 if (operand_equal_p (src, dest, 0))
11676 if (fcode != BUILT_IN_MEMPCPY_CHK)
11678 /* Evaluate and ignore LEN in case it has side-effects. */
11679 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11680 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11683 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11684 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11687 /* __memmove_chk special case. */
11688 if (fcode == BUILT_IN_MEMMOVE_CHK)
11690 unsigned int src_align
11691 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11693 if (src_align == 0)
11696 /* If src is categorized for a readonly section we can use
11697 normal __memcpy_chk. */
11698 if (readonly_data_expr (src))
11700 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11703 fn = build_call_expr (fn, 4, dest, src, len, size);
11704 STRIP_TYPE_NOPS (fn);
11705 while (TREE_CODE (fn) == COMPOUND_EXPR)
11707 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11709 fn = TREE_OPERAND (fn, 1);
11711 if (TREE_CODE (fn) == CALL_EXPR)
11712 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11713 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11720 /* Emit warning if a buffer overflow is detected at compile time. */
11723 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11730 case BUILT_IN_STRCPY_CHK:
11731 case BUILT_IN_STPCPY_CHK:
11732 /* For __strcat_chk the warning will be emitted only if overflowing
11733 by at least strlen (dest) + 1 bytes. */
11734 case BUILT_IN_STRCAT_CHK:
11735 len = CALL_EXPR_ARG (exp, 1);
11736 size = CALL_EXPR_ARG (exp, 2);
11739 case BUILT_IN_STRNCAT_CHK:
11740 case BUILT_IN_STRNCPY_CHK:
11741 len = CALL_EXPR_ARG (exp, 2);
11742 size = CALL_EXPR_ARG (exp, 3);
11744 case BUILT_IN_SNPRINTF_CHK:
11745 case BUILT_IN_VSNPRINTF_CHK:
11746 len = CALL_EXPR_ARG (exp, 1);
11747 size = CALL_EXPR_ARG (exp, 3);
11750 gcc_unreachable ();
11756 if (! host_integerp (size, 1) || integer_all_onesp (size))
11761 len = c_strlen (len, 1);
11762 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11765 else if (fcode == BUILT_IN_STRNCAT_CHK)
11767 tree src = CALL_EXPR_ARG (exp, 1);
11768 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11770 src = c_strlen (src, 1);
11771 if (! src || ! host_integerp (src, 1))
11773 warning (0, "%Kcall to %D might overflow destination buffer",
11774 exp, get_callee_fndecl (exp));
11777 else if (tree_int_cst_lt (src, size))
11780 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11783 warning (0, "%Kcall to %D will always overflow destination buffer",
11784 exp, get_callee_fndecl (exp));
11787 /* Emit warning if a buffer overflow is detected at compile time
11788 in __sprintf_chk/__vsprintf_chk calls. */
11791 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11793 tree dest, size, len, fmt, flag;
11794 const char *fmt_str;
11795 int nargs = call_expr_nargs (exp);
11797 /* Verify the required arguments in the original call. */
11801 dest = CALL_EXPR_ARG (exp, 0);
11802 flag = CALL_EXPR_ARG (exp, 1);
11803 size = CALL_EXPR_ARG (exp, 2);
11804 fmt = CALL_EXPR_ARG (exp, 3);
11806 if (! host_integerp (size, 1) || integer_all_onesp (size))
11809 /* Check whether the format is a literal string constant. */
11810 fmt_str = c_getstr (fmt);
11811 if (fmt_str == NULL)
11814 if (!init_target_chars ())
11817 /* If the format doesn't contain % args or %%, we know its size. */
11818 if (strchr (fmt_str, target_percent) == 0)
11819 len = build_int_cstu (size_type_node, strlen (fmt_str));
11820 /* If the format is "%s" and first ... argument is a string literal,
11822 else if (fcode == BUILT_IN_SPRINTF_CHK
11823 && strcmp (fmt_str, target_percent_s) == 0)
11829 arg = CALL_EXPR_ARG (exp, 4);
11830 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11833 len = c_strlen (arg, 1);
11834 if (!len || ! host_integerp (len, 1))
11840 if (! tree_int_cst_lt (len, size))
11842 warning (0, "%Kcall to %D will always overflow destination buffer",
11843 exp, get_callee_fndecl (exp));
11847 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11851 fold_builtin_object_size (tree ptr, tree ost)
11853 tree ret = NULL_TREE;
11854 int object_size_type;
11856 if (!validate_arg (ptr, POINTER_TYPE)
11857 || !validate_arg (ost, INTEGER_TYPE))
11862 if (TREE_CODE (ost) != INTEGER_CST
11863 || tree_int_cst_sgn (ost) < 0
11864 || compare_tree_int (ost, 3) > 0)
11867 object_size_type = tree_low_cst (ost, 0);
11869 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11870 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11871 and (size_t) 0 for types 2 and 3. */
11872 if (TREE_SIDE_EFFECTS (ptr))
11873 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11875 if (TREE_CODE (ptr) == ADDR_EXPR)
11876 ret = build_int_cstu (size_type_node,
11877 compute_builtin_object_size (ptr, object_size_type));
11879 else if (TREE_CODE (ptr) == SSA_NAME)
11881 unsigned HOST_WIDE_INT bytes;
11883 /* If object size is not known yet, delay folding until
11884 later. Maybe subsequent passes will help determining
11886 bytes = compute_builtin_object_size (ptr, object_size_type);
11887 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11889 ret = build_int_cstu (size_type_node, bytes);
11894 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11895 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11896 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11903 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11904 DEST, SRC, LEN, and SIZE are the arguments to the call.
11905 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11906 code of the builtin. If MAXLEN is not NULL, it is maximum length
11907 passed as third argument. */
11910 fold_builtin_memory_chk (tree fndecl,
11911 tree dest, tree src, tree len, tree size,
11912 tree maxlen, bool ignore,
11913 enum built_in_function fcode)
11917 if (!validate_arg (dest, POINTER_TYPE)
11918 || !validate_arg (src,
11919 (fcode == BUILT_IN_MEMSET_CHK
11920 ? INTEGER_TYPE : POINTER_TYPE))
11921 || !validate_arg (len, INTEGER_TYPE)
11922 || !validate_arg (size, INTEGER_TYPE))
11925 /* If SRC and DEST are the same (and not volatile), return DEST
11926 (resp. DEST+LEN for __mempcpy_chk). */
11927 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11929 if (fcode != BUILT_IN_MEMPCPY_CHK)
11930 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11933 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11934 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11938 if (! host_integerp (size, 1))
11941 if (! integer_all_onesp (size))
11943 if (! host_integerp (len, 1))
11945 /* If LEN is not constant, try MAXLEN too.
11946 For MAXLEN only allow optimizing into non-_ocs function
11947 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11948 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11950 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11952 /* (void) __mempcpy_chk () can be optimized into
11953 (void) __memcpy_chk (). */
11954 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11958 return build_call_expr (fn, 4, dest, src, len, size);
11966 if (tree_int_cst_lt (size, maxlen))
11971 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11972 mem{cpy,pcpy,move,set} is available. */
11975 case BUILT_IN_MEMCPY_CHK:
11976 fn = built_in_decls[BUILT_IN_MEMCPY];
11978 case BUILT_IN_MEMPCPY_CHK:
11979 fn = built_in_decls[BUILT_IN_MEMPCPY];
11981 case BUILT_IN_MEMMOVE_CHK:
11982 fn = built_in_decls[BUILT_IN_MEMMOVE];
11984 case BUILT_IN_MEMSET_CHK:
11985 fn = built_in_decls[BUILT_IN_MEMSET];
11994 return build_call_expr (fn, 3, dest, src, len);
11997 /* Fold a call to the __st[rp]cpy_chk builtin.
11998 DEST, SRC, and SIZE are the arguments to the call.
11999 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12000 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12001 strings passed as second argument. */
12004 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12005 tree maxlen, bool ignore,
12006 enum built_in_function fcode)
12010 if (!validate_arg (dest, POINTER_TYPE)
12011 || !validate_arg (src, POINTER_TYPE)
12012 || !validate_arg (size, INTEGER_TYPE))
12015 /* If SRC and DEST are the same (and not volatile), return DEST. */
12016 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12017 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12019 if (! host_integerp (size, 1))
12022 if (! integer_all_onesp (size))
12024 len = c_strlen (src, 1);
12025 if (! len || ! host_integerp (len, 1))
12027 /* If LEN is not constant, try MAXLEN too.
12028 For MAXLEN only allow optimizing into non-_ocs function
12029 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12030 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12032 if (fcode == BUILT_IN_STPCPY_CHK)
12037 /* If return value of __stpcpy_chk is ignored,
12038 optimize into __strcpy_chk. */
12039 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12043 return build_call_expr (fn, 3, dest, src, size);
12046 if (! len || TREE_SIDE_EFFECTS (len))
12049 /* If c_strlen returned something, but not a constant,
12050 transform __strcpy_chk into __memcpy_chk. */
12051 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12055 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12056 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12057 build_call_expr (fn, 4,
12058 dest, src, len, size));
12064 if (! tree_int_cst_lt (maxlen, size))
12068 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12069 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12070 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12074 return build_call_expr (fn, 2, dest, src);
12077 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12078 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12079 length passed as third argument. */
12082 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12087 if (!validate_arg (dest, POINTER_TYPE)
12088 || !validate_arg (src, POINTER_TYPE)
12089 || !validate_arg (len, INTEGER_TYPE)
12090 || !validate_arg (size, INTEGER_TYPE))
12093 if (! host_integerp (size, 1))
12096 if (! integer_all_onesp (size))
12098 if (! host_integerp (len, 1))
12100 /* If LEN is not constant, try MAXLEN too.
12101 For MAXLEN only allow optimizing into non-_ocs function
12102 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12103 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12109 if (tree_int_cst_lt (size, maxlen))
12113 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12114 fn = built_in_decls[BUILT_IN_STRNCPY];
12118 return build_call_expr (fn, 3, dest, src, len);
12121 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12122 are the arguments to the call. */
12125 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12130 if (!validate_arg (dest, POINTER_TYPE)
12131 || !validate_arg (src, POINTER_TYPE)
12132 || !validate_arg (size, INTEGER_TYPE))
12135 p = c_getstr (src);
12136 /* If the SRC parameter is "", return DEST. */
12137 if (p && *p == '\0')
12138 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12140 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12143 /* If __builtin_strcat_chk is used, assume strcat is available. */
12144 fn = built_in_decls[BUILT_IN_STRCAT];
12148 return build_call_expr (fn, 2, dest, src);
12151 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12155 fold_builtin_strncat_chk (tree fndecl,
12156 tree dest, tree src, tree len, tree size)
12161 if (!validate_arg (dest, POINTER_TYPE)
12162 || !validate_arg (src, POINTER_TYPE)
12163 || !validate_arg (size, INTEGER_TYPE)
12164 || !validate_arg (size, INTEGER_TYPE))
12167 p = c_getstr (src);
12168 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12169 if (p && *p == '\0')
12170 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12171 else if (integer_zerop (len))
12172 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12174 if (! host_integerp (size, 1))
12177 if (! integer_all_onesp (size))
12179 tree src_len = c_strlen (src, 1);
12181 && host_integerp (src_len, 1)
12182 && host_integerp (len, 1)
12183 && ! tree_int_cst_lt (len, src_len))
12185 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12186 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12190 return build_call_expr (fn, 3, dest, src, size);
12195 /* If __builtin_strncat_chk is used, assume strncat is available. */
12196 fn = built_in_decls[BUILT_IN_STRNCAT];
12200 return build_call_expr (fn, 3, dest, src, len);
12203 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12204 a normal call should be emitted rather than expanding the function
12205 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12208 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12210 tree dest, size, len, fn, fmt, flag;
12211 const char *fmt_str;
12212 int nargs = call_expr_nargs (exp);
12214 /* Verify the required arguments in the original call. */
12217 dest = CALL_EXPR_ARG (exp, 0);
12218 if (!validate_arg (dest, POINTER_TYPE))
12220 flag = CALL_EXPR_ARG (exp, 1);
12221 if (!validate_arg (flag, INTEGER_TYPE))
12223 size = CALL_EXPR_ARG (exp, 2);
12224 if (!validate_arg (size, INTEGER_TYPE))
12226 fmt = CALL_EXPR_ARG (exp, 3);
12227 if (!validate_arg (fmt, POINTER_TYPE))
12230 if (! host_integerp (size, 1))
12235 if (!init_target_chars ())
12238 /* Check whether the format is a literal string constant. */
12239 fmt_str = c_getstr (fmt);
12240 if (fmt_str != NULL)
12242 /* If the format doesn't contain % args or %%, we know the size. */
12243 if (strchr (fmt_str, target_percent) == 0)
12245 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12246 len = build_int_cstu (size_type_node, strlen (fmt_str));
12248 /* If the format is "%s" and first ... argument is a string literal,
12249 we know the size too. */
12250 else if (fcode == BUILT_IN_SPRINTF_CHK
12251 && strcmp (fmt_str, target_percent_s) == 0)
12257 arg = CALL_EXPR_ARG (exp, 4);
12258 if (validate_arg (arg, POINTER_TYPE))
12260 len = c_strlen (arg, 1);
12261 if (! len || ! host_integerp (len, 1))
12268 if (! integer_all_onesp (size))
12270 if (! len || ! tree_int_cst_lt (len, size))
12274 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12275 or if format doesn't contain % chars or is "%s". */
12276 if (! integer_zerop (flag))
12278 if (fmt_str == NULL)
12280 if (strchr (fmt_str, target_percent) != NULL
12281 && strcmp (fmt_str, target_percent_s))
12285 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12286 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12287 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12291 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12294 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12295 a normal call should be emitted rather than expanding the function
12296 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12297 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12298 passed as second argument. */
12301 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12302 enum built_in_function fcode)
12304 tree dest, size, len, fn, fmt, flag;
12305 const char *fmt_str;
12307 /* Verify the required arguments in the original call. */
12308 if (call_expr_nargs (exp) < 5)
12310 dest = CALL_EXPR_ARG (exp, 0);
12311 if (!validate_arg (dest, POINTER_TYPE))
12313 len = CALL_EXPR_ARG (exp, 1);
12314 if (!validate_arg (len, INTEGER_TYPE))
12316 flag = CALL_EXPR_ARG (exp, 2);
12317 if (!validate_arg (flag, INTEGER_TYPE))
12319 size = CALL_EXPR_ARG (exp, 3);
12320 if (!validate_arg (size, INTEGER_TYPE))
12322 fmt = CALL_EXPR_ARG (exp, 4);
12323 if (!validate_arg (fmt, POINTER_TYPE))
12326 if (! host_integerp (size, 1))
12329 if (! integer_all_onesp (size))
12331 if (! host_integerp (len, 1))
12333 /* If LEN is not constant, try MAXLEN too.
12334 For MAXLEN only allow optimizing into non-_ocs function
12335 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12336 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12342 if (tree_int_cst_lt (size, maxlen))
12346 if (!init_target_chars ())
12349 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12350 or if format doesn't contain % chars or is "%s". */
12351 if (! integer_zerop (flag))
12353 fmt_str = c_getstr (fmt);
12354 if (fmt_str == NULL)
12356 if (strchr (fmt_str, target_percent) != NULL
12357 && strcmp (fmt_str, target_percent_s))
12361 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12363 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12364 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12368 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12371 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12372 FMT and ARG are the arguments to the call; we don't fold cases with
12373 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12375 Return NULL_TREE if no simplification was possible, otherwise return the
12376 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12377 code of the function to be simplified. */
12380 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12381 enum built_in_function fcode)
12383 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12384 const char *fmt_str = NULL;
12386 /* If the return value is used, don't do the transformation. */
12390 /* Verify the required arguments in the original call. */
12391 if (!validate_arg (fmt, POINTER_TYPE))
12394 /* Check whether the format is a literal string constant. */
12395 fmt_str = c_getstr (fmt);
12396 if (fmt_str == NULL)
12399 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12401 /* If we're using an unlocked function, assume the other
12402 unlocked functions exist explicitly. */
12403 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12404 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12408 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12409 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12412 if (!init_target_chars ())
12415 if (strcmp (fmt_str, target_percent_s) == 0
12416 || strchr (fmt_str, target_percent) == NULL)
12420 if (strcmp (fmt_str, target_percent_s) == 0)
12422 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12425 if (!arg || !validate_arg (arg, POINTER_TYPE))
12428 str = c_getstr (arg);
12434 /* The format specifier doesn't contain any '%' characters. */
12435 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12441 /* If the string was "", printf does nothing. */
12442 if (str[0] == '\0')
12443 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12445 /* If the string has length of 1, call putchar. */
12446 if (str[1] == '\0')
12448 /* Given printf("c"), (where c is any one character,)
12449 convert "c"[0] to an int and pass that to the replacement
12451 newarg = build_int_cst (NULL_TREE, str[0]);
12453 call = build_call_expr (fn_putchar, 1, newarg);
12457 /* If the string was "string\n", call puts("string"). */
12458 size_t len = strlen (str);
12459 if ((unsigned char)str[len - 1] == target_newline)
12461 /* Create a NUL-terminated string that's one char shorter
12462 than the original, stripping off the trailing '\n'. */
12463 char *newstr = alloca (len);
12464 memcpy (newstr, str, len - 1);
12465 newstr[len - 1] = 0;
12467 newarg = build_string_literal (len, newstr);
12469 call = build_call_expr (fn_puts, 1, newarg);
12472 /* We'd like to arrange to call fputs(string,stdout) here,
12473 but we need stdout and don't have a way to get it yet. */
12478 /* The other optimizations can be done only on the non-va_list variants. */
12479 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12482 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12483 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12485 if (!arg || !validate_arg (arg, POINTER_TYPE))
12488 call = build_call_expr (fn_puts, 1, arg);
12491 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12492 else if (strcmp (fmt_str, target_percent_c) == 0)
12494 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12497 call = build_call_expr (fn_putchar, 1, arg);
12503 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12506 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12507 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12508 more than 3 arguments, and ARG may be null in the 2-argument case.
12510 Return NULL_TREE if no simplification was possible, otherwise return the
12511 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12512 code of the function to be simplified. */
12515 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12516 enum built_in_function fcode)
12518 tree fn_fputc, fn_fputs, call = NULL_TREE;
12519 const char *fmt_str = NULL;
12521 /* If the return value is used, don't do the transformation. */
12525 /* Verify the required arguments in the original call. */
12526 if (!validate_arg (fp, POINTER_TYPE))
12528 if (!validate_arg (fmt, POINTER_TYPE))
12531 /* Check whether the format is a literal string constant. */
12532 fmt_str = c_getstr (fmt);
12533 if (fmt_str == NULL)
12536 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12538 /* If we're using an unlocked function, assume the other
12539 unlocked functions exist explicitly. */
12540 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12541 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12545 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12546 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12549 if (!init_target_chars ())
12552 /* If the format doesn't contain % args or %%, use strcpy. */
12553 if (strchr (fmt_str, target_percent) == NULL)
12555 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12559 /* If the format specifier was "", fprintf does nothing. */
12560 if (fmt_str[0] == '\0')
12562 /* If FP has side-effects, just wait until gimplification is
12564 if (TREE_SIDE_EFFECTS (fp))
12567 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12570 /* When "string" doesn't contain %, replace all cases of
12571 fprintf (fp, string) with fputs (string, fp). The fputs
12572 builtin will take care of special cases like length == 1. */
12574 call = build_call_expr (fn_fputs, 2, fmt, fp);
12577 /* The other optimizations can be done only on the non-va_list variants. */
12578 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12581 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12582 else if (strcmp (fmt_str, target_percent_s) == 0)
12584 if (!arg || !validate_arg (arg, POINTER_TYPE))
12587 call = build_call_expr (fn_fputs, 2, arg, fp);
12590 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12591 else if (strcmp (fmt_str, target_percent_c) == 0)
12593 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12596 call = build_call_expr (fn_fputc, 2, arg, fp);
12601 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12604 /* Initialize format string characters in the target charset. */
12607 init_target_chars (void)
12612 target_newline = lang_hooks.to_target_charset ('\n');
12613 target_percent = lang_hooks.to_target_charset ('%');
12614 target_c = lang_hooks.to_target_charset ('c');
12615 target_s = lang_hooks.to_target_charset ('s');
12616 if (target_newline == 0 || target_percent == 0 || target_c == 0
12620 target_percent_c[0] = target_percent;
12621 target_percent_c[1] = target_c;
12622 target_percent_c[2] = '\0';
12624 target_percent_s[0] = target_percent;
12625 target_percent_s[1] = target_s;
12626 target_percent_s[2] = '\0';
12628 target_percent_s_newline[0] = target_percent;
12629 target_percent_s_newline[1] = target_s;
12630 target_percent_s_newline[2] = target_newline;
12631 target_percent_s_newline[3] = '\0';
12638 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12639 and no overflow/underflow occurred. INEXACT is true if M was not
12640 exactly calculated. TYPE is the tree type for the result. This
12641 function assumes that you cleared the MPFR flags and then
12642 calculated M to see if anything subsequently set a flag prior to
12643 entering this function. Return NULL_TREE if any checks fail. */
12646 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12648 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12649 overflow/underflow occurred. If -frounding-math, proceed iff the
12650 result of calling FUNC was exact. */
12651 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12652 && (!flag_rounding_math || !inexact))
12654 REAL_VALUE_TYPE rr;
12656 real_from_mpfr (&rr, m, type, GMP_RNDN);
12657 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12658 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12659 but the mpft_t is not, then we underflowed in the
12661 if (real_isfinite (&rr)
12662 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12664 REAL_VALUE_TYPE rmode;
12666 real_convert (&rmode, TYPE_MODE (type), &rr);
12667 /* Proceed iff the specified mode can hold the value. */
12668 if (real_identical (&rmode, &rr))
12669 return build_real (type, rmode);
12675 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12676 FUNC on it and return the resulting value as a tree with type TYPE.
12677 If MIN and/or MAX are not NULL, then the supplied ARG must be
12678 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12679 acceptable values, otherwise they are not. The mpfr precision is
12680 set to the precision of TYPE. We assume that function FUNC returns
12681 zero if the result could be calculated exactly within the requested
12685 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12686 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12689 tree result = NULL_TREE;
12693 /* To proceed, MPFR must exactly represent the target floating point
12694 format, which only happens when the target base equals two. */
12695 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12696 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12698 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12700 if (real_isfinite (ra)
12701 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12702 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12704 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12708 mpfr_init2 (m, prec);
12709 mpfr_from_real (m, ra, GMP_RNDN);
12710 mpfr_clear_flags ();
12711 inexact = func (m, m, GMP_RNDN);
12712 result = do_mpfr_ckconv (m, type, inexact);
12720 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12721 FUNC on it and return the resulting value as a tree with type TYPE.
12722 The mpfr precision is set to the precision of TYPE. We assume that
12723 function FUNC returns zero if the result could be calculated
12724 exactly within the requested precision. */
12727 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12728 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12730 tree result = NULL_TREE;
12735 /* To proceed, MPFR must exactly represent the target floating point
12736 format, which only happens when the target base equals two. */
12737 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12738 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12739 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12741 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12742 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12744 if (real_isfinite (ra1) && real_isfinite (ra2))
12746 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12750 mpfr_inits2 (prec, m1, m2, NULL);
12751 mpfr_from_real (m1, ra1, GMP_RNDN);
12752 mpfr_from_real (m2, ra2, GMP_RNDN);
12753 mpfr_clear_flags ();
12754 inexact = func (m1, m1, m2, GMP_RNDN);
12755 result = do_mpfr_ckconv (m1, type, inexact);
12756 mpfr_clears (m1, m2, NULL);
12763 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12764 FUNC on it and return the resulting value as a tree with type TYPE.
12765 The mpfr precision is set to the precision of TYPE. We assume that
12766 function FUNC returns zero if the result could be calculated
12767 exactly within the requested precision. */
12770 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12771 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12773 tree result = NULL_TREE;
12779 /* To proceed, MPFR must exactly represent the target floating point
12780 format, which only happens when the target base equals two. */
12781 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12782 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12783 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12784 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12786 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12787 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12788 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12790 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12792 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12796 mpfr_inits2 (prec, m1, m2, m3, NULL);
12797 mpfr_from_real (m1, ra1, GMP_RNDN);
12798 mpfr_from_real (m2, ra2, GMP_RNDN);
12799 mpfr_from_real (m3, ra3, GMP_RNDN);
12800 mpfr_clear_flags ();
12801 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12802 result = do_mpfr_ckconv (m1, type, inexact);
12803 mpfr_clears (m1, m2, m3, NULL);
12810 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12811 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12812 If ARG_SINP and ARG_COSP are NULL then the result is returned
12813 as a complex value.
12814 The type is taken from the type of ARG and is used for setting the
12815 precision of the calculation and results. */
12818 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12820 tree const type = TREE_TYPE (arg);
12821 tree result = NULL_TREE;
12825 /* To proceed, MPFR must exactly represent the target floating point
12826 format, which only happens when the target base equals two. */
12827 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12828 && TREE_CODE (arg) == REAL_CST
12829 && !TREE_OVERFLOW (arg))
12831 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12833 if (real_isfinite (ra))
12835 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12836 tree result_s, result_c;
12840 mpfr_inits2 (prec, m, ms, mc, NULL);
12841 mpfr_from_real (m, ra, GMP_RNDN);
12842 mpfr_clear_flags ();
12843 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12844 result_s = do_mpfr_ckconv (ms, type, inexact);
12845 result_c = do_mpfr_ckconv (mc, type, inexact);
12846 mpfr_clears (m, ms, mc, NULL);
12847 if (result_s && result_c)
12849 /* If we are to return in a complex value do so. */
12850 if (!arg_sinp && !arg_cosp)
12851 return build_complex (build_complex_type (type),
12852 result_c, result_s);
12854 /* Dereference the sin/cos pointer arguments. */
12855 arg_sinp = build_fold_indirect_ref (arg_sinp);
12856 arg_cosp = build_fold_indirect_ref (arg_cosp);
12857 /* Proceed if valid pointer type were passed in. */
12858 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12859 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12861 /* Set the values. */
12862 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12864 TREE_SIDE_EFFECTS (result_s) = 1;
12865 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12867 TREE_SIDE_EFFECTS (result_c) = 1;
12868 /* Combine the assignments into a compound expr. */
12869 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12870 result_s, result_c));
12878 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12879 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12880 two-argument mpfr order N Bessel function FUNC on them and return
12881 the resulting value as a tree with type TYPE. The mpfr precision
12882 is set to the precision of TYPE. We assume that function FUNC
12883 returns zero if the result could be calculated exactly within the
12884 requested precision. */
12886 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12887 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12888 const REAL_VALUE_TYPE *min, bool inclusive)
12890 tree result = NULL_TREE;
12895 /* To proceed, MPFR must exactly represent the target floating point
12896 format, which only happens when the target base equals two. */
12897 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12898 && host_integerp (arg1, 0)
12899 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12901 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12902 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12905 && real_isfinite (ra)
12906 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12908 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12912 mpfr_init2 (m, prec);
12913 mpfr_from_real (m, ra, GMP_RNDN);
12914 mpfr_clear_flags ();
12915 inexact = func (m, n, m, GMP_RNDN);
12916 result = do_mpfr_ckconv (m, type, inexact);
12924 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12925 the pointer *(ARG_QUO) and return the result. The type is taken
12926 from the type of ARG0 and is used for setting the precision of the
12927 calculation and results. */
12930 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12932 tree const type = TREE_TYPE (arg0);
12933 tree result = NULL_TREE;
12938 /* To proceed, MPFR must exactly represent the target floating point
12939 format, which only happens when the target base equals two. */
12940 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12941 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12942 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12944 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12945 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12947 if (real_isfinite (ra0) && real_isfinite (ra1))
12949 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12954 mpfr_inits2 (prec, m0, m1, NULL);
12955 mpfr_from_real (m0, ra0, GMP_RNDN);
12956 mpfr_from_real (m1, ra1, GMP_RNDN);
12957 mpfr_clear_flags ();
12958 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12959 /* Remquo is independent of the rounding mode, so pass
12960 inexact=0 to do_mpfr_ckconv(). */
12961 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12962 mpfr_clears (m0, m1, NULL);
12965 /* MPFR calculates quo in the host's long so it may
12966 return more bits in quo than the target int can hold
12967 if sizeof(host long) > sizeof(target int). This can
12968 happen even for native compilers in LP64 mode. In
12969 these cases, modulo the quo value with the largest
12970 number that the target int can hold while leaving one
12971 bit for the sign. */
12972 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12973 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12975 /* Dereference the quo pointer argument. */
12976 arg_quo = build_fold_indirect_ref (arg_quo);
12977 /* Proceed iff a valid pointer type was passed in. */
12978 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12980 /* Set the value. */
12981 tree result_quo = fold_build2 (MODIFY_EXPR,
12982 TREE_TYPE (arg_quo), arg_quo,
12983 build_int_cst (NULL, integer_quo));
12984 TREE_SIDE_EFFECTS (result_quo) = 1;
12985 /* Combine the quo assignment with the rem. */
12986 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12987 result_quo, result_rem));
12995 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12996 resulting value as a tree with type TYPE. The mpfr precision is
12997 set to the precision of TYPE. We assume that this mpfr function
12998 returns zero if the result could be calculated exactly within the
12999 requested precision. In addition, the integer pointer represented
13000 by ARG_SG will be dereferenced and set to the appropriate signgam
13004 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13006 tree result = NULL_TREE;
13010 /* To proceed, MPFR must exactly represent the target floating point
13011 format, which only happens when the target base equals two. Also
13012 verify ARG is a constant and that ARG_SG is an int pointer. */
13013 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13014 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13015 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13016 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13018 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13020 /* In addition to NaN and Inf, the argument cannot be zero or a
13021 negative integer. */
13022 if (real_isfinite (ra)
13023 && ra->cl != rvc_zero
13024 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13026 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13031 mpfr_init2 (m, prec);
13032 mpfr_from_real (m, ra, GMP_RNDN);
13033 mpfr_clear_flags ();
13034 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
13035 result_lg = do_mpfr_ckconv (m, type, inexact);
13041 /* Dereference the arg_sg pointer argument. */
13042 arg_sg = build_fold_indirect_ref (arg_sg);
13043 /* Assign the signgam value into *arg_sg. */
13044 result_sg = fold_build2 (MODIFY_EXPR,
13045 TREE_TYPE (arg_sg), arg_sg,
13046 build_int_cst (NULL, sg));
13047 TREE_SIDE_EFFECTS (result_sg) = 1;
13048 /* Combine the signgam assignment with the lgamma result. */
13049 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13050 result_sg, result_lg));