1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
31 #include "tree-gimple.h"
34 #include "hard-reg-set.h"
37 #include "insn-config.h"
43 #include "typeclass.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
53 #include "diagnostic.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
59 /* Define the names of the builtin function types and codes. */
60 const char *const built_in_class_names[4]
61 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
63 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
64 const char * built_in_names[(int) END_BUILTINS] =
66 #include "builtins.def"
70 /* Setup an array of _DECL trees, make sure each element is
71 initialized to NULL_TREE. */
72 tree built_in_decls[(int) END_BUILTINS];
73 /* Declarations used when constructing the builtin implicitly in the compiler.
74 It may be NULL_TREE when this is invalid (for instance runtime is not
75 required to implement the function call in all cases). */
76 tree implicit_built_in_decls[(int) END_BUILTINS];
78 static const char *c_getstr (tree);
79 static rtx c_readstr (const char *, enum machine_mode);
80 static int target_char_cast (tree, char *);
81 static rtx get_memory_rtx (tree, tree);
82 static int apply_args_size (void);
83 static int apply_result_size (void);
84 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
85 static rtx result_vector (int, rtx);
87 static void expand_builtin_update_setjmp_buf (rtx);
88 static void expand_builtin_prefetch (tree);
89 static rtx expand_builtin_apply_args (void);
90 static rtx expand_builtin_apply_args_1 (void);
91 static rtx expand_builtin_apply (rtx, rtx, rtx);
92 static void expand_builtin_return (rtx);
93 static enum type_class type_to_class (tree);
94 static rtx expand_builtin_classify_type (tree);
95 static void expand_errno_check (tree, rtx);
96 static rtx expand_builtin_mathfn (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
98 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
99 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_sincos (tree);
101 static rtx expand_builtin_cexpi (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
103 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_args_info (tree);
105 static rtx expand_builtin_next_arg (void);
106 static rtx expand_builtin_va_start (tree);
107 static rtx expand_builtin_va_end (tree);
108 static rtx expand_builtin_va_copy (tree);
109 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
113 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
114 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
121 enum machine_mode, int);
122 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
123 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_bcopy (tree, int);
126 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_alloca (tree, rtx);
141 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static rtx expand_builtin_fputs (tree, rtx, bool);
144 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
145 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
146 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
147 static tree stabilize_va_list (tree, int);
148 static rtx expand_builtin_expect (tree, rtx);
149 static tree fold_builtin_constant_p (tree);
150 static tree fold_builtin_expect (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (tree);
153 static tree fold_builtin_inf (tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (tree, int, tree, int, ...);
156 static bool validate_arg (tree, enum tree_code code);
157 static bool integer_valued_real_p (tree);
158 static tree fold_trunc_transparent_mathfn (tree, tree);
159 static bool readonly_data_expr (tree);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_sqrt (tree, tree);
163 static tree fold_builtin_cbrt (tree, tree);
164 static tree fold_builtin_pow (tree, tree, tree, tree);
165 static tree fold_builtin_powi (tree, tree, tree, tree);
166 static tree fold_builtin_cos (tree, tree, tree);
167 static tree fold_builtin_cosh (tree, tree, tree);
168 static tree fold_builtin_tan (tree, tree);
169 static tree fold_builtin_trunc (tree, tree);
170 static tree fold_builtin_floor (tree, tree);
171 static tree fold_builtin_ceil (tree, tree);
172 static tree fold_builtin_round (tree, tree);
173 static tree fold_builtin_int_roundingfn (tree, tree);
174 static tree fold_builtin_bitop (tree, tree);
175 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
176 static tree fold_builtin_strchr (tree, tree, tree);
177 static tree fold_builtin_memchr (tree, tree, tree, tree);
178 static tree fold_builtin_memcmp (tree, tree, tree);
179 static tree fold_builtin_strcmp (tree, tree);
180 static tree fold_builtin_strncmp (tree, tree, tree);
181 static tree fold_builtin_signbit (tree, tree);
182 static tree fold_builtin_copysign (tree, tree, tree, tree);
183 static tree fold_builtin_isascii (tree);
184 static tree fold_builtin_toascii (tree);
185 static tree fold_builtin_isdigit (tree);
186 static tree fold_builtin_fabs (tree, tree);
187 static tree fold_builtin_abs (tree, tree);
188 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
190 static tree fold_builtin_n (tree, tree *, int, bool);
191 static tree fold_builtin_0 (tree, bool);
192 static tree fold_builtin_1 (tree, tree, bool);
193 static tree fold_builtin_2 (tree, tree, tree, bool);
194 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
195 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
196 static tree fold_builtin_varargs (tree, tree, bool);
198 static tree fold_builtin_strpbrk (tree, tree, tree);
199 static tree fold_builtin_strstr (tree, tree, tree);
200 static tree fold_builtin_strrchr (tree, tree, tree);
201 static tree fold_builtin_strcat (tree, tree);
202 static tree fold_builtin_strncat (tree, tree, tree);
203 static tree fold_builtin_strspn (tree, tree);
204 static tree fold_builtin_strcspn (tree, tree);
205 static tree fold_builtin_sprintf (tree, tree, tree, int);
207 static rtx expand_builtin_object_size (tree);
208 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
209 enum built_in_function);
210 static void maybe_emit_chk_warning (tree, enum built_in_function);
211 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
212 static tree fold_builtin_object_size (tree, tree);
213 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
214 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
215 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
216 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
217 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
218 enum built_in_function);
219 static bool init_target_chars (void);
221 static unsigned HOST_WIDE_INT target_newline;
222 static unsigned HOST_WIDE_INT target_percent;
223 static unsigned HOST_WIDE_INT target_c;
224 static unsigned HOST_WIDE_INT target_s;
225 static char target_percent_c[3];
226 static char target_percent_s[3];
227 static char target_percent_s_newline[4];
228 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
229 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
230 static tree do_mpfr_arg2 (tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_arg3 (tree, tree, tree, tree,
233 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
234 static tree do_mpfr_sincos (tree, tree, tree);
235 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
236 static tree do_mpfr_bessel_n (tree, tree, tree,
237 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
238 const REAL_VALUE_TYPE *, bool);
239 static tree do_mpfr_remquo (tree, tree, tree);
240 static tree do_mpfr_lgamma_r (tree, tree, tree);
243 /* Return true if NODE should be considered for inline expansion regardless
244 of the optimization level. This means whenever a function is invoked with
245 its "internal" name, which normally contains the prefix "__builtin". */
247 static bool called_as_built_in (tree node)
249 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
250 if (strncmp (name, "__builtin_", 10) == 0)
252 if (strncmp (name, "__sync_", 7) == 0)
257 /* Return the alignment in bits of EXP, a pointer valued expression.
258 But don't return more than MAX_ALIGN no matter what.
259 The alignment returned is, by default, the alignment of the thing that
260 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
262 Otherwise, look at the expression to see if we can do better, i.e., if the
263 expression is actually pointing at an object whose alignment is tighter. */
266 get_pointer_alignment (tree exp, unsigned int max_align)
268 unsigned int align, inner;
270 /* We rely on TER to compute accurate alignment information. */
271 if (!(optimize && flag_tree_ter))
274 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
277 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
278 align = MIN (align, max_align);
282 switch (TREE_CODE (exp))
286 case NON_LVALUE_EXPR:
287 exp = TREE_OPERAND (exp, 0);
288 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
291 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
292 align = MIN (inner, max_align);
295 case POINTER_PLUS_EXPR:
296 /* If sum of pointer + int, restrict our maximum alignment to that
297 imposed by the integer. If not, we can't do any better than
299 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
302 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
303 & (max_align / BITS_PER_UNIT - 1))
307 exp = TREE_OPERAND (exp, 0);
311 /* See what we are pointing at and look at its alignment. */
312 exp = TREE_OPERAND (exp, 0);
314 if (handled_component_p (exp))
316 HOST_WIDE_INT bitsize, bitpos;
318 enum machine_mode mode;
319 int unsignedp, volatilep;
321 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
322 &mode, &unsignedp, &volatilep, true);
324 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
325 if (offset && TREE_CODE (offset) == PLUS_EXPR
326 && host_integerp (TREE_OPERAND (offset, 1), 1))
328 /* Any overflow in calculating offset_bits won't change
331 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
335 inner = MIN (inner, (offset_bits & -offset_bits));
336 offset = TREE_OPERAND (offset, 0);
338 if (offset && TREE_CODE (offset) == MULT_EXPR
339 && host_integerp (TREE_OPERAND (offset, 1), 1))
341 /* Any overflow in calculating offset_factor won't change
343 unsigned offset_factor
344 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
348 inner = MIN (inner, (offset_factor & -offset_factor));
351 inner = MIN (inner, BITS_PER_UNIT);
354 align = MIN (inner, DECL_ALIGN (exp));
355 #ifdef CONSTANT_ALIGNMENT
356 else if (CONSTANT_CLASS_P (exp))
357 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
359 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
360 || TREE_CODE (exp) == INDIRECT_REF)
361 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
363 align = MIN (align, inner);
364 return MIN (align, max_align);
372 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
373 way, because it could contain a zero byte in the middle.
374 TREE_STRING_LENGTH is the size of the character array, not the string.
376 ONLY_VALUE should be nonzero if the result is not going to be emitted
377 into the instruction stream and zero if it is going to be expanded.
378 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
379 is returned, otherwise NULL, since
380 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
381 evaluate the side-effects.
383 The value returned is of type `ssizetype'.
385 Unfortunately, string_constant can't access the values of const char
386 arrays with initializers, so neither can we do so here. */
389 c_strlen (tree src, int only_value)
392 HOST_WIDE_INT offset;
397 if (TREE_CODE (src) == COND_EXPR
398 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
402 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
403 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
404 if (tree_int_cst_equal (len1, len2))
408 if (TREE_CODE (src) == COMPOUND_EXPR
409 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
410 return c_strlen (TREE_OPERAND (src, 1), only_value);
412 src = string_constant (src, &offset_node);
416 max = TREE_STRING_LENGTH (src) - 1;
417 ptr = TREE_STRING_POINTER (src);
419 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
421 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
422 compute the offset to the following null if we don't know where to
423 start searching for it. */
426 for (i = 0; i < max; i++)
430 /* We don't know the starting offset, but we do know that the string
431 has no internal zero bytes. We can assume that the offset falls
432 within the bounds of the string; otherwise, the programmer deserves
433 what he gets. Subtract the offset from the length of the string,
434 and return that. This would perhaps not be valid if we were dealing
435 with named arrays in addition to literal string constants. */
437 return size_diffop (size_int (max), offset_node);
440 /* We have a known offset into the string. Start searching there for
441 a null character if we can represent it as a single HOST_WIDE_INT. */
442 if (offset_node == 0)
444 else if (! host_integerp (offset_node, 0))
447 offset = tree_low_cst (offset_node, 0);
449 /* If the offset is known to be out of bounds, warn, and call strlen at
451 if (offset < 0 || offset > max)
453 warning (0, "offset outside bounds of constant string");
457 /* Use strlen to search for the first zero byte. Since any strings
458 constructed with build_string will have nulls appended, we win even
459 if we get handed something like (char[4])"abcd".
461 Since OFFSET is our starting index into the string, no further
462 calculation is needed. */
463 return ssize_int (strlen (ptr + offset));
466 /* Return a char pointer for a C string if it is a string constant
467 or sum of string constant and integer constant. */
474 src = string_constant (src, &offset_node);
478 if (offset_node == 0)
479 return TREE_STRING_POINTER (src);
480 else if (!host_integerp (offset_node, 1)
481 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
484 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
487 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
488 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
491 c_readstr (const char *str, enum machine_mode mode)
497 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
502 for (i = 0; i < GET_MODE_SIZE (mode); i++)
505 if (WORDS_BIG_ENDIAN)
506 j = GET_MODE_SIZE (mode) - i - 1;
507 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
508 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
509 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
511 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
514 ch = (unsigned char) str[i];
515 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
517 return immed_double_const (c[0], c[1], mode);
520 /* Cast a target constant CST to target CHAR and if that value fits into
521 host char type, return zero and put that value into variable pointed to by
525 target_char_cast (tree cst, char *p)
527 unsigned HOST_WIDE_INT val, hostval;
529 if (!host_integerp (cst, 1)
530 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
533 val = tree_low_cst (cst, 1);
534 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
535 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
538 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
539 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
548 /* Similar to save_expr, but assumes that arbitrary code is not executed
549 in between the multiple evaluations. In particular, we assume that a
550 non-addressable local variable will not be modified. */
553 builtin_save_expr (tree exp)
555 if (TREE_ADDRESSABLE (exp) == 0
556 && (TREE_CODE (exp) == PARM_DECL
557 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
560 return save_expr (exp);
563 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
564 times to get the address of either a higher stack frame, or a return
565 address located within it (depending on FNDECL_CODE). */
568 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
572 #ifdef INITIAL_FRAME_ADDRESS_RTX
573 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
577 /* For a zero count with __builtin_return_address, we don't care what
578 frame address we return, because target-specific definitions will
579 override us. Therefore frame pointer elimination is OK, and using
580 the soft frame pointer is OK.
582 For a nonzero count, or a zero count with __builtin_frame_address,
583 we require a stable offset from the current frame pointer to the
584 previous one, so we must use the hard frame pointer, and
585 we must disable frame pointer elimination. */
586 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
587 tem = frame_pointer_rtx;
590 tem = hard_frame_pointer_rtx;
592 /* Tell reload not to eliminate the frame pointer. */
593 current_function_accesses_prior_frames = 1;
597 /* Some machines need special handling before we can access
598 arbitrary frames. For example, on the SPARC, we must first flush
599 all register windows to the stack. */
600 #ifdef SETUP_FRAME_ADDRESSES
602 SETUP_FRAME_ADDRESSES ();
605 /* On the SPARC, the return address is not in the frame, it is in a
606 register. There is no way to access it off of the current frame
607 pointer, but it can be accessed off the previous frame pointer by
608 reading the value from the register window save area. */
609 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
610 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
614 /* Scan back COUNT frames to the specified frame. */
615 for (i = 0; i < count; i++)
617 /* Assume the dynamic chain pointer is in the word that the
618 frame address points to, unless otherwise specified. */
619 #ifdef DYNAMIC_CHAIN_ADDRESS
620 tem = DYNAMIC_CHAIN_ADDRESS (tem);
622 tem = memory_address (Pmode, tem);
623 tem = gen_frame_mem (Pmode, tem);
624 tem = copy_to_reg (tem);
627 /* For __builtin_frame_address, return what we've got. But, on
628 the SPARC for example, we may have to add a bias. */
629 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
630 #ifdef FRAME_ADDR_RTX
631 return FRAME_ADDR_RTX (tem);
636 /* For __builtin_return_address, get the return address from that frame. */
637 #ifdef RETURN_ADDR_RTX
638 tem = RETURN_ADDR_RTX (count, tem);
640 tem = memory_address (Pmode,
641 plus_constant (tem, GET_MODE_SIZE (Pmode)));
642 tem = gen_frame_mem (Pmode, tem);
647 /* Alias set used for setjmp buffer. */
648 static HOST_WIDE_INT setjmp_alias_set = -1;
650 /* Construct the leading half of a __builtin_setjmp call. Control will
651 return to RECEIVER_LABEL. This is also called directly by the SJLJ
652 exception handling code. */
655 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
657 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
661 if (setjmp_alias_set == -1)
662 setjmp_alias_set = new_alias_set ();
664 buf_addr = convert_memory_address (Pmode, buf_addr);
666 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
668 /* We store the frame pointer and the address of receiver_label in
669 the buffer and use the rest of it for the stack save area, which
670 is machine-dependent. */
672 mem = gen_rtx_MEM (Pmode, buf_addr);
673 set_mem_alias_set (mem, setjmp_alias_set);
674 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
676 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
677 set_mem_alias_set (mem, setjmp_alias_set);
679 emit_move_insn (validize_mem (mem),
680 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
682 stack_save = gen_rtx_MEM (sa_mode,
683 plus_constant (buf_addr,
684 2 * GET_MODE_SIZE (Pmode)));
685 set_mem_alias_set (stack_save, setjmp_alias_set);
686 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
688 /* If there is further processing to do, do it. */
689 #ifdef HAVE_builtin_setjmp_setup
690 if (HAVE_builtin_setjmp_setup)
691 emit_insn (gen_builtin_setjmp_setup (buf_addr));
694 /* Tell optimize_save_area_alloca that extra work is going to
695 need to go on during alloca. */
696 current_function_calls_setjmp = 1;
698 /* We have a nonlocal label. */
699 current_function_has_nonlocal_label = 1;
702 /* Construct the trailing part of a __builtin_setjmp call. This is
703 also called directly by the SJLJ exception handling code. */
706 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
708 /* Clobber the FP when we get here, so we have to make sure it's
709 marked as used by this function. */
710 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
712 /* Mark the static chain as clobbered here so life information
713 doesn't get messed up for it. */
714 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
716 /* Now put in the code to restore the frame pointer, and argument
717 pointer, if needed. */
718 #ifdef HAVE_nonlocal_goto
719 if (! HAVE_nonlocal_goto)
722 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
723 /* This might change the hard frame pointer in ways that aren't
724 apparent to early optimization passes, so force a clobber. */
725 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
728 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
729 if (fixed_regs[ARG_POINTER_REGNUM])
731 #ifdef ELIMINABLE_REGS
733 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
735 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
736 if (elim_regs[i].from == ARG_POINTER_REGNUM
737 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
740 if (i == ARRAY_SIZE (elim_regs))
743 /* Now restore our arg pointer from the address at which it
744 was saved in our stack frame. */
745 emit_move_insn (virtual_incoming_args_rtx,
746 copy_to_reg (get_arg_pointer_save_area (cfun)));
751 #ifdef HAVE_builtin_setjmp_receiver
752 if (HAVE_builtin_setjmp_receiver)
753 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
756 #ifdef HAVE_nonlocal_goto_receiver
757 if (HAVE_nonlocal_goto_receiver)
758 emit_insn (gen_nonlocal_goto_receiver ());
763 /* We must not allow the code we just generated to be reordered by
764 scheduling. Specifically, the update of the frame pointer must
765 happen immediately, not later. */
766 emit_insn (gen_blockage ());
769 /* __builtin_longjmp is passed a pointer to an array of five words (not
770 all will be used on all machines). It operates similarly to the C
771 library function of the same name, but is more efficient. Much of
772 the code below is copied from the handling of non-local gotos. */
775 expand_builtin_longjmp (rtx buf_addr, rtx value)
777 rtx fp, lab, stack, insn, last;
778 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
780 if (setjmp_alias_set == -1)
781 setjmp_alias_set = new_alias_set ();
783 buf_addr = convert_memory_address (Pmode, buf_addr);
785 buf_addr = force_reg (Pmode, buf_addr);
787 /* We used to store value in static_chain_rtx, but that fails if pointers
788 are smaller than integers. We instead require that the user must pass
789 a second argument of 1, because that is what builtin_setjmp will
790 return. This also makes EH slightly more efficient, since we are no
791 longer copying around a value that we don't care about. */
792 gcc_assert (value == const1_rtx);
794 last = get_last_insn ();
795 #ifdef HAVE_builtin_longjmp
796 if (HAVE_builtin_longjmp)
797 emit_insn (gen_builtin_longjmp (buf_addr));
801 fp = gen_rtx_MEM (Pmode, buf_addr);
802 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
803 GET_MODE_SIZE (Pmode)));
805 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
806 2 * GET_MODE_SIZE (Pmode)));
807 set_mem_alias_set (fp, setjmp_alias_set);
808 set_mem_alias_set (lab, setjmp_alias_set);
809 set_mem_alias_set (stack, setjmp_alias_set);
811 /* Pick up FP, label, and SP from the block and jump. This code is
812 from expand_goto in stmt.c; see there for detailed comments. */
813 #ifdef HAVE_nonlocal_goto
814 if (HAVE_nonlocal_goto)
815 /* We have to pass a value to the nonlocal_goto pattern that will
816 get copied into the static_chain pointer, but it does not matter
817 what that value is, because builtin_setjmp does not use it. */
818 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
822 lab = copy_to_reg (lab);
824 emit_insn (gen_rtx_CLOBBER (VOIDmode,
825 gen_rtx_MEM (BLKmode,
826 gen_rtx_SCRATCH (VOIDmode))));
827 emit_insn (gen_rtx_CLOBBER (VOIDmode,
828 gen_rtx_MEM (BLKmode,
829 hard_frame_pointer_rtx)));
831 emit_move_insn (hard_frame_pointer_rtx, fp);
832 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
834 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
835 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
836 emit_indirect_jump (lab);
840 /* Search backwards and mark the jump insn as a non-local goto.
841 Note that this precludes the use of __builtin_longjmp to a
842 __builtin_setjmp target in the same function. However, we've
843 already cautioned the user that these functions are for
844 internal exception handling use only. */
845 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
847 gcc_assert (insn != last);
851 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
855 else if (CALL_P (insn))
860 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
861 and the address of the save area. */
864 expand_builtin_nonlocal_goto (tree exp)
866 tree t_label, t_save_area;
867 rtx r_label, r_save_area, r_fp, r_sp, insn;
869 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
872 t_label = CALL_EXPR_ARG (exp, 0);
873 t_save_area = CALL_EXPR_ARG (exp, 1);
875 r_label = expand_normal (t_label);
876 r_label = convert_memory_address (Pmode, r_label);
877 r_save_area = expand_normal (t_save_area);
878 r_save_area = convert_memory_address (Pmode, r_save_area);
879 r_fp = gen_rtx_MEM (Pmode, r_save_area);
880 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
881 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
883 current_function_has_nonlocal_goto = 1;
885 #ifdef HAVE_nonlocal_goto
886 /* ??? We no longer need to pass the static chain value, afaik. */
887 if (HAVE_nonlocal_goto)
888 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
892 r_label = copy_to_reg (r_label);
894 emit_insn (gen_rtx_CLOBBER (VOIDmode,
895 gen_rtx_MEM (BLKmode,
896 gen_rtx_SCRATCH (VOIDmode))));
898 emit_insn (gen_rtx_CLOBBER (VOIDmode,
899 gen_rtx_MEM (BLKmode,
900 hard_frame_pointer_rtx)));
902 /* Restore frame pointer for containing function.
903 This sets the actual hard register used for the frame pointer
904 to the location of the function's incoming static chain info.
905 The non-local goto handler will then adjust it to contain the
906 proper value and reload the argument pointer, if needed. */
907 emit_move_insn (hard_frame_pointer_rtx, r_fp);
908 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
910 /* USE of hard_frame_pointer_rtx added for consistency;
911 not clear if really needed. */
912 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
913 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
914 emit_indirect_jump (r_label);
917 /* Search backwards to the jump insn and mark it as a
919 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
923 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
924 const0_rtx, REG_NOTES (insn));
927 else if (CALL_P (insn))
934 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
935 (not all will be used on all machines) that was passed to __builtin_setjmp.
936 It updates the stack pointer in that block to correspond to the current
940 expand_builtin_update_setjmp_buf (rtx buf_addr)
942 enum machine_mode sa_mode = Pmode;
946 #ifdef HAVE_save_stack_nonlocal
947 if (HAVE_save_stack_nonlocal)
948 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
950 #ifdef STACK_SAVEAREA_MODE
951 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
955 = gen_rtx_MEM (sa_mode,
958 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
962 emit_insn (gen_setjmp ());
965 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
968 /* Expand a call to __builtin_prefetch. For a target that does not support
969 data prefetch, evaluate the memory address argument in case it has side
973 expand_builtin_prefetch (tree exp)
975 tree arg0, arg1, arg2;
979 if (!validate_arglist (exp, POINTER_TYPE, 0))
982 arg0 = CALL_EXPR_ARG (exp, 0);
984 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
985 zero (read) and argument 2 (locality) defaults to 3 (high degree of
987 nargs = call_expr_nargs (exp);
989 arg1 = CALL_EXPR_ARG (exp, 1);
991 arg1 = integer_zero_node;
993 arg2 = CALL_EXPR_ARG (exp, 2);
995 arg2 = build_int_cst (NULL_TREE, 3);
997 /* Argument 0 is an address. */
998 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1000 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1001 if (TREE_CODE (arg1) != INTEGER_CST)
1003 error ("second argument to %<__builtin_prefetch%> must be a constant");
1004 arg1 = integer_zero_node;
1006 op1 = expand_normal (arg1);
1007 /* Argument 1 must be either zero or one. */
1008 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1010 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1015 /* Argument 2 (locality) must be a compile-time constant int. */
1016 if (TREE_CODE (arg2) != INTEGER_CST)
1018 error ("third argument to %<__builtin_prefetch%> must be a constant");
1019 arg2 = integer_zero_node;
1021 op2 = expand_normal (arg2);
1022 /* Argument 2 must be 0, 1, 2, or 3. */
1023 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1025 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1029 #ifdef HAVE_prefetch
1032 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1034 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1035 || (GET_MODE (op0) != Pmode))
1037 op0 = convert_memory_address (Pmode, op0);
1038 op0 = force_reg (Pmode, op0);
1040 emit_insn (gen_prefetch (op0, op1, op2));
1044 /* Don't do anything with direct references to volatile memory, but
1045 generate code to handle other side effects. */
1046 if (!MEM_P (op0) && side_effects_p (op0))
1050 /* Get a MEM rtx for expression EXP which is the address of an operand
1051 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1052 the maximum length of the block of memory that might be accessed or
1056 get_memory_rtx (tree exp, tree len)
1058 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1059 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1061 /* Get an expression we can use to find the attributes to assign to MEM.
1062 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1063 we can. First remove any nops. */
1064 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1065 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1066 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1067 exp = TREE_OPERAND (exp, 0);
1069 if (TREE_CODE (exp) == ADDR_EXPR)
1070 exp = TREE_OPERAND (exp, 0);
1071 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1072 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1076 /* Honor attributes derived from exp, except for the alias set
1077 (as builtin stringops may alias with anything) and the size
1078 (as stringops may access multiple array elements). */
1081 set_mem_attributes (mem, exp, 0);
1083 /* Allow the string and memory builtins to overflow from one
1084 field into another, see http://gcc.gnu.org/PR23561.
1085 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1086 memory accessed by the string or memory builtin will fit
1087 within the field. */
1088 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1090 tree mem_expr = MEM_EXPR (mem);
1091 HOST_WIDE_INT offset = -1, length = -1;
1094 while (TREE_CODE (inner) == ARRAY_REF
1095 || TREE_CODE (inner) == NOP_EXPR
1096 || TREE_CODE (inner) == CONVERT_EXPR
1097 || TREE_CODE (inner) == NON_LVALUE_EXPR
1098 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1099 || TREE_CODE (inner) == SAVE_EXPR)
1100 inner = TREE_OPERAND (inner, 0);
1102 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1104 if (MEM_OFFSET (mem)
1105 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1106 offset = INTVAL (MEM_OFFSET (mem));
1108 if (offset >= 0 && len && host_integerp (len, 0))
1109 length = tree_low_cst (len, 0);
1111 while (TREE_CODE (inner) == COMPONENT_REF)
1113 tree field = TREE_OPERAND (inner, 1);
1114 gcc_assert (! DECL_BIT_FIELD (field));
1115 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1116 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1119 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1120 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1123 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1124 /* If we can prove the memory starting at XEXP (mem, 0)
1125 and ending at XEXP (mem, 0) + LENGTH will fit into
1126 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1129 && offset + length <= size)
1134 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1135 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1136 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1144 mem_expr = TREE_OPERAND (mem_expr, 0);
1145 inner = TREE_OPERAND (inner, 0);
1148 if (mem_expr == NULL)
1150 if (mem_expr != MEM_EXPR (mem))
1152 set_mem_expr (mem, mem_expr);
1153 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1156 set_mem_alias_set (mem, 0);
1157 set_mem_size (mem, NULL_RTX);
1163 /* Built-in functions to perform an untyped call and return. */
1165 /* For each register that may be used for calling a function, this
1166 gives a mode used to copy the register's value. VOIDmode indicates
1167 the register is not used for calling a function. If the machine
1168 has register windows, this gives only the outbound registers.
1169 INCOMING_REGNO gives the corresponding inbound register. */
1170 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1172 /* For each register that may be used for returning values, this gives
1173 a mode used to copy the register's value. VOIDmode indicates the
1174 register is not used for returning values. If the machine has
1175 register windows, this gives only the outbound registers.
1176 INCOMING_REGNO gives the corresponding inbound register. */
1177 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1179 /* For each register that may be used for calling a function, this
1180 gives the offset of that register into the block returned by
1181 __builtin_apply_args. 0 indicates that the register is not
1182 used for calling a function. */
1183 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1185 /* Return the size required for the block returned by __builtin_apply_args,
1186 and initialize apply_args_mode. */
1189 apply_args_size (void)
1191 static int size = -1;
1194 enum machine_mode mode;
1196 /* The values computed by this function never change. */
1199 /* The first value is the incoming arg-pointer. */
1200 size = GET_MODE_SIZE (Pmode);
1202 /* The second value is the structure value address unless this is
1203 passed as an "invisible" first argument. */
1204 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1205 size += GET_MODE_SIZE (Pmode);
1207 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1208 if (FUNCTION_ARG_REGNO_P (regno))
1210 mode = reg_raw_mode[regno];
1212 gcc_assert (mode != VOIDmode);
1214 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1215 if (size % align != 0)
1216 size = CEIL (size, align) * align;
1217 apply_args_reg_offset[regno] = size;
1218 size += GET_MODE_SIZE (mode);
1219 apply_args_mode[regno] = mode;
1223 apply_args_mode[regno] = VOIDmode;
1224 apply_args_reg_offset[regno] = 0;
1230 /* Return the size required for the block returned by __builtin_apply,
1231 and initialize apply_result_mode. */
1234 apply_result_size (void)
1236 static int size = -1;
1238 enum machine_mode mode;
1240 /* The values computed by this function never change. */
1245 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1246 if (FUNCTION_VALUE_REGNO_P (regno))
1248 mode = reg_raw_mode[regno];
1250 gcc_assert (mode != VOIDmode);
1252 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1253 if (size % align != 0)
1254 size = CEIL (size, align) * align;
1255 size += GET_MODE_SIZE (mode);
1256 apply_result_mode[regno] = mode;
1259 apply_result_mode[regno] = VOIDmode;
1261 /* Allow targets that use untyped_call and untyped_return to override
1262 the size so that machine-specific information can be stored here. */
1263 #ifdef APPLY_RESULT_SIZE
1264 size = APPLY_RESULT_SIZE;
1270 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1271 /* Create a vector describing the result block RESULT. If SAVEP is true,
1272 the result block is used to save the values; otherwise it is used to
1273 restore the values. */
1276 result_vector (int savep, rtx result)
1278 int regno, size, align, nelts;
1279 enum machine_mode mode;
1281 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1284 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1285 if ((mode = apply_result_mode[regno]) != VOIDmode)
1287 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1288 if (size % align != 0)
1289 size = CEIL (size, align) * align;
1290 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1291 mem = adjust_address (result, mode, size);
1292 savevec[nelts++] = (savep
1293 ? gen_rtx_SET (VOIDmode, mem, reg)
1294 : gen_rtx_SET (VOIDmode, reg, mem));
1295 size += GET_MODE_SIZE (mode);
1297 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1299 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1301 /* Save the state required to perform an untyped call with the same
1302 arguments as were passed to the current function. */
1305 expand_builtin_apply_args_1 (void)
1308 int size, align, regno;
1309 enum machine_mode mode;
1310 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1312 /* Create a block where the arg-pointer, structure value address,
1313 and argument registers can be saved. */
1314 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1316 /* Walk past the arg-pointer and structure value address. */
1317 size = GET_MODE_SIZE (Pmode);
1318 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1319 size += GET_MODE_SIZE (Pmode);
1321 /* Save each register used in calling a function to the block. */
1322 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1323 if ((mode = apply_args_mode[regno]) != VOIDmode)
1325 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1326 if (size % align != 0)
1327 size = CEIL (size, align) * align;
1329 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1331 emit_move_insn (adjust_address (registers, mode, size), tem);
1332 size += GET_MODE_SIZE (mode);
1335 /* Save the arg pointer to the block. */
1336 tem = copy_to_reg (virtual_incoming_args_rtx);
1337 #ifdef STACK_GROWS_DOWNWARD
1338 /* We need the pointer as the caller actually passed them to us, not
1339 as we might have pretended they were passed. Make sure it's a valid
1340 operand, as emit_move_insn isn't expected to handle a PLUS. */
1342 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1345 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1347 size = GET_MODE_SIZE (Pmode);
1349 /* Save the structure value address unless this is passed as an
1350 "invisible" first argument. */
1351 if (struct_incoming_value)
1353 emit_move_insn (adjust_address (registers, Pmode, size),
1354 copy_to_reg (struct_incoming_value));
1355 size += GET_MODE_SIZE (Pmode);
1358 /* Return the address of the block. */
1359 return copy_addr_to_reg (XEXP (registers, 0));
1362 /* __builtin_apply_args returns block of memory allocated on
1363 the stack into which is stored the arg pointer, structure
1364 value address, static chain, and all the registers that might
1365 possibly be used in performing a function call. The code is
1366 moved to the start of the function so the incoming values are
1370 expand_builtin_apply_args (void)
1372 /* Don't do __builtin_apply_args more than once in a function.
1373 Save the result of the first call and reuse it. */
1374 if (apply_args_value != 0)
1375 return apply_args_value;
1377 /* When this function is called, it means that registers must be
1378 saved on entry to this function. So we migrate the
1379 call to the first insn of this function. */
1384 temp = expand_builtin_apply_args_1 ();
1388 apply_args_value = temp;
1390 /* Put the insns after the NOTE that starts the function.
1391 If this is inside a start_sequence, make the outer-level insn
1392 chain current, so the code is placed at the start of the
1394 push_topmost_sequence ();
1395 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1396 pop_topmost_sequence ();
1401 /* Perform an untyped call and save the state required to perform an
1402 untyped return of whatever value was returned by the given function. */
1405 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1407 int size, align, regno;
1408 enum machine_mode mode;
1409 rtx incoming_args, result, reg, dest, src, call_insn;
1410 rtx old_stack_level = 0;
1411 rtx call_fusage = 0;
1412 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1414 arguments = convert_memory_address (Pmode, arguments);
1416 /* Create a block where the return registers can be saved. */
1417 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1419 /* Fetch the arg pointer from the ARGUMENTS block. */
1420 incoming_args = gen_reg_rtx (Pmode);
1421 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1422 #ifndef STACK_GROWS_DOWNWARD
1423 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1424 incoming_args, 0, OPTAB_LIB_WIDEN);
1427 /* Push a new argument block and copy the arguments. Do not allow
1428 the (potential) memcpy call below to interfere with our stack
1430 do_pending_stack_adjust ();
1433 /* Save the stack with nonlocal if available. */
1434 #ifdef HAVE_save_stack_nonlocal
1435 if (HAVE_save_stack_nonlocal)
1436 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1439 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1441 /* Allocate a block of memory onto the stack and copy the memory
1442 arguments to the outgoing arguments address. */
1443 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1444 dest = virtual_outgoing_args_rtx;
1445 #ifndef STACK_GROWS_DOWNWARD
1446 if (GET_CODE (argsize) == CONST_INT)
1447 dest = plus_constant (dest, -INTVAL (argsize));
1449 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1451 dest = gen_rtx_MEM (BLKmode, dest);
1452 set_mem_align (dest, PARM_BOUNDARY);
1453 src = gen_rtx_MEM (BLKmode, incoming_args);
1454 set_mem_align (src, PARM_BOUNDARY);
1455 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1457 /* Refer to the argument block. */
1459 arguments = gen_rtx_MEM (BLKmode, arguments);
1460 set_mem_align (arguments, PARM_BOUNDARY);
1462 /* Walk past the arg-pointer and structure value address. */
1463 size = GET_MODE_SIZE (Pmode);
1465 size += GET_MODE_SIZE (Pmode);
1467 /* Restore each of the registers previously saved. Make USE insns
1468 for each of these registers for use in making the call. */
1469 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1470 if ((mode = apply_args_mode[regno]) != VOIDmode)
1472 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1473 if (size % align != 0)
1474 size = CEIL (size, align) * align;
1475 reg = gen_rtx_REG (mode, regno);
1476 emit_move_insn (reg, adjust_address (arguments, mode, size));
1477 use_reg (&call_fusage, reg);
1478 size += GET_MODE_SIZE (mode);
1481 /* Restore the structure value address unless this is passed as an
1482 "invisible" first argument. */
1483 size = GET_MODE_SIZE (Pmode);
1486 rtx value = gen_reg_rtx (Pmode);
1487 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1488 emit_move_insn (struct_value, value);
1489 if (REG_P (struct_value))
1490 use_reg (&call_fusage, struct_value);
1491 size += GET_MODE_SIZE (Pmode);
1494 /* All arguments and registers used for the call are set up by now! */
1495 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1497 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1498 and we don't want to load it into a register as an optimization,
1499 because prepare_call_address already did it if it should be done. */
1500 if (GET_CODE (function) != SYMBOL_REF)
1501 function = memory_address (FUNCTION_MODE, function);
1503 /* Generate the actual call instruction and save the return value. */
1504 #ifdef HAVE_untyped_call
1505 if (HAVE_untyped_call)
1506 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1507 result, result_vector (1, result)));
1510 #ifdef HAVE_call_value
1511 if (HAVE_call_value)
1515 /* Locate the unique return register. It is not possible to
1516 express a call that sets more than one return register using
1517 call_value; use untyped_call for that. In fact, untyped_call
1518 only needs to save the return registers in the given block. */
1519 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1520 if ((mode = apply_result_mode[regno]) != VOIDmode)
1522 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1524 valreg = gen_rtx_REG (mode, regno);
1527 emit_call_insn (GEN_CALL_VALUE (valreg,
1528 gen_rtx_MEM (FUNCTION_MODE, function),
1529 const0_rtx, NULL_RTX, const0_rtx));
1531 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1537 /* Find the CALL insn we just emitted, and attach the register usage
1539 call_insn = last_call_insn ();
1540 add_function_usage_to (call_insn, call_fusage);
1542 /* Restore the stack. */
1543 #ifdef HAVE_save_stack_nonlocal
1544 if (HAVE_save_stack_nonlocal)
1545 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1548 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1552 /* Return the address of the result block. */
1553 result = copy_addr_to_reg (XEXP (result, 0));
1554 return convert_memory_address (ptr_mode, result);
1557 /* Perform an untyped return. */
1560 expand_builtin_return (rtx result)
1562 int size, align, regno;
1563 enum machine_mode mode;
1565 rtx call_fusage = 0;
1567 result = convert_memory_address (Pmode, result);
1569 apply_result_size ();
1570 result = gen_rtx_MEM (BLKmode, result);
1572 #ifdef HAVE_untyped_return
1573 if (HAVE_untyped_return)
1575 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1581 /* Restore the return value and note that each value is used. */
1583 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1584 if ((mode = apply_result_mode[regno]) != VOIDmode)
1586 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1587 if (size % align != 0)
1588 size = CEIL (size, align) * align;
1589 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1590 emit_move_insn (reg, adjust_address (result, mode, size));
1592 push_to_sequence (call_fusage);
1593 emit_insn (gen_rtx_USE (VOIDmode, reg));
1594 call_fusage = get_insns ();
1596 size += GET_MODE_SIZE (mode);
1599 /* Put the USE insns before the return. */
1600 emit_insn (call_fusage);
1602 /* Return whatever values was restored by jumping directly to the end
1604 expand_naked_return ();
1607 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1609 static enum type_class
1610 type_to_class (tree type)
1612 switch (TREE_CODE (type))
1614 case VOID_TYPE: return void_type_class;
1615 case INTEGER_TYPE: return integer_type_class;
1616 case ENUMERAL_TYPE: return enumeral_type_class;
1617 case BOOLEAN_TYPE: return boolean_type_class;
1618 case POINTER_TYPE: return pointer_type_class;
1619 case REFERENCE_TYPE: return reference_type_class;
1620 case OFFSET_TYPE: return offset_type_class;
1621 case REAL_TYPE: return real_type_class;
1622 case COMPLEX_TYPE: return complex_type_class;
1623 case FUNCTION_TYPE: return function_type_class;
1624 case METHOD_TYPE: return method_type_class;
1625 case RECORD_TYPE: return record_type_class;
1627 case QUAL_UNION_TYPE: return union_type_class;
1628 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1629 ? string_type_class : array_type_class);
1630 case LANG_TYPE: return lang_type_class;
1631 default: return no_type_class;
1635 /* Expand a call EXP to __builtin_classify_type. */
1638 expand_builtin_classify_type (tree exp)
1640 if (call_expr_nargs (exp))
1641 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1642 return GEN_INT (no_type_class);
1645 /* This helper macro, meant to be used in mathfn_built_in below,
1646 determines which among a set of three builtin math functions is
1647 appropriate for a given type mode. The `F' and `L' cases are
1648 automatically generated from the `double' case. */
1649 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1650 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1651 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1652 fcodel = BUILT_IN_MATHFN##L ; break;
1653 /* Similar to above, but appends _R after any F/L suffix. */
1654 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1655 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1656 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1657 fcodel = BUILT_IN_MATHFN##L_R ; break;
1659 /* Return mathematic function equivalent to FN but operating directly
1660 on TYPE, if available. If we can't do the conversion, return zero. */
1662 mathfn_built_in (tree type, enum built_in_function fn)
1664 enum built_in_function fcode, fcodef, fcodel;
1668 CASE_MATHFN (BUILT_IN_ACOS)
1669 CASE_MATHFN (BUILT_IN_ACOSH)
1670 CASE_MATHFN (BUILT_IN_ASIN)
1671 CASE_MATHFN (BUILT_IN_ASINH)
1672 CASE_MATHFN (BUILT_IN_ATAN)
1673 CASE_MATHFN (BUILT_IN_ATAN2)
1674 CASE_MATHFN (BUILT_IN_ATANH)
1675 CASE_MATHFN (BUILT_IN_CBRT)
1676 CASE_MATHFN (BUILT_IN_CEIL)
1677 CASE_MATHFN (BUILT_IN_CEXPI)
1678 CASE_MATHFN (BUILT_IN_COPYSIGN)
1679 CASE_MATHFN (BUILT_IN_COS)
1680 CASE_MATHFN (BUILT_IN_COSH)
1681 CASE_MATHFN (BUILT_IN_DREM)
1682 CASE_MATHFN (BUILT_IN_ERF)
1683 CASE_MATHFN (BUILT_IN_ERFC)
1684 CASE_MATHFN (BUILT_IN_EXP)
1685 CASE_MATHFN (BUILT_IN_EXP10)
1686 CASE_MATHFN (BUILT_IN_EXP2)
1687 CASE_MATHFN (BUILT_IN_EXPM1)
1688 CASE_MATHFN (BUILT_IN_FABS)
1689 CASE_MATHFN (BUILT_IN_FDIM)
1690 CASE_MATHFN (BUILT_IN_FLOOR)
1691 CASE_MATHFN (BUILT_IN_FMA)
1692 CASE_MATHFN (BUILT_IN_FMAX)
1693 CASE_MATHFN (BUILT_IN_FMIN)
1694 CASE_MATHFN (BUILT_IN_FMOD)
1695 CASE_MATHFN (BUILT_IN_FREXP)
1696 CASE_MATHFN (BUILT_IN_GAMMA)
1697 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1698 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1699 CASE_MATHFN (BUILT_IN_HYPOT)
1700 CASE_MATHFN (BUILT_IN_ILOGB)
1701 CASE_MATHFN (BUILT_IN_INF)
1702 CASE_MATHFN (BUILT_IN_ISINF)
1703 CASE_MATHFN (BUILT_IN_J0)
1704 CASE_MATHFN (BUILT_IN_J1)
1705 CASE_MATHFN (BUILT_IN_JN)
1706 CASE_MATHFN (BUILT_IN_LCEIL)
1707 CASE_MATHFN (BUILT_IN_LDEXP)
1708 CASE_MATHFN (BUILT_IN_LFLOOR)
1709 CASE_MATHFN (BUILT_IN_LGAMMA)
1710 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1711 CASE_MATHFN (BUILT_IN_LLCEIL)
1712 CASE_MATHFN (BUILT_IN_LLFLOOR)
1713 CASE_MATHFN (BUILT_IN_LLRINT)
1714 CASE_MATHFN (BUILT_IN_LLROUND)
1715 CASE_MATHFN (BUILT_IN_LOG)
1716 CASE_MATHFN (BUILT_IN_LOG10)
1717 CASE_MATHFN (BUILT_IN_LOG1P)
1718 CASE_MATHFN (BUILT_IN_LOG2)
1719 CASE_MATHFN (BUILT_IN_LOGB)
1720 CASE_MATHFN (BUILT_IN_LRINT)
1721 CASE_MATHFN (BUILT_IN_LROUND)
1722 CASE_MATHFN (BUILT_IN_MODF)
1723 CASE_MATHFN (BUILT_IN_NAN)
1724 CASE_MATHFN (BUILT_IN_NANS)
1725 CASE_MATHFN (BUILT_IN_NEARBYINT)
1726 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1727 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1728 CASE_MATHFN (BUILT_IN_POW)
1729 CASE_MATHFN (BUILT_IN_POWI)
1730 CASE_MATHFN (BUILT_IN_POW10)
1731 CASE_MATHFN (BUILT_IN_REMAINDER)
1732 CASE_MATHFN (BUILT_IN_REMQUO)
1733 CASE_MATHFN (BUILT_IN_RINT)
1734 CASE_MATHFN (BUILT_IN_ROUND)
1735 CASE_MATHFN (BUILT_IN_SCALB)
1736 CASE_MATHFN (BUILT_IN_SCALBLN)
1737 CASE_MATHFN (BUILT_IN_SCALBN)
1738 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1739 CASE_MATHFN (BUILT_IN_SIN)
1740 CASE_MATHFN (BUILT_IN_SINCOS)
1741 CASE_MATHFN (BUILT_IN_SINH)
1742 CASE_MATHFN (BUILT_IN_SQRT)
1743 CASE_MATHFN (BUILT_IN_TAN)
1744 CASE_MATHFN (BUILT_IN_TANH)
1745 CASE_MATHFN (BUILT_IN_TGAMMA)
1746 CASE_MATHFN (BUILT_IN_TRUNC)
1747 CASE_MATHFN (BUILT_IN_Y0)
1748 CASE_MATHFN (BUILT_IN_Y1)
1749 CASE_MATHFN (BUILT_IN_YN)
1755 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1756 return implicit_built_in_decls[fcode];
1757 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1758 return implicit_built_in_decls[fcodef];
1759 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1760 return implicit_built_in_decls[fcodel];
1765 /* If errno must be maintained, expand the RTL to check if the result,
1766 TARGET, of a built-in function call, EXP, is NaN, and if so set
1770 expand_errno_check (tree exp, rtx target)
1772 rtx lab = gen_label_rtx ();
1774 /* Test the result; if it is NaN, set errno=EDOM because
1775 the argument was not in the domain. */
1776 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1780 /* If this built-in doesn't throw an exception, set errno directly. */
1781 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1783 #ifdef GEN_ERRNO_RTX
1784 rtx errno_rtx = GEN_ERRNO_RTX;
1787 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1789 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1795 /* We can't set errno=EDOM directly; let the library call do it.
1796 Pop the arguments right away in case the call gets deleted. */
1798 expand_call (exp, target, 0);
1803 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1804 Return NULL_RTX if a normal call should be emitted rather than expanding
1805 the function in-line. EXP is the expression that is a call to the builtin
1806 function; if convenient, the result should be placed in TARGET.
1807 SUBTARGET may be used as the target for computing one of EXP's operands. */
1810 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1812 optab builtin_optab;
1813 rtx op0, insns, before_call;
1814 tree fndecl = get_callee_fndecl (exp);
1815 enum machine_mode mode;
1816 bool errno_set = false;
1819 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1822 arg = CALL_EXPR_ARG (exp, 0);
1824 switch (DECL_FUNCTION_CODE (fndecl))
1826 CASE_FLT_FN (BUILT_IN_SQRT):
1827 errno_set = ! tree_expr_nonnegative_p (arg);
1828 builtin_optab = sqrt_optab;
1830 CASE_FLT_FN (BUILT_IN_EXP):
1831 errno_set = true; builtin_optab = exp_optab; break;
1832 CASE_FLT_FN (BUILT_IN_EXP10):
1833 CASE_FLT_FN (BUILT_IN_POW10):
1834 errno_set = true; builtin_optab = exp10_optab; break;
1835 CASE_FLT_FN (BUILT_IN_EXP2):
1836 errno_set = true; builtin_optab = exp2_optab; break;
1837 CASE_FLT_FN (BUILT_IN_EXPM1):
1838 errno_set = true; builtin_optab = expm1_optab; break;
1839 CASE_FLT_FN (BUILT_IN_LOGB):
1840 errno_set = true; builtin_optab = logb_optab; break;
1841 CASE_FLT_FN (BUILT_IN_LOG):
1842 errno_set = true; builtin_optab = log_optab; break;
1843 CASE_FLT_FN (BUILT_IN_LOG10):
1844 errno_set = true; builtin_optab = log10_optab; break;
1845 CASE_FLT_FN (BUILT_IN_LOG2):
1846 errno_set = true; builtin_optab = log2_optab; break;
1847 CASE_FLT_FN (BUILT_IN_LOG1P):
1848 errno_set = true; builtin_optab = log1p_optab; break;
1849 CASE_FLT_FN (BUILT_IN_ASIN):
1850 builtin_optab = asin_optab; break;
1851 CASE_FLT_FN (BUILT_IN_ACOS):
1852 builtin_optab = acos_optab; break;
1853 CASE_FLT_FN (BUILT_IN_TAN):
1854 builtin_optab = tan_optab; break;
1855 CASE_FLT_FN (BUILT_IN_ATAN):
1856 builtin_optab = atan_optab; break;
1857 CASE_FLT_FN (BUILT_IN_FLOOR):
1858 builtin_optab = floor_optab; break;
1859 CASE_FLT_FN (BUILT_IN_CEIL):
1860 builtin_optab = ceil_optab; break;
1861 CASE_FLT_FN (BUILT_IN_TRUNC):
1862 builtin_optab = btrunc_optab; break;
1863 CASE_FLT_FN (BUILT_IN_ROUND):
1864 builtin_optab = round_optab; break;
1865 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1866 builtin_optab = nearbyint_optab;
1867 if (flag_trapping_math)
1869 /* Else fallthrough and expand as rint. */
1870 CASE_FLT_FN (BUILT_IN_RINT):
1871 builtin_optab = rint_optab; break;
1876 /* Make a suitable register to place result in. */
1877 mode = TYPE_MODE (TREE_TYPE (exp));
1879 if (! flag_errno_math || ! HONOR_NANS (mode))
1882 /* Before working hard, check whether the instruction is available. */
1883 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1885 target = gen_reg_rtx (mode);
1887 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1888 need to expand the argument again. This way, we will not perform
1889 side-effects more the once. */
1890 narg = builtin_save_expr (arg);
1894 exp = build_call_expr (fndecl, 1, arg);
1897 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1901 /* Compute into TARGET.
1902 Set TARGET to wherever the result comes back. */
1903 target = expand_unop (mode, builtin_optab, op0, target, 0);
1908 expand_errno_check (exp, target);
1910 /* Output the entire sequence. */
1911 insns = get_insns ();
1917 /* If we were unable to expand via the builtin, stop the sequence
1918 (without outputting the insns) and call to the library function
1919 with the stabilized argument list. */
1923 before_call = get_last_insn ();
1925 target = expand_call (exp, target, target == const0_rtx);
1927 /* If this is a sqrt operation and we don't care about errno, try to
1928 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1929 This allows the semantics of the libcall to be visible to the RTL
1931 if (builtin_optab == sqrt_optab && !errno_set)
1933 /* Search backwards through the insns emitted by expand_call looking
1934 for the instruction with the REG_RETVAL note. */
1935 rtx last = get_last_insn ();
1936 while (last != before_call)
1938 if (find_reg_note (last, REG_RETVAL, NULL))
1940 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1941 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1942 two elements, i.e. symbol_ref(sqrt) and the operand. */
1944 && GET_CODE (note) == EXPR_LIST
1945 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1946 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1947 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1949 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1950 /* Check operand is a register with expected mode. */
1953 && GET_MODE (operand) == mode)
1955 /* Replace the REG_EQUAL note with a SQRT rtx. */
1956 rtx equiv = gen_rtx_SQRT (mode, operand);
1957 set_unique_reg_note (last, REG_EQUAL, equiv);
1962 last = PREV_INSN (last);
1969 /* Expand a call to the builtin binary math functions (pow and atan2).
1970 Return NULL_RTX if a normal call should be emitted rather than expanding the
1971 function in-line. EXP is the expression that is a call to the builtin
1972 function; if convenient, the result should be placed in TARGET.
1973 SUBTARGET may be used as the target for computing one of EXP's
1977 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1979 optab builtin_optab;
1980 rtx op0, op1, insns;
1981 int op1_type = REAL_TYPE;
1982 tree fndecl = get_callee_fndecl (exp);
1983 tree arg0, arg1, narg;
1984 enum machine_mode mode;
1985 bool errno_set = true;
1988 switch (DECL_FUNCTION_CODE (fndecl))
1990 CASE_FLT_FN (BUILT_IN_SCALBN):
1991 CASE_FLT_FN (BUILT_IN_SCALBLN):
1992 CASE_FLT_FN (BUILT_IN_LDEXP):
1993 op1_type = INTEGER_TYPE;
1998 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2001 arg0 = CALL_EXPR_ARG (exp, 0);
2002 arg1 = CALL_EXPR_ARG (exp, 1);
2004 switch (DECL_FUNCTION_CODE (fndecl))
2006 CASE_FLT_FN (BUILT_IN_POW):
2007 builtin_optab = pow_optab; break;
2008 CASE_FLT_FN (BUILT_IN_ATAN2):
2009 builtin_optab = atan2_optab; break;
2010 CASE_FLT_FN (BUILT_IN_SCALB):
2011 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2013 builtin_optab = scalb_optab; break;
2014 CASE_FLT_FN (BUILT_IN_SCALBN):
2015 CASE_FLT_FN (BUILT_IN_SCALBLN):
2016 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2018 /* Fall through... */
2019 CASE_FLT_FN (BUILT_IN_LDEXP):
2020 builtin_optab = ldexp_optab; break;
2021 CASE_FLT_FN (BUILT_IN_FMOD):
2022 builtin_optab = fmod_optab; break;
2023 CASE_FLT_FN (BUILT_IN_REMAINDER):
2024 CASE_FLT_FN (BUILT_IN_DREM):
2025 builtin_optab = remainder_optab; break;
2030 /* Make a suitable register to place result in. */
2031 mode = TYPE_MODE (TREE_TYPE (exp));
2033 /* Before working hard, check whether the instruction is available. */
2034 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2037 target = gen_reg_rtx (mode);
2039 if (! flag_errno_math || ! HONOR_NANS (mode))
2042 /* Always stabilize the argument list. */
2043 narg = builtin_save_expr (arg1);
2049 narg = builtin_save_expr (arg0);
2057 exp = build_call_expr (fndecl, 2, arg0, arg1);
2059 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2060 op1 = expand_normal (arg1);
2064 /* Compute into TARGET.
2065 Set TARGET to wherever the result comes back. */
2066 target = expand_binop (mode, builtin_optab, op0, op1,
2067 target, 0, OPTAB_DIRECT);
2069 /* If we were unable to expand via the builtin, stop the sequence
2070 (without outputting the insns) and call to the library function
2071 with the stabilized argument list. */
2075 return expand_call (exp, target, target == const0_rtx);
2079 expand_errno_check (exp, target);
2081 /* Output the entire sequence. */
2082 insns = get_insns ();
2089 /* Expand a call to the builtin sin and cos math functions.
2090 Return NULL_RTX if a normal call should be emitted rather than expanding the
2091 function in-line. EXP is the expression that is a call to the builtin
2092 function; if convenient, the result should be placed in TARGET.
2093 SUBTARGET may be used as the target for computing one of EXP's
2097 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2099 optab builtin_optab;
2101 tree fndecl = get_callee_fndecl (exp);
2102 enum machine_mode mode;
2105 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2108 arg = CALL_EXPR_ARG (exp, 0);
2110 switch (DECL_FUNCTION_CODE (fndecl))
2112 CASE_FLT_FN (BUILT_IN_SIN):
2113 CASE_FLT_FN (BUILT_IN_COS):
2114 builtin_optab = sincos_optab; break;
2119 /* Make a suitable register to place result in. */
2120 mode = TYPE_MODE (TREE_TYPE (exp));
2122 /* Check if sincos insn is available, otherwise fallback
2123 to sin or cos insn. */
2124 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2125 switch (DECL_FUNCTION_CODE (fndecl))
2127 CASE_FLT_FN (BUILT_IN_SIN):
2128 builtin_optab = sin_optab; break;
2129 CASE_FLT_FN (BUILT_IN_COS):
2130 builtin_optab = cos_optab; break;
2135 /* Before working hard, check whether the instruction is available. */
2136 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2138 target = gen_reg_rtx (mode);
2140 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2141 need to expand the argument again. This way, we will not perform
2142 side-effects more the once. */
2143 narg = save_expr (arg);
2147 exp = build_call_expr (fndecl, 1, arg);
2150 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2154 /* Compute into TARGET.
2155 Set TARGET to wherever the result comes back. */
2156 if (builtin_optab == sincos_optab)
2160 switch (DECL_FUNCTION_CODE (fndecl))
2162 CASE_FLT_FN (BUILT_IN_SIN):
2163 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2171 gcc_assert (result);
2175 target = expand_unop (mode, builtin_optab, op0, target, 0);
2180 /* Output the entire sequence. */
2181 insns = get_insns ();
2187 /* If we were unable to expand via the builtin, stop the sequence
2188 (without outputting the insns) and call to the library function
2189 with the stabilized argument list. */
2193 target = expand_call (exp, target, target == const0_rtx);
2198 /* Expand a call to one of the builtin math functions that operate on
2199 floating point argument and output an integer result (ilogb, isinf,
2201 Return 0 if a normal call should be emitted rather than expanding the
2202 function in-line. EXP is the expression that is a call to the builtin
2203 function; if convenient, the result should be placed in TARGET.
2204 SUBTARGET may be used as the target for computing one of EXP's operands. */
2207 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2209 optab builtin_optab = 0;
2210 enum insn_code icode = CODE_FOR_nothing;
2212 tree fndecl = get_callee_fndecl (exp);
2213 enum machine_mode mode;
2214 bool errno_set = false;
2217 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2220 arg = CALL_EXPR_ARG (exp, 0);
2222 switch (DECL_FUNCTION_CODE (fndecl))
2224 CASE_FLT_FN (BUILT_IN_ILOGB):
2225 errno_set = true; builtin_optab = ilogb_optab; break;
2226 CASE_FLT_FN (BUILT_IN_ISINF):
2227 builtin_optab = isinf_optab; break;
2228 case BUILT_IN_ISNORMAL:
2229 case BUILT_IN_ISFINITE:
2230 CASE_FLT_FN (BUILT_IN_FINITE):
2231 /* These builtins have no optabs (yet). */
2237 /* There's no easy way to detect the case we need to set EDOM. */
2238 if (flag_errno_math && errno_set)
2241 /* Optab mode depends on the mode of the input argument. */
2242 mode = TYPE_MODE (TREE_TYPE (arg));
2245 icode = builtin_optab->handlers[(int) mode].insn_code;
2247 /* Before working hard, check whether the instruction is available. */
2248 if (icode != CODE_FOR_nothing)
2250 /* Make a suitable register to place result in. */
2252 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2253 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2255 gcc_assert (insn_data[icode].operand[0].predicate
2256 (target, GET_MODE (target)));
2258 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2259 need to expand the argument again. This way, we will not perform
2260 side-effects more the once. */
2261 narg = builtin_save_expr (arg);
2265 exp = build_call_expr (fndecl, 1, arg);
2268 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2270 if (mode != GET_MODE (op0))
2271 op0 = convert_to_mode (mode, op0, 0);
2273 /* Compute into TARGET.
2274 Set TARGET to wherever the result comes back. */
2275 emit_unop_insn (icode, target, op0, UNKNOWN);
2279 /* If there is no optab, try generic code. */
2280 switch (DECL_FUNCTION_CODE (fndecl))
2284 CASE_FLT_FN (BUILT_IN_ISINF):
2286 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2287 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2288 tree const type = TREE_TYPE (arg);
2292 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2293 real_from_string (&r, buf);
2294 result = build_call_expr (isgr_fn, 2,
2295 fold_build1 (ABS_EXPR, type, arg),
2296 build_real (type, r));
2297 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2299 CASE_FLT_FN (BUILT_IN_FINITE):
2300 case BUILT_IN_ISFINITE:
2302 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2303 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2304 tree const type = TREE_TYPE (arg);
2308 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2309 real_from_string (&r, buf);
2310 result = build_call_expr (isle_fn, 2,
2311 fold_build1 (ABS_EXPR, type, arg),
2312 build_real (type, r));
2313 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2315 case BUILT_IN_ISNORMAL:
2317 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2318 islessequal(fabs(x),DBL_MAX). */
2319 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2320 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2321 tree const type = TREE_TYPE (arg);
2322 REAL_VALUE_TYPE rmax, rmin;
2325 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2326 real_from_string (&rmax, buf);
2327 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2328 real_from_string (&rmin, buf);
2329 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2330 result = build_call_expr (isle_fn, 2, arg,
2331 build_real (type, rmax));
2332 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2333 build_call_expr (isge_fn, 2, arg,
2334 build_real (type, rmin)));
2335 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2341 target = expand_call (exp, target, target == const0_rtx);
2346 /* Expand a call to the builtin sincos math function.
2347 Return NULL_RTX if a normal call should be emitted rather than expanding the
2348 function in-line. EXP is the expression that is a call to the builtin
2352 expand_builtin_sincos (tree exp)
2354 rtx op0, op1, op2, target1, target2;
2355 enum machine_mode mode;
2356 tree arg, sinp, cosp;
2359 if (!validate_arglist (exp, REAL_TYPE,
2360 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2363 arg = CALL_EXPR_ARG (exp, 0);
2364 sinp = CALL_EXPR_ARG (exp, 1);
2365 cosp = CALL_EXPR_ARG (exp, 2);
2367 /* Make a suitable register to place result in. */
2368 mode = TYPE_MODE (TREE_TYPE (arg));
2370 /* Check if sincos insn is available, otherwise emit the call. */
2371 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2374 target1 = gen_reg_rtx (mode);
2375 target2 = gen_reg_rtx (mode);
2377 op0 = expand_normal (arg);
2378 op1 = expand_normal (build_fold_indirect_ref (sinp));
2379 op2 = expand_normal (build_fold_indirect_ref (cosp));
2381 /* Compute into target1 and target2.
2382 Set TARGET to wherever the result comes back. */
2383 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2384 gcc_assert (result);
2386 /* Move target1 and target2 to the memory locations indicated
2388 emit_move_insn (op1, target1);
2389 emit_move_insn (op2, target2);
2394 /* Expand a call to the internal cexpi builtin to the sincos math function.
2395 EXP is the expression that is a call to the builtin function; if convenient,
2396 the result should be placed in TARGET. SUBTARGET may be used as the target
2397 for computing one of EXP's operands. */
2400 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2402 tree fndecl = get_callee_fndecl (exp);
2404 enum machine_mode mode;
2407 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2410 arg = CALL_EXPR_ARG (exp, 0);
2411 type = TREE_TYPE (arg);
2412 mode = TYPE_MODE (TREE_TYPE (arg));
2414 /* Try expanding via a sincos optab, fall back to emitting a libcall
2415 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2416 is only generated from sincos, cexp or if we have either of them. */
2417 if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2419 op1 = gen_reg_rtx (mode);
2420 op2 = gen_reg_rtx (mode);
2422 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2424 /* Compute into op1 and op2. */
2425 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2427 else if (TARGET_HAS_SINCOS)
2429 tree call, fn = NULL_TREE;
2433 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2434 fn = built_in_decls[BUILT_IN_SINCOSF];
2435 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2436 fn = built_in_decls[BUILT_IN_SINCOS];
2437 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2438 fn = built_in_decls[BUILT_IN_SINCOSL];
2442 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2443 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2444 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2445 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2446 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2447 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2449 /* Make sure not to fold the sincos call again. */
2450 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2451 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2452 call, 3, arg, top1, top2));
2456 tree call, fn = NULL_TREE, narg;
2457 tree ctype = build_complex_type (type);
2459 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2460 fn = built_in_decls[BUILT_IN_CEXPF];
2461 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2462 fn = built_in_decls[BUILT_IN_CEXP];
2463 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2464 fn = built_in_decls[BUILT_IN_CEXPL];
2468 /* If we don't have a decl for cexp create one. This is the
2469 friendliest fallback if the user calls __builtin_cexpi
2470 without full target C99 function support. */
2471 if (fn == NULL_TREE)
2474 const char *name = NULL;
2476 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2478 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2480 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2483 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2484 fn = build_fn_decl (name, fntype);
2487 narg = fold_build2 (COMPLEX_EXPR, ctype,
2488 build_real (type, dconst0), arg);
2490 /* Make sure not to fold the cexp call again. */
2491 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2492 return expand_expr (build_call_nary (ctype, call, 1, narg),
2493 target, VOIDmode, EXPAND_NORMAL);
2496 /* Now build the proper return type. */
2497 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2498 make_tree (TREE_TYPE (arg), op2),
2499 make_tree (TREE_TYPE (arg), op1)),
2500 target, VOIDmode, EXPAND_NORMAL);
2503 /* Expand a call to one of the builtin rounding functions gcc defines
2504 as an extension (lfloor and lceil). As these are gcc extensions we
2505 do not need to worry about setting errno to EDOM.
2506 If expanding via optab fails, lower expression to (int)(floor(x)).
2507 EXP is the expression that is a call to the builtin function;
2508 if convenient, the result should be placed in TARGET. SUBTARGET may
2509 be used as the target for computing one of EXP's operands. */
2512 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2514 convert_optab builtin_optab;
2515 rtx op0, insns, tmp;
2516 tree fndecl = get_callee_fndecl (exp);
2517 enum built_in_function fallback_fn;
2518 tree fallback_fndecl;
2519 enum machine_mode mode;
2522 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2525 arg = CALL_EXPR_ARG (exp, 0);
2527 switch (DECL_FUNCTION_CODE (fndecl))
2529 CASE_FLT_FN (BUILT_IN_LCEIL):
2530 CASE_FLT_FN (BUILT_IN_LLCEIL):
2531 builtin_optab = lceil_optab;
2532 fallback_fn = BUILT_IN_CEIL;
2535 CASE_FLT_FN (BUILT_IN_LFLOOR):
2536 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2537 builtin_optab = lfloor_optab;
2538 fallback_fn = BUILT_IN_FLOOR;
2545 /* Make a suitable register to place result in. */
2546 mode = TYPE_MODE (TREE_TYPE (exp));
2548 target = gen_reg_rtx (mode);
2550 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2551 need to expand the argument again. This way, we will not perform
2552 side-effects more the once. */
2553 narg = builtin_save_expr (arg);
2557 exp = build_call_expr (fndecl, 1, arg);
2560 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2564 /* Compute into TARGET. */
2565 if (expand_sfix_optab (target, op0, builtin_optab))
2567 /* Output the entire sequence. */
2568 insns = get_insns ();
2574 /* If we were unable to expand via the builtin, stop the sequence
2575 (without outputting the insns). */
2578 /* Fall back to floating point rounding optab. */
2579 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2581 /* For non-C99 targets we may end up without a fallback fndecl here
2582 if the user called __builtin_lfloor directly. In this case emit
2583 a call to the floor/ceil variants nevertheless. This should result
2584 in the best user experience for not full C99 targets. */
2585 if (fallback_fndecl == NULL_TREE)
2588 const char *name = NULL;
2590 switch (DECL_FUNCTION_CODE (fndecl))
2592 case BUILT_IN_LCEIL:
2593 case BUILT_IN_LLCEIL:
2596 case BUILT_IN_LCEILF:
2597 case BUILT_IN_LLCEILF:
2600 case BUILT_IN_LCEILL:
2601 case BUILT_IN_LLCEILL:
2604 case BUILT_IN_LFLOOR:
2605 case BUILT_IN_LLFLOOR:
2608 case BUILT_IN_LFLOORF:
2609 case BUILT_IN_LLFLOORF:
2612 case BUILT_IN_LFLOORL:
2613 case BUILT_IN_LLFLOORL:
2620 fntype = build_function_type_list (TREE_TYPE (arg),
2621 TREE_TYPE (arg), NULL_TREE);
2622 fallback_fndecl = build_fn_decl (name, fntype);
2625 exp = build_call_expr (fallback_fndecl, 1, arg);
2627 tmp = expand_normal (exp);
2629 /* Truncate the result of floating point optab to integer
2630 via expand_fix (). */
2631 target = gen_reg_rtx (mode);
2632 expand_fix (target, tmp, 0);
2637 /* Expand a call to one of the builtin math functions doing integer
2639 Return 0 if a normal call should be emitted rather than expanding the
2640 function in-line. EXP is the expression that is a call to the builtin
2641 function; if convenient, the result should be placed in TARGET.
2642 SUBTARGET may be used as the target for computing one of EXP's operands. */
2645 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2647 convert_optab builtin_optab;
2649 tree fndecl = get_callee_fndecl (exp);
2651 enum machine_mode mode;
2653 /* There's no easy way to detect the case we need to set EDOM. */
2654 if (flag_errno_math)
2657 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2660 arg = CALL_EXPR_ARG (exp, 0);
2662 switch (DECL_FUNCTION_CODE (fndecl))
2664 CASE_FLT_FN (BUILT_IN_LRINT):
2665 CASE_FLT_FN (BUILT_IN_LLRINT):
2666 builtin_optab = lrint_optab; break;
2667 CASE_FLT_FN (BUILT_IN_LROUND):
2668 CASE_FLT_FN (BUILT_IN_LLROUND):
2669 builtin_optab = lround_optab; break;
2674 /* Make a suitable register to place result in. */
2675 mode = TYPE_MODE (TREE_TYPE (exp));
2677 target = gen_reg_rtx (mode);
2679 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2680 need to expand the argument again. This way, we will not perform
2681 side-effects more the once. */
2682 narg = builtin_save_expr (arg);
2686 exp = build_call_expr (fndecl, 1, arg);
2689 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2693 if (expand_sfix_optab (target, op0, builtin_optab))
2695 /* Output the entire sequence. */
2696 insns = get_insns ();
2702 /* If we were unable to expand via the builtin, stop the sequence
2703 (without outputting the insns) and call to the library function
2704 with the stabilized argument list. */
2707 target = expand_call (exp, target, target == const0_rtx);
2712 /* To evaluate powi(x,n), the floating point value x raised to the
2713 constant integer exponent n, we use a hybrid algorithm that
2714 combines the "window method" with look-up tables. For an
2715 introduction to exponentiation algorithms and "addition chains",
2716 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2717 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2718 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2719 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2721 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2722 multiplications to inline before calling the system library's pow
2723 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2724 so this default never requires calling pow, powf or powl. */
2726 #ifndef POWI_MAX_MULTS
2727 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2730 /* The size of the "optimal power tree" lookup table. All
2731 exponents less than this value are simply looked up in the
2732 powi_table below. This threshold is also used to size the
2733 cache of pseudo registers that hold intermediate results. */
2734 #define POWI_TABLE_SIZE 256
2736 /* The size, in bits of the window, used in the "window method"
2737 exponentiation algorithm. This is equivalent to a radix of
2738 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2739 #define POWI_WINDOW_SIZE 3
2741 /* The following table is an efficient representation of an
2742 "optimal power tree". For each value, i, the corresponding
2743 value, j, in the table states than an optimal evaluation
2744 sequence for calculating pow(x,i) can be found by evaluating
2745 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2746 100 integers is given in Knuth's "Seminumerical algorithms". */
2748 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2750 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2751 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2752 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2753 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2754 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2755 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2756 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2757 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2758 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2759 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2760 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2761 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2762 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2763 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2764 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2765 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2766 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2767 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2768 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2769 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2770 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2771 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2772 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2773 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2774 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2775 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2776 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2777 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2778 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2779 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2780 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2781 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2785 /* Return the number of multiplications required to calculate
2786 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2787 subroutine of powi_cost. CACHE is an array indicating
2788 which exponents have already been calculated. */
2791 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2793 /* If we've already calculated this exponent, then this evaluation
2794 doesn't require any additional multiplications. */
2799 return powi_lookup_cost (n - powi_table[n], cache)
2800 + powi_lookup_cost (powi_table[n], cache) + 1;
2803 /* Return the number of multiplications required to calculate
2804 powi(x,n) for an arbitrary x, given the exponent N. This
2805 function needs to be kept in sync with expand_powi below. */
2808 powi_cost (HOST_WIDE_INT n)
2810 bool cache[POWI_TABLE_SIZE];
2811 unsigned HOST_WIDE_INT digit;
2812 unsigned HOST_WIDE_INT val;
2818 /* Ignore the reciprocal when calculating the cost. */
2819 val = (n < 0) ? -n : n;
2821 /* Initialize the exponent cache. */
2822 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2827 while (val >= POWI_TABLE_SIZE)
2831 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2832 result += powi_lookup_cost (digit, cache)
2833 + POWI_WINDOW_SIZE + 1;
2834 val >>= POWI_WINDOW_SIZE;
2843 return result + powi_lookup_cost (val, cache);
2846 /* Recursive subroutine of expand_powi. This function takes the array,
2847 CACHE, of already calculated exponents and an exponent N and returns
2848 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2851 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2853 unsigned HOST_WIDE_INT digit;
2857 if (n < POWI_TABLE_SIZE)
2862 target = gen_reg_rtx (mode);
2865 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2866 op1 = expand_powi_1 (mode, powi_table[n], cache);
2870 target = gen_reg_rtx (mode);
2871 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2872 op0 = expand_powi_1 (mode, n - digit, cache);
2873 op1 = expand_powi_1 (mode, digit, cache);
2877 target = gen_reg_rtx (mode);
2878 op0 = expand_powi_1 (mode, n >> 1, cache);
2882 result = expand_mult (mode, op0, op1, target, 0);
2883 if (result != target)
2884 emit_move_insn (target, result);
2888 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2889 floating point operand in mode MODE, and N is the exponent. This
2890 function needs to be kept in sync with powi_cost above. */
2893 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2895 unsigned HOST_WIDE_INT val;
2896 rtx cache[POWI_TABLE_SIZE];
2900 return CONST1_RTX (mode);
2902 val = (n < 0) ? -n : n;
2904 memset (cache, 0, sizeof (cache));
2907 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2909 /* If the original exponent was negative, reciprocate the result. */
2911 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2912 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2917 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2918 a normal call should be emitted rather than expanding the function
2919 in-line. EXP is the expression that is a call to the builtin
2920 function; if convenient, the result should be placed in TARGET. */
2923 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2927 tree type = TREE_TYPE (exp);
2928 REAL_VALUE_TYPE cint, c, c2;
2931 enum machine_mode mode = TYPE_MODE (type);
2933 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2936 arg0 = CALL_EXPR_ARG (exp, 0);
2937 arg1 = CALL_EXPR_ARG (exp, 1);
2939 if (TREE_CODE (arg1) != REAL_CST
2940 || TREE_OVERFLOW (arg1))
2941 return expand_builtin_mathfn_2 (exp, target, subtarget);
2943 /* Handle constant exponents. */
2945 /* For integer valued exponents we can expand to an optimal multiplication
2946 sequence using expand_powi. */
2947 c = TREE_REAL_CST (arg1);
2948 n = real_to_integer (&c);
2949 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2950 if (real_identical (&c, &cint)
2951 && ((n >= -1 && n <= 2)
2952 || (flag_unsafe_math_optimizations
2954 && powi_cost (n) <= POWI_MAX_MULTS)))
2956 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2959 op = force_reg (mode, op);
2960 op = expand_powi (op, mode, n);
2965 narg0 = builtin_save_expr (arg0);
2967 /* If the exponent is not integer valued, check if it is half of an integer.
2968 In this case we can expand to sqrt (x) * x**(n/2). */
2969 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2970 if (fn != NULL_TREE)
2972 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2973 n = real_to_integer (&c2);
2974 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2975 if (real_identical (&c2, &cint)
2976 && ((flag_unsafe_math_optimizations
2978 && powi_cost (n/2) <= POWI_MAX_MULTS)
2981 tree call_expr = build_call_expr (fn, 1, narg0);
2982 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2985 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2986 op2 = force_reg (mode, op2);
2987 op2 = expand_powi (op2, mode, abs (n / 2));
2988 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2989 0, OPTAB_LIB_WIDEN);
2990 /* If the original exponent was negative, reciprocate the
2993 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2994 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3000 /* Try if the exponent is a third of an integer. In this case
3001 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3002 different from pow (x, 1./3.) due to rounding and behavior
3003 with negative x we need to constrain this transformation to
3004 unsafe math and positive x or finite math. */
3005 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3007 && flag_unsafe_math_optimizations
3008 && (tree_expr_nonnegative_p (arg0)
3009 || !HONOR_NANS (mode)))
3011 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3012 real_round (&c2, mode, &c2);
3013 n = real_to_integer (&c2);
3014 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3015 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3016 real_convert (&c2, mode, &c2);
3017 if (real_identical (&c2, &c)
3019 && powi_cost (n/3) <= POWI_MAX_MULTS)
3022 tree call_expr = build_call_expr (fn, 1,narg0);
3023 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3024 if (abs (n) % 3 == 2)
3025 op = expand_simple_binop (mode, MULT, op, op, op,
3026 0, OPTAB_LIB_WIDEN);
3029 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3030 op2 = force_reg (mode, op2);
3031 op2 = expand_powi (op2, mode, abs (n / 3));
3032 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3033 0, OPTAB_LIB_WIDEN);
3034 /* If the original exponent was negative, reciprocate the
3037 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3038 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3044 /* Fall back to optab expansion. */
3045 return expand_builtin_mathfn_2 (exp, target, subtarget);
3048 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3049 a normal call should be emitted rather than expanding the function
3050 in-line. EXP is the expression that is a call to the builtin
3051 function; if convenient, the result should be placed in TARGET. */
3054 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3058 enum machine_mode mode;
3059 enum machine_mode mode2;
3061 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3064 arg0 = CALL_EXPR_ARG (exp, 0);
3065 arg1 = CALL_EXPR_ARG (exp, 1);
3066 mode = TYPE_MODE (TREE_TYPE (exp));
3068 /* Handle constant power. */
3070 if (TREE_CODE (arg1) == INTEGER_CST
3071 && !TREE_OVERFLOW (arg1))
3073 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3075 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3076 Otherwise, check the number of multiplications required. */
3077 if ((TREE_INT_CST_HIGH (arg1) == 0
3078 || TREE_INT_CST_HIGH (arg1) == -1)
3079 && ((n >= -1 && n <= 2)
3081 && powi_cost (n) <= POWI_MAX_MULTS)))
3083 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3084 op0 = force_reg (mode, op0);
3085 return expand_powi (op0, mode, n);
3089 /* Emit a libcall to libgcc. */
3091 /* Mode of the 2nd argument must match that of an int. */
3092 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3094 if (target == NULL_RTX)
3095 target = gen_reg_rtx (mode);
3097 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3098 if (GET_MODE (op0) != mode)
3099 op0 = convert_to_mode (mode, op0, 0);
3100 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3101 if (GET_MODE (op1) != mode2)
3102 op1 = convert_to_mode (mode2, op1, 0);
3104 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
3105 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3106 op0, mode, op1, mode2);
3111 /* Expand expression EXP which is a call to the strlen builtin. Return
3112 NULL_RTX if we failed the caller should emit a normal call, otherwise
3113 try to get the result in TARGET, if convenient. */
3116 expand_builtin_strlen (tree exp, rtx target,
3117 enum machine_mode target_mode)
3119 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3125 tree src = CALL_EXPR_ARG (exp, 0);
3126 rtx result, src_reg, char_rtx, before_strlen;
3127 enum machine_mode insn_mode = target_mode, char_mode;
3128 enum insn_code icode = CODE_FOR_nothing;
3131 /* If the length can be computed at compile-time, return it. */
3132 len = c_strlen (src, 0);
3134 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3136 /* If the length can be computed at compile-time and is constant
3137 integer, but there are side-effects in src, evaluate
3138 src for side-effects, then return len.
3139 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3140 can be optimized into: i++; x = 3; */
3141 len = c_strlen (src, 1);
3142 if (len && TREE_CODE (len) == INTEGER_CST)
3144 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3145 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3148 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3150 /* If SRC is not a pointer type, don't do this operation inline. */
3154 /* Bail out if we can't compute strlen in the right mode. */
3155 while (insn_mode != VOIDmode)
3157 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
3158 if (icode != CODE_FOR_nothing)
3161 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3163 if (insn_mode == VOIDmode)
3166 /* Make a place to write the result of the instruction. */
3170 && GET_MODE (result) == insn_mode
3171 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3172 result = gen_reg_rtx (insn_mode);
3174 /* Make a place to hold the source address. We will not expand
3175 the actual source until we are sure that the expansion will
3176 not fail -- there are trees that cannot be expanded twice. */
3177 src_reg = gen_reg_rtx (Pmode);
3179 /* Mark the beginning of the strlen sequence so we can emit the
3180 source operand later. */
3181 before_strlen = get_last_insn ();
3183 char_rtx = const0_rtx;
3184 char_mode = insn_data[(int) icode].operand[2].mode;
3185 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3187 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3189 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3190 char_rtx, GEN_INT (align));
3195 /* Now that we are assured of success, expand the source. */
3197 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3199 emit_move_insn (src_reg, pat);
3204 emit_insn_after (pat, before_strlen);
3206 emit_insn_before (pat, get_insns ());
3208 /* Return the value in the proper mode for this function. */
3209 if (GET_MODE (result) == target_mode)
3211 else if (target != 0)
3212 convert_move (target, result, 0);
3214 target = convert_to_mode (target_mode, result, 0);
3220 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3221 caller should emit a normal call, otherwise try to get the result
3222 in TARGET, if convenient (and in mode MODE if that's convenient). */
3225 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3227 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3229 tree type = TREE_TYPE (exp);
3230 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3231 CALL_EXPR_ARG (exp, 1), type);
3233 return expand_expr (result, target, mode, EXPAND_NORMAL);
3238 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3239 caller should emit a normal call, otherwise try to get the result
3240 in TARGET, if convenient (and in mode MODE if that's convenient). */
3243 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3245 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3247 tree type = TREE_TYPE (exp);
3248 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3249 CALL_EXPR_ARG (exp, 1), type);
3251 return expand_expr (result, target, mode, EXPAND_NORMAL);
3253 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3258 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3259 caller should emit a normal call, otherwise try to get the result
3260 in TARGET, if convenient (and in mode MODE if that's convenient). */
3263 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3265 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3267 tree type = TREE_TYPE (exp);
3268 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3269 CALL_EXPR_ARG (exp, 1), type);
3271 return expand_expr (result, target, mode, EXPAND_NORMAL);
3276 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3277 caller should emit a normal call, otherwise try to get the result
3278 in TARGET, if convenient (and in mode MODE if that's convenient). */
3281 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3283 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3285 tree type = TREE_TYPE (exp);
3286 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3287 CALL_EXPR_ARG (exp, 1), type);
3289 return expand_expr (result, target, mode, EXPAND_NORMAL);
3294 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3295 bytes from constant string DATA + OFFSET and return it as target
3299 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3300 enum machine_mode mode)
3302 const char *str = (const char *) data;
3304 gcc_assert (offset >= 0
3305 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3306 <= strlen (str) + 1));
3308 return c_readstr (str + offset, mode);
3311 /* Expand a call EXP to the memcpy builtin.
3312 Return NULL_RTX if we failed, the caller should emit a normal call,
3313 otherwise try to get the result in TARGET, if convenient (and in
3314 mode MODE if that's convenient). */
3317 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3319 tree fndecl = get_callee_fndecl (exp);
3321 if (!validate_arglist (exp,
3322 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3326 tree dest = CALL_EXPR_ARG (exp, 0);
3327 tree src = CALL_EXPR_ARG (exp, 1);
3328 tree len = CALL_EXPR_ARG (exp, 2);
3329 const char *src_str;
3330 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3331 unsigned int dest_align
3332 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3333 rtx dest_mem, src_mem, dest_addr, len_rtx;
3334 tree result = fold_builtin_memory_op (dest, src, len,
3335 TREE_TYPE (TREE_TYPE (fndecl)),
3337 HOST_WIDE_INT expected_size = -1;
3338 unsigned int expected_align = 0;
3342 while (TREE_CODE (result) == COMPOUND_EXPR)
3344 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3346 result = TREE_OPERAND (result, 1);
3348 return expand_expr (result, target, mode, EXPAND_NORMAL);
3351 /* If DEST is not a pointer type, call the normal function. */
3352 if (dest_align == 0)
3355 /* If either SRC is not a pointer type, don't do this
3356 operation in-line. */
3360 stringop_block_profile (exp, &expected_align, &expected_size);
3361 if (expected_align < dest_align)
3362 expected_align = dest_align;
3363 dest_mem = get_memory_rtx (dest, len);
3364 set_mem_align (dest_mem, dest_align);
3365 len_rtx = expand_normal (len);
3366 src_str = c_getstr (src);
3368 /* If SRC is a string constant and block move would be done
3369 by pieces, we can avoid loading the string from memory
3370 and only stored the computed constants. */
3372 && GET_CODE (len_rtx) == CONST_INT
3373 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3374 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3375 (void *) src_str, dest_align))
3377 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3378 builtin_memcpy_read_str,
3379 (void *) src_str, dest_align, 0);
3380 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3381 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3385 src_mem = get_memory_rtx (src, len);
3386 set_mem_align (src_mem, src_align);
3388 /* Copy word part most expediently. */
3389 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3390 CALL_EXPR_TAILCALL (exp)
3391 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3392 expected_align, expected_size);
3396 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3397 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3403 /* Expand a call EXP to the mempcpy builtin.
3404 Return NULL_RTX if we failed; the caller should emit a normal call,
3405 otherwise try to get the result in TARGET, if convenient (and in
3406 mode MODE if that's convenient). If ENDP is 0 return the
3407 destination pointer, if ENDP is 1 return the end pointer ala
3408 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3412 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3414 if (!validate_arglist (exp,
3415 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3419 tree dest = CALL_EXPR_ARG (exp, 0);
3420 tree src = CALL_EXPR_ARG (exp, 1);
3421 tree len = CALL_EXPR_ARG (exp, 2);
3422 return expand_builtin_mempcpy_args (dest, src, len,
3424 target, mode, /*endp=*/ 1);
3428 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3429 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3430 so that this can also be called without constructing an actual CALL_EXPR.
3431 TYPE is the return type of the call. The other arguments and return value
3432 are the same as for expand_builtin_mempcpy. */
3435 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3436 rtx target, enum machine_mode mode, int endp)
3438 /* If return value is ignored, transform mempcpy into memcpy. */
3439 if (target == const0_rtx)
3441 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3446 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3447 target, mode, EXPAND_NORMAL);
3451 const char *src_str;
3452 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3453 unsigned int dest_align
3454 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3455 rtx dest_mem, src_mem, len_rtx;
3456 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3460 while (TREE_CODE (result) == COMPOUND_EXPR)
3462 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3464 result = TREE_OPERAND (result, 1);
3466 return expand_expr (result, target, mode, EXPAND_NORMAL);
3469 /* If either SRC or DEST is not a pointer type, don't do this
3470 operation in-line. */
3471 if (dest_align == 0 || src_align == 0)
3474 /* If LEN is not constant, call the normal function. */
3475 if (! host_integerp (len, 1))
3478 len_rtx = expand_normal (len);
3479 src_str = c_getstr (src);
3481 /* If SRC is a string constant and block move would be done
3482 by pieces, we can avoid loading the string from memory
3483 and only stored the computed constants. */
3485 && GET_CODE (len_rtx) == CONST_INT
3486 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3487 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3488 (void *) src_str, dest_align))
3490 dest_mem = get_memory_rtx (dest, len);
3491 set_mem_align (dest_mem, dest_align);
3492 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3493 builtin_memcpy_read_str,
3494 (void *) src_str, dest_align, endp);
3495 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3496 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3500 if (GET_CODE (len_rtx) == CONST_INT
3501 && can_move_by_pieces (INTVAL (len_rtx),
3502 MIN (dest_align, src_align)))
3504 dest_mem = get_memory_rtx (dest, len);
3505 set_mem_align (dest_mem, dest_align);
3506 src_mem = get_memory_rtx (src, len);
3507 set_mem_align (src_mem, src_align);
3508 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3509 MIN (dest_align, src_align), endp);
3510 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3511 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3519 /* Expand expression EXP, which is a call to the memmove builtin. Return
3520 NULL_RTX if we failed; the caller should emit a normal call. */
3523 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3525 if (!validate_arglist (exp,
3526 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3530 tree dest = CALL_EXPR_ARG (exp, 0);
3531 tree src = CALL_EXPR_ARG (exp, 1);
3532 tree len = CALL_EXPR_ARG (exp, 2);
3533 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3534 target, mode, ignore);
3538 /* Helper function to do the actual work for expand_builtin_memmove. The
3539 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3540 so that this can also be called without constructing an actual CALL_EXPR.
3541 TYPE is the return type of the call. The other arguments and return value
3542 are the same as for expand_builtin_memmove. */
3545 expand_builtin_memmove_args (tree dest, tree src, tree len,
3546 tree type, rtx target, enum machine_mode mode,
3549 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3553 STRIP_TYPE_NOPS (result);
3554 while (TREE_CODE (result) == COMPOUND_EXPR)
3556 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3558 result = TREE_OPERAND (result, 1);
3560 return expand_expr (result, target, mode, EXPAND_NORMAL);
3563 /* Otherwise, call the normal function. */
3567 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3568 NULL_RTX if we failed the caller should emit a normal call. */
3571 expand_builtin_bcopy (tree exp, int ignore)
3573 tree type = TREE_TYPE (exp);
3574 tree src, dest, size;
3576 if (!validate_arglist (exp,
3577 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3580 src = CALL_EXPR_ARG (exp, 0);
3581 dest = CALL_EXPR_ARG (exp, 1);
3582 size = CALL_EXPR_ARG (exp, 2);
3584 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3585 This is done this way so that if it isn't expanded inline, we fall
3586 back to calling bcopy instead of memmove. */
3587 return expand_builtin_memmove_args (dest, src,
3588 fold_convert (sizetype, size),
3589 type, const0_rtx, VOIDmode,
3594 # define HAVE_movstr 0
3595 # define CODE_FOR_movstr CODE_FOR_nothing
3598 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3599 we failed, the caller should emit a normal call, otherwise try to
3600 get the result in TARGET, if convenient. If ENDP is 0 return the
3601 destination pointer, if ENDP is 1 return the end pointer ala
3602 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3606 expand_movstr (tree dest, tree src, rtx target, int endp)
3612 const struct insn_data * data;
3617 dest_mem = get_memory_rtx (dest, NULL);
3618 src_mem = get_memory_rtx (src, NULL);
3621 target = force_reg (Pmode, XEXP (dest_mem, 0));
3622 dest_mem = replace_equiv_address (dest_mem, target);
3623 end = gen_reg_rtx (Pmode);
3627 if (target == 0 || target == const0_rtx)
3629 end = gen_reg_rtx (Pmode);
3637 data = insn_data + CODE_FOR_movstr;
3639 if (data->operand[0].mode != VOIDmode)
3640 end = gen_lowpart (data->operand[0].mode, end);
3642 insn = data->genfun (end, dest_mem, src_mem);
3648 /* movstr is supposed to set end to the address of the NUL
3649 terminator. If the caller requested a mempcpy-like return value,
3651 if (endp == 1 && target != const0_rtx)
3653 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3654 emit_move_insn (target, force_operand (tem, NULL_RTX));
3660 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3661 NULL_RTX if we failed the caller should emit a normal call, otherwise
3662 try to get the result in TARGET, if convenient (and in mode MODE if that's
3666 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3668 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3670 tree dest = CALL_EXPR_ARG (exp, 0);
3671 tree src = CALL_EXPR_ARG (exp, 1);
3672 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3677 /* Helper function to do the actual work for expand_builtin_strcpy. The
3678 arguments to the builtin_strcpy call DEST and SRC are broken out
3679 so that this can also be called without constructing an actual CALL_EXPR.
3680 The other arguments and return value are the same as for
3681 expand_builtin_strcpy. */
3684 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3685 rtx target, enum machine_mode mode)
3687 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3689 return expand_expr (result, target, mode, EXPAND_NORMAL);
3690 return expand_movstr (dest, src, target, /*endp=*/0);
3694 /* Expand a call EXP to the stpcpy builtin.
3695 Return NULL_RTX if we failed the caller should emit a normal call,
3696 otherwise try to get the result in TARGET, if convenient (and in
3697 mode MODE if that's convenient). */
3700 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3704 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3707 dst = CALL_EXPR_ARG (exp, 0);
3708 src = CALL_EXPR_ARG (exp, 1);
3710 /* If return value is ignored, transform stpcpy into strcpy. */
3711 if (target == const0_rtx)
3713 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3717 return expand_expr (build_call_expr (fn, 2, dst, src),
3718 target, mode, EXPAND_NORMAL);
3725 /* Ensure we get an actual string whose length can be evaluated at
3726 compile-time, not an expression containing a string. This is
3727 because the latter will potentially produce pessimized code
3728 when used to produce the return value. */
3729 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3730 return expand_movstr (dst, src, target, /*endp=*/2);
3732 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3733 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3734 target, mode, /*endp=*/2);
3739 if (TREE_CODE (len) == INTEGER_CST)
3741 rtx len_rtx = expand_normal (len);
3743 if (GET_CODE (len_rtx) == CONST_INT)
3745 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3746 dst, src, target, mode);
3752 if (mode != VOIDmode)
3753 target = gen_reg_rtx (mode);
3755 target = gen_reg_rtx (GET_MODE (ret));
3757 if (GET_MODE (target) != GET_MODE (ret))
3758 ret = gen_lowpart (GET_MODE (target), ret);
3760 ret = plus_constant (ret, INTVAL (len_rtx));
3761 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3769 return expand_movstr (dst, src, target, /*endp=*/2);
3773 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3774 bytes from constant string DATA + OFFSET and return it as target
3778 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3779 enum machine_mode mode)
3781 const char *str = (const char *) data;
3783 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3786 return c_readstr (str + offset, mode);
3789 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3790 NULL_RTX if we failed the caller should emit a normal call. */
3793 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3795 tree fndecl = get_callee_fndecl (exp);
3797 if (validate_arglist (exp,
3798 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3800 tree dest = CALL_EXPR_ARG (exp, 0);
3801 tree src = CALL_EXPR_ARG (exp, 1);
3802 tree len = CALL_EXPR_ARG (exp, 2);
3803 tree slen = c_strlen (src, 1);
3804 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3808 while (TREE_CODE (result) == COMPOUND_EXPR)
3810 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3812 result = TREE_OPERAND (result, 1);
3814 return expand_expr (result, target, mode, EXPAND_NORMAL);
3817 /* We must be passed a constant len and src parameter. */
3818 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3821 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3823 /* We're required to pad with trailing zeros if the requested
3824 len is greater than strlen(s2)+1. In that case try to
3825 use store_by_pieces, if it fails, punt. */
3826 if (tree_int_cst_lt (slen, len))
3828 unsigned int dest_align
3829 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3830 const char *p = c_getstr (src);
3833 if (!p || dest_align == 0 || !host_integerp (len, 1)
3834 || !can_store_by_pieces (tree_low_cst (len, 1),
3835 builtin_strncpy_read_str,
3836 (void *) p, dest_align))
3839 dest_mem = get_memory_rtx (dest, len);
3840 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3841 builtin_strncpy_read_str,
3842 (void *) p, dest_align, 0);
3843 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3844 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3851 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3852 bytes from constant string DATA + OFFSET and return it as target
3856 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3857 enum machine_mode mode)
3859 const char *c = (const char *) data;
3860 char *p = alloca (GET_MODE_SIZE (mode));
3862 memset (p, *c, GET_MODE_SIZE (mode));
3864 return c_readstr (p, mode);
3867 /* Callback routine for store_by_pieces. Return the RTL of a register
3868 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3869 char value given in the RTL register data. For example, if mode is
3870 4 bytes wide, return the RTL for 0x01010101*data. */
3873 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3874 enum machine_mode mode)
3880 size = GET_MODE_SIZE (mode);
3885 memset (p, 1, size);
3886 coeff = c_readstr (p, mode);
3888 target = convert_to_mode (mode, (rtx) data, 1);
3889 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3890 return force_reg (mode, target);
3893 /* Expand expression EXP, which is a call to the memset builtin. Return
3894 NULL_RTX if we failed the caller should emit a normal call, otherwise
3895 try to get the result in TARGET, if convenient (and in mode MODE if that's
3899 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3901 if (!validate_arglist (exp,
3902 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3906 tree dest = CALL_EXPR_ARG (exp, 0);
3907 tree val = CALL_EXPR_ARG (exp, 1);
3908 tree len = CALL_EXPR_ARG (exp, 2);
3909 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3913 /* Helper function to do the actual work for expand_builtin_memset. The
3914 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3915 so that this can also be called without constructing an actual CALL_EXPR.
3916 The other arguments and return value are the same as for
3917 expand_builtin_memset. */
3920 expand_builtin_memset_args (tree dest, tree val, tree len,
3921 rtx target, enum machine_mode mode, tree orig_exp)
3924 enum built_in_function fcode;
3926 unsigned int dest_align;
3927 rtx dest_mem, dest_addr, len_rtx;
3928 HOST_WIDE_INT expected_size = -1;
3929 unsigned int expected_align = 0;
3931 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3933 /* If DEST is not a pointer type, don't do this operation in-line. */
3934 if (dest_align == 0)
3937 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3938 if (expected_align < dest_align)
3939 expected_align = dest_align;
3941 /* If the LEN parameter is zero, return DEST. */
3942 if (integer_zerop (len))
3944 /* Evaluate and ignore VAL in case it has side-effects. */
3945 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3946 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3949 /* Stabilize the arguments in case we fail. */
3950 dest = builtin_save_expr (dest);
3951 val = builtin_save_expr (val);
3952 len = builtin_save_expr (len);
3954 len_rtx = expand_normal (len);
3955 dest_mem = get_memory_rtx (dest, len);
3957 if (TREE_CODE (val) != INTEGER_CST)
3961 val_rtx = expand_normal (val);
3962 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3965 /* Assume that we can memset by pieces if we can store
3966 * the coefficients by pieces (in the required modes).
3967 * We can't pass builtin_memset_gen_str as that emits RTL. */
3969 if (host_integerp (len, 1)
3970 && !(optimize_size && tree_low_cst (len, 1) > 1)
3971 && can_store_by_pieces (tree_low_cst (len, 1),
3972 builtin_memset_read_str, &c, dest_align))
3974 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3976 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3977 builtin_memset_gen_str, val_rtx, dest_align, 0);
3979 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3980 dest_align, expected_align,
3984 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3985 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3989 if (target_char_cast (val, &c))
3994 if (host_integerp (len, 1)
3995 && !(optimize_size && tree_low_cst (len, 1) > 1)
3996 && can_store_by_pieces (tree_low_cst (len, 1),
3997 builtin_memset_read_str, &c, dest_align))
3998 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3999 builtin_memset_read_str, &c, dest_align, 0);
4000 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4001 dest_align, expected_align,
4005 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4006 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4010 set_mem_align (dest_mem, dest_align);
4011 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4012 CALL_EXPR_TAILCALL (orig_exp)
4013 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4014 expected_align, expected_size);
4018 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4019 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4025 fndecl = get_callee_fndecl (orig_exp);
4026 fcode = DECL_FUNCTION_CODE (fndecl);
4027 if (fcode == BUILT_IN_MEMSET)
4028 fn = build_call_expr (fndecl, 3, dest, val, len);
4029 else if (fcode == BUILT_IN_BZERO)
4030 fn = build_call_expr (fndecl, 2, dest, len);
4033 if (TREE_CODE (fn) == CALL_EXPR)
4034 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4035 return expand_call (fn, target, target == const0_rtx);
4038 /* Expand expression EXP, which is a call to the bzero builtin. Return
4039 NULL_RTX if we failed the caller should emit a normal call. */
4042 expand_builtin_bzero (tree exp)
4046 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4049 dest = CALL_EXPR_ARG (exp, 0);
4050 size = CALL_EXPR_ARG (exp, 1);
4052 /* New argument list transforming bzero(ptr x, int y) to
4053 memset(ptr x, int 0, size_t y). This is done this way
4054 so that if it isn't expanded inline, we fallback to
4055 calling bzero instead of memset. */
4057 return expand_builtin_memset_args (dest, integer_zero_node,
4058 fold_convert (sizetype, size),
4059 const0_rtx, VOIDmode, exp);
4062 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4063 caller should emit a normal call, otherwise try to get the result
4064 in TARGET, if convenient (and in mode MODE if that's convenient). */
4067 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4069 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4070 INTEGER_TYPE, VOID_TYPE))
4072 tree type = TREE_TYPE (exp);
4073 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4074 CALL_EXPR_ARG (exp, 1),
4075 CALL_EXPR_ARG (exp, 2), type);
4077 return expand_expr (result, target, mode, EXPAND_NORMAL);
4082 /* Expand expression EXP, which is a call to the memcmp built-in function.
4083 Return NULL_RTX if we failed and the
4084 caller should emit a normal call, otherwise try to get the result in
4085 TARGET, if convenient (and in mode MODE, if that's convenient). */
4088 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4090 if (!validate_arglist (exp,
4091 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4095 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4096 CALL_EXPR_ARG (exp, 1),
4097 CALL_EXPR_ARG (exp, 2));
4099 return expand_expr (result, target, mode, EXPAND_NORMAL);
4102 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4104 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4107 tree arg1 = CALL_EXPR_ARG (exp, 0);
4108 tree arg2 = CALL_EXPR_ARG (exp, 1);
4109 tree len = CALL_EXPR_ARG (exp, 2);
4112 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4114 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4115 enum machine_mode insn_mode;
4117 #ifdef HAVE_cmpmemsi
4119 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4122 #ifdef HAVE_cmpstrnsi
4124 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4129 /* If we don't have POINTER_TYPE, call the function. */
4130 if (arg1_align == 0 || arg2_align == 0)
4133 /* Make a place to write the result of the instruction. */
4136 && REG_P (result) && GET_MODE (result) == insn_mode
4137 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4138 result = gen_reg_rtx (insn_mode);
4140 arg1_rtx = get_memory_rtx (arg1, len);
4141 arg2_rtx = get_memory_rtx (arg2, len);
4142 arg3_rtx = expand_normal (len);
4144 /* Set MEM_SIZE as appropriate. */
4145 if (GET_CODE (arg3_rtx) == CONST_INT)
4147 set_mem_size (arg1_rtx, arg3_rtx);
4148 set_mem_size (arg2_rtx, arg3_rtx);
4151 #ifdef HAVE_cmpmemsi
4153 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4154 GEN_INT (MIN (arg1_align, arg2_align)));
4157 #ifdef HAVE_cmpstrnsi
4159 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4160 GEN_INT (MIN (arg1_align, arg2_align)));
4168 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4169 TYPE_MODE (integer_type_node), 3,
4170 XEXP (arg1_rtx, 0), Pmode,
4171 XEXP (arg2_rtx, 0), Pmode,
4172 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4173 TYPE_UNSIGNED (sizetype)),
4174 TYPE_MODE (sizetype));
4176 /* Return the value in the proper mode for this function. */
4177 mode = TYPE_MODE (TREE_TYPE (exp));
4178 if (GET_MODE (result) == mode)
4180 else if (target != 0)
4182 convert_move (target, result, 0);
4186 return convert_to_mode (mode, result, 0);
4193 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4194 if we failed the caller should emit a normal call, otherwise try to get
4195 the result in TARGET, if convenient. */
4198 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4200 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4204 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4205 CALL_EXPR_ARG (exp, 1));
4207 return expand_expr (result, target, mode, EXPAND_NORMAL);
4210 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4211 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4212 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4214 rtx arg1_rtx, arg2_rtx;
4215 rtx result, insn = NULL_RTX;
4217 tree arg1 = CALL_EXPR_ARG (exp, 0);
4218 tree arg2 = CALL_EXPR_ARG (exp, 1);
4221 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4223 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4225 /* If we don't have POINTER_TYPE, call the function. */
4226 if (arg1_align == 0 || arg2_align == 0)
4229 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4230 arg1 = builtin_save_expr (arg1);
4231 arg2 = builtin_save_expr (arg2);
4233 arg1_rtx = get_memory_rtx (arg1, NULL);
4234 arg2_rtx = get_memory_rtx (arg2, NULL);
4236 #ifdef HAVE_cmpstrsi
4237 /* Try to call cmpstrsi. */
4240 enum machine_mode insn_mode
4241 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4243 /* Make a place to write the result of the instruction. */
4246 && REG_P (result) && GET_MODE (result) == insn_mode
4247 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4248 result = gen_reg_rtx (insn_mode);
4250 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4251 GEN_INT (MIN (arg1_align, arg2_align)));
4254 #ifdef HAVE_cmpstrnsi
4255 /* Try to determine at least one length and call cmpstrnsi. */
4256 if (!insn && HAVE_cmpstrnsi)
4261 enum machine_mode insn_mode
4262 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4263 tree len1 = c_strlen (arg1, 1);
4264 tree len2 = c_strlen (arg2, 1);
4267 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4269 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4271 /* If we don't have a constant length for the first, use the length
4272 of the second, if we know it. We don't require a constant for
4273 this case; some cost analysis could be done if both are available
4274 but neither is constant. For now, assume they're equally cheap,
4275 unless one has side effects. If both strings have constant lengths,
4282 else if (TREE_SIDE_EFFECTS (len1))
4284 else if (TREE_SIDE_EFFECTS (len2))
4286 else if (TREE_CODE (len1) != INTEGER_CST)
4288 else if (TREE_CODE (len2) != INTEGER_CST)
4290 else if (tree_int_cst_lt (len1, len2))
4295 /* If both arguments have side effects, we cannot optimize. */
4296 if (!len || TREE_SIDE_EFFECTS (len))
4299 arg3_rtx = expand_normal (len);
4301 /* Make a place to write the result of the instruction. */
4304 && REG_P (result) && GET_MODE (result) == insn_mode
4305 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4306 result = gen_reg_rtx (insn_mode);
4308 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4309 GEN_INT (MIN (arg1_align, arg2_align)));
4317 /* Return the value in the proper mode for this function. */
4318 mode = TYPE_MODE (TREE_TYPE (exp));
4319 if (GET_MODE (result) == mode)
4322 return convert_to_mode (mode, result, 0);
4323 convert_move (target, result, 0);
4327 /* Expand the library call ourselves using a stabilized argument
4328 list to avoid re-evaluating the function's arguments twice. */
4329 #ifdef HAVE_cmpstrnsi
4332 fndecl = get_callee_fndecl (exp);
4333 fn = build_call_expr (fndecl, 2, arg1, arg2);
4334 if (TREE_CODE (fn) == CALL_EXPR)
4335 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4336 return expand_call (fn, target, target == const0_rtx);
4342 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4343 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4344 the result in TARGET, if convenient. */
4347 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4349 if (!validate_arglist (exp,
4350 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4354 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4355 CALL_EXPR_ARG (exp, 1),
4356 CALL_EXPR_ARG (exp, 2));
4358 return expand_expr (result, target, mode, EXPAND_NORMAL);
4361 /* If c_strlen can determine an expression for one of the string
4362 lengths, and it doesn't have side effects, then emit cmpstrnsi
4363 using length MIN(strlen(string)+1, arg3). */
4364 #ifdef HAVE_cmpstrnsi
4367 tree len, len1, len2;
4368 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4371 tree arg1 = CALL_EXPR_ARG (exp, 0);
4372 tree arg2 = CALL_EXPR_ARG (exp, 1);
4373 tree arg3 = CALL_EXPR_ARG (exp, 2);
4376 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4378 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4379 enum machine_mode insn_mode
4380 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4382 len1 = c_strlen (arg1, 1);
4383 len2 = c_strlen (arg2, 1);
4386 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4388 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4390 /* If we don't have a constant length for the first, use the length
4391 of the second, if we know it. We don't require a constant for
4392 this case; some cost analysis could be done if both are available
4393 but neither is constant. For now, assume they're equally cheap,
4394 unless one has side effects. If both strings have constant lengths,
4401 else if (TREE_SIDE_EFFECTS (len1))
4403 else if (TREE_SIDE_EFFECTS (len2))
4405 else if (TREE_CODE (len1) != INTEGER_CST)
4407 else if (TREE_CODE (len2) != INTEGER_CST)
4409 else if (tree_int_cst_lt (len1, len2))
4414 /* If both arguments have side effects, we cannot optimize. */
4415 if (!len || TREE_SIDE_EFFECTS (len))
4418 /* The actual new length parameter is MIN(len,arg3). */
4419 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4420 fold_convert (TREE_TYPE (len), arg3));
4422 /* If we don't have POINTER_TYPE, call the function. */
4423 if (arg1_align == 0 || arg2_align == 0)
4426 /* Make a place to write the result of the instruction. */
4429 && REG_P (result) && GET_MODE (result) == insn_mode
4430 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4431 result = gen_reg_rtx (insn_mode);
4433 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4434 arg1 = builtin_save_expr (arg1);
4435 arg2 = builtin_save_expr (arg2);
4436 len = builtin_save_expr (len);
4438 arg1_rtx = get_memory_rtx (arg1, len);
4439 arg2_rtx = get_memory_rtx (arg2, len);
4440 arg3_rtx = expand_normal (len);
4441 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4442 GEN_INT (MIN (arg1_align, arg2_align)));
4447 /* Return the value in the proper mode for this function. */
4448 mode = TYPE_MODE (TREE_TYPE (exp));
4449 if (GET_MODE (result) == mode)
4452 return convert_to_mode (mode, result, 0);
4453 convert_move (target, result, 0);
4457 /* Expand the library call ourselves using a stabilized argument
4458 list to avoid re-evaluating the function's arguments twice. */
4459 fndecl = get_callee_fndecl (exp);
4460 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4461 if (TREE_CODE (fn) == CALL_EXPR)
4462 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4463 return expand_call (fn, target, target == const0_rtx);
4469 /* Expand expression EXP, which is a call to the strcat builtin.
4470 Return NULL_RTX if we failed the caller should emit a normal call,
4471 otherwise try to get the result in TARGET, if convenient. */
4474 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4476 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4480 tree dst = CALL_EXPR_ARG (exp, 0);
4481 tree src = CALL_EXPR_ARG (exp, 1);
4482 const char *p = c_getstr (src);
4484 /* If the string length is zero, return the dst parameter. */
4485 if (p && *p == '\0')
4486 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4490 /* See if we can store by pieces into (dst + strlen(dst)). */
4491 tree newsrc, newdst,
4492 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4495 /* Stabilize the argument list. */
4496 newsrc = builtin_save_expr (src);
4497 dst = builtin_save_expr (dst);
4501 /* Create strlen (dst). */
4502 newdst = build_call_expr (strlen_fn, 1, dst);
4503 /* Create (dst p+ strlen (dst)). */
4505 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4506 newdst = builtin_save_expr (newdst);
4508 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4510 end_sequence (); /* Stop sequence. */
4514 /* Output the entire sequence. */
4515 insns = get_insns ();
4519 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4526 /* Expand expression EXP, which is a call to the strncat builtin.
4527 Return NULL_RTX if we failed the caller should emit a normal call,
4528 otherwise try to get the result in TARGET, if convenient. */
4531 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4533 if (validate_arglist (exp,
4534 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4536 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4537 CALL_EXPR_ARG (exp, 1),
4538 CALL_EXPR_ARG (exp, 2));
4540 return expand_expr (result, target, mode, EXPAND_NORMAL);
4545 /* Expand expression EXP, which is a call to the strspn builtin.
4546 Return NULL_RTX if we failed the caller should emit a normal call,
4547 otherwise try to get the result in TARGET, if convenient. */
4550 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4552 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4554 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4555 CALL_EXPR_ARG (exp, 1));
4557 return expand_expr (result, target, mode, EXPAND_NORMAL);
4562 /* Expand expression EXP, which is a call to the strcspn builtin.
4563 Return NULL_RTX if we failed the caller should emit a normal call,
4564 otherwise try to get the result in TARGET, if convenient. */
4567 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4569 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4571 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4572 CALL_EXPR_ARG (exp, 1));
4574 return expand_expr (result, target, mode, EXPAND_NORMAL);
4579 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4580 if that's convenient. */
4583 expand_builtin_saveregs (void)
4587 /* Don't do __builtin_saveregs more than once in a function.
4588 Save the result of the first call and reuse it. */
4589 if (saveregs_value != 0)
4590 return saveregs_value;
4592 /* When this function is called, it means that registers must be
4593 saved on entry to this function. So we migrate the call to the
4594 first insn of this function. */
4598 /* Do whatever the machine needs done in this case. */
4599 val = targetm.calls.expand_builtin_saveregs ();
4604 saveregs_value = val;
4606 /* Put the insns after the NOTE that starts the function. If this
4607 is inside a start_sequence, make the outer-level insn chain current, so
4608 the code is placed at the start of the function. */
4609 push_topmost_sequence ();
4610 emit_insn_after (seq, entry_of_function ());
4611 pop_topmost_sequence ();
4616 /* __builtin_args_info (N) returns word N of the arg space info
4617 for the current function. The number and meanings of words
4618 is controlled by the definition of CUMULATIVE_ARGS. */
4621 expand_builtin_args_info (tree exp)
4623 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4624 int *word_ptr = (int *) ¤t_function_args_info;
4626 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4628 if (call_expr_nargs (exp) != 0)
4630 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4631 error ("argument of %<__builtin_args_info%> must be constant");
4634 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4636 if (wordnum < 0 || wordnum >= nwords)
4637 error ("argument of %<__builtin_args_info%> out of range");
4639 return GEN_INT (word_ptr[wordnum]);
4643 error ("missing argument in %<__builtin_args_info%>");
4648 /* Expand a call to __builtin_next_arg. */
4651 expand_builtin_next_arg (void)
4653 /* Checking arguments is already done in fold_builtin_next_arg
4654 that must be called before this function. */
4655 return expand_binop (ptr_mode, add_optab,
4656 current_function_internal_arg_pointer,
4657 current_function_arg_offset_rtx,
4658 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4661 /* Make it easier for the backends by protecting the valist argument
4662 from multiple evaluations. */
4665 stabilize_va_list (tree valist, int needs_lvalue)
4667 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4669 if (TREE_SIDE_EFFECTS (valist))
4670 valist = save_expr (valist);
4672 /* For this case, the backends will be expecting a pointer to
4673 TREE_TYPE (va_list_type_node), but it's possible we've
4674 actually been given an array (an actual va_list_type_node).
4676 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4678 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4679 valist = build_fold_addr_expr_with_type (valist, p1);
4688 if (! TREE_SIDE_EFFECTS (valist))
4691 pt = build_pointer_type (va_list_type_node);
4692 valist = fold_build1 (ADDR_EXPR, pt, valist);
4693 TREE_SIDE_EFFECTS (valist) = 1;
4696 if (TREE_SIDE_EFFECTS (valist))
4697 valist = save_expr (valist);
4698 valist = build_fold_indirect_ref (valist);
4704 /* The "standard" definition of va_list is void*. */
4707 std_build_builtin_va_list (void)
4709 return ptr_type_node;
4712 /* The "standard" implementation of va_start: just assign `nextarg' to
4716 std_expand_builtin_va_start (tree valist, rtx nextarg)
4718 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4719 convert_move (va_r, nextarg, 0);
4722 /* Expand EXP, a call to __builtin_va_start. */
4725 expand_builtin_va_start (tree exp)
4730 if (call_expr_nargs (exp) < 2)
4732 error ("too few arguments to function %<va_start%>");
4736 if (fold_builtin_next_arg (exp, true))
4739 nextarg = expand_builtin_next_arg ();
4740 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4742 #ifdef EXPAND_BUILTIN_VA_START
4743 EXPAND_BUILTIN_VA_START (valist, nextarg);
4745 std_expand_builtin_va_start (valist, nextarg);
4751 /* The "standard" implementation of va_arg: read the value from the
4752 current (padded) address and increment by the (padded) size. */
4755 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4757 tree addr, t, type_size, rounded_size, valist_tmp;
4758 unsigned HOST_WIDE_INT align, boundary;
4761 #ifdef ARGS_GROW_DOWNWARD
4762 /* All of the alignment and movement below is for args-grow-up machines.
4763 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4764 implement their own specialized gimplify_va_arg_expr routines. */
4768 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4770 type = build_pointer_type (type);
4772 align = PARM_BOUNDARY / BITS_PER_UNIT;
4773 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4775 /* Hoist the valist value into a temporary for the moment. */
4776 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4778 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4779 requires greater alignment, we must perform dynamic alignment. */
4780 if (boundary > align
4781 && !integer_zerop (TYPE_SIZE (type)))
4783 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4784 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4785 valist_tmp, size_int (boundary - 1)));
4786 gimplify_and_add (t, pre_p);
4788 t = fold_convert (sizetype, valist_tmp);
4789 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4790 fold_convert (TREE_TYPE (valist),
4791 fold_build2 (BIT_AND_EXPR, sizetype, t,
4792 size_int (-boundary))));
4793 gimplify_and_add (t, pre_p);
4798 /* If the actual alignment is less than the alignment of the type,
4799 adjust the type accordingly so that we don't assume strict alignment
4800 when deferencing the pointer. */
4801 boundary *= BITS_PER_UNIT;
4802 if (boundary < TYPE_ALIGN (type))
4804 type = build_variant_type_copy (type);
4805 TYPE_ALIGN (type) = boundary;
4808 /* Compute the rounded size of the type. */
4809 type_size = size_in_bytes (type);
4810 rounded_size = round_up (type_size, align);
4812 /* Reduce rounded_size so it's sharable with the postqueue. */
4813 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4817 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4819 /* Small args are padded downward. */
4820 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4821 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4822 size_binop (MINUS_EXPR, rounded_size, type_size));
4823 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4826 /* Compute new value for AP. */
4827 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4828 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4829 gimplify_and_add (t, pre_p);
4831 addr = fold_convert (build_pointer_type (type), addr);
4834 addr = build_va_arg_indirect_ref (addr);
4836 return build_va_arg_indirect_ref (addr);
4839 /* Build an indirect-ref expression over the given TREE, which represents a
4840 piece of a va_arg() expansion. */
4842 build_va_arg_indirect_ref (tree addr)
4844 addr = build_fold_indirect_ref (addr);
4846 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4852 /* Return a dummy expression of type TYPE in order to keep going after an
4856 dummy_object (tree type)
4858 tree t = build_int_cst (build_pointer_type (type), 0);
4859 return build1 (INDIRECT_REF, type, t);
4862 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4863 builtin function, but a very special sort of operator. */
4865 enum gimplify_status
4866 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4868 tree promoted_type, want_va_type, have_va_type;
4869 tree valist = TREE_OPERAND (*expr_p, 0);
4870 tree type = TREE_TYPE (*expr_p);
4873 /* Verify that valist is of the proper type. */
4874 want_va_type = va_list_type_node;
4875 have_va_type = TREE_TYPE (valist);
4877 if (have_va_type == error_mark_node)
4880 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4882 /* If va_list is an array type, the argument may have decayed
4883 to a pointer type, e.g. by being passed to another function.
4884 In that case, unwrap both types so that we can compare the
4885 underlying records. */
4886 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4887 || POINTER_TYPE_P (have_va_type))
4889 want_va_type = TREE_TYPE (want_va_type);
4890 have_va_type = TREE_TYPE (have_va_type);
4894 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4896 error ("first argument to %<va_arg%> not of type %<va_list%>");
4900 /* Generate a diagnostic for requesting data of a type that cannot
4901 be passed through `...' due to type promotion at the call site. */
4902 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4905 static bool gave_help;
4907 /* Unfortunately, this is merely undefined, rather than a constraint
4908 violation, so we cannot make this an error. If this call is never
4909 executed, the program is still strictly conforming. */
4910 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4911 type, promoted_type);
4915 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4916 promoted_type, type);
4919 /* We can, however, treat "undefined" any way we please.
4920 Call abort to encourage the user to fix the program. */
4921 inform ("if this code is reached, the program will abort");
4922 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4923 append_to_statement_list (t, pre_p);
4925 /* This is dead code, but go ahead and finish so that the
4926 mode of the result comes out right. */
4927 *expr_p = dummy_object (type);
4932 /* Make it easier for the backends by protecting the valist argument
4933 from multiple evaluations. */
4934 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4936 /* For this case, the backends will be expecting a pointer to
4937 TREE_TYPE (va_list_type_node), but it's possible we've
4938 actually been given an array (an actual va_list_type_node).
4940 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4942 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4943 valist = build_fold_addr_expr_with_type (valist, p1);
4945 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4948 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4950 if (!targetm.gimplify_va_arg_expr)
4951 /* FIXME:Once most targets are converted we should merely
4952 assert this is non-null. */
4955 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4960 /* Expand EXP, a call to __builtin_va_end. */
4963 expand_builtin_va_end (tree exp)
4965 tree valist = CALL_EXPR_ARG (exp, 0);
4967 /* Evaluate for side effects, if needed. I hate macros that don't
4969 if (TREE_SIDE_EFFECTS (valist))
4970 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4975 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4976 builtin rather than just as an assignment in stdarg.h because of the
4977 nastiness of array-type va_list types. */
4980 expand_builtin_va_copy (tree exp)
4984 dst = CALL_EXPR_ARG (exp, 0);
4985 src = CALL_EXPR_ARG (exp, 1);
4987 dst = stabilize_va_list (dst, 1);
4988 src = stabilize_va_list (src, 0);
4990 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4992 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4993 TREE_SIDE_EFFECTS (t) = 1;
4994 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4998 rtx dstb, srcb, size;
5000 /* Evaluate to pointers. */
5001 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5002 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5003 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
5004 VOIDmode, EXPAND_NORMAL);
5006 dstb = convert_memory_address (Pmode, dstb);
5007 srcb = convert_memory_address (Pmode, srcb);
5009 /* "Dereference" to BLKmode memories. */
5010 dstb = gen_rtx_MEM (BLKmode, dstb);
5011 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5012 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
5013 srcb = gen_rtx_MEM (BLKmode, srcb);
5014 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5015 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
5018 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5024 /* Expand a call to one of the builtin functions __builtin_frame_address or
5025 __builtin_return_address. */
5028 expand_builtin_frame_address (tree fndecl, tree exp)
5030 /* The argument must be a nonnegative integer constant.
5031 It counts the number of frames to scan up the stack.
5032 The value is the return address saved in that frame. */
5033 if (call_expr_nargs (exp) == 0)
5034 /* Warning about missing arg was already issued. */
5036 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5038 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5039 error ("invalid argument to %<__builtin_frame_address%>");
5041 error ("invalid argument to %<__builtin_return_address%>");
5047 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5048 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5050 /* Some ports cannot access arbitrary stack frames. */
5053 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5054 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5056 warning (0, "unsupported argument to %<__builtin_return_address%>");
5060 /* For __builtin_frame_address, return what we've got. */
5061 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5065 && ! CONSTANT_P (tem))
5066 tem = copy_to_mode_reg (Pmode, tem);
5071 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5072 we failed and the caller should emit a normal call, otherwise try to get
5073 the result in TARGET, if convenient. */
5076 expand_builtin_alloca (tree exp, rtx target)
5081 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5082 should always expand to function calls. These can be intercepted
5087 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5090 /* Compute the argument. */
5091 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5093 /* Allocate the desired space. */
5094 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5095 result = convert_memory_address (ptr_mode, result);
5100 /* Expand a call to a bswap builtin with argument ARG0. MODE
5101 is the mode to expand with. */
5104 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5106 enum machine_mode mode;
5110 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5113 arg = CALL_EXPR_ARG (exp, 0);
5114 mode = TYPE_MODE (TREE_TYPE (arg));
5115 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5117 target = expand_unop (mode, bswap_optab, op0, target, 1);
5119 gcc_assert (target);
5121 return convert_to_mode (mode, target, 0);
5124 /* Expand a call to a unary builtin in EXP.
5125 Return NULL_RTX if a normal call should be emitted rather than expanding the
5126 function in-line. If convenient, the result should be placed in TARGET.
5127 SUBTARGET may be used as the target for computing one of EXP's operands. */
5130 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5131 rtx subtarget, optab op_optab)
5135 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5138 /* Compute the argument. */
5139 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5140 VOIDmode, EXPAND_NORMAL);
5141 /* Compute op, into TARGET if possible.
5142 Set TARGET to wherever the result comes back. */
5143 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5144 op_optab, op0, target, 1);
5145 gcc_assert (target);
5147 return convert_to_mode (target_mode, target, 0);
5150 /* If the string passed to fputs is a constant and is one character
5151 long, we attempt to transform this call into __builtin_fputc(). */
5154 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5156 /* Verify the arguments in the original call. */
5157 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5159 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5160 CALL_EXPR_ARG (exp, 1),
5161 (target == const0_rtx),
5162 unlocked, NULL_TREE);
5164 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5169 /* Expand a call to __builtin_expect. We just return our argument
5170 as the builtin_expect semantic should've been already executed by
5171 tree branch prediction pass. */
5174 expand_builtin_expect (tree exp, rtx target)
5178 if (call_expr_nargs (exp) < 2)
5180 arg = CALL_EXPR_ARG (exp, 0);
5181 c = CALL_EXPR_ARG (exp, 1);
5183 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5184 /* When guessing was done, the hints should be already stripped away. */
5185 gcc_assert (!flag_guess_branch_prob
5186 || optimize == 0 || errorcount || sorrycount);
5191 expand_builtin_trap (void)
5195 emit_insn (gen_trap ());
5198 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5202 /* Expand EXP, a call to fabs, fabsf or fabsl.
5203 Return NULL_RTX if a normal call should be emitted rather than expanding
5204 the function inline. If convenient, the result should be placed
5205 in TARGET. SUBTARGET may be used as the target for computing
5209 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5211 enum machine_mode mode;
5215 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5218 arg = CALL_EXPR_ARG (exp, 0);
5219 mode = TYPE_MODE (TREE_TYPE (arg));
5220 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5221 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5224 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5225 Return NULL is a normal call should be emitted rather than expanding the
5226 function inline. If convenient, the result should be placed in TARGET.
5227 SUBTARGET may be used as the target for computing the operand. */
5230 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5235 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5238 arg = CALL_EXPR_ARG (exp, 0);
5239 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5241 arg = CALL_EXPR_ARG (exp, 1);
5242 op1 = expand_normal (arg);
5244 return expand_copysign (op0, op1, target);
5247 /* Create a new constant string literal and return a char* pointer to it.
5248 The STRING_CST value is the LEN characters at STR. */
5250 build_string_literal (int len, const char *str)
5252 tree t, elem, index, type;
5254 t = build_string (len, str);
5255 elem = build_type_variant (char_type_node, 1, 0);
5256 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5257 type = build_array_type (elem, index);
5258 TREE_TYPE (t) = type;
5259 TREE_CONSTANT (t) = 1;
5260 TREE_INVARIANT (t) = 1;
5261 TREE_READONLY (t) = 1;
5262 TREE_STATIC (t) = 1;
5264 type = build_pointer_type (type);
5265 t = build1 (ADDR_EXPR, type, t);
5267 type = build_pointer_type (elem);
5268 t = build1 (NOP_EXPR, type, t);
5272 /* Expand EXP, a call to printf or printf_unlocked.
5273 Return NULL_RTX if a normal call should be emitted rather than transforming
5274 the function inline. If convenient, the result should be placed in
5275 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5278 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5281 /* If we're using an unlocked function, assume the other unlocked
5282 functions exist explicitly. */
5283 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5284 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5285 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5286 : implicit_built_in_decls[BUILT_IN_PUTS];
5287 const char *fmt_str;
5290 int nargs = call_expr_nargs (exp);
5292 /* If the return value is used, don't do the transformation. */
5293 if (target != const0_rtx)
5296 /* Verify the required arguments in the original call. */
5299 fmt = CALL_EXPR_ARG (exp, 0);
5300 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5303 /* Check whether the format is a literal string constant. */
5304 fmt_str = c_getstr (fmt);
5305 if (fmt_str == NULL)
5308 if (!init_target_chars ())
5311 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5312 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5315 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5318 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5320 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5321 else if (strcmp (fmt_str, target_percent_c) == 0)
5324 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5327 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5331 /* We can't handle anything else with % args or %% ... yet. */
5332 if (strchr (fmt_str, target_percent))
5338 /* If the format specifier was "", printf does nothing. */
5339 if (fmt_str[0] == '\0')
5341 /* If the format specifier has length of 1, call putchar. */
5342 if (fmt_str[1] == '\0')
5344 /* Given printf("c"), (where c is any one character,)
5345 convert "c"[0] to an int and pass that to the replacement
5347 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5349 fn = build_call_expr (fn_putchar, 1, arg);
5353 /* If the format specifier was "string\n", call puts("string"). */
5354 size_t len = strlen (fmt_str);
5355 if ((unsigned char)fmt_str[len - 1] == target_newline)
5357 /* Create a NUL-terminated string that's one char shorter
5358 than the original, stripping off the trailing '\n'. */
5359 char *newstr = alloca (len);
5360 memcpy (newstr, fmt_str, len - 1);
5361 newstr[len - 1] = 0;
5362 arg = build_string_literal (len, newstr);
5364 fn = build_call_expr (fn_puts, 1, arg);
5367 /* We'd like to arrange to call fputs(string,stdout) here,
5368 but we need stdout and don't have a way to get it yet. */
5375 if (TREE_CODE (fn) == CALL_EXPR)
5376 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5377 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5380 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5381 Return NULL_RTX if a normal call should be emitted rather than transforming
5382 the function inline. If convenient, the result should be placed in
5383 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5386 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5389 /* If we're using an unlocked function, assume the other unlocked
5390 functions exist explicitly. */
5391 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5392 : implicit_built_in_decls[BUILT_IN_FPUTC];
5393 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5394 : implicit_built_in_decls[BUILT_IN_FPUTS];
5395 const char *fmt_str;
5398 int nargs = call_expr_nargs (exp);
5400 /* If the return value is used, don't do the transformation. */
5401 if (target != const0_rtx)
5404 /* Verify the required arguments in the original call. */
5407 fp = CALL_EXPR_ARG (exp, 0);
5408 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5410 fmt = CALL_EXPR_ARG (exp, 1);
5411 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5414 /* Check whether the format is a literal string constant. */
5415 fmt_str = c_getstr (fmt);
5416 if (fmt_str == NULL)
5419 if (!init_target_chars ())
5422 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5423 if (strcmp (fmt_str, target_percent_s) == 0)
5426 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5428 arg = CALL_EXPR_ARG (exp, 2);
5430 fn = build_call_expr (fn_fputs, 2, arg, fp);
5432 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5433 else if (strcmp (fmt_str, target_percent_c) == 0)
5436 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5438 arg = CALL_EXPR_ARG (exp, 2);
5440 fn = build_call_expr (fn_fputc, 2, arg, fp);
5444 /* We can't handle anything else with % args or %% ... yet. */
5445 if (strchr (fmt_str, target_percent))
5451 /* If the format specifier was "", fprintf does nothing. */
5452 if (fmt_str[0] == '\0')
5454 /* Evaluate and ignore FILE* argument for side-effects. */
5455 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5459 /* When "string" doesn't contain %, replace all cases of
5460 fprintf(stream,string) with fputs(string,stream). The fputs
5461 builtin will take care of special cases like length == 1. */
5463 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5468 if (TREE_CODE (fn) == CALL_EXPR)
5469 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5470 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5473 /* Expand a call EXP to sprintf. Return NULL_RTX if
5474 a normal call should be emitted rather than expanding the function
5475 inline. If convenient, the result should be placed in TARGET with
5479 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5482 const char *fmt_str;
5483 int nargs = call_expr_nargs (exp);
5485 /* Verify the required arguments in the original call. */
5488 dest = CALL_EXPR_ARG (exp, 0);
5489 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5491 fmt = CALL_EXPR_ARG (exp, 0);
5492 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5495 /* Check whether the format is a literal string constant. */
5496 fmt_str = c_getstr (fmt);
5497 if (fmt_str == NULL)
5500 if (!init_target_chars ())
5503 /* If the format doesn't contain % args or %%, use strcpy. */
5504 if (strchr (fmt_str, target_percent) == 0)
5506 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5509 if ((nargs > 2) || ! fn)
5511 expand_expr (build_call_expr (fn, 2, dest, fmt),
5512 const0_rtx, VOIDmode, EXPAND_NORMAL);
5513 if (target == const0_rtx)
5515 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5516 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5518 /* If the format is "%s", use strcpy if the result isn't used. */
5519 else if (strcmp (fmt_str, target_percent_s) == 0)
5522 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5528 arg = CALL_EXPR_ARG (exp, 2);
5529 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5532 if (target != const0_rtx)
5534 len = c_strlen (arg, 1);
5535 if (! len || TREE_CODE (len) != INTEGER_CST)
5541 expand_expr (build_call_expr (fn, 2, dest, arg),
5542 const0_rtx, VOIDmode, EXPAND_NORMAL);
5544 if (target == const0_rtx)
5546 return expand_expr (len, target, mode, EXPAND_NORMAL);
5552 /* Expand a call to either the entry or exit function profiler. */
5555 expand_builtin_profile_func (bool exitp)
5559 this = DECL_RTL (current_function_decl);
5560 gcc_assert (MEM_P (this));
5561 this = XEXP (this, 0);
5564 which = profile_function_exit_libfunc;
5566 which = profile_function_entry_libfunc;
5568 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5569 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5576 /* Expand a call to __builtin___clear_cache. */
5579 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5581 #ifndef HAVE_clear_cache
5582 #ifdef CLEAR_INSN_CACHE
5583 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5584 does something. Just do the default expansion to a call to
5588 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5589 does nothing. There is no need to call it. Do nothing. */
5591 #endif /* CLEAR_INSN_CACHE */
5593 /* We have a "clear_cache" insn, and it will handle everything. */
5595 rtx begin_rtx, end_rtx;
5596 enum insn_code icode;
5598 /* We must not expand to a library call. If we did, any
5599 fallback library function in libgcc that might contain a call to
5600 __builtin___clear_cache() would recurse infinitely. */
5601 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5603 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5607 if (HAVE_clear_cache)
5609 icode = CODE_FOR_clear_cache;
5611 begin = CALL_EXPR_ARG (exp, 0);
5612 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5613 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5614 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5615 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5617 end = CALL_EXPR_ARG (exp, 1);
5618 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5619 end_rtx = convert_memory_address (Pmode, end_rtx);
5620 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5621 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5623 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5626 #endif /* HAVE_clear_cache */
5629 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5632 round_trampoline_addr (rtx tramp)
5634 rtx temp, addend, mask;
5636 /* If we don't need too much alignment, we'll have been guaranteed
5637 proper alignment by get_trampoline_type. */
5638 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5641 /* Round address up to desired boundary. */
5642 temp = gen_reg_rtx (Pmode);
5643 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5644 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5646 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5647 temp, 0, OPTAB_LIB_WIDEN);
5648 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5649 temp, 0, OPTAB_LIB_WIDEN);
5655 expand_builtin_init_trampoline (tree exp)
5657 tree t_tramp, t_func, t_chain;
5658 rtx r_tramp, r_func, r_chain;
5659 #ifdef TRAMPOLINE_TEMPLATE
5663 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5664 POINTER_TYPE, VOID_TYPE))
5667 t_tramp = CALL_EXPR_ARG (exp, 0);
5668 t_func = CALL_EXPR_ARG (exp, 1);
5669 t_chain = CALL_EXPR_ARG (exp, 2);
5671 r_tramp = expand_normal (t_tramp);
5672 r_func = expand_normal (t_func);
5673 r_chain = expand_normal (t_chain);
5675 /* Generate insns to initialize the trampoline. */
5676 r_tramp = round_trampoline_addr (r_tramp);
5677 #ifdef TRAMPOLINE_TEMPLATE
5678 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5679 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5680 emit_block_move (blktramp, assemble_trampoline_template (),
5681 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5683 trampolines_created = 1;
5684 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5690 expand_builtin_adjust_trampoline (tree exp)
5694 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5697 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5698 tramp = round_trampoline_addr (tramp);
5699 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5700 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5706 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5707 function. The function first checks whether the back end provides
5708 an insn to implement signbit for the respective mode. If not, it
5709 checks whether the floating point format of the value is such that
5710 the sign bit can be extracted. If that is not the case, the
5711 function returns NULL_RTX to indicate that a normal call should be
5712 emitted rather than expanding the function in-line. EXP is the
5713 expression that is a call to the builtin function; if convenient,
5714 the result should be placed in TARGET. */
5716 expand_builtin_signbit (tree exp, rtx target)
5718 const struct real_format *fmt;
5719 enum machine_mode fmode, imode, rmode;
5720 HOST_WIDE_INT hi, lo;
5723 enum insn_code icode;
5726 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5729 arg = CALL_EXPR_ARG (exp, 0);
5730 fmode = TYPE_MODE (TREE_TYPE (arg));
5731 rmode = TYPE_MODE (TREE_TYPE (exp));
5732 fmt = REAL_MODE_FORMAT (fmode);
5734 arg = builtin_save_expr (arg);
5736 /* Expand the argument yielding a RTX expression. */
5737 temp = expand_normal (arg);
5739 /* Check if the back end provides an insn that handles signbit for the
5741 icode = signbit_optab->handlers [(int) fmode].insn_code;
5742 if (icode != CODE_FOR_nothing)
5744 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5745 emit_unop_insn (icode, target, temp, UNKNOWN);
5749 /* For floating point formats without a sign bit, implement signbit
5751 bitpos = fmt->signbit_ro;
5754 /* But we can't do this if the format supports signed zero. */
5755 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5758 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5759 build_real (TREE_TYPE (arg), dconst0));
5760 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5763 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5765 imode = int_mode_for_mode (fmode);
5766 if (imode == BLKmode)
5768 temp = gen_lowpart (imode, temp);
5773 /* Handle targets with different FP word orders. */
5774 if (FLOAT_WORDS_BIG_ENDIAN)
5775 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5777 word = bitpos / BITS_PER_WORD;
5778 temp = operand_subword_force (temp, word, fmode);
5779 bitpos = bitpos % BITS_PER_WORD;
5782 /* Force the intermediate word_mode (or narrower) result into a
5783 register. This avoids attempting to create paradoxical SUBREGs
5784 of floating point modes below. */
5785 temp = force_reg (imode, temp);
5787 /* If the bitpos is within the "result mode" lowpart, the operation
5788 can be implement with a single bitwise AND. Otherwise, we need
5789 a right shift and an AND. */
5791 if (bitpos < GET_MODE_BITSIZE (rmode))
5793 if (bitpos < HOST_BITS_PER_WIDE_INT)
5796 lo = (HOST_WIDE_INT) 1 << bitpos;
5800 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5805 temp = gen_lowpart (rmode, temp);
5806 temp = expand_binop (rmode, and_optab, temp,
5807 immed_double_const (lo, hi, rmode),
5808 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5812 /* Perform a logical right shift to place the signbit in the least
5813 significant bit, then truncate the result to the desired mode
5814 and mask just this bit. */
5815 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5816 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5817 temp = gen_lowpart (rmode, temp);
5818 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5819 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5825 /* Expand fork or exec calls. TARGET is the desired target of the
5826 call. EXP is the call. FN is the
5827 identificator of the actual function. IGNORE is nonzero if the
5828 value is to be ignored. */
5831 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5836 /* If we are not profiling, just call the function. */
5837 if (!profile_arc_flag)
5840 /* Otherwise call the wrapper. This should be equivalent for the rest of
5841 compiler, so the code does not diverge, and the wrapper may run the
5842 code necessary for keeping the profiling sane. */
5844 switch (DECL_FUNCTION_CODE (fn))
5847 id = get_identifier ("__gcov_fork");
5850 case BUILT_IN_EXECL:
5851 id = get_identifier ("__gcov_execl");
5854 case BUILT_IN_EXECV:
5855 id = get_identifier ("__gcov_execv");
5858 case BUILT_IN_EXECLP:
5859 id = get_identifier ("__gcov_execlp");
5862 case BUILT_IN_EXECLE:
5863 id = get_identifier ("__gcov_execle");
5866 case BUILT_IN_EXECVP:
5867 id = get_identifier ("__gcov_execvp");
5870 case BUILT_IN_EXECVE:
5871 id = get_identifier ("__gcov_execve");
5878 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5879 DECL_EXTERNAL (decl) = 1;
5880 TREE_PUBLIC (decl) = 1;
5881 DECL_ARTIFICIAL (decl) = 1;
5882 TREE_NOTHROW (decl) = 1;
5883 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5884 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5885 call = rewrite_call_expr (exp, 0, decl, 0);
5886 return expand_call (call, target, ignore);
5891 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5892 the pointer in these functions is void*, the tree optimizers may remove
5893 casts. The mode computed in expand_builtin isn't reliable either, due
5894 to __sync_bool_compare_and_swap.
5896 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5897 group of builtins. This gives us log2 of the mode size. */
5899 static inline enum machine_mode
5900 get_builtin_sync_mode (int fcode_diff)
5902 /* The size is not negotiable, so ask not to get BLKmode in return
5903 if the target indicates that a smaller size would be better. */
5904 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5907 /* Expand the memory expression LOC and return the appropriate memory operand
5908 for the builtin_sync operations. */
5911 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5915 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5917 /* Note that we explicitly do not want any alias information for this
5918 memory, so that we kill all other live memories. Otherwise we don't
5919 satisfy the full barrier semantics of the intrinsic. */
5920 mem = validize_mem (gen_rtx_MEM (mode, addr));
5922 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5923 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5924 MEM_VOLATILE_P (mem) = 1;
5929 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5930 EXP is the CALL_EXPR. CODE is the rtx code
5931 that corresponds to the arithmetic or logical operation from the name;
5932 an exception here is that NOT actually means NAND. TARGET is an optional
5933 place for us to store the results; AFTER is true if this is the
5934 fetch_and_xxx form. IGNORE is true if we don't actually care about
5935 the result of the operation at all. */
5938 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5939 enum rtx_code code, bool after,
5940 rtx target, bool ignore)
5943 enum machine_mode old_mode;
5945 /* Expand the operands. */
5946 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5948 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5949 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5950 of CONST_INTs, where we know the old_mode only from the call argument. */
5951 old_mode = GET_MODE (val);
5952 if (old_mode == VOIDmode)
5953 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5954 val = convert_modes (mode, old_mode, val, 1);
5957 return expand_sync_operation (mem, val, code);
5959 return expand_sync_fetch_operation (mem, val, code, after, target);
5962 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5963 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5964 true if this is the boolean form. TARGET is a place for us to store the
5965 results; this is NOT optional if IS_BOOL is true. */
5968 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5969 bool is_bool, rtx target)
5971 rtx old_val, new_val, mem;
5972 enum machine_mode old_mode;
5974 /* Expand the operands. */
5975 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5978 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5979 mode, EXPAND_NORMAL);
5980 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5981 of CONST_INTs, where we know the old_mode only from the call argument. */
5982 old_mode = GET_MODE (old_val);
5983 if (old_mode == VOIDmode)
5984 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5985 old_val = convert_modes (mode, old_mode, old_val, 1);
5987 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5988 mode, EXPAND_NORMAL);
5989 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5990 of CONST_INTs, where we know the old_mode only from the call argument. */
5991 old_mode = GET_MODE (new_val);
5992 if (old_mode == VOIDmode)
5993 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5994 new_val = convert_modes (mode, old_mode, new_val, 1);
5997 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5999 return expand_val_compare_and_swap (mem, old_val, new_val, target);
6002 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6003 general form is actually an atomic exchange, and some targets only
6004 support a reduced form with the second argument being a constant 1.
6005 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6009 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
6013 enum machine_mode old_mode;
6015 /* Expand the operands. */
6016 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6017 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6018 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6019 of CONST_INTs, where we know the old_mode only from the call argument. */
6020 old_mode = GET_MODE (val);
6021 if (old_mode == VOIDmode)
6022 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6023 val = convert_modes (mode, old_mode, val, 1);
6025 return expand_sync_lock_test_and_set (mem, val, target);
6028 /* Expand the __sync_synchronize intrinsic. */
6031 expand_builtin_synchronize (void)
6035 #ifdef HAVE_memory_barrier
6036 if (HAVE_memory_barrier)
6038 emit_insn (gen_memory_barrier ());
6043 /* If no explicit memory barrier instruction is available, create an
6044 empty asm stmt with a memory clobber. */
6045 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6046 tree_cons (NULL, build_string (6, "memory"), NULL));
6047 ASM_VOLATILE_P (x) = 1;
6048 expand_asm_expr (x);
6051 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6054 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6056 enum insn_code icode;
6058 rtx val = const0_rtx;
6060 /* Expand the operands. */
6061 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6063 /* If there is an explicit operation in the md file, use it. */
6064 icode = sync_lock_release[mode];
6065 if (icode != CODE_FOR_nothing)
6067 if (!insn_data[icode].operand[1].predicate (val, mode))
6068 val = force_reg (mode, val);
6070 insn = GEN_FCN (icode) (mem, val);
6078 /* Otherwise we can implement this operation by emitting a barrier
6079 followed by a store of zero. */
6080 expand_builtin_synchronize ();
6081 emit_move_insn (mem, val);
6084 /* Expand an expression EXP that calls a built-in function,
6085 with result going to TARGET if that's convenient
6086 (and in mode MODE if that's convenient).
6087 SUBTARGET may be used as the target for computing one of EXP's operands.
6088 IGNORE is nonzero if the value is to be ignored. */
6091 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6094 tree fndecl = get_callee_fndecl (exp);
6095 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6096 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6098 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6099 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6101 /* When not optimizing, generate calls to library functions for a certain
6104 && !called_as_built_in (fndecl)
6105 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6106 && fcode != BUILT_IN_ALLOCA)
6107 return expand_call (exp, target, ignore);
6109 /* The built-in function expanders test for target == const0_rtx
6110 to determine whether the function's result will be ignored. */
6112 target = const0_rtx;
6114 /* If the result of a pure or const built-in function is ignored, and
6115 none of its arguments are volatile, we can avoid expanding the
6116 built-in call and just evaluate the arguments for side-effects. */
6117 if (target == const0_rtx
6118 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
6120 bool volatilep = false;
6122 call_expr_arg_iterator iter;
6124 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6125 if (TREE_THIS_VOLATILE (arg))
6133 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6134 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6141 CASE_FLT_FN (BUILT_IN_FABS):
6142 target = expand_builtin_fabs (exp, target, subtarget);
6147 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6148 target = expand_builtin_copysign (exp, target, subtarget);
6153 /* Just do a normal library call if we were unable to fold
6155 CASE_FLT_FN (BUILT_IN_CABS):
6158 CASE_FLT_FN (BUILT_IN_EXP):
6159 CASE_FLT_FN (BUILT_IN_EXP10):
6160 CASE_FLT_FN (BUILT_IN_POW10):
6161 CASE_FLT_FN (BUILT_IN_EXP2):
6162 CASE_FLT_FN (BUILT_IN_EXPM1):
6163 CASE_FLT_FN (BUILT_IN_LOGB):
6164 CASE_FLT_FN (BUILT_IN_LOG):
6165 CASE_FLT_FN (BUILT_IN_LOG10):
6166 CASE_FLT_FN (BUILT_IN_LOG2):
6167 CASE_FLT_FN (BUILT_IN_LOG1P):
6168 CASE_FLT_FN (BUILT_IN_TAN):
6169 CASE_FLT_FN (BUILT_IN_ASIN):
6170 CASE_FLT_FN (BUILT_IN_ACOS):
6171 CASE_FLT_FN (BUILT_IN_ATAN):
6172 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6173 because of possible accuracy problems. */
6174 if (! flag_unsafe_math_optimizations)
6176 CASE_FLT_FN (BUILT_IN_SQRT):
6177 CASE_FLT_FN (BUILT_IN_FLOOR):
6178 CASE_FLT_FN (BUILT_IN_CEIL):
6179 CASE_FLT_FN (BUILT_IN_TRUNC):
6180 CASE_FLT_FN (BUILT_IN_ROUND):
6181 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6182 CASE_FLT_FN (BUILT_IN_RINT):
6183 target = expand_builtin_mathfn (exp, target, subtarget);
6188 CASE_FLT_FN (BUILT_IN_ILOGB):
6189 if (! flag_unsafe_math_optimizations)
6191 CASE_FLT_FN (BUILT_IN_ISINF):
6192 CASE_FLT_FN (BUILT_IN_FINITE):
6193 case BUILT_IN_ISFINITE:
6194 case BUILT_IN_ISNORMAL:
6195 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6200 CASE_FLT_FN (BUILT_IN_LCEIL):
6201 CASE_FLT_FN (BUILT_IN_LLCEIL):
6202 CASE_FLT_FN (BUILT_IN_LFLOOR):
6203 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6204 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6209 CASE_FLT_FN (BUILT_IN_LRINT):
6210 CASE_FLT_FN (BUILT_IN_LLRINT):
6211 CASE_FLT_FN (BUILT_IN_LROUND):
6212 CASE_FLT_FN (BUILT_IN_LLROUND):
6213 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6218 CASE_FLT_FN (BUILT_IN_POW):
6219 target = expand_builtin_pow (exp, target, subtarget);
6224 CASE_FLT_FN (BUILT_IN_POWI):
6225 target = expand_builtin_powi (exp, target, subtarget);
6230 CASE_FLT_FN (BUILT_IN_ATAN2):
6231 CASE_FLT_FN (BUILT_IN_LDEXP):
6232 CASE_FLT_FN (BUILT_IN_SCALB):
6233 CASE_FLT_FN (BUILT_IN_SCALBN):
6234 CASE_FLT_FN (BUILT_IN_SCALBLN):
6235 if (! flag_unsafe_math_optimizations)
6238 CASE_FLT_FN (BUILT_IN_FMOD):
6239 CASE_FLT_FN (BUILT_IN_REMAINDER):
6240 CASE_FLT_FN (BUILT_IN_DREM):
6241 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6246 CASE_FLT_FN (BUILT_IN_CEXPI):
6247 target = expand_builtin_cexpi (exp, target, subtarget);
6248 gcc_assert (target);
6251 CASE_FLT_FN (BUILT_IN_SIN):
6252 CASE_FLT_FN (BUILT_IN_COS):
6253 if (! flag_unsafe_math_optimizations)
6255 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6260 CASE_FLT_FN (BUILT_IN_SINCOS):
6261 if (! flag_unsafe_math_optimizations)
6263 target = expand_builtin_sincos (exp);
6268 case BUILT_IN_APPLY_ARGS:
6269 return expand_builtin_apply_args ();
6271 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6272 FUNCTION with a copy of the parameters described by
6273 ARGUMENTS, and ARGSIZE. It returns a block of memory
6274 allocated on the stack into which is stored all the registers
6275 that might possibly be used for returning the result of a
6276 function. ARGUMENTS is the value returned by
6277 __builtin_apply_args. ARGSIZE is the number of bytes of
6278 arguments that must be copied. ??? How should this value be
6279 computed? We'll also need a safe worst case value for varargs
6281 case BUILT_IN_APPLY:
6282 if (!validate_arglist (exp, POINTER_TYPE,
6283 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6284 && !validate_arglist (exp, REFERENCE_TYPE,
6285 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6291 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6292 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6293 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6295 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6298 /* __builtin_return (RESULT) causes the function to return the
6299 value described by RESULT. RESULT is address of the block of
6300 memory returned by __builtin_apply. */
6301 case BUILT_IN_RETURN:
6302 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6303 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6306 case BUILT_IN_SAVEREGS:
6307 return expand_builtin_saveregs ();
6309 case BUILT_IN_ARGS_INFO:
6310 return expand_builtin_args_info (exp);
6312 /* Return the address of the first anonymous stack arg. */
6313 case BUILT_IN_NEXT_ARG:
6314 if (fold_builtin_next_arg (exp, false))
6316 return expand_builtin_next_arg ();
6318 case BUILT_IN_CLEAR_CACHE:
6319 target = expand_builtin___clear_cache (exp);
6324 case BUILT_IN_CLASSIFY_TYPE:
6325 return expand_builtin_classify_type (exp);
6327 case BUILT_IN_CONSTANT_P:
6330 case BUILT_IN_FRAME_ADDRESS:
6331 case BUILT_IN_RETURN_ADDRESS:
6332 return expand_builtin_frame_address (fndecl, exp);
6334 /* Returns the address of the area where the structure is returned.
6336 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6337 if (call_expr_nargs (exp) != 0
6338 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6339 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6342 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6344 case BUILT_IN_ALLOCA:
6345 target = expand_builtin_alloca (exp, target);
6350 case BUILT_IN_STACK_SAVE:
6351 return expand_stack_save ();
6353 case BUILT_IN_STACK_RESTORE:
6354 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6357 case BUILT_IN_BSWAP32:
6358 case BUILT_IN_BSWAP64:
6359 target = expand_builtin_bswap (exp, target, subtarget);
6365 CASE_INT_FN (BUILT_IN_FFS):
6366 case BUILT_IN_FFSIMAX:
6367 target = expand_builtin_unop (target_mode, exp, target,
6368 subtarget, ffs_optab);
6373 CASE_INT_FN (BUILT_IN_CLZ):
6374 case BUILT_IN_CLZIMAX:
6375 target = expand_builtin_unop (target_mode, exp, target,
6376 subtarget, clz_optab);
6381 CASE_INT_FN (BUILT_IN_CTZ):
6382 case BUILT_IN_CTZIMAX:
6383 target = expand_builtin_unop (target_mode, exp, target,
6384 subtarget, ctz_optab);
6389 CASE_INT_FN (BUILT_IN_POPCOUNT):
6390 case BUILT_IN_POPCOUNTIMAX:
6391 target = expand_builtin_unop (target_mode, exp, target,
6392 subtarget, popcount_optab);
6397 CASE_INT_FN (BUILT_IN_PARITY):
6398 case BUILT_IN_PARITYIMAX:
6399 target = expand_builtin_unop (target_mode, exp, target,
6400 subtarget, parity_optab);
6405 case BUILT_IN_STRLEN:
6406 target = expand_builtin_strlen (exp, target, target_mode);
6411 case BUILT_IN_STRCPY:
6412 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6417 case BUILT_IN_STRNCPY:
6418 target = expand_builtin_strncpy (exp, target, mode);
6423 case BUILT_IN_STPCPY:
6424 target = expand_builtin_stpcpy (exp, target, mode);
6429 case BUILT_IN_STRCAT:
6430 target = expand_builtin_strcat (fndecl, exp, target, mode);
6435 case BUILT_IN_STRNCAT:
6436 target = expand_builtin_strncat (exp, target, mode);
6441 case BUILT_IN_STRSPN:
6442 target = expand_builtin_strspn (exp, target, mode);
6447 case BUILT_IN_STRCSPN:
6448 target = expand_builtin_strcspn (exp, target, mode);
6453 case BUILT_IN_STRSTR:
6454 target = expand_builtin_strstr (exp, target, mode);
6459 case BUILT_IN_STRPBRK:
6460 target = expand_builtin_strpbrk (exp, target, mode);
6465 case BUILT_IN_INDEX:
6466 case BUILT_IN_STRCHR:
6467 target = expand_builtin_strchr (exp, target, mode);
6472 case BUILT_IN_RINDEX:
6473 case BUILT_IN_STRRCHR:
6474 target = expand_builtin_strrchr (exp, target, mode);
6479 case BUILT_IN_MEMCPY:
6480 target = expand_builtin_memcpy (exp, target, mode);
6485 case BUILT_IN_MEMPCPY:
6486 target = expand_builtin_mempcpy (exp, target, mode);
6491 case BUILT_IN_MEMMOVE:
6492 target = expand_builtin_memmove (exp, target, mode, ignore);
6497 case BUILT_IN_BCOPY:
6498 target = expand_builtin_bcopy (exp, ignore);
6503 case BUILT_IN_MEMSET:
6504 target = expand_builtin_memset (exp, target, mode);
6509 case BUILT_IN_BZERO:
6510 target = expand_builtin_bzero (exp);
6515 case BUILT_IN_STRCMP:
6516 target = expand_builtin_strcmp (exp, target, mode);
6521 case BUILT_IN_STRNCMP:
6522 target = expand_builtin_strncmp (exp, target, mode);
6527 case BUILT_IN_MEMCHR:
6528 target = expand_builtin_memchr (exp, target, mode);
6534 case BUILT_IN_MEMCMP:
6535 target = expand_builtin_memcmp (exp, target, mode);
6540 case BUILT_IN_SETJMP:
6541 /* This should have been lowered to the builtins below. */
6544 case BUILT_IN_SETJMP_SETUP:
6545 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6546 and the receiver label. */
6547 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6549 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6550 VOIDmode, EXPAND_NORMAL);
6551 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6552 rtx label_r = label_rtx (label);
6554 /* This is copied from the handling of non-local gotos. */
6555 expand_builtin_setjmp_setup (buf_addr, label_r);
6556 nonlocal_goto_handler_labels
6557 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6558 nonlocal_goto_handler_labels);
6559 /* ??? Do not let expand_label treat us as such since we would
6560 not want to be both on the list of non-local labels and on
6561 the list of forced labels. */
6562 FORCED_LABEL (label) = 0;
6567 case BUILT_IN_SETJMP_DISPATCHER:
6568 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6569 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6571 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6572 rtx label_r = label_rtx (label);
6574 /* Remove the dispatcher label from the list of non-local labels
6575 since the receiver labels have been added to it above. */
6576 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6581 case BUILT_IN_SETJMP_RECEIVER:
6582 /* __builtin_setjmp_receiver is passed the receiver label. */
6583 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6585 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6586 rtx label_r = label_rtx (label);
6588 expand_builtin_setjmp_receiver (label_r);
6593 /* __builtin_longjmp is passed a pointer to an array of five words.
6594 It's similar to the C library longjmp function but works with
6595 __builtin_setjmp above. */
6596 case BUILT_IN_LONGJMP:
6597 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6599 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6600 VOIDmode, EXPAND_NORMAL);
6601 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6603 if (value != const1_rtx)
6605 error ("%<__builtin_longjmp%> second argument must be 1");
6609 expand_builtin_longjmp (buf_addr, value);
6614 case BUILT_IN_NONLOCAL_GOTO:
6615 target = expand_builtin_nonlocal_goto (exp);
6620 /* This updates the setjmp buffer that is its argument with the value
6621 of the current stack pointer. */
6622 case BUILT_IN_UPDATE_SETJMP_BUF:
6623 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6626 = expand_normal (CALL_EXPR_ARG (exp, 0));
6628 expand_builtin_update_setjmp_buf (buf_addr);
6634 expand_builtin_trap ();
6637 case BUILT_IN_PRINTF:
6638 target = expand_builtin_printf (exp, target, mode, false);
6643 case BUILT_IN_PRINTF_UNLOCKED:
6644 target = expand_builtin_printf (exp, target, mode, true);
6649 case BUILT_IN_FPUTS:
6650 target = expand_builtin_fputs (exp, target, false);
6654 case BUILT_IN_FPUTS_UNLOCKED:
6655 target = expand_builtin_fputs (exp, target, true);
6660 case BUILT_IN_FPRINTF:
6661 target = expand_builtin_fprintf (exp, target, mode, false);
6666 case BUILT_IN_FPRINTF_UNLOCKED:
6667 target = expand_builtin_fprintf (exp, target, mode, true);
6672 case BUILT_IN_SPRINTF:
6673 target = expand_builtin_sprintf (exp, target, mode);
6678 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6679 case BUILT_IN_SIGNBITD32:
6680 case BUILT_IN_SIGNBITD64:
6681 case BUILT_IN_SIGNBITD128:
6682 target = expand_builtin_signbit (exp, target);
6687 /* Various hooks for the DWARF 2 __throw routine. */
6688 case BUILT_IN_UNWIND_INIT:
6689 expand_builtin_unwind_init ();
6691 case BUILT_IN_DWARF_CFA:
6692 return virtual_cfa_rtx;
6693 #ifdef DWARF2_UNWIND_INFO
6694 case BUILT_IN_DWARF_SP_COLUMN:
6695 return expand_builtin_dwarf_sp_column ();
6696 case BUILT_IN_INIT_DWARF_REG_SIZES:
6697 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6700 case BUILT_IN_FROB_RETURN_ADDR:
6701 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6702 case BUILT_IN_EXTRACT_RETURN_ADDR:
6703 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6704 case BUILT_IN_EH_RETURN:
6705 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6706 CALL_EXPR_ARG (exp, 1));
6708 #ifdef EH_RETURN_DATA_REGNO
6709 case BUILT_IN_EH_RETURN_DATA_REGNO:
6710 return expand_builtin_eh_return_data_regno (exp);
6712 case BUILT_IN_EXTEND_POINTER:
6713 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6715 case BUILT_IN_VA_START:
6716 case BUILT_IN_STDARG_START:
6717 return expand_builtin_va_start (exp);
6718 case BUILT_IN_VA_END:
6719 return expand_builtin_va_end (exp);
6720 case BUILT_IN_VA_COPY:
6721 return expand_builtin_va_copy (exp);
6722 case BUILT_IN_EXPECT:
6723 return expand_builtin_expect (exp, target);
6724 case BUILT_IN_PREFETCH:
6725 expand_builtin_prefetch (exp);
6728 case BUILT_IN_PROFILE_FUNC_ENTER:
6729 return expand_builtin_profile_func (false);
6730 case BUILT_IN_PROFILE_FUNC_EXIT:
6731 return expand_builtin_profile_func (true);
6733 case BUILT_IN_INIT_TRAMPOLINE:
6734 return expand_builtin_init_trampoline (exp);
6735 case BUILT_IN_ADJUST_TRAMPOLINE:
6736 return expand_builtin_adjust_trampoline (exp);
6739 case BUILT_IN_EXECL:
6740 case BUILT_IN_EXECV:
6741 case BUILT_IN_EXECLP:
6742 case BUILT_IN_EXECLE:
6743 case BUILT_IN_EXECVP:
6744 case BUILT_IN_EXECVE:
6745 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6750 case BUILT_IN_FETCH_AND_ADD_1:
6751 case BUILT_IN_FETCH_AND_ADD_2:
6752 case BUILT_IN_FETCH_AND_ADD_4:
6753 case BUILT_IN_FETCH_AND_ADD_8:
6754 case BUILT_IN_FETCH_AND_ADD_16:
6755 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6756 target = expand_builtin_sync_operation (mode, exp, PLUS,
6757 false, target, ignore);
6762 case BUILT_IN_FETCH_AND_SUB_1:
6763 case BUILT_IN_FETCH_AND_SUB_2:
6764 case BUILT_IN_FETCH_AND_SUB_4:
6765 case BUILT_IN_FETCH_AND_SUB_8:
6766 case BUILT_IN_FETCH_AND_SUB_16:
6767 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6768 target = expand_builtin_sync_operation (mode, exp, MINUS,
6769 false, target, ignore);
6774 case BUILT_IN_FETCH_AND_OR_1:
6775 case BUILT_IN_FETCH_AND_OR_2:
6776 case BUILT_IN_FETCH_AND_OR_4:
6777 case BUILT_IN_FETCH_AND_OR_8:
6778 case BUILT_IN_FETCH_AND_OR_16:
6779 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6780 target = expand_builtin_sync_operation (mode, exp, IOR,
6781 false, target, ignore);
6786 case BUILT_IN_FETCH_AND_AND_1:
6787 case BUILT_IN_FETCH_AND_AND_2:
6788 case BUILT_IN_FETCH_AND_AND_4:
6789 case BUILT_IN_FETCH_AND_AND_8:
6790 case BUILT_IN_FETCH_AND_AND_16:
6791 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6792 target = expand_builtin_sync_operation (mode, exp, AND,
6793 false, target, ignore);
6798 case BUILT_IN_FETCH_AND_XOR_1:
6799 case BUILT_IN_FETCH_AND_XOR_2:
6800 case BUILT_IN_FETCH_AND_XOR_4:
6801 case BUILT_IN_FETCH_AND_XOR_8:
6802 case BUILT_IN_FETCH_AND_XOR_16:
6803 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6804 target = expand_builtin_sync_operation (mode, exp, XOR,
6805 false, target, ignore);
6810 case BUILT_IN_FETCH_AND_NAND_1:
6811 case BUILT_IN_FETCH_AND_NAND_2:
6812 case BUILT_IN_FETCH_AND_NAND_4:
6813 case BUILT_IN_FETCH_AND_NAND_8:
6814 case BUILT_IN_FETCH_AND_NAND_16:
6815 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6816 target = expand_builtin_sync_operation (mode, exp, NOT,
6817 false, target, ignore);
6822 case BUILT_IN_ADD_AND_FETCH_1:
6823 case BUILT_IN_ADD_AND_FETCH_2:
6824 case BUILT_IN_ADD_AND_FETCH_4:
6825 case BUILT_IN_ADD_AND_FETCH_8:
6826 case BUILT_IN_ADD_AND_FETCH_16:
6827 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6828 target = expand_builtin_sync_operation (mode, exp, PLUS,
6829 true, target, ignore);
6834 case BUILT_IN_SUB_AND_FETCH_1:
6835 case BUILT_IN_SUB_AND_FETCH_2:
6836 case BUILT_IN_SUB_AND_FETCH_4:
6837 case BUILT_IN_SUB_AND_FETCH_8:
6838 case BUILT_IN_SUB_AND_FETCH_16:
6839 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6840 target = expand_builtin_sync_operation (mode, exp, MINUS,
6841 true, target, ignore);
6846 case BUILT_IN_OR_AND_FETCH_1:
6847 case BUILT_IN_OR_AND_FETCH_2:
6848 case BUILT_IN_OR_AND_FETCH_4:
6849 case BUILT_IN_OR_AND_FETCH_8:
6850 case BUILT_IN_OR_AND_FETCH_16:
6851 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6852 target = expand_builtin_sync_operation (mode, exp, IOR,
6853 true, target, ignore);
6858 case BUILT_IN_AND_AND_FETCH_1:
6859 case BUILT_IN_AND_AND_FETCH_2:
6860 case BUILT_IN_AND_AND_FETCH_4:
6861 case BUILT_IN_AND_AND_FETCH_8:
6862 case BUILT_IN_AND_AND_FETCH_16:
6863 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6864 target = expand_builtin_sync_operation (mode, exp, AND,
6865 true, target, ignore);
6870 case BUILT_IN_XOR_AND_FETCH_1:
6871 case BUILT_IN_XOR_AND_FETCH_2:
6872 case BUILT_IN_XOR_AND_FETCH_4:
6873 case BUILT_IN_XOR_AND_FETCH_8:
6874 case BUILT_IN_XOR_AND_FETCH_16:
6875 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6876 target = expand_builtin_sync_operation (mode, exp, XOR,
6877 true, target, ignore);
6882 case BUILT_IN_NAND_AND_FETCH_1:
6883 case BUILT_IN_NAND_AND_FETCH_2:
6884 case BUILT_IN_NAND_AND_FETCH_4:
6885 case BUILT_IN_NAND_AND_FETCH_8:
6886 case BUILT_IN_NAND_AND_FETCH_16:
6887 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6888 target = expand_builtin_sync_operation (mode, exp, NOT,
6889 true, target, ignore);
6894 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6895 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6896 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6897 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6898 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6899 if (mode == VOIDmode)
6900 mode = TYPE_MODE (boolean_type_node);
6901 if (!target || !register_operand (target, mode))
6902 target = gen_reg_rtx (mode);
6904 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6905 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6910 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6911 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6912 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6913 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6914 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6915 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6916 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6921 case BUILT_IN_LOCK_TEST_AND_SET_1:
6922 case BUILT_IN_LOCK_TEST_AND_SET_2:
6923 case BUILT_IN_LOCK_TEST_AND_SET_4:
6924 case BUILT_IN_LOCK_TEST_AND_SET_8:
6925 case BUILT_IN_LOCK_TEST_AND_SET_16:
6926 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6927 target = expand_builtin_lock_test_and_set (mode, exp, target);
6932 case BUILT_IN_LOCK_RELEASE_1:
6933 case BUILT_IN_LOCK_RELEASE_2:
6934 case BUILT_IN_LOCK_RELEASE_4:
6935 case BUILT_IN_LOCK_RELEASE_8:
6936 case BUILT_IN_LOCK_RELEASE_16:
6937 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6938 expand_builtin_lock_release (mode, exp);
6941 case BUILT_IN_SYNCHRONIZE:
6942 expand_builtin_synchronize ();
6945 case BUILT_IN_OBJECT_SIZE:
6946 return expand_builtin_object_size (exp);
6948 case BUILT_IN_MEMCPY_CHK:
6949 case BUILT_IN_MEMPCPY_CHK:
6950 case BUILT_IN_MEMMOVE_CHK:
6951 case BUILT_IN_MEMSET_CHK:
6952 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6957 case BUILT_IN_STRCPY_CHK:
6958 case BUILT_IN_STPCPY_CHK:
6959 case BUILT_IN_STRNCPY_CHK:
6960 case BUILT_IN_STRCAT_CHK:
6961 case BUILT_IN_STRNCAT_CHK:
6962 case BUILT_IN_SNPRINTF_CHK:
6963 case BUILT_IN_VSNPRINTF_CHK:
6964 maybe_emit_chk_warning (exp, fcode);
6967 case BUILT_IN_SPRINTF_CHK:
6968 case BUILT_IN_VSPRINTF_CHK:
6969 maybe_emit_sprintf_chk_warning (exp, fcode);
6972 default: /* just do library call, if unknown builtin */
6976 /* The switch statement above can drop through to cause the function
6977 to be called normally. */
6978 return expand_call (exp, target, ignore);
6981 /* Determine whether a tree node represents a call to a built-in
6982 function. If the tree T is a call to a built-in function with
6983 the right number of arguments of the appropriate types, return
6984 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6985 Otherwise the return value is END_BUILTINS. */
6987 enum built_in_function
6988 builtin_mathfn_code (tree t)
6990 tree fndecl, arg, parmlist;
6991 tree argtype, parmtype;
6992 call_expr_arg_iterator iter;
6994 if (TREE_CODE (t) != CALL_EXPR
6995 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6996 return END_BUILTINS;
6998 fndecl = get_callee_fndecl (t);
6999 if (fndecl == NULL_TREE
7000 || TREE_CODE (fndecl) != FUNCTION_DECL
7001 || ! DECL_BUILT_IN (fndecl)
7002 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7003 return END_BUILTINS;
7005 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7006 init_call_expr_arg_iterator (t, &iter);
7007 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7009 /* If a function doesn't take a variable number of arguments,
7010 the last element in the list will have type `void'. */
7011 parmtype = TREE_VALUE (parmlist);
7012 if (VOID_TYPE_P (parmtype))
7014 if (more_call_expr_args_p (&iter))
7015 return END_BUILTINS;
7016 return DECL_FUNCTION_CODE (fndecl);
7019 if (! more_call_expr_args_p (&iter))
7020 return END_BUILTINS;
7022 arg = next_call_expr_arg (&iter);
7023 argtype = TREE_TYPE (arg);
7025 if (SCALAR_FLOAT_TYPE_P (parmtype))
7027 if (! SCALAR_FLOAT_TYPE_P (argtype))
7028 return END_BUILTINS;
7030 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7032 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7033 return END_BUILTINS;
7035 else if (POINTER_TYPE_P (parmtype))
7037 if (! POINTER_TYPE_P (argtype))
7038 return END_BUILTINS;
7040 else if (INTEGRAL_TYPE_P (parmtype))
7042 if (! INTEGRAL_TYPE_P (argtype))
7043 return END_BUILTINS;
7046 return END_BUILTINS;
7049 /* Variable-length argument list. */
7050 return DECL_FUNCTION_CODE (fndecl);
7053 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7054 evaluate to a constant. */
7057 fold_builtin_constant_p (tree arg)
7059 /* We return 1 for a numeric type that's known to be a constant
7060 value at compile-time or for an aggregate type that's a
7061 literal constant. */
7064 /* If we know this is a constant, emit the constant of one. */
7065 if (CONSTANT_CLASS_P (arg)
7066 || (TREE_CODE (arg) == CONSTRUCTOR
7067 && TREE_CONSTANT (arg)))
7068 return integer_one_node;
7069 if (TREE_CODE (arg) == ADDR_EXPR)
7071 tree op = TREE_OPERAND (arg, 0);
7072 if (TREE_CODE (op) == STRING_CST
7073 || (TREE_CODE (op) == ARRAY_REF
7074 && integer_zerop (TREE_OPERAND (op, 1))
7075 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7076 return integer_one_node;
7079 /* If this expression has side effects, show we don't know it to be a
7080 constant. Likewise if it's a pointer or aggregate type since in
7081 those case we only want literals, since those are only optimized
7082 when generating RTL, not later.
7083 And finally, if we are compiling an initializer, not code, we
7084 need to return a definite result now; there's not going to be any
7085 more optimization done. */
7086 if (TREE_SIDE_EFFECTS (arg)
7087 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7088 || POINTER_TYPE_P (TREE_TYPE (arg))
7090 || folding_initializer)
7091 return integer_zero_node;
7096 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
7097 comparison against the argument will fold to a constant. In practice,
7098 this means a true constant or the address of a non-weak symbol. */
7101 fold_builtin_expect (tree arg)
7105 /* If the argument isn't invariant, then there's nothing we can do. */
7106 if (!TREE_INVARIANT (arg))
7109 /* If we're looking at an address of a weak decl, then do not fold. */
7112 if (TREE_CODE (inner) == ADDR_EXPR)
7116 inner = TREE_OPERAND (inner, 0);
7118 while (TREE_CODE (inner) == COMPONENT_REF
7119 || TREE_CODE (inner) == ARRAY_REF);
7120 if (DECL_P (inner) && DECL_WEAK (inner))
7124 /* Otherwise, ARG already has the proper type for the return value. */
7128 /* Fold a call to __builtin_classify_type with argument ARG. */
7131 fold_builtin_classify_type (tree arg)
7134 return build_int_cst (NULL_TREE, no_type_class);
7136 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7139 /* Fold a call to __builtin_strlen with argument ARG. */
7142 fold_builtin_strlen (tree arg)
7144 if (!validate_arg (arg, POINTER_TYPE))
7148 tree len = c_strlen (arg, 0);
7152 /* Convert from the internal "sizetype" type to "size_t". */
7154 len = fold_convert (size_type_node, len);
7162 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7165 fold_builtin_inf (tree type, int warn)
7167 REAL_VALUE_TYPE real;
7169 /* __builtin_inff is intended to be usable to define INFINITY on all
7170 targets. If an infinity is not available, INFINITY expands "to a
7171 positive constant of type float that overflows at translation
7172 time", footnote "In this case, using INFINITY will violate the
7173 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7174 Thus we pedwarn to ensure this constraint violation is
7176 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7177 pedwarn ("target format does not support infinity");
7180 return build_real (type, real);
7183 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7186 fold_builtin_nan (tree arg, tree type, int quiet)
7188 REAL_VALUE_TYPE real;
7191 if (!validate_arg (arg, POINTER_TYPE))
7193 str = c_getstr (arg);
7197 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7200 return build_real (type, real);
7203 /* Return true if the floating point expression T has an integer value.
7204 We also allow +Inf, -Inf and NaN to be considered integer values. */
7207 integer_valued_real_p (tree t)
7209 switch (TREE_CODE (t))
7216 case NON_LVALUE_EXPR:
7217 return integer_valued_real_p (TREE_OPERAND (t, 0));
7222 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7229 return integer_valued_real_p (TREE_OPERAND (t, 0))
7230 && integer_valued_real_p (TREE_OPERAND (t, 1));
7233 return integer_valued_real_p (TREE_OPERAND (t, 1))
7234 && integer_valued_real_p (TREE_OPERAND (t, 2));
7237 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7241 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7242 if (TREE_CODE (type) == INTEGER_TYPE)
7244 if (TREE_CODE (type) == REAL_TYPE)
7245 return integer_valued_real_p (TREE_OPERAND (t, 0));
7250 switch (builtin_mathfn_code (t))
7252 CASE_FLT_FN (BUILT_IN_CEIL):
7253 CASE_FLT_FN (BUILT_IN_FLOOR):
7254 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7255 CASE_FLT_FN (BUILT_IN_RINT):
7256 CASE_FLT_FN (BUILT_IN_ROUND):
7257 CASE_FLT_FN (BUILT_IN_TRUNC):
7260 CASE_FLT_FN (BUILT_IN_FMIN):
7261 CASE_FLT_FN (BUILT_IN_FMAX):
7262 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7263 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7276 /* FNDECL is assumed to be a builtin where truncation can be propagated
7277 across (for instance floor((double)f) == (double)floorf (f).
7278 Do the transformation for a call with argument ARG. */
7281 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7283 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7285 if (!validate_arg (arg, REAL_TYPE))
7288 /* Integer rounding functions are idempotent. */
7289 if (fcode == builtin_mathfn_code (arg))
7292 /* If argument is already integer valued, and we don't need to worry
7293 about setting errno, there's no need to perform rounding. */
7294 if (! flag_errno_math && integer_valued_real_p (arg))
7299 tree arg0 = strip_float_extensions (arg);
7300 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7301 tree newtype = TREE_TYPE (arg0);
7304 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7305 && (decl = mathfn_built_in (newtype, fcode)))
7306 return fold_convert (ftype,
7307 build_call_expr (decl, 1,
7308 fold_convert (newtype, arg0)));
7313 /* FNDECL is assumed to be builtin which can narrow the FP type of
7314 the argument, for instance lround((double)f) -> lroundf (f).
7315 Do the transformation for a call with argument ARG. */
7318 fold_fixed_mathfn (tree fndecl, tree arg)
7320 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7322 if (!validate_arg (arg, REAL_TYPE))
7325 /* If argument is already integer valued, and we don't need to worry
7326 about setting errno, there's no need to perform rounding. */
7327 if (! flag_errno_math && integer_valued_real_p (arg))
7328 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7332 tree ftype = TREE_TYPE (arg);
7333 tree arg0 = strip_float_extensions (arg);
7334 tree newtype = TREE_TYPE (arg0);
7337 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7338 && (decl = mathfn_built_in (newtype, fcode)))
7339 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7342 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7343 sizeof (long long) == sizeof (long). */
7344 if (TYPE_PRECISION (long_long_integer_type_node)
7345 == TYPE_PRECISION (long_integer_type_node))
7347 tree newfn = NULL_TREE;
7350 CASE_FLT_FN (BUILT_IN_LLCEIL):
7351 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7354 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7355 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7358 CASE_FLT_FN (BUILT_IN_LLROUND):
7359 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7362 CASE_FLT_FN (BUILT_IN_LLRINT):
7363 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7372 tree newcall = build_call_expr(newfn, 1, arg);
7373 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7380 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7381 return type. Return NULL_TREE if no simplification can be made. */
7384 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7388 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7389 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7392 /* Calculate the result when the argument is a constant. */
7393 if (TREE_CODE (arg) == COMPLEX_CST
7394 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7398 if (TREE_CODE (arg) == COMPLEX_EXPR)
7400 tree real = TREE_OPERAND (arg, 0);
7401 tree imag = TREE_OPERAND (arg, 1);
7403 /* If either part is zero, cabs is fabs of the other. */
7404 if (real_zerop (real))
7405 return fold_build1 (ABS_EXPR, type, imag);
7406 if (real_zerop (imag))
7407 return fold_build1 (ABS_EXPR, type, real);
7409 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7410 if (flag_unsafe_math_optimizations
7411 && operand_equal_p (real, imag, OEP_PURE_SAME))
7413 const REAL_VALUE_TYPE sqrt2_trunc
7414 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7416 return fold_build2 (MULT_EXPR, type,
7417 fold_build1 (ABS_EXPR, type, real),
7418 build_real (type, sqrt2_trunc));
7422 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7423 if (TREE_CODE (arg) == NEGATE_EXPR
7424 || TREE_CODE (arg) == CONJ_EXPR)
7425 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7427 /* Don't do this when optimizing for size. */
7428 if (flag_unsafe_math_optimizations
7429 && optimize && !optimize_size)
7431 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7433 if (sqrtfn != NULL_TREE)
7435 tree rpart, ipart, result;
7437 arg = builtin_save_expr (arg);
7439 rpart = fold_build1 (REALPART_EXPR, type, arg);
7440 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7442 rpart = builtin_save_expr (rpart);
7443 ipart = builtin_save_expr (ipart);
7445 result = fold_build2 (PLUS_EXPR, type,
7446 fold_build2 (MULT_EXPR, type,
7448 fold_build2 (MULT_EXPR, type,
7451 return build_call_expr (sqrtfn, 1, result);
7458 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7459 Return NULL_TREE if no simplification can be made. */
7462 fold_builtin_sqrt (tree arg, tree type)
7465 enum built_in_function fcode;
7468 if (!validate_arg (arg, REAL_TYPE))
7471 /* Calculate the result when the argument is a constant. */
7472 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7475 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7476 fcode = builtin_mathfn_code (arg);
7477 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7479 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7480 arg = fold_build2 (MULT_EXPR, type,
7481 CALL_EXPR_ARG (arg, 0),
7482 build_real (type, dconsthalf));
7483 return build_call_expr (expfn, 1, arg);
7486 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7487 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7489 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7493 tree arg0 = CALL_EXPR_ARG (arg, 0);
7495 /* The inner root was either sqrt or cbrt. */
7496 REAL_VALUE_TYPE dconstroot =
7497 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
7499 /* Adjust for the outer root. */
7500 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7501 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7502 tree_root = build_real (type, dconstroot);
7503 return build_call_expr (powfn, 2, arg0, tree_root);
7507 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7508 if (flag_unsafe_math_optimizations
7509 && (fcode == BUILT_IN_POW
7510 || fcode == BUILT_IN_POWF
7511 || fcode == BUILT_IN_POWL))
7513 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7514 tree arg0 = CALL_EXPR_ARG (arg, 0);
7515 tree arg1 = CALL_EXPR_ARG (arg, 1);
7517 if (!tree_expr_nonnegative_p (arg0))
7518 arg0 = build1 (ABS_EXPR, type, arg0);
7519 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7520 build_real (type, dconsthalf));
7521 return build_call_expr (powfn, 2, arg0, narg1);
7527 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7528 Return NULL_TREE if no simplification can be made. */
7531 fold_builtin_cbrt (tree arg, tree type)
7533 const enum built_in_function fcode = builtin_mathfn_code (arg);
7536 if (!validate_arg (arg, REAL_TYPE))
7539 /* Calculate the result when the argument is a constant. */
7540 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7543 if (flag_unsafe_math_optimizations)
7545 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7546 if (BUILTIN_EXPONENT_P (fcode))
7548 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7549 const REAL_VALUE_TYPE third_trunc =
7550 real_value_truncate (TYPE_MODE (type), dconstthird);
7551 arg = fold_build2 (MULT_EXPR, type,
7552 CALL_EXPR_ARG (arg, 0),
7553 build_real (type, third_trunc));
7554 return build_call_expr (expfn, 1, arg);
7557 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7558 if (BUILTIN_SQRT_P (fcode))
7560 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7564 tree arg0 = CALL_EXPR_ARG (arg, 0);
7566 REAL_VALUE_TYPE dconstroot = dconstthird;
7568 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7569 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7570 tree_root = build_real (type, dconstroot);
7571 return build_call_expr (powfn, 2, arg0, tree_root);
7575 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7576 if (BUILTIN_CBRT_P (fcode))
7578 tree arg0 = CALL_EXPR_ARG (arg, 0);
7579 if (tree_expr_nonnegative_p (arg0))
7581 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7586 REAL_VALUE_TYPE dconstroot;
7588 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
7589 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7590 tree_root = build_real (type, dconstroot);
7591 return build_call_expr (powfn, 2, arg0, tree_root);
7596 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7597 if (fcode == BUILT_IN_POW
7598 || fcode == BUILT_IN_POWF
7599 || fcode == BUILT_IN_POWL)
7601 tree arg00 = CALL_EXPR_ARG (arg, 0);
7602 tree arg01 = CALL_EXPR_ARG (arg, 1);
7603 if (tree_expr_nonnegative_p (arg00))
7605 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7606 const REAL_VALUE_TYPE dconstroot
7607 = real_value_truncate (TYPE_MODE (type), dconstthird);
7608 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7609 build_real (type, dconstroot));
7610 return build_call_expr (powfn, 2, arg00, narg01);
7617 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7618 TYPE is the type of the return value. Return NULL_TREE if no
7619 simplification can be made. */
7622 fold_builtin_cos (tree arg, tree type, tree fndecl)
7626 if (!validate_arg (arg, REAL_TYPE))
7629 /* Calculate the result when the argument is a constant. */
7630 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7633 /* Optimize cos(-x) into cos (x). */
7634 if ((narg = fold_strip_sign_ops (arg)))
7635 return build_call_expr (fndecl, 1, narg);
7640 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7641 Return NULL_TREE if no simplification can be made. */
7644 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7646 if (validate_arg (arg, REAL_TYPE))
7650 /* Calculate the result when the argument is a constant. */
7651 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7654 /* Optimize cosh(-x) into cosh (x). */
7655 if ((narg = fold_strip_sign_ops (arg)))
7656 return build_call_expr (fndecl, 1, narg);
7662 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7663 Return NULL_TREE if no simplification can be made. */
7666 fold_builtin_tan (tree arg, tree type)
7668 enum built_in_function fcode;
7671 if (!validate_arg (arg, REAL_TYPE))
7674 /* Calculate the result when the argument is a constant. */
7675 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7678 /* Optimize tan(atan(x)) = x. */
7679 fcode = builtin_mathfn_code (arg);
7680 if (flag_unsafe_math_optimizations
7681 && (fcode == BUILT_IN_ATAN
7682 || fcode == BUILT_IN_ATANF
7683 || fcode == BUILT_IN_ATANL))
7684 return CALL_EXPR_ARG (arg, 0);
7689 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7690 NULL_TREE if no simplification can be made. */
7693 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7698 if (!validate_arg (arg0, REAL_TYPE)
7699 || !validate_arg (arg1, POINTER_TYPE)
7700 || !validate_arg (arg2, POINTER_TYPE))
7703 type = TREE_TYPE (arg0);
7705 /* Calculate the result when the argument is a constant. */
7706 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7709 /* Canonicalize sincos to cexpi. */
7710 if (!TARGET_C99_FUNCTIONS)
7712 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7716 call = build_call_expr (fn, 1, arg0);
7717 call = builtin_save_expr (call);
7719 return build2 (COMPOUND_EXPR, type,
7720 build2 (MODIFY_EXPR, void_type_node,
7721 build_fold_indirect_ref (arg1),
7722 build1 (IMAGPART_EXPR, type, call)),
7723 build2 (MODIFY_EXPR, void_type_node,
7724 build_fold_indirect_ref (arg2),
7725 build1 (REALPART_EXPR, type, call)));
7728 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7729 NULL_TREE if no simplification can be made. */
7732 fold_builtin_cexp (tree arg0, tree type)
7735 tree realp, imagp, ifn;
7737 if (!validate_arg (arg0, COMPLEX_TYPE))
7740 rtype = TREE_TYPE (TREE_TYPE (arg0));
7742 /* In case we can figure out the real part of arg0 and it is constant zero
7744 if (!TARGET_C99_FUNCTIONS)
7746 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7750 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7751 && real_zerop (realp))
7753 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7754 return build_call_expr (ifn, 1, narg);
7757 /* In case we can easily decompose real and imaginary parts split cexp
7758 to exp (r) * cexpi (i). */
7759 if (flag_unsafe_math_optimizations
7762 tree rfn, rcall, icall;
7764 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7768 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7772 icall = build_call_expr (ifn, 1, imagp);
7773 icall = builtin_save_expr (icall);
7774 rcall = build_call_expr (rfn, 1, realp);
7775 rcall = builtin_save_expr (rcall);
7776 return build2 (COMPLEX_EXPR, type,
7777 build2 (MULT_EXPR, rtype,
7779 build1 (REALPART_EXPR, rtype, icall)),
7780 build2 (MULT_EXPR, rtype,
7782 build1 (IMAGPART_EXPR, rtype, icall)));
7788 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7789 Return NULL_TREE if no simplification can be made. */
7792 fold_builtin_trunc (tree fndecl, tree arg)
7794 if (!validate_arg (arg, REAL_TYPE))
7797 /* Optimize trunc of constant value. */
7798 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7800 REAL_VALUE_TYPE r, x;
7801 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7803 x = TREE_REAL_CST (arg);
7804 real_trunc (&r, TYPE_MODE (type), &x);
7805 return build_real (type, r);
7808 return fold_trunc_transparent_mathfn (fndecl, arg);
7811 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7812 Return NULL_TREE if no simplification can be made. */
7815 fold_builtin_floor (tree fndecl, tree arg)
7817 if (!validate_arg (arg, REAL_TYPE))
7820 /* Optimize floor of constant value. */
7821 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7825 x = TREE_REAL_CST (arg);
7826 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7828 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7831 real_floor (&r, TYPE_MODE (type), &x);
7832 return build_real (type, r);
7836 /* Fold floor (x) where x is nonnegative to trunc (x). */
7837 if (tree_expr_nonnegative_p (arg))
7839 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7841 return build_call_expr (truncfn, 1, arg);
7844 return fold_trunc_transparent_mathfn (fndecl, arg);
7847 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7848 Return NULL_TREE if no simplification can be made. */
7851 fold_builtin_ceil (tree fndecl, tree arg)
7853 if (!validate_arg (arg, REAL_TYPE))
7856 /* Optimize ceil of constant value. */
7857 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7861 x = TREE_REAL_CST (arg);
7862 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7864 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7867 real_ceil (&r, TYPE_MODE (type), &x);
7868 return build_real (type, r);
7872 return fold_trunc_transparent_mathfn (fndecl, arg);
7875 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7876 Return NULL_TREE if no simplification can be made. */
7879 fold_builtin_round (tree fndecl, tree arg)
7881 if (!validate_arg (arg, REAL_TYPE))
7884 /* Optimize round of constant value. */
7885 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7889 x = TREE_REAL_CST (arg);
7890 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7892 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7895 real_round (&r, TYPE_MODE (type), &x);
7896 return build_real (type, r);
7900 return fold_trunc_transparent_mathfn (fndecl, arg);
7903 /* Fold function call to builtin lround, lroundf or lroundl (or the
7904 corresponding long long versions) and other rounding functions. ARG
7905 is the argument to the call. Return NULL_TREE if no simplification
7909 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7911 if (!validate_arg (arg, REAL_TYPE))
7914 /* Optimize lround of constant value. */
7915 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7917 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7919 if (real_isfinite (&x))
7921 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7922 tree ftype = TREE_TYPE (arg);
7923 unsigned HOST_WIDE_INT lo2;
7924 HOST_WIDE_INT hi, lo;
7927 switch (DECL_FUNCTION_CODE (fndecl))
7929 CASE_FLT_FN (BUILT_IN_LFLOOR):
7930 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7931 real_floor (&r, TYPE_MODE (ftype), &x);
7934 CASE_FLT_FN (BUILT_IN_LCEIL):
7935 CASE_FLT_FN (BUILT_IN_LLCEIL):
7936 real_ceil (&r, TYPE_MODE (ftype), &x);
7939 CASE_FLT_FN (BUILT_IN_LROUND):
7940 CASE_FLT_FN (BUILT_IN_LLROUND):
7941 real_round (&r, TYPE_MODE (ftype), &x);
7948 REAL_VALUE_TO_INT (&lo, &hi, r);
7949 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7950 return build_int_cst_wide (itype, lo2, hi);
7954 switch (DECL_FUNCTION_CODE (fndecl))
7956 CASE_FLT_FN (BUILT_IN_LFLOOR):
7957 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7958 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7959 if (tree_expr_nonnegative_p (arg))
7960 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7966 return fold_fixed_mathfn (fndecl, arg);
7969 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7970 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7971 the argument to the call. Return NULL_TREE if no simplification can
7975 fold_builtin_bitop (tree fndecl, tree arg)
7977 if (!validate_arg (arg, INTEGER_TYPE))
7980 /* Optimize for constant argument. */
7981 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7983 HOST_WIDE_INT hi, width, result;
7984 unsigned HOST_WIDE_INT lo;
7987 type = TREE_TYPE (arg);
7988 width = TYPE_PRECISION (type);
7989 lo = TREE_INT_CST_LOW (arg);
7991 /* Clear all the bits that are beyond the type's precision. */
7992 if (width > HOST_BITS_PER_WIDE_INT)
7994 hi = TREE_INT_CST_HIGH (arg);
7995 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7996 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8001 if (width < HOST_BITS_PER_WIDE_INT)
8002 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8005 switch (DECL_FUNCTION_CODE (fndecl))
8007 CASE_INT_FN (BUILT_IN_FFS):
8009 result = exact_log2 (lo & -lo) + 1;
8011 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8016 CASE_INT_FN (BUILT_IN_CLZ):
8018 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8020 result = width - floor_log2 (lo) - 1;
8021 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8025 CASE_INT_FN (BUILT_IN_CTZ):
8027 result = exact_log2 (lo & -lo);
8029 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8030 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8034 CASE_INT_FN (BUILT_IN_POPCOUNT):
8037 result++, lo &= lo - 1;
8039 result++, hi &= hi - 1;
8042 CASE_INT_FN (BUILT_IN_PARITY):
8045 result++, lo &= lo - 1;
8047 result++, hi &= hi - 1;
8055 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8061 /* Fold function call to builtin_bswap and the long and long long
8062 variants. Return NULL_TREE if no simplification can be made. */
8064 fold_builtin_bswap (tree fndecl, tree arg)
8066 if (! validate_arg (arg, INTEGER_TYPE))
8069 /* Optimize constant value. */
8070 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8072 HOST_WIDE_INT hi, width, r_hi = 0;
8073 unsigned HOST_WIDE_INT lo, r_lo = 0;
8076 type = TREE_TYPE (arg);
8077 width = TYPE_PRECISION (type);
8078 lo = TREE_INT_CST_LOW (arg);
8079 hi = TREE_INT_CST_HIGH (arg);
8081 switch (DECL_FUNCTION_CODE (fndecl))
8083 case BUILT_IN_BSWAP32:
8084 case BUILT_IN_BSWAP64:
8088 for (s = 0; s < width; s += 8)
8090 int d = width - s - 8;
8091 unsigned HOST_WIDE_INT byte;
8093 if (s < HOST_BITS_PER_WIDE_INT)
8094 byte = (lo >> s) & 0xff;
8096 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8098 if (d < HOST_BITS_PER_WIDE_INT)
8101 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8111 if (width < HOST_BITS_PER_WIDE_INT)
8112 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8114 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8120 /* Return true if EXPR is the real constant contained in VALUE. */
8123 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8127 return ((TREE_CODE (expr) == REAL_CST
8128 && !TREE_OVERFLOW (expr)
8129 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8130 || (TREE_CODE (expr) == COMPLEX_CST
8131 && real_dconstp (TREE_REALPART (expr), value)
8132 && real_zerop (TREE_IMAGPART (expr))));
8135 /* A subroutine of fold_builtin to fold the various logarithmic
8136 functions. Return NULL_TREE if no simplification can me made.
8137 FUNC is the corresponding MPFR logarithm function. */
8140 fold_builtin_logarithm (tree fndecl, tree arg,
8141 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8143 if (validate_arg (arg, REAL_TYPE))
8145 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8147 const enum built_in_function fcode = builtin_mathfn_code (arg);
8149 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8150 instead we'll look for 'e' truncated to MODE. So only do
8151 this if flag_unsafe_math_optimizations is set. */
8152 if (flag_unsafe_math_optimizations && func == mpfr_log)
8154 const REAL_VALUE_TYPE e_truncated =
8155 real_value_truncate (TYPE_MODE (type), dconste);
8156 if (real_dconstp (arg, &e_truncated))
8157 return build_real (type, dconst1);
8160 /* Calculate the result when the argument is a constant. */
8161 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8164 /* Special case, optimize logN(expN(x)) = x. */
8165 if (flag_unsafe_math_optimizations
8166 && ((func == mpfr_log
8167 && (fcode == BUILT_IN_EXP
8168 || fcode == BUILT_IN_EXPF
8169 || fcode == BUILT_IN_EXPL))
8170 || (func == mpfr_log2
8171 && (fcode == BUILT_IN_EXP2
8172 || fcode == BUILT_IN_EXP2F
8173 || fcode == BUILT_IN_EXP2L))
8174 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8175 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8177 /* Optimize logN(func()) for various exponential functions. We
8178 want to determine the value "x" and the power "exponent" in
8179 order to transform logN(x**exponent) into exponent*logN(x). */
8180 if (flag_unsafe_math_optimizations)
8182 tree exponent = 0, x = 0;
8186 CASE_FLT_FN (BUILT_IN_EXP):
8187 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8188 x = build_real (type,
8189 real_value_truncate (TYPE_MODE (type), dconste));
8190 exponent = CALL_EXPR_ARG (arg, 0);
8192 CASE_FLT_FN (BUILT_IN_EXP2):
8193 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8194 x = build_real (type, dconst2);
8195 exponent = CALL_EXPR_ARG (arg, 0);
8197 CASE_FLT_FN (BUILT_IN_EXP10):
8198 CASE_FLT_FN (BUILT_IN_POW10):
8199 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8200 x = build_real (type, dconst10);
8201 exponent = CALL_EXPR_ARG (arg, 0);
8203 CASE_FLT_FN (BUILT_IN_SQRT):
8204 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8205 x = CALL_EXPR_ARG (arg, 0);
8206 exponent = build_real (type, dconsthalf);
8208 CASE_FLT_FN (BUILT_IN_CBRT):
8209 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8210 x = CALL_EXPR_ARG (arg, 0);
8211 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8214 CASE_FLT_FN (BUILT_IN_POW):
8215 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8216 x = CALL_EXPR_ARG (arg, 0);
8217 exponent = CALL_EXPR_ARG (arg, 1);
8223 /* Now perform the optimization. */
8226 tree logfn = build_call_expr (fndecl, 1, x);
8227 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8235 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8236 NULL_TREE if no simplification can be made. */
8239 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8241 tree res, narg0, narg1;
8243 if (!validate_arg (arg0, REAL_TYPE)
8244 || !validate_arg (arg1, REAL_TYPE))
8247 /* Calculate the result when the argument is a constant. */
8248 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8251 /* If either argument to hypot has a negate or abs, strip that off.
8252 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8253 narg0 = fold_strip_sign_ops (arg0);
8254 narg1 = fold_strip_sign_ops (arg1);
8257 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8258 narg1 ? narg1 : arg1);
8261 /* If either argument is zero, hypot is fabs of the other. */
8262 if (real_zerop (arg0))
8263 return fold_build1 (ABS_EXPR, type, arg1);
8264 else if (real_zerop (arg1))
8265 return fold_build1 (ABS_EXPR, type, arg0);
8267 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8268 if (flag_unsafe_math_optimizations
8269 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8271 const REAL_VALUE_TYPE sqrt2_trunc
8272 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
8273 return fold_build2 (MULT_EXPR, type,
8274 fold_build1 (ABS_EXPR, type, arg0),
8275 build_real (type, sqrt2_trunc));
8282 /* Fold a builtin function call to pow, powf, or powl. Return
8283 NULL_TREE if no simplification can be made. */
8285 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8289 if (!validate_arg (arg0, REAL_TYPE)
8290 || !validate_arg (arg1, REAL_TYPE))
8293 /* Calculate the result when the argument is a constant. */
8294 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8297 /* Optimize pow(1.0,y) = 1.0. */
8298 if (real_onep (arg0))
8299 return omit_one_operand (type, build_real (type, dconst1), arg1);
8301 if (TREE_CODE (arg1) == REAL_CST
8302 && !TREE_OVERFLOW (arg1))
8304 REAL_VALUE_TYPE cint;
8308 c = TREE_REAL_CST (arg1);
8310 /* Optimize pow(x,0.0) = 1.0. */
8311 if (REAL_VALUES_EQUAL (c, dconst0))
8312 return omit_one_operand (type, build_real (type, dconst1),
8315 /* Optimize pow(x,1.0) = x. */
8316 if (REAL_VALUES_EQUAL (c, dconst1))
8319 /* Optimize pow(x,-1.0) = 1.0/x. */
8320 if (REAL_VALUES_EQUAL (c, dconstm1))
8321 return fold_build2 (RDIV_EXPR, type,
8322 build_real (type, dconst1), arg0);
8324 /* Optimize pow(x,0.5) = sqrt(x). */
8325 if (flag_unsafe_math_optimizations
8326 && REAL_VALUES_EQUAL (c, dconsthalf))
8328 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8330 if (sqrtfn != NULL_TREE)
8331 return build_call_expr (sqrtfn, 1, arg0);
8334 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8335 if (flag_unsafe_math_optimizations)
8337 const REAL_VALUE_TYPE dconstroot
8338 = real_value_truncate (TYPE_MODE (type), dconstthird);
8340 if (REAL_VALUES_EQUAL (c, dconstroot))
8342 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8343 if (cbrtfn != NULL_TREE)
8344 return build_call_expr (cbrtfn, 1, arg0);
8348 /* Check for an integer exponent. */
8349 n = real_to_integer (&c);
8350 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8351 if (real_identical (&c, &cint))
8353 /* Attempt to evaluate pow at compile-time. */
8354 if (TREE_CODE (arg0) == REAL_CST
8355 && !TREE_OVERFLOW (arg0))
8360 x = TREE_REAL_CST (arg0);
8361 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8362 if (flag_unsafe_math_optimizations || !inexact)
8363 return build_real (type, x);
8366 /* Strip sign ops from even integer powers. */
8367 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8369 tree narg0 = fold_strip_sign_ops (arg0);
8371 return build_call_expr (fndecl, 2, narg0, arg1);
8376 if (flag_unsafe_math_optimizations)
8378 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8380 /* Optimize pow(expN(x),y) = expN(x*y). */
8381 if (BUILTIN_EXPONENT_P (fcode))
8383 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8384 tree arg = CALL_EXPR_ARG (arg0, 0);
8385 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8386 return build_call_expr (expfn, 1, arg);
8389 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8390 if (BUILTIN_SQRT_P (fcode))
8392 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8393 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8394 build_real (type, dconsthalf));
8395 return build_call_expr (fndecl, 2, narg0, narg1);
8398 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8399 if (BUILTIN_CBRT_P (fcode))
8401 tree arg = CALL_EXPR_ARG (arg0, 0);
8402 if (tree_expr_nonnegative_p (arg))
8404 const REAL_VALUE_TYPE dconstroot
8405 = real_value_truncate (TYPE_MODE (type), dconstthird);
8406 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8407 build_real (type, dconstroot));
8408 return build_call_expr (fndecl, 2, arg, narg1);
8412 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8413 if (fcode == BUILT_IN_POW
8414 || fcode == BUILT_IN_POWF
8415 || fcode == BUILT_IN_POWL)
8417 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8418 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8419 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8420 return build_call_expr (fndecl, 2, arg00, narg1);
8427 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8428 Return NULL_TREE if no simplification can be made. */
8430 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8431 tree arg0, tree arg1, tree type)
8433 if (!validate_arg (arg0, REAL_TYPE)
8434 || !validate_arg (arg1, INTEGER_TYPE))
8437 /* Optimize pow(1.0,y) = 1.0. */
8438 if (real_onep (arg0))
8439 return omit_one_operand (type, build_real (type, dconst1), arg1);
8441 if (host_integerp (arg1, 0))
8443 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8445 /* Evaluate powi at compile-time. */
8446 if (TREE_CODE (arg0) == REAL_CST
8447 && !TREE_OVERFLOW (arg0))
8450 x = TREE_REAL_CST (arg0);
8451 real_powi (&x, TYPE_MODE (type), &x, c);
8452 return build_real (type, x);
8455 /* Optimize pow(x,0) = 1.0. */
8457 return omit_one_operand (type, build_real (type, dconst1),
8460 /* Optimize pow(x,1) = x. */
8464 /* Optimize pow(x,-1) = 1.0/x. */
8466 return fold_build2 (RDIV_EXPR, type,
8467 build_real (type, dconst1), arg0);
8473 /* A subroutine of fold_builtin to fold the various exponent
8474 functions. Return NULL_TREE if no simplification can be made.
8475 FUNC is the corresponding MPFR exponent function. */
8478 fold_builtin_exponent (tree fndecl, tree arg,
8479 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8481 if (validate_arg (arg, REAL_TYPE))
8483 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8486 /* Calculate the result when the argument is a constant. */
8487 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8490 /* Optimize expN(logN(x)) = x. */
8491 if (flag_unsafe_math_optimizations)
8493 const enum built_in_function fcode = builtin_mathfn_code (arg);
8495 if ((func == mpfr_exp
8496 && (fcode == BUILT_IN_LOG
8497 || fcode == BUILT_IN_LOGF
8498 || fcode == BUILT_IN_LOGL))
8499 || (func == mpfr_exp2
8500 && (fcode == BUILT_IN_LOG2
8501 || fcode == BUILT_IN_LOG2F
8502 || fcode == BUILT_IN_LOG2L))
8503 || (func == mpfr_exp10
8504 && (fcode == BUILT_IN_LOG10
8505 || fcode == BUILT_IN_LOG10F
8506 || fcode == BUILT_IN_LOG10L)))
8507 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8514 /* Return true if VAR is a VAR_DECL or a component thereof. */
8517 var_decl_component_p (tree var)
8520 while (handled_component_p (inner))
8521 inner = TREE_OPERAND (inner, 0);
8522 return SSA_VAR_P (inner);
8525 /* Fold function call to builtin memset. Return
8526 NULL_TREE if no simplification can be made. */
8529 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8532 unsigned HOST_WIDE_INT length, cval;
8534 if (! validate_arg (dest, POINTER_TYPE)
8535 || ! validate_arg (c, INTEGER_TYPE)
8536 || ! validate_arg (len, INTEGER_TYPE))
8539 if (! host_integerp (len, 1))
8542 /* If the LEN parameter is zero, return DEST. */
8543 if (integer_zerop (len))
8544 return omit_one_operand (type, dest, c);
8546 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8551 if (TREE_CODE (var) != ADDR_EXPR)
8554 var = TREE_OPERAND (var, 0);
8555 if (TREE_THIS_VOLATILE (var))
8558 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8559 && !POINTER_TYPE_P (TREE_TYPE (var)))
8562 if (! var_decl_component_p (var))
8565 length = tree_low_cst (len, 1);
8566 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8567 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8571 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8574 if (integer_zerop (c))
8578 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8581 cval = tree_low_cst (c, 1);
8585 cval |= (cval << 31) << 1;
8588 ret = build_int_cst_type (TREE_TYPE (var), cval);
8589 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8593 return omit_one_operand (type, dest, ret);
8596 /* Fold function call to builtin memset. Return
8597 NULL_TREE if no simplification can be made. */
8600 fold_builtin_bzero (tree dest, tree size, bool ignore)
8602 if (! validate_arg (dest, POINTER_TYPE)
8603 || ! validate_arg (size, INTEGER_TYPE))
8609 /* New argument list transforming bzero(ptr x, int y) to
8610 memset(ptr x, int 0, size_t y). This is done this way
8611 so that if it isn't expanded inline, we fallback to
8612 calling bzero instead of memset. */
8614 return fold_builtin_memset (dest, integer_zero_node,
8615 fold_convert (sizetype, size),
8616 void_type_node, ignore);
8619 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8620 NULL_TREE if no simplification can be made.
8621 If ENDP is 0, return DEST (like memcpy).
8622 If ENDP is 1, return DEST+LEN (like mempcpy).
8623 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8624 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8628 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8630 tree destvar, srcvar, expr;
8632 if (! validate_arg (dest, POINTER_TYPE)
8633 || ! validate_arg (src, POINTER_TYPE)
8634 || ! validate_arg (len, INTEGER_TYPE))
8637 /* If the LEN parameter is zero, return DEST. */
8638 if (integer_zerop (len))
8639 return omit_one_operand (type, dest, src);
8641 /* If SRC and DEST are the same (and not volatile), return
8642 DEST{,+LEN,+LEN-1}. */
8643 if (operand_equal_p (src, dest, 0))
8647 tree srctype, desttype;
8650 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8651 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8653 /* Both DEST and SRC must be pointer types.
8654 ??? This is what old code did. Is the testing for pointer types
8657 If either SRC is readonly or length is 1, we can use memcpy. */
8658 if (dest_align && src_align
8659 && (readonly_data_expr (src)
8660 || (host_integerp (len, 1)
8661 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8662 tree_low_cst (len, 1)))))
8664 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8667 return build_call_expr (fn, 3, dest, src, len);
8672 if (!host_integerp (len, 0))
8675 This logic lose for arguments like (type *)malloc (sizeof (type)),
8676 since we strip the casts of up to VOID return value from malloc.
8677 Perhaps we ought to inherit type from non-VOID argument here? */
8680 srctype = TREE_TYPE (TREE_TYPE (src));
8681 desttype = TREE_TYPE (TREE_TYPE (dest));
8682 if (!srctype || !desttype
8683 || !TYPE_SIZE_UNIT (srctype)
8684 || !TYPE_SIZE_UNIT (desttype)
8685 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8686 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8687 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8688 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8691 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8692 < (int) TYPE_ALIGN (desttype)
8693 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8694 < (int) TYPE_ALIGN (srctype)))
8698 dest = builtin_save_expr (dest);
8700 srcvar = build_fold_indirect_ref (src);
8701 if (TREE_THIS_VOLATILE (srcvar))
8703 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8705 /* With memcpy, it is possible to bypass aliasing rules, so without
8706 this check i. e. execute/20060930-2.c would be misoptimized, because
8707 it use conflicting alias set to hold argument for the memcpy call.
8708 This check is probably unnecesary with -fno-strict-aliasing.
8709 Similarly for destvar. See also PR29286. */
8710 if (!var_decl_component_p (srcvar)
8711 /* Accept: memcpy (*char_var, "test", 1); that simplify
8713 || is_gimple_min_invariant (srcvar)
8714 || readonly_data_expr (src))
8717 destvar = build_fold_indirect_ref (dest);
8718 if (TREE_THIS_VOLATILE (destvar))
8720 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8722 if (!var_decl_component_p (destvar))
8725 if (srctype == desttype
8726 || (gimple_in_ssa_p (cfun)
8727 && useless_type_conversion_p (desttype, srctype)))
8729 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8730 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8731 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8732 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8733 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8735 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8736 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8742 if (endp == 0 || endp == 3)
8743 return omit_one_operand (type, dest, expr);
8749 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8752 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8753 dest = fold_convert (type, dest);
8755 dest = omit_one_operand (type, dest, expr);
8759 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8760 If LEN is not NULL, it represents the length of the string to be
8761 copied. Return NULL_TREE if no simplification can be made. */
8764 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8768 if (!validate_arg (dest, POINTER_TYPE)
8769 || !validate_arg (src, POINTER_TYPE))
8772 /* If SRC and DEST are the same (and not volatile), return DEST. */
8773 if (operand_equal_p (src, dest, 0))
8774 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8779 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8785 len = c_strlen (src, 1);
8786 if (! len || TREE_SIDE_EFFECTS (len))
8790 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8791 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8792 build_call_expr (fn, 3, dest, src, len));
8795 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8796 If SLEN is not NULL, it represents the length of the source string.
8797 Return NULL_TREE if no simplification can be made. */
8800 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8804 if (!validate_arg (dest, POINTER_TYPE)
8805 || !validate_arg (src, POINTER_TYPE)
8806 || !validate_arg (len, INTEGER_TYPE))
8809 /* If the LEN parameter is zero, return DEST. */
8810 if (integer_zerop (len))
8811 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8813 /* We can't compare slen with len as constants below if len is not a
8815 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8819 slen = c_strlen (src, 1);
8821 /* Now, we must be passed a constant src ptr parameter. */
8822 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8825 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8827 /* We do not support simplification of this case, though we do
8828 support it when expanding trees into RTL. */
8829 /* FIXME: generate a call to __builtin_memset. */
8830 if (tree_int_cst_lt (slen, len))
8833 /* OK transform into builtin memcpy. */
8834 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8837 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8838 build_call_expr (fn, 3, dest, src, len));
8841 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8842 arguments to the call, and TYPE is its return type.
8843 Return NULL_TREE if no simplification can be made. */
8846 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8848 if (!validate_arg (arg1, POINTER_TYPE)
8849 || !validate_arg (arg2, INTEGER_TYPE)
8850 || !validate_arg (len, INTEGER_TYPE))
8856 if (TREE_CODE (arg2) != INTEGER_CST
8857 || !host_integerp (len, 1))
8860 p1 = c_getstr (arg1);
8861 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8867 if (target_char_cast (arg2, &c))
8870 r = memchr (p1, c, tree_low_cst (len, 1));
8873 return build_int_cst (TREE_TYPE (arg1), 0);
8875 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8877 return fold_convert (type, tem);
8883 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8884 Return NULL_TREE if no simplification can be made. */
8887 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8889 const char *p1, *p2;
8891 if (!validate_arg (arg1, POINTER_TYPE)
8892 || !validate_arg (arg2, POINTER_TYPE)
8893 || !validate_arg (len, INTEGER_TYPE))
8896 /* If the LEN parameter is zero, return zero. */
8897 if (integer_zerop (len))
8898 return omit_two_operands (integer_type_node, integer_zero_node,
8901 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8902 if (operand_equal_p (arg1, arg2, 0))
8903 return omit_one_operand (integer_type_node, integer_zero_node, len);
8905 p1 = c_getstr (arg1);
8906 p2 = c_getstr (arg2);
8908 /* If all arguments are constant, and the value of len is not greater
8909 than the lengths of arg1 and arg2, evaluate at compile-time. */
8910 if (host_integerp (len, 1) && p1 && p2
8911 && compare_tree_int (len, strlen (p1) + 1) <= 0
8912 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8914 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8917 return integer_one_node;
8919 return integer_minus_one_node;
8921 return integer_zero_node;
8924 /* If len parameter is one, return an expression corresponding to
8925 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8926 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8928 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8929 tree cst_uchar_ptr_node
8930 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8932 tree ind1 = fold_convert (integer_type_node,
8933 build1 (INDIRECT_REF, cst_uchar_node,
8934 fold_convert (cst_uchar_ptr_node,
8936 tree ind2 = fold_convert (integer_type_node,
8937 build1 (INDIRECT_REF, cst_uchar_node,
8938 fold_convert (cst_uchar_ptr_node,
8940 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8946 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8947 Return NULL_TREE if no simplification can be made. */
8950 fold_builtin_strcmp (tree arg1, tree arg2)
8952 const char *p1, *p2;
8954 if (!validate_arg (arg1, POINTER_TYPE)
8955 || !validate_arg (arg2, POINTER_TYPE))
8958 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8959 if (operand_equal_p (arg1, arg2, 0))
8960 return integer_zero_node;
8962 p1 = c_getstr (arg1);
8963 p2 = c_getstr (arg2);
8967 const int i = strcmp (p1, p2);
8969 return integer_minus_one_node;
8971 return integer_one_node;
8973 return integer_zero_node;
8976 /* If the second arg is "", return *(const unsigned char*)arg1. */
8977 if (p2 && *p2 == '\0')
8979 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8980 tree cst_uchar_ptr_node
8981 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8983 return fold_convert (integer_type_node,
8984 build1 (INDIRECT_REF, cst_uchar_node,
8985 fold_convert (cst_uchar_ptr_node,
8989 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8990 if (p1 && *p1 == '\0')
8992 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8993 tree cst_uchar_ptr_node
8994 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8996 tree temp = fold_convert (integer_type_node,
8997 build1 (INDIRECT_REF, cst_uchar_node,
8998 fold_convert (cst_uchar_ptr_node,
9000 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9006 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9007 Return NULL_TREE if no simplification can be made. */
9010 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9012 const char *p1, *p2;
9014 if (!validate_arg (arg1, POINTER_TYPE)
9015 || !validate_arg (arg2, POINTER_TYPE)
9016 || !validate_arg (len, INTEGER_TYPE))
9019 /* If the LEN parameter is zero, return zero. */
9020 if (integer_zerop (len))
9021 return omit_two_operands (integer_type_node, integer_zero_node,
9024 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9025 if (operand_equal_p (arg1, arg2, 0))
9026 return omit_one_operand (integer_type_node, integer_zero_node, len);
9028 p1 = c_getstr (arg1);
9029 p2 = c_getstr (arg2);
9031 if (host_integerp (len, 1) && p1 && p2)
9033 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9035 return integer_one_node;
9037 return integer_minus_one_node;
9039 return integer_zero_node;
9042 /* If the second arg is "", and the length is greater than zero,
9043 return *(const unsigned char*)arg1. */
9044 if (p2 && *p2 == '\0'
9045 && TREE_CODE (len) == INTEGER_CST
9046 && tree_int_cst_sgn (len) == 1)
9048 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9049 tree cst_uchar_ptr_node
9050 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9052 return fold_convert (integer_type_node,
9053 build1 (INDIRECT_REF, cst_uchar_node,
9054 fold_convert (cst_uchar_ptr_node,
9058 /* If the first arg is "", and the length is greater than zero,
9059 return -*(const unsigned char*)arg2. */
9060 if (p1 && *p1 == '\0'
9061 && TREE_CODE (len) == INTEGER_CST
9062 && tree_int_cst_sgn (len) == 1)
9064 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9065 tree cst_uchar_ptr_node
9066 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9068 tree temp = fold_convert (integer_type_node,
9069 build1 (INDIRECT_REF, cst_uchar_node,
9070 fold_convert (cst_uchar_ptr_node,
9072 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9075 /* If len parameter is one, return an expression corresponding to
9076 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9077 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9079 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9080 tree cst_uchar_ptr_node
9081 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9083 tree ind1 = fold_convert (integer_type_node,
9084 build1 (INDIRECT_REF, cst_uchar_node,
9085 fold_convert (cst_uchar_ptr_node,
9087 tree ind2 = fold_convert (integer_type_node,
9088 build1 (INDIRECT_REF, cst_uchar_node,
9089 fold_convert (cst_uchar_ptr_node,
9091 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9097 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9098 ARG. Return NULL_TREE if no simplification can be made. */
9101 fold_builtin_signbit (tree arg, tree type)
9105 if (!validate_arg (arg, REAL_TYPE))
9108 /* If ARG is a compile-time constant, determine the result. */
9109 if (TREE_CODE (arg) == REAL_CST
9110 && !TREE_OVERFLOW (arg))
9114 c = TREE_REAL_CST (arg);
9115 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9116 return fold_convert (type, temp);
9119 /* If ARG is non-negative, the result is always zero. */
9120 if (tree_expr_nonnegative_p (arg))
9121 return omit_one_operand (type, integer_zero_node, arg);
9123 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9124 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9125 return fold_build2 (LT_EXPR, type, arg,
9126 build_real (TREE_TYPE (arg), dconst0));
9131 /* Fold function call to builtin copysign, copysignf or copysignl with
9132 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9136 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9140 if (!validate_arg (arg1, REAL_TYPE)
9141 || !validate_arg (arg2, REAL_TYPE))
9144 /* copysign(X,X) is X. */
9145 if (operand_equal_p (arg1, arg2, 0))
9146 return fold_convert (type, arg1);
9148 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9149 if (TREE_CODE (arg1) == REAL_CST
9150 && TREE_CODE (arg2) == REAL_CST
9151 && !TREE_OVERFLOW (arg1)
9152 && !TREE_OVERFLOW (arg2))
9154 REAL_VALUE_TYPE c1, c2;
9156 c1 = TREE_REAL_CST (arg1);
9157 c2 = TREE_REAL_CST (arg2);
9158 /* c1.sign := c2.sign. */
9159 real_copysign (&c1, &c2);
9160 return build_real (type, c1);
9163 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9164 Remember to evaluate Y for side-effects. */
9165 if (tree_expr_nonnegative_p (arg2))
9166 return omit_one_operand (type,
9167 fold_build1 (ABS_EXPR, type, arg1),
9170 /* Strip sign changing operations for the first argument. */
9171 tem = fold_strip_sign_ops (arg1);
9173 return build_call_expr (fndecl, 2, tem, arg2);
9178 /* Fold a call to builtin isascii with argument ARG. */
9181 fold_builtin_isascii (tree arg)
9183 if (!validate_arg (arg, INTEGER_TYPE))
9187 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9188 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9189 build_int_cst (NULL_TREE,
9190 ~ (unsigned HOST_WIDE_INT) 0x7f));
9191 return fold_build2 (EQ_EXPR, integer_type_node,
9192 arg, integer_zero_node);
9196 /* Fold a call to builtin toascii with argument ARG. */
9199 fold_builtin_toascii (tree arg)
9201 if (!validate_arg (arg, INTEGER_TYPE))
9204 /* Transform toascii(c) -> (c & 0x7f). */
9205 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9206 build_int_cst (NULL_TREE, 0x7f));
9209 /* Fold a call to builtin isdigit with argument ARG. */
9212 fold_builtin_isdigit (tree arg)
9214 if (!validate_arg (arg, INTEGER_TYPE))
9218 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9219 /* According to the C standard, isdigit is unaffected by locale.
9220 However, it definitely is affected by the target character set. */
9221 unsigned HOST_WIDE_INT target_digit0
9222 = lang_hooks.to_target_charset ('0');
9224 if (target_digit0 == 0)
9227 arg = fold_convert (unsigned_type_node, arg);
9228 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9229 build_int_cst (unsigned_type_node, target_digit0));
9230 return fold_build2 (LE_EXPR, integer_type_node, arg,
9231 build_int_cst (unsigned_type_node, 9));
9235 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9238 fold_builtin_fabs (tree arg, tree type)
9240 if (!validate_arg (arg, REAL_TYPE))
9243 arg = fold_convert (type, arg);
9244 if (TREE_CODE (arg) == REAL_CST)
9245 return fold_abs_const (arg, type);
9246 return fold_build1 (ABS_EXPR, type, arg);
9249 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9252 fold_builtin_abs (tree arg, tree type)
9254 if (!validate_arg (arg, INTEGER_TYPE))
9257 arg = fold_convert (type, arg);
9258 if (TREE_CODE (arg) == INTEGER_CST)
9259 return fold_abs_const (arg, type);
9260 return fold_build1 (ABS_EXPR, type, arg);
9263 /* Fold a call to builtin fmin or fmax. */
9266 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9268 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9270 /* Calculate the result when the argument is a constant. */
9271 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9276 /* If either argument is NaN, return the other one. Avoid the
9277 transformation if we get (and honor) a signalling NaN. Using
9278 omit_one_operand() ensures we create a non-lvalue. */
9279 if (TREE_CODE (arg0) == REAL_CST
9280 && real_isnan (&TREE_REAL_CST (arg0))
9281 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9282 || ! TREE_REAL_CST (arg0).signalling))
9283 return omit_one_operand (type, arg1, arg0);
9284 if (TREE_CODE (arg1) == REAL_CST
9285 && real_isnan (&TREE_REAL_CST (arg1))
9286 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9287 || ! TREE_REAL_CST (arg1).signalling))
9288 return omit_one_operand (type, arg0, arg1);
9290 /* Transform fmin/fmax(x,x) -> x. */
9291 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9292 return omit_one_operand (type, arg0, arg1);
9294 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9295 functions to return the numeric arg if the other one is NaN.
9296 These tree codes don't honor that, so only transform if
9297 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9298 handled, so we don't have to worry about it either. */
9299 if (flag_finite_math_only)
9300 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9301 fold_convert (type, arg0),
9302 fold_convert (type, arg1));
9307 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9310 fold_builtin_carg (tree arg, tree type)
9312 if (validate_arg (arg, COMPLEX_TYPE))
9314 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9318 tree new_arg = builtin_save_expr (arg);
9319 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9320 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9321 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9328 /* Fold a call to builtin logb/ilogb. */
9331 fold_builtin_logb (tree arg, tree rettype)
9333 if (! validate_arg (arg, REAL_TYPE))
9338 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9340 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9346 /* If arg is Inf or NaN and we're logb, return it. */
9347 if (TREE_CODE (rettype) == REAL_TYPE)
9348 return fold_convert (rettype, arg);
9349 /* Fall through... */
9351 /* Zero may set errno and/or raise an exception for logb, also
9352 for ilogb we don't know FP_ILOGB0. */
9355 /* For normal numbers, proceed iff radix == 2. In GCC,
9356 normalized significands are in the range [0.5, 1.0). We
9357 want the exponent as if they were [1.0, 2.0) so get the
9358 exponent and subtract 1. */
9359 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9360 return fold_convert (rettype, build_int_cst (NULL_TREE,
9361 REAL_EXP (value)-1));
9369 /* Fold a call to builtin significand, if radix == 2. */
9372 fold_builtin_significand (tree arg, tree rettype)
9374 if (! validate_arg (arg, REAL_TYPE))
9379 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9381 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9388 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9389 return fold_convert (rettype, arg);
9391 /* For normal numbers, proceed iff radix == 2. */
9392 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9394 REAL_VALUE_TYPE result = *value;
9395 /* In GCC, normalized significands are in the range [0.5,
9396 1.0). We want them to be [1.0, 2.0) so set the
9398 SET_REAL_EXP (&result, 1);
9399 return build_real (rettype, result);
9408 /* Fold a call to builtin frexp, we can assume the base is 2. */
9411 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9413 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9418 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9421 arg1 = build_fold_indirect_ref (arg1);
9423 /* Proceed if a valid pointer type was passed in. */
9424 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9426 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9432 /* For +-0, return (*exp = 0, +-0). */
9433 exp = integer_zero_node;
9438 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9439 return omit_one_operand (rettype, arg0, arg1);
9442 /* Since the frexp function always expects base 2, and in
9443 GCC normalized significands are already in the range
9444 [0.5, 1.0), we have exactly what frexp wants. */
9445 REAL_VALUE_TYPE frac_rvt = *value;
9446 SET_REAL_EXP (&frac_rvt, 0);
9447 frac = build_real (rettype, frac_rvt);
9448 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9455 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9456 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9457 TREE_SIDE_EFFECTS (arg1) = 1;
9458 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9464 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9465 then we can assume the base is two. If it's false, then we have to
9466 check the mode of the TYPE parameter in certain cases. */
9469 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9471 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9476 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9477 if (real_zerop (arg0) || integer_zerop (arg1)
9478 || (TREE_CODE (arg0) == REAL_CST
9479 && !real_isfinite (&TREE_REAL_CST (arg0))))
9480 return omit_one_operand (type, arg0, arg1);
9482 /* If both arguments are constant, then try to evaluate it. */
9483 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9484 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9485 && host_integerp (arg1, 0))
9487 /* Bound the maximum adjustment to twice the range of the
9488 mode's valid exponents. Use abs to ensure the range is
9489 positive as a sanity check. */
9490 const long max_exp_adj = 2 *
9491 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9492 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9494 /* Get the user-requested adjustment. */
9495 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9497 /* The requested adjustment must be inside this range. This
9498 is a preliminary cap to avoid things like overflow, we
9499 may still fail to compute the result for other reasons. */
9500 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9502 REAL_VALUE_TYPE initial_result;
9504 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9506 /* Ensure we didn't overflow. */
9507 if (! real_isinf (&initial_result))
9509 const REAL_VALUE_TYPE trunc_result
9510 = real_value_truncate (TYPE_MODE (type), initial_result);
9512 /* Only proceed if the target mode can hold the
9514 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9515 return build_real (type, trunc_result);
9524 /* Fold a call to builtin modf. */
9527 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9529 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9534 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9537 arg1 = build_fold_indirect_ref (arg1);
9539 /* Proceed if a valid pointer type was passed in. */
9540 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9542 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9543 REAL_VALUE_TYPE trunc, frac;
9549 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9550 trunc = frac = *value;
9553 /* For +-Inf, return (*arg1 = arg0, +-0). */
9555 frac.sign = value->sign;
9559 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9560 real_trunc (&trunc, VOIDmode, value);
9561 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9562 /* If the original number was negative and already
9563 integral, then the fractional part is -0.0. */
9564 if (value->sign && frac.cl == rvc_zero)
9565 frac.sign = value->sign;
9569 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9570 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9571 build_real (rettype, trunc));
9572 TREE_SIDE_EFFECTS (arg1) = 1;
9573 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9574 build_real (rettype, frac));
9580 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9581 ARG is the argument for the call. */
9584 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9586 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9589 if (!validate_arg (arg, REAL_TYPE))
9591 error ("non-floating-point argument to function %qs",
9592 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9593 return error_mark_node;
9596 switch (builtin_index)
9598 case BUILT_IN_ISINF:
9599 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9600 return omit_one_operand (type, integer_zero_node, arg);
9602 if (TREE_CODE (arg) == REAL_CST)
9604 r = TREE_REAL_CST (arg);
9605 if (real_isinf (&r))
9606 return real_compare (GT_EXPR, &r, &dconst0)
9607 ? integer_one_node : integer_minus_one_node;
9609 return integer_zero_node;
9614 case BUILT_IN_ISFINITE:
9615 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9616 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9617 return omit_one_operand (type, integer_one_node, arg);
9619 if (TREE_CODE (arg) == REAL_CST)
9621 r = TREE_REAL_CST (arg);
9622 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9627 case BUILT_IN_ISNAN:
9628 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9629 return omit_one_operand (type, integer_zero_node, arg);
9631 if (TREE_CODE (arg) == REAL_CST)
9633 r = TREE_REAL_CST (arg);
9634 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9637 arg = builtin_save_expr (arg);
9638 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9645 /* Fold a call to an unordered comparison function such as
9646 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9647 being called and ARG0 and ARG1 are the arguments for the call.
9648 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9649 the opposite of the desired result. UNORDERED_CODE is used
9650 for modes that can hold NaNs and ORDERED_CODE is used for
9654 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9655 enum tree_code unordered_code,
9656 enum tree_code ordered_code)
9658 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9659 enum tree_code code;
9661 enum tree_code code0, code1;
9662 tree cmp_type = NULL_TREE;
9664 type0 = TREE_TYPE (arg0);
9665 type1 = TREE_TYPE (arg1);
9667 code0 = TREE_CODE (type0);
9668 code1 = TREE_CODE (type1);
9670 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9671 /* Choose the wider of two real types. */
9672 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9674 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9676 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9680 error ("non-floating-point argument to function %qs",
9681 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9682 return error_mark_node;
9685 arg0 = fold_convert (cmp_type, arg0);
9686 arg1 = fold_convert (cmp_type, arg1);
9688 if (unordered_code == UNORDERED_EXPR)
9690 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9691 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9692 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9695 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9697 return fold_build1 (TRUTH_NOT_EXPR, type,
9698 fold_build2 (code, type, arg0, arg1));
9701 /* Fold a call to built-in function FNDECL with 0 arguments.
9702 IGNORE is true if the result of the function call is ignored. This
9703 function returns NULL_TREE if no simplification was possible. */
9706 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9708 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9709 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9712 CASE_FLT_FN (BUILT_IN_INF):
9713 case BUILT_IN_INFD32:
9714 case BUILT_IN_INFD64:
9715 case BUILT_IN_INFD128:
9716 return fold_builtin_inf (type, true);
9718 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9719 return fold_builtin_inf (type, false);
9721 case BUILT_IN_CLASSIFY_TYPE:
9722 return fold_builtin_classify_type (NULL_TREE);
9730 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9731 IGNORE is true if the result of the function call is ignored. This
9732 function returns NULL_TREE if no simplification was possible. */
9735 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9737 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9738 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9742 case BUILT_IN_CONSTANT_P:
9744 tree val = fold_builtin_constant_p (arg0);
9746 /* Gimplification will pull the CALL_EXPR for the builtin out of
9747 an if condition. When not optimizing, we'll not CSE it back.
9748 To avoid link error types of regressions, return false now. */
9749 if (!val && !optimize)
9750 val = integer_zero_node;
9755 case BUILT_IN_CLASSIFY_TYPE:
9756 return fold_builtin_classify_type (arg0);
9758 case BUILT_IN_STRLEN:
9759 return fold_builtin_strlen (arg0);
9761 CASE_FLT_FN (BUILT_IN_FABS):
9762 return fold_builtin_fabs (arg0, type);
9766 case BUILT_IN_LLABS:
9767 case BUILT_IN_IMAXABS:
9768 return fold_builtin_abs (arg0, type);
9770 CASE_FLT_FN (BUILT_IN_CONJ):
9771 if (validate_arg (arg0, COMPLEX_TYPE))
9772 return fold_build1 (CONJ_EXPR, type, arg0);
9775 CASE_FLT_FN (BUILT_IN_CREAL):
9776 if (validate_arg (arg0, COMPLEX_TYPE))
9777 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9780 CASE_FLT_FN (BUILT_IN_CIMAG):
9781 if (validate_arg (arg0, COMPLEX_TYPE))
9782 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9785 CASE_FLT_FN (BUILT_IN_CCOS):
9786 CASE_FLT_FN (BUILT_IN_CCOSH):
9787 /* These functions are "even", i.e. f(x) == f(-x). */
9788 if (validate_arg (arg0, COMPLEX_TYPE))
9790 tree narg = fold_strip_sign_ops (arg0);
9792 return build_call_expr (fndecl, 1, narg);
9796 CASE_FLT_FN (BUILT_IN_CABS):
9797 return fold_builtin_cabs (arg0, type, fndecl);
9799 CASE_FLT_FN (BUILT_IN_CARG):
9800 return fold_builtin_carg (arg0, type);
9802 CASE_FLT_FN (BUILT_IN_SQRT):
9803 return fold_builtin_sqrt (arg0, type);
9805 CASE_FLT_FN (BUILT_IN_CBRT):
9806 return fold_builtin_cbrt (arg0, type);
9808 CASE_FLT_FN (BUILT_IN_ASIN):
9809 if (validate_arg (arg0, REAL_TYPE))
9810 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9811 &dconstm1, &dconst1, true);
9814 CASE_FLT_FN (BUILT_IN_ACOS):
9815 if (validate_arg (arg0, REAL_TYPE))
9816 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9817 &dconstm1, &dconst1, true);
9820 CASE_FLT_FN (BUILT_IN_ATAN):
9821 if (validate_arg (arg0, REAL_TYPE))
9822 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9825 CASE_FLT_FN (BUILT_IN_ASINH):
9826 if (validate_arg (arg0, REAL_TYPE))
9827 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9830 CASE_FLT_FN (BUILT_IN_ACOSH):
9831 if (validate_arg (arg0, REAL_TYPE))
9832 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9833 &dconst1, NULL, true);
9836 CASE_FLT_FN (BUILT_IN_ATANH):
9837 if (validate_arg (arg0, REAL_TYPE))
9838 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9839 &dconstm1, &dconst1, false);
9842 CASE_FLT_FN (BUILT_IN_SIN):
9843 if (validate_arg (arg0, REAL_TYPE))
9844 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9847 CASE_FLT_FN (BUILT_IN_COS):
9848 return fold_builtin_cos (arg0, type, fndecl);
9851 CASE_FLT_FN (BUILT_IN_TAN):
9852 return fold_builtin_tan (arg0, type);
9854 CASE_FLT_FN (BUILT_IN_CEXP):
9855 return fold_builtin_cexp (arg0, type);
9857 CASE_FLT_FN (BUILT_IN_CEXPI):
9858 if (validate_arg (arg0, REAL_TYPE))
9859 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9862 CASE_FLT_FN (BUILT_IN_SINH):
9863 if (validate_arg (arg0, REAL_TYPE))
9864 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9867 CASE_FLT_FN (BUILT_IN_COSH):
9868 return fold_builtin_cosh (arg0, type, fndecl);
9870 CASE_FLT_FN (BUILT_IN_TANH):
9871 if (validate_arg (arg0, REAL_TYPE))
9872 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9875 CASE_FLT_FN (BUILT_IN_ERF):
9876 if (validate_arg (arg0, REAL_TYPE))
9877 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9880 CASE_FLT_FN (BUILT_IN_ERFC):
9881 if (validate_arg (arg0, REAL_TYPE))
9882 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9885 CASE_FLT_FN (BUILT_IN_TGAMMA):
9886 if (validate_arg (arg0, REAL_TYPE))
9887 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9890 CASE_FLT_FN (BUILT_IN_EXP):
9891 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9893 CASE_FLT_FN (BUILT_IN_EXP2):
9894 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9896 CASE_FLT_FN (BUILT_IN_EXP10):
9897 CASE_FLT_FN (BUILT_IN_POW10):
9898 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9900 CASE_FLT_FN (BUILT_IN_EXPM1):
9901 if (validate_arg (arg0, REAL_TYPE))
9902 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9905 CASE_FLT_FN (BUILT_IN_LOG):
9906 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9908 CASE_FLT_FN (BUILT_IN_LOG2):
9909 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9911 CASE_FLT_FN (BUILT_IN_LOG10):
9912 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9914 CASE_FLT_FN (BUILT_IN_LOG1P):
9915 if (validate_arg (arg0, REAL_TYPE))
9916 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9917 &dconstm1, NULL, false);
9920 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9921 CASE_FLT_FN (BUILT_IN_J0):
9922 if (validate_arg (arg0, REAL_TYPE))
9923 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9927 CASE_FLT_FN (BUILT_IN_J1):
9928 if (validate_arg (arg0, REAL_TYPE))
9929 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9933 CASE_FLT_FN (BUILT_IN_Y0):
9934 if (validate_arg (arg0, REAL_TYPE))
9935 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9936 &dconst0, NULL, false);
9939 CASE_FLT_FN (BUILT_IN_Y1):
9940 if (validate_arg (arg0, REAL_TYPE))
9941 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9942 &dconst0, NULL, false);
9946 CASE_FLT_FN (BUILT_IN_NAN):
9947 case BUILT_IN_NAND32:
9948 case BUILT_IN_NAND64:
9949 case BUILT_IN_NAND128:
9950 return fold_builtin_nan (arg0, type, true);
9952 CASE_FLT_FN (BUILT_IN_NANS):
9953 return fold_builtin_nan (arg0, type, false);
9955 CASE_FLT_FN (BUILT_IN_FLOOR):
9956 return fold_builtin_floor (fndecl, arg0);
9958 CASE_FLT_FN (BUILT_IN_CEIL):
9959 return fold_builtin_ceil (fndecl, arg0);
9961 CASE_FLT_FN (BUILT_IN_TRUNC):
9962 return fold_builtin_trunc (fndecl, arg0);
9964 CASE_FLT_FN (BUILT_IN_ROUND):
9965 return fold_builtin_round (fndecl, arg0);
9967 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9968 CASE_FLT_FN (BUILT_IN_RINT):
9969 return fold_trunc_transparent_mathfn (fndecl, arg0);
9971 CASE_FLT_FN (BUILT_IN_LCEIL):
9972 CASE_FLT_FN (BUILT_IN_LLCEIL):
9973 CASE_FLT_FN (BUILT_IN_LFLOOR):
9974 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9975 CASE_FLT_FN (BUILT_IN_LROUND):
9976 CASE_FLT_FN (BUILT_IN_LLROUND):
9977 return fold_builtin_int_roundingfn (fndecl, arg0);
9979 CASE_FLT_FN (BUILT_IN_LRINT):
9980 CASE_FLT_FN (BUILT_IN_LLRINT):
9981 return fold_fixed_mathfn (fndecl, arg0);
9983 case BUILT_IN_BSWAP32:
9984 case BUILT_IN_BSWAP64:
9985 return fold_builtin_bswap (fndecl, arg0);
9987 CASE_INT_FN (BUILT_IN_FFS):
9988 CASE_INT_FN (BUILT_IN_CLZ):
9989 CASE_INT_FN (BUILT_IN_CTZ):
9990 CASE_INT_FN (BUILT_IN_POPCOUNT):
9991 CASE_INT_FN (BUILT_IN_PARITY):
9992 return fold_builtin_bitop (fndecl, arg0);
9994 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9995 return fold_builtin_signbit (arg0, type);
9997 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9998 return fold_builtin_significand (arg0, type);
10000 CASE_FLT_FN (BUILT_IN_ILOGB):
10001 CASE_FLT_FN (BUILT_IN_LOGB):
10002 return fold_builtin_logb (arg0, type);
10004 case BUILT_IN_ISASCII:
10005 return fold_builtin_isascii (arg0);
10007 case BUILT_IN_TOASCII:
10008 return fold_builtin_toascii (arg0);
10010 case BUILT_IN_ISDIGIT:
10011 return fold_builtin_isdigit (arg0);
10013 CASE_FLT_FN (BUILT_IN_FINITE):
10014 case BUILT_IN_FINITED32:
10015 case BUILT_IN_FINITED64:
10016 case BUILT_IN_FINITED128:
10017 case BUILT_IN_ISFINITE:
10018 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10020 CASE_FLT_FN (BUILT_IN_ISINF):
10021 case BUILT_IN_ISINFD32:
10022 case BUILT_IN_ISINFD64:
10023 case BUILT_IN_ISINFD128:
10024 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10026 CASE_FLT_FN (BUILT_IN_ISNAN):
10027 case BUILT_IN_ISNAND32:
10028 case BUILT_IN_ISNAND64:
10029 case BUILT_IN_ISNAND128:
10030 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10032 case BUILT_IN_PRINTF:
10033 case BUILT_IN_PRINTF_UNLOCKED:
10034 case BUILT_IN_VPRINTF:
10035 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10045 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10046 IGNORE is true if the result of the function call is ignored. This
10047 function returns NULL_TREE if no simplification was possible. */
10050 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10052 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10053 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10057 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10058 CASE_FLT_FN (BUILT_IN_JN):
10059 if (validate_arg (arg0, INTEGER_TYPE)
10060 && validate_arg (arg1, REAL_TYPE))
10061 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10064 CASE_FLT_FN (BUILT_IN_YN):
10065 if (validate_arg (arg0, INTEGER_TYPE)
10066 && validate_arg (arg1, REAL_TYPE))
10067 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10071 CASE_FLT_FN (BUILT_IN_DREM):
10072 CASE_FLT_FN (BUILT_IN_REMAINDER):
10073 if (validate_arg (arg0, REAL_TYPE)
10074 && validate_arg(arg1, REAL_TYPE))
10075 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10078 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10079 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10080 if (validate_arg (arg0, REAL_TYPE)
10081 && validate_arg(arg1, POINTER_TYPE))
10082 return do_mpfr_lgamma_r (arg0, arg1, type);
10086 CASE_FLT_FN (BUILT_IN_ATAN2):
10087 if (validate_arg (arg0, REAL_TYPE)
10088 && validate_arg(arg1, REAL_TYPE))
10089 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10092 CASE_FLT_FN (BUILT_IN_FDIM):
10093 if (validate_arg (arg0, REAL_TYPE)
10094 && validate_arg(arg1, REAL_TYPE))
10095 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10098 CASE_FLT_FN (BUILT_IN_HYPOT):
10099 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10101 CASE_FLT_FN (BUILT_IN_LDEXP):
10102 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10103 CASE_FLT_FN (BUILT_IN_SCALBN):
10104 CASE_FLT_FN (BUILT_IN_SCALBLN):
10105 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10107 CASE_FLT_FN (BUILT_IN_FREXP):
10108 return fold_builtin_frexp (arg0, arg1, type);
10110 CASE_FLT_FN (BUILT_IN_MODF):
10111 return fold_builtin_modf (arg0, arg1, type);
10113 case BUILT_IN_BZERO:
10114 return fold_builtin_bzero (arg0, arg1, ignore);
10116 case BUILT_IN_FPUTS:
10117 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10119 case BUILT_IN_FPUTS_UNLOCKED:
10120 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10122 case BUILT_IN_STRSTR:
10123 return fold_builtin_strstr (arg0, arg1, type);
10125 case BUILT_IN_STRCAT:
10126 return fold_builtin_strcat (arg0, arg1);
10128 case BUILT_IN_STRSPN:
10129 return fold_builtin_strspn (arg0, arg1);
10131 case BUILT_IN_STRCSPN:
10132 return fold_builtin_strcspn (arg0, arg1);
10134 case BUILT_IN_STRCHR:
10135 case BUILT_IN_INDEX:
10136 return fold_builtin_strchr (arg0, arg1, type);
10138 case BUILT_IN_STRRCHR:
10139 case BUILT_IN_RINDEX:
10140 return fold_builtin_strrchr (arg0, arg1, type);
10142 case BUILT_IN_STRCPY:
10143 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10145 case BUILT_IN_STRCMP:
10146 return fold_builtin_strcmp (arg0, arg1);
10148 case BUILT_IN_STRPBRK:
10149 return fold_builtin_strpbrk (arg0, arg1, type);
10151 case BUILT_IN_EXPECT:
10152 return fold_builtin_expect (arg0);
10154 CASE_FLT_FN (BUILT_IN_POW):
10155 return fold_builtin_pow (fndecl, arg0, arg1, type);
10157 CASE_FLT_FN (BUILT_IN_POWI):
10158 return fold_builtin_powi (fndecl, arg0, arg1, type);
10160 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10161 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10163 CASE_FLT_FN (BUILT_IN_FMIN):
10164 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10166 CASE_FLT_FN (BUILT_IN_FMAX):
10167 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10169 case BUILT_IN_ISGREATER:
10170 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10171 case BUILT_IN_ISGREATEREQUAL:
10172 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10173 case BUILT_IN_ISLESS:
10174 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10175 case BUILT_IN_ISLESSEQUAL:
10176 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10177 case BUILT_IN_ISLESSGREATER:
10178 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10179 case BUILT_IN_ISUNORDERED:
10180 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10183 /* We do the folding for va_start in the expander. */
10184 case BUILT_IN_VA_START:
10187 case BUILT_IN_SPRINTF:
10188 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10190 case BUILT_IN_OBJECT_SIZE:
10191 return fold_builtin_object_size (arg0, arg1);
10193 case BUILT_IN_PRINTF:
10194 case BUILT_IN_PRINTF_UNLOCKED:
10195 case BUILT_IN_VPRINTF:
10196 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10198 case BUILT_IN_PRINTF_CHK:
10199 case BUILT_IN_VPRINTF_CHK:
10200 if (!validate_arg (arg0, INTEGER_TYPE)
10201 || TREE_SIDE_EFFECTS (arg0))
10204 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10207 case BUILT_IN_FPRINTF:
10208 case BUILT_IN_FPRINTF_UNLOCKED:
10209 case BUILT_IN_VFPRINTF:
10210 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10219 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10220 and ARG2. IGNORE is true if the result of the function call is ignored.
10221 This function returns NULL_TREE if no simplification was possible. */
10224 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10226 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10227 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10231 CASE_FLT_FN (BUILT_IN_SINCOS):
10232 return fold_builtin_sincos (arg0, arg1, arg2);
10234 CASE_FLT_FN (BUILT_IN_FMA):
10235 if (validate_arg (arg0, REAL_TYPE)
10236 && validate_arg(arg1, REAL_TYPE)
10237 && validate_arg(arg2, REAL_TYPE))
10238 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10241 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10242 CASE_FLT_FN (BUILT_IN_REMQUO):
10243 if (validate_arg (arg0, REAL_TYPE)
10244 && validate_arg(arg1, REAL_TYPE)
10245 && validate_arg(arg2, POINTER_TYPE))
10246 return do_mpfr_remquo (arg0, arg1, arg2);
10250 case BUILT_IN_MEMSET:
10251 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10253 case BUILT_IN_BCOPY:
10254 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10256 case BUILT_IN_MEMCPY:
10257 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10259 case BUILT_IN_MEMPCPY:
10260 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10262 case BUILT_IN_MEMMOVE:
10263 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10265 case BUILT_IN_STRNCAT:
10266 return fold_builtin_strncat (arg0, arg1, arg2);
10268 case BUILT_IN_STRNCPY:
10269 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10271 case BUILT_IN_STRNCMP:
10272 return fold_builtin_strncmp (arg0, arg1, arg2);
10274 case BUILT_IN_MEMCHR:
10275 return fold_builtin_memchr (arg0, arg1, arg2, type);
10277 case BUILT_IN_BCMP:
10278 case BUILT_IN_MEMCMP:
10279 return fold_builtin_memcmp (arg0, arg1, arg2);;
10281 case BUILT_IN_SPRINTF:
10282 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10284 case BUILT_IN_STRCPY_CHK:
10285 case BUILT_IN_STPCPY_CHK:
10286 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10289 case BUILT_IN_STRCAT_CHK:
10290 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10292 case BUILT_IN_PRINTF_CHK:
10293 case BUILT_IN_VPRINTF_CHK:
10294 if (!validate_arg (arg0, INTEGER_TYPE)
10295 || TREE_SIDE_EFFECTS (arg0))
10298 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10301 case BUILT_IN_FPRINTF:
10302 case BUILT_IN_FPRINTF_UNLOCKED:
10303 case BUILT_IN_VFPRINTF:
10304 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10306 case BUILT_IN_FPRINTF_CHK:
10307 case BUILT_IN_VFPRINTF_CHK:
10308 if (!validate_arg (arg1, INTEGER_TYPE)
10309 || TREE_SIDE_EFFECTS (arg1))
10312 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10321 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10322 ARG2, and ARG3. IGNORE is true if the result of the function call is
10323 ignored. This function returns NULL_TREE if no simplification was
10327 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10330 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10334 case BUILT_IN_MEMCPY_CHK:
10335 case BUILT_IN_MEMPCPY_CHK:
10336 case BUILT_IN_MEMMOVE_CHK:
10337 case BUILT_IN_MEMSET_CHK:
10338 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10340 DECL_FUNCTION_CODE (fndecl));
10342 case BUILT_IN_STRNCPY_CHK:
10343 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10345 case BUILT_IN_STRNCAT_CHK:
10346 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10348 case BUILT_IN_FPRINTF_CHK:
10349 case BUILT_IN_VFPRINTF_CHK:
10350 if (!validate_arg (arg1, INTEGER_TYPE)
10351 || TREE_SIDE_EFFECTS (arg1))
10354 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10364 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10365 arguments, where NARGS <= 4. IGNORE is true if the result of the
10366 function call is ignored. This function returns NULL_TREE if no
10367 simplification was possible. Note that this only folds builtins with
10368 fixed argument patterns. Foldings that do varargs-to-varargs
10369 transformations, or that match calls with more than 4 arguments,
10370 need to be handled with fold_builtin_varargs instead. */
10372 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10375 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10377 tree ret = NULL_TREE;
10381 ret = fold_builtin_0 (fndecl, ignore);
10384 ret = fold_builtin_1 (fndecl, args[0], ignore);
10387 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10390 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10393 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10401 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10402 TREE_NO_WARNING (ret) = 1;
10408 /* Builtins with folding operations that operate on "..." arguments
10409 need special handling; we need to store the arguments in a convenient
10410 data structure before attempting any folding. Fortunately there are
10411 only a few builtins that fall into this category. FNDECL is the
10412 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10413 result of the function call is ignored. */
10416 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10418 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10419 tree ret = NULL_TREE;
10423 case BUILT_IN_SPRINTF_CHK:
10424 case BUILT_IN_VSPRINTF_CHK:
10425 ret = fold_builtin_sprintf_chk (exp, fcode);
10428 case BUILT_IN_SNPRINTF_CHK:
10429 case BUILT_IN_VSNPRINTF_CHK:
10430 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10437 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10438 TREE_NO_WARNING (ret) = 1;
10444 /* A wrapper function for builtin folding that prevents warnings for
10445 "statement without effect" and the like, caused by removing the
10446 call node earlier than the warning is generated. */
10449 fold_call_expr (tree exp, bool ignore)
10451 tree ret = NULL_TREE;
10452 tree fndecl = get_callee_fndecl (exp);
10454 && TREE_CODE (fndecl) == FUNCTION_DECL
10455 && DECL_BUILT_IN (fndecl))
10457 /* FIXME: Don't use a list in this interface. */
10458 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10459 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10462 int nargs = call_expr_nargs (exp);
10463 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10465 tree *args = CALL_EXPR_ARGP (exp);
10466 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10469 ret = fold_builtin_varargs (fndecl, exp, ignore);
10472 /* Propagate location information from original call to
10473 expansion of builtin. Otherwise things like
10474 maybe_emit_chk_warning, that operate on the expansion
10475 of a builtin, will use the wrong location information. */
10476 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10478 tree realret = ret;
10479 if (TREE_CODE (ret) == NOP_EXPR)
10480 realret = TREE_OPERAND (ret, 0);
10481 if (CAN_HAVE_LOCATION_P (realret)
10482 && !EXPR_HAS_LOCATION (realret))
10483 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10492 /* Conveniently construct a function call expression. FNDECL names the
10493 function to be called and ARGLIST is a TREE_LIST of arguments. */
10496 build_function_call_expr (tree fndecl, tree arglist)
10498 tree fntype = TREE_TYPE (fndecl);
10499 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10500 int n = list_length (arglist);
10501 tree *argarray = (tree *) alloca (n * sizeof (tree));
10504 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10505 argarray[i] = TREE_VALUE (arglist);
10506 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10509 /* Conveniently construct a function call expression. FNDECL names the
10510 function to be called, N is the number of arguments, and the "..."
10511 parameters are the argument expressions. */
10514 build_call_expr (tree fndecl, int n, ...)
10517 tree fntype = TREE_TYPE (fndecl);
10518 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10519 tree *argarray = (tree *) alloca (n * sizeof (tree));
10523 for (i = 0; i < n; i++)
10524 argarray[i] = va_arg (ap, tree);
10526 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10529 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10530 N arguments are passed in the array ARGARRAY. */
10533 fold_builtin_call_array (tree type,
10538 tree ret = NULL_TREE;
10542 if (TREE_CODE (fn) == ADDR_EXPR)
10544 tree fndecl = TREE_OPERAND (fn, 0);
10545 if (TREE_CODE (fndecl) == FUNCTION_DECL
10546 && DECL_BUILT_IN (fndecl))
10548 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10550 tree arglist = NULL_TREE;
10551 for (i = n - 1; i >= 0; i--)
10552 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10553 ret = targetm.fold_builtin (fndecl, arglist, false);
10557 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10559 /* First try the transformations that don't require consing up
10561 ret = fold_builtin_n (fndecl, argarray, n, false);
10566 /* If we got this far, we need to build an exp. */
10567 exp = build_call_array (type, fn, n, argarray);
10568 ret = fold_builtin_varargs (fndecl, exp, false);
10569 return ret ? ret : exp;
10573 return build_call_array (type, fn, n, argarray);
10576 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10577 along with N new arguments specified as the "..." parameters. SKIP
10578 is the number of arguments in EXP to be omitted. This function is used
10579 to do varargs-to-varargs transformations. */
10582 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10584 int oldnargs = call_expr_nargs (exp);
10585 int nargs = oldnargs - skip + n;
10586 tree fntype = TREE_TYPE (fndecl);
10587 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10595 buffer = alloca (nargs * sizeof (tree));
10597 for (i = 0; i < n; i++)
10598 buffer[i] = va_arg (ap, tree);
10600 for (j = skip; j < oldnargs; j++, i++)
10601 buffer[i] = CALL_EXPR_ARG (exp, j);
10604 buffer = CALL_EXPR_ARGP (exp) + skip;
10606 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10609 /* Validate a single argument ARG against a tree code CODE representing
10613 validate_arg (tree arg, enum tree_code code)
10617 else if (code == POINTER_TYPE)
10618 return POINTER_TYPE_P (TREE_TYPE (arg));
10619 return code == TREE_CODE (TREE_TYPE (arg));
10622 /* This function validates the types of a function call argument list
10623 against a specified list of tree_codes. If the last specifier is a 0,
10624 that represents an ellipses, otherwise the last specifier must be a
10628 validate_arglist (tree callexpr, ...)
10630 enum tree_code code;
10633 call_expr_arg_iterator iter;
10636 va_start (ap, callexpr);
10637 init_call_expr_arg_iterator (callexpr, &iter);
10641 code = va_arg (ap, enum tree_code);
10645 /* This signifies an ellipses, any further arguments are all ok. */
10649 /* This signifies an endlink, if no arguments remain, return
10650 true, otherwise return false. */
10651 res = !more_call_expr_args_p (&iter);
10654 /* If no parameters remain or the parameter's code does not
10655 match the specified code, return false. Otherwise continue
10656 checking any remaining arguments. */
10657 arg = next_call_expr_arg (&iter);
10658 if (!validate_arg (arg, code))
10665 /* We need gotos here since we can only have one VA_CLOSE in a
10673 /* Default target-specific builtin expander that does nothing. */
10676 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10677 rtx target ATTRIBUTE_UNUSED,
10678 rtx subtarget ATTRIBUTE_UNUSED,
10679 enum machine_mode mode ATTRIBUTE_UNUSED,
10680 int ignore ATTRIBUTE_UNUSED)
10685 /* Returns true is EXP represents data that would potentially reside
10686 in a readonly section. */
10689 readonly_data_expr (tree exp)
10693 if (TREE_CODE (exp) != ADDR_EXPR)
10696 exp = get_base_address (TREE_OPERAND (exp, 0));
10700 /* Make sure we call decl_readonly_section only for trees it
10701 can handle (since it returns true for everything it doesn't
10703 if (TREE_CODE (exp) == STRING_CST
10704 || TREE_CODE (exp) == CONSTRUCTOR
10705 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10706 return decl_readonly_section (exp, 0);
10711 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10712 to the call, and TYPE is its return type.
10714 Return NULL_TREE if no simplification was possible, otherwise return the
10715 simplified form of the call as a tree.
10717 The simplified form may be a constant or other expression which
10718 computes the same value, but in a more efficient manner (including
10719 calls to other builtin functions).
10721 The call may contain arguments which need to be evaluated, but
10722 which are not useful to determine the result of the call. In
10723 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10724 COMPOUND_EXPR will be an argument which must be evaluated.
10725 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10726 COMPOUND_EXPR in the chain will contain the tree for the simplified
10727 form of the builtin function call. */
10730 fold_builtin_strstr (tree s1, tree s2, tree type)
10732 if (!validate_arg (s1, POINTER_TYPE)
10733 || !validate_arg (s2, POINTER_TYPE))
10738 const char *p1, *p2;
10740 p2 = c_getstr (s2);
10744 p1 = c_getstr (s1);
10747 const char *r = strstr (p1, p2);
10751 return build_int_cst (TREE_TYPE (s1), 0);
10753 /* Return an offset into the constant string argument. */
10754 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10755 s1, size_int (r - p1));
10756 return fold_convert (type, tem);
10759 /* The argument is const char *, and the result is char *, so we need
10760 a type conversion here to avoid a warning. */
10762 return fold_convert (type, s1);
10767 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10771 /* New argument list transforming strstr(s1, s2) to
10772 strchr(s1, s2[0]). */
10773 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10777 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10778 the call, and TYPE is its return type.
10780 Return NULL_TREE if no simplification was possible, otherwise return the
10781 simplified form of the call as a tree.
10783 The simplified form may be a constant or other expression which
10784 computes the same value, but in a more efficient manner (including
10785 calls to other builtin functions).
10787 The call may contain arguments which need to be evaluated, but
10788 which are not useful to determine the result of the call. In
10789 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10790 COMPOUND_EXPR will be an argument which must be evaluated.
10791 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10792 COMPOUND_EXPR in the chain will contain the tree for the simplified
10793 form of the builtin function call. */
10796 fold_builtin_strchr (tree s1, tree s2, tree type)
10798 if (!validate_arg (s1, POINTER_TYPE)
10799 || !validate_arg (s2, INTEGER_TYPE))
10805 if (TREE_CODE (s2) != INTEGER_CST)
10808 p1 = c_getstr (s1);
10815 if (target_char_cast (s2, &c))
10818 r = strchr (p1, c);
10821 return build_int_cst (TREE_TYPE (s1), 0);
10823 /* Return an offset into the constant string argument. */
10824 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10825 s1, size_int (r - p1));
10826 return fold_convert (type, tem);
10832 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10833 the call, and TYPE is its return type.
10835 Return NULL_TREE if no simplification was possible, otherwise return the
10836 simplified form of the call as a tree.
10838 The simplified form may be a constant or other expression which
10839 computes the same value, but in a more efficient manner (including
10840 calls to other builtin functions).
10842 The call may contain arguments which need to be evaluated, but
10843 which are not useful to determine the result of the call. In
10844 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10845 COMPOUND_EXPR will be an argument which must be evaluated.
10846 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10847 COMPOUND_EXPR in the chain will contain the tree for the simplified
10848 form of the builtin function call. */
10851 fold_builtin_strrchr (tree s1, tree s2, tree type)
10853 if (!validate_arg (s1, POINTER_TYPE)
10854 || !validate_arg (s2, INTEGER_TYPE))
10861 if (TREE_CODE (s2) != INTEGER_CST)
10864 p1 = c_getstr (s1);
10871 if (target_char_cast (s2, &c))
10874 r = strrchr (p1, c);
10877 return build_int_cst (TREE_TYPE (s1), 0);
10879 /* Return an offset into the constant string argument. */
10880 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10881 s1, size_int (r - p1));
10882 return fold_convert (type, tem);
10885 if (! integer_zerop (s2))
10888 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10892 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10893 return build_call_expr (fn, 2, s1, s2);
10897 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10898 to the call, and TYPE is its return type.
10900 Return NULL_TREE if no simplification was possible, otherwise return the
10901 simplified form of the call as a tree.
10903 The simplified form may be a constant or other expression which
10904 computes the same value, but in a more efficient manner (including
10905 calls to other builtin functions).
10907 The call may contain arguments which need to be evaluated, but
10908 which are not useful to determine the result of the call. In
10909 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10910 COMPOUND_EXPR will be an argument which must be evaluated.
10911 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10912 COMPOUND_EXPR in the chain will contain the tree for the simplified
10913 form of the builtin function call. */
10916 fold_builtin_strpbrk (tree s1, tree s2, tree type)
10918 if (!validate_arg (s1, POINTER_TYPE)
10919 || !validate_arg (s2, POINTER_TYPE))
10924 const char *p1, *p2;
10926 p2 = c_getstr (s2);
10930 p1 = c_getstr (s1);
10933 const char *r = strpbrk (p1, p2);
10937 return build_int_cst (TREE_TYPE (s1), 0);
10939 /* Return an offset into the constant string argument. */
10940 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10941 s1, size_int (r - p1));
10942 return fold_convert (type, tem);
10946 /* strpbrk(x, "") == NULL.
10947 Evaluate and ignore s1 in case it had side-effects. */
10948 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
10951 return NULL_TREE; /* Really call strpbrk. */
10953 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10957 /* New argument list transforming strpbrk(s1, s2) to
10958 strchr(s1, s2[0]). */
10959 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10963 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10966 Return NULL_TREE if no simplification was possible, otherwise return the
10967 simplified form of the call as a tree.
10969 The simplified form may be a constant or other expression which
10970 computes the same value, but in a more efficient manner (including
10971 calls to other builtin functions).
10973 The call may contain arguments which need to be evaluated, but
10974 which are not useful to determine the result of the call. In
10975 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10976 COMPOUND_EXPR will be an argument which must be evaluated.
10977 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10978 COMPOUND_EXPR in the chain will contain the tree for the simplified
10979 form of the builtin function call. */
10982 fold_builtin_strcat (tree dst, tree src)
10984 if (!validate_arg (dst, POINTER_TYPE)
10985 || !validate_arg (src, POINTER_TYPE))
10989 const char *p = c_getstr (src);
10991 /* If the string length is zero, return the dst parameter. */
10992 if (p && *p == '\0')
10999 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11000 arguments to the call.
11002 Return NULL_TREE if no simplification was possible, otherwise return the
11003 simplified form of the call as a tree.
11005 The simplified form may be a constant or other expression which
11006 computes the same value, but in a more efficient manner (including
11007 calls to other builtin functions).
11009 The call may contain arguments which need to be evaluated, but
11010 which are not useful to determine the result of the call. In
11011 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11012 COMPOUND_EXPR will be an argument which must be evaluated.
11013 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11014 COMPOUND_EXPR in the chain will contain the tree for the simplified
11015 form of the builtin function call. */
11018 fold_builtin_strncat (tree dst, tree src, tree len)
11020 if (!validate_arg (dst, POINTER_TYPE)
11021 || !validate_arg (src, POINTER_TYPE)
11022 || !validate_arg (len, INTEGER_TYPE))
11026 const char *p = c_getstr (src);
11028 /* If the requested length is zero, or the src parameter string
11029 length is zero, return the dst parameter. */
11030 if (integer_zerop (len) || (p && *p == '\0'))
11031 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11033 /* If the requested len is greater than or equal to the string
11034 length, call strcat. */
11035 if (TREE_CODE (len) == INTEGER_CST && p
11036 && compare_tree_int (len, strlen (p)) >= 0)
11038 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11040 /* If the replacement _DECL isn't initialized, don't do the
11045 return build_call_expr (fn, 2, dst, src);
11051 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11054 Return NULL_TREE if no simplification was possible, otherwise return the
11055 simplified form of the call as a tree.
11057 The simplified form may be a constant or other expression which
11058 computes the same value, but in a more efficient manner (including
11059 calls to other builtin functions).
11061 The call may contain arguments which need to be evaluated, but
11062 which are not useful to determine the result of the call. In
11063 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11064 COMPOUND_EXPR will be an argument which must be evaluated.
11065 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11066 COMPOUND_EXPR in the chain will contain the tree for the simplified
11067 form of the builtin function call. */
11070 fold_builtin_strspn (tree s1, tree s2)
11072 if (!validate_arg (s1, POINTER_TYPE)
11073 || !validate_arg (s2, POINTER_TYPE))
11077 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11079 /* If both arguments are constants, evaluate at compile-time. */
11082 const size_t r = strspn (p1, p2);
11083 return size_int (r);
11086 /* If either argument is "", return NULL_TREE. */
11087 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11088 /* Evaluate and ignore both arguments in case either one has
11090 return omit_two_operands (integer_type_node, integer_zero_node,
11096 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11099 Return NULL_TREE if no simplification was possible, otherwise return the
11100 simplified form of the call as a tree.
11102 The simplified form may be a constant or other expression which
11103 computes the same value, but in a more efficient manner (including
11104 calls to other builtin functions).
11106 The call may contain arguments which need to be evaluated, but
11107 which are not useful to determine the result of the call. In
11108 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11109 COMPOUND_EXPR will be an argument which must be evaluated.
11110 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11111 COMPOUND_EXPR in the chain will contain the tree for the simplified
11112 form of the builtin function call. */
11115 fold_builtin_strcspn (tree s1, tree s2)
11117 if (!validate_arg (s1, POINTER_TYPE)
11118 || !validate_arg (s2, POINTER_TYPE))
11122 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11124 /* If both arguments are constants, evaluate at compile-time. */
11127 const size_t r = strcspn (p1, p2);
11128 return size_int (r);
11131 /* If the first argument is "", return NULL_TREE. */
11132 if (p1 && *p1 == '\0')
11134 /* Evaluate and ignore argument s2 in case it has
11136 return omit_one_operand (integer_type_node,
11137 integer_zero_node, s2);
11140 /* If the second argument is "", return __builtin_strlen(s1). */
11141 if (p2 && *p2 == '\0')
11143 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11145 /* If the replacement _DECL isn't initialized, don't do the
11150 return build_call_expr (fn, 1, s1);
11156 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11157 to the call. IGNORE is true if the value returned
11158 by the builtin will be ignored. UNLOCKED is true is true if this
11159 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11160 the known length of the string. Return NULL_TREE if no simplification
11164 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11166 /* If we're using an unlocked function, assume the other unlocked
11167 functions exist explicitly. */
11168 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11169 : implicit_built_in_decls[BUILT_IN_FPUTC];
11170 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11171 : implicit_built_in_decls[BUILT_IN_FWRITE];
11173 /* If the return value is used, don't do the transformation. */
11177 /* Verify the arguments in the original call. */
11178 if (!validate_arg (arg0, POINTER_TYPE)
11179 || !validate_arg (arg1, POINTER_TYPE))
11183 len = c_strlen (arg0, 0);
11185 /* Get the length of the string passed to fputs. If the length
11186 can't be determined, punt. */
11188 || TREE_CODE (len) != INTEGER_CST)
11191 switch (compare_tree_int (len, 1))
11193 case -1: /* length is 0, delete the call entirely . */
11194 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11196 case 0: /* length is 1, call fputc. */
11198 const char *p = c_getstr (arg0);
11203 return build_call_expr (fn_fputc, 2,
11204 build_int_cst (NULL_TREE, p[0]), arg1);
11210 case 1: /* length is greater than 1, call fwrite. */
11212 /* If optimizing for size keep fputs. */
11215 /* New argument list transforming fputs(string, stream) to
11216 fwrite(string, 1, len, stream). */
11218 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11223 gcc_unreachable ();
11228 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11229 produced. False otherwise. This is done so that we don't output the error
11230 or warning twice or three times. */
11232 fold_builtin_next_arg (tree exp, bool va_start_p)
11234 tree fntype = TREE_TYPE (current_function_decl);
11235 int nargs = call_expr_nargs (exp);
11238 if (TYPE_ARG_TYPES (fntype) == 0
11239 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11240 == void_type_node))
11242 error ("%<va_start%> used in function with fixed args");
11248 if (va_start_p && (nargs != 2))
11250 error ("wrong number of arguments to function %<va_start%>");
11253 arg = CALL_EXPR_ARG (exp, 1);
11255 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11256 when we checked the arguments and if needed issued a warning. */
11261 /* Evidently an out of date version of <stdarg.h>; can't validate
11262 va_start's second argument, but can still work as intended. */
11263 warning (0, "%<__builtin_next_arg%> called without an argument");
11266 else if (nargs > 1)
11268 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11271 arg = CALL_EXPR_ARG (exp, 0);
11274 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11275 or __builtin_next_arg (0) the first time we see it, after checking
11276 the arguments and if needed issuing a warning. */
11277 if (!integer_zerop (arg))
11279 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11281 /* Strip off all nops for the sake of the comparison. This
11282 is not quite the same as STRIP_NOPS. It does more.
11283 We must also strip off INDIRECT_EXPR for C++ reference
11285 while (TREE_CODE (arg) == NOP_EXPR
11286 || TREE_CODE (arg) == CONVERT_EXPR
11287 || TREE_CODE (arg) == NON_LVALUE_EXPR
11288 || TREE_CODE (arg) == INDIRECT_REF)
11289 arg = TREE_OPERAND (arg, 0);
11290 if (arg != last_parm)
11292 /* FIXME: Sometimes with the tree optimizers we can get the
11293 not the last argument even though the user used the last
11294 argument. We just warn and set the arg to be the last
11295 argument so that we will get wrong-code because of
11297 warning (0, "second parameter of %<va_start%> not last named argument");
11299 /* We want to verify the second parameter just once before the tree
11300 optimizers are run and then avoid keeping it in the tree,
11301 as otherwise we could warn even for correct code like:
11302 void foo (int i, ...)
11303 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11305 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11307 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11313 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11314 ORIG may be null if this is a 2-argument call. We don't attempt to
11315 simplify calls with more than 3 arguments.
11317 Return NULL_TREE if no simplification was possible, otherwise return the
11318 simplified form of the call as a tree. If IGNORED is true, it means that
11319 the caller does not use the returned value of the function. */
11322 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11325 const char *fmt_str = NULL;
11327 /* Verify the required arguments in the original call. We deal with two
11328 types of sprintf() calls: 'sprintf (str, fmt)' and
11329 'sprintf (dest, "%s", orig)'. */
11330 if (!validate_arg (dest, POINTER_TYPE)
11331 || !validate_arg (fmt, POINTER_TYPE))
11333 if (orig && !validate_arg (orig, POINTER_TYPE))
11336 /* Check whether the format is a literal string constant. */
11337 fmt_str = c_getstr (fmt);
11338 if (fmt_str == NULL)
11342 retval = NULL_TREE;
11344 if (!init_target_chars ())
11347 /* If the format doesn't contain % args or %%, use strcpy. */
11348 if (strchr (fmt_str, target_percent) == NULL)
11350 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11355 /* Don't optimize sprintf (buf, "abc", ptr++). */
11359 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11360 'format' is known to contain no % formats. */
11361 call = build_call_expr (fn, 2, dest, fmt);
11363 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11366 /* If the format is "%s", use strcpy if the result isn't used. */
11367 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11370 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11375 /* Don't crash on sprintf (str1, "%s"). */
11379 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11382 retval = c_strlen (orig, 1);
11383 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11386 call = build_call_expr (fn, 2, dest, orig);
11389 if (call && retval)
11391 retval = fold_convert
11392 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11394 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11400 /* Expand a call EXP to __builtin_object_size. */
11403 expand_builtin_object_size (tree exp)
11406 int object_size_type;
11407 tree fndecl = get_callee_fndecl (exp);
11408 location_t locus = EXPR_LOCATION (exp);
11410 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11412 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11414 expand_builtin_trap ();
11418 ost = CALL_EXPR_ARG (exp, 1);
11421 if (TREE_CODE (ost) != INTEGER_CST
11422 || tree_int_cst_sgn (ost) < 0
11423 || compare_tree_int (ost, 3) > 0)
11425 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11427 expand_builtin_trap ();
11431 object_size_type = tree_low_cst (ost, 0);
11433 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11436 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11437 FCODE is the BUILT_IN_* to use.
11438 Return NULL_RTX if we failed; the caller should emit a normal call,
11439 otherwise try to get the result in TARGET, if convenient (and in
11440 mode MODE if that's convenient). */
11443 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11444 enum built_in_function fcode)
11446 tree dest, src, len, size;
11448 if (!validate_arglist (exp,
11450 fcode == BUILT_IN_MEMSET_CHK
11451 ? INTEGER_TYPE : POINTER_TYPE,
11452 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11455 dest = CALL_EXPR_ARG (exp, 0);
11456 src = CALL_EXPR_ARG (exp, 1);
11457 len = CALL_EXPR_ARG (exp, 2);
11458 size = CALL_EXPR_ARG (exp, 3);
11460 if (! host_integerp (size, 1))
11463 if (host_integerp (len, 1) || integer_all_onesp (size))
11467 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11469 location_t locus = EXPR_LOCATION (exp);
11470 warning (0, "%Hcall to %D will always overflow destination buffer",
11471 &locus, get_callee_fndecl (exp));
11476 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11477 mem{cpy,pcpy,move,set} is available. */
11480 case BUILT_IN_MEMCPY_CHK:
11481 fn = built_in_decls[BUILT_IN_MEMCPY];
11483 case BUILT_IN_MEMPCPY_CHK:
11484 fn = built_in_decls[BUILT_IN_MEMPCPY];
11486 case BUILT_IN_MEMMOVE_CHK:
11487 fn = built_in_decls[BUILT_IN_MEMMOVE];
11489 case BUILT_IN_MEMSET_CHK:
11490 fn = built_in_decls[BUILT_IN_MEMSET];
11499 fn = build_call_expr (fn, 3, dest, src, len);
11500 if (TREE_CODE (fn) == CALL_EXPR)
11501 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11502 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11504 else if (fcode == BUILT_IN_MEMSET_CHK)
11508 unsigned int dest_align
11509 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11511 /* If DEST is not a pointer type, call the normal function. */
11512 if (dest_align == 0)
11515 /* If SRC and DEST are the same (and not volatile), do nothing. */
11516 if (operand_equal_p (src, dest, 0))
11520 if (fcode != BUILT_IN_MEMPCPY_CHK)
11522 /* Evaluate and ignore LEN in case it has side-effects. */
11523 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11524 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11527 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11528 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11531 /* __memmove_chk special case. */
11532 if (fcode == BUILT_IN_MEMMOVE_CHK)
11534 unsigned int src_align
11535 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11537 if (src_align == 0)
11540 /* If src is categorized for a readonly section we can use
11541 normal __memcpy_chk. */
11542 if (readonly_data_expr (src))
11544 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11547 fn = build_call_expr (fn, 4, dest, src, len, size);
11548 if (TREE_CODE (fn) == CALL_EXPR)
11549 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11550 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11557 /* Emit warning if a buffer overflow is detected at compile time. */
11560 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11568 case BUILT_IN_STRCPY_CHK:
11569 case BUILT_IN_STPCPY_CHK:
11570 /* For __strcat_chk the warning will be emitted only if overflowing
11571 by at least strlen (dest) + 1 bytes. */
11572 case BUILT_IN_STRCAT_CHK:
11573 len = CALL_EXPR_ARG (exp, 1);
11574 size = CALL_EXPR_ARG (exp, 2);
11577 case BUILT_IN_STRNCAT_CHK:
11578 case BUILT_IN_STRNCPY_CHK:
11579 len = CALL_EXPR_ARG (exp, 2);
11580 size = CALL_EXPR_ARG (exp, 3);
11582 case BUILT_IN_SNPRINTF_CHK:
11583 case BUILT_IN_VSNPRINTF_CHK:
11584 len = CALL_EXPR_ARG (exp, 1);
11585 size = CALL_EXPR_ARG (exp, 3);
11588 gcc_unreachable ();
11594 if (! host_integerp (size, 1) || integer_all_onesp (size))
11599 len = c_strlen (len, 1);
11600 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11603 else if (fcode == BUILT_IN_STRNCAT_CHK)
11605 tree src = CALL_EXPR_ARG (exp, 1);
11606 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11608 src = c_strlen (src, 1);
11609 if (! src || ! host_integerp (src, 1))
11611 locus = EXPR_LOCATION (exp);
11612 warning (0, "%Hcall to %D might overflow destination buffer",
11613 &locus, get_callee_fndecl (exp));
11616 else if (tree_int_cst_lt (src, size))
11619 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11622 locus = EXPR_LOCATION (exp);
11623 warning (0, "%Hcall to %D will always overflow destination buffer",
11624 &locus, get_callee_fndecl (exp));
11627 /* Emit warning if a buffer overflow is detected at compile time
11628 in __sprintf_chk/__vsprintf_chk calls. */
11631 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11633 tree dest, size, len, fmt, flag;
11634 const char *fmt_str;
11635 int nargs = call_expr_nargs (exp);
11637 /* Verify the required arguments in the original call. */
11641 dest = CALL_EXPR_ARG (exp, 0);
11642 flag = CALL_EXPR_ARG (exp, 1);
11643 size = CALL_EXPR_ARG (exp, 2);
11644 fmt = CALL_EXPR_ARG (exp, 3);
11646 if (! host_integerp (size, 1) || integer_all_onesp (size))
11649 /* Check whether the format is a literal string constant. */
11650 fmt_str = c_getstr (fmt);
11651 if (fmt_str == NULL)
11654 if (!init_target_chars ())
11657 /* If the format doesn't contain % args or %%, we know its size. */
11658 if (strchr (fmt_str, target_percent) == 0)
11659 len = build_int_cstu (size_type_node, strlen (fmt_str));
11660 /* If the format is "%s" and first ... argument is a string literal,
11662 else if (fcode == BUILT_IN_SPRINTF_CHK
11663 && strcmp (fmt_str, target_percent_s) == 0)
11669 arg = CALL_EXPR_ARG (exp, 4);
11670 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11673 len = c_strlen (arg, 1);
11674 if (!len || ! host_integerp (len, 1))
11680 if (! tree_int_cst_lt (len, size))
11682 location_t locus = EXPR_LOCATION (exp);
11683 warning (0, "%Hcall to %D will always overflow destination buffer",
11684 &locus, get_callee_fndecl (exp));
11688 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11692 fold_builtin_object_size (tree ptr, tree ost)
11694 tree ret = NULL_TREE;
11695 int object_size_type;
11697 if (!validate_arg (ptr, POINTER_TYPE)
11698 || !validate_arg (ost, INTEGER_TYPE))
11703 if (TREE_CODE (ost) != INTEGER_CST
11704 || tree_int_cst_sgn (ost) < 0
11705 || compare_tree_int (ost, 3) > 0)
11708 object_size_type = tree_low_cst (ost, 0);
11710 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11711 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11712 and (size_t) 0 for types 2 and 3. */
11713 if (TREE_SIDE_EFFECTS (ptr))
11714 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11716 if (TREE_CODE (ptr) == ADDR_EXPR)
11717 ret = build_int_cstu (size_type_node,
11718 compute_builtin_object_size (ptr, object_size_type));
11720 else if (TREE_CODE (ptr) == SSA_NAME)
11722 unsigned HOST_WIDE_INT bytes;
11724 /* If object size is not known yet, delay folding until
11725 later. Maybe subsequent passes will help determining
11727 bytes = compute_builtin_object_size (ptr, object_size_type);
11728 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11730 ret = build_int_cstu (size_type_node, bytes);
11735 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11736 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11737 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11744 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11745 DEST, SRC, LEN, and SIZE are the arguments to the call.
11746 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11747 code of the builtin. If MAXLEN is not NULL, it is maximum length
11748 passed as third argument. */
11751 fold_builtin_memory_chk (tree fndecl,
11752 tree dest, tree src, tree len, tree size,
11753 tree maxlen, bool ignore,
11754 enum built_in_function fcode)
11758 if (!validate_arg (dest, POINTER_TYPE)
11759 || !validate_arg (src,
11760 (fcode == BUILT_IN_MEMSET_CHK
11761 ? INTEGER_TYPE : POINTER_TYPE))
11762 || !validate_arg (len, INTEGER_TYPE)
11763 || !validate_arg (size, INTEGER_TYPE))
11766 /* If SRC and DEST are the same (and not volatile), return DEST
11767 (resp. DEST+LEN for __mempcpy_chk). */
11768 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11770 if (fcode != BUILT_IN_MEMPCPY_CHK)
11771 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11774 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11775 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11779 if (! host_integerp (size, 1))
11782 if (! integer_all_onesp (size))
11784 if (! host_integerp (len, 1))
11786 /* If LEN is not constant, try MAXLEN too.
11787 For MAXLEN only allow optimizing into non-_ocs function
11788 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11789 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11791 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11793 /* (void) __mempcpy_chk () can be optimized into
11794 (void) __memcpy_chk (). */
11795 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11799 return build_call_expr (fn, 4, dest, src, len, size);
11807 if (tree_int_cst_lt (size, maxlen))
11812 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11813 mem{cpy,pcpy,move,set} is available. */
11816 case BUILT_IN_MEMCPY_CHK:
11817 fn = built_in_decls[BUILT_IN_MEMCPY];
11819 case BUILT_IN_MEMPCPY_CHK:
11820 fn = built_in_decls[BUILT_IN_MEMPCPY];
11822 case BUILT_IN_MEMMOVE_CHK:
11823 fn = built_in_decls[BUILT_IN_MEMMOVE];
11825 case BUILT_IN_MEMSET_CHK:
11826 fn = built_in_decls[BUILT_IN_MEMSET];
11835 return build_call_expr (fn, 3, dest, src, len);
11838 /* Fold a call to the __st[rp]cpy_chk builtin.
11839 DEST, SRC, and SIZE are the arguments to the call.
11840 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11841 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11842 strings passed as second argument. */
11845 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11846 tree maxlen, bool ignore,
11847 enum built_in_function fcode)
11851 if (!validate_arg (dest, POINTER_TYPE)
11852 || !validate_arg (src, POINTER_TYPE)
11853 || !validate_arg (size, INTEGER_TYPE))
11856 /* If SRC and DEST are the same (and not volatile), return DEST. */
11857 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11858 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11860 if (! host_integerp (size, 1))
11863 if (! integer_all_onesp (size))
11865 len = c_strlen (src, 1);
11866 if (! len || ! host_integerp (len, 1))
11868 /* If LEN is not constant, try MAXLEN too.
11869 For MAXLEN only allow optimizing into non-_ocs function
11870 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11871 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11873 if (fcode == BUILT_IN_STPCPY_CHK)
11878 /* If return value of __stpcpy_chk is ignored,
11879 optimize into __strcpy_chk. */
11880 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11884 return build_call_expr (fn, 3, dest, src, size);
11887 if (! len || TREE_SIDE_EFFECTS (len))
11890 /* If c_strlen returned something, but not a constant,
11891 transform __strcpy_chk into __memcpy_chk. */
11892 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11896 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11897 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11898 build_call_expr (fn, 4,
11899 dest, src, len, size));
11905 if (! tree_int_cst_lt (maxlen, size))
11909 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11910 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
11911 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
11915 return build_call_expr (fn, 2, dest, src);
11918 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11919 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11920 length passed as third argument. */
11923 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
11928 if (!validate_arg (dest, POINTER_TYPE)
11929 || !validate_arg (src, POINTER_TYPE)
11930 || !validate_arg (len, INTEGER_TYPE)
11931 || !validate_arg (size, INTEGER_TYPE))
11934 if (! host_integerp (size, 1))
11937 if (! integer_all_onesp (size))
11939 if (! host_integerp (len, 1))
11941 /* If LEN is not constant, try MAXLEN too.
11942 For MAXLEN only allow optimizing into non-_ocs function
11943 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11944 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11950 if (tree_int_cst_lt (size, maxlen))
11954 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11955 fn = built_in_decls[BUILT_IN_STRNCPY];
11959 return build_call_expr (fn, 3, dest, src, len);
11962 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11963 are the arguments to the call. */
11966 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
11971 if (!validate_arg (dest, POINTER_TYPE)
11972 || !validate_arg (src, POINTER_TYPE)
11973 || !validate_arg (size, INTEGER_TYPE))
11976 p = c_getstr (src);
11977 /* If the SRC parameter is "", return DEST. */
11978 if (p && *p == '\0')
11979 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11981 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
11984 /* If __builtin_strcat_chk is used, assume strcat is available. */
11985 fn = built_in_decls[BUILT_IN_STRCAT];
11989 return build_call_expr (fn, 2, dest, src);
11992 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11996 fold_builtin_strncat_chk (tree fndecl,
11997 tree dest, tree src, tree len, tree size)
12002 if (!validate_arg (dest, POINTER_TYPE)
12003 || !validate_arg (src, POINTER_TYPE)
12004 || !validate_arg (size, INTEGER_TYPE)
12005 || !validate_arg (size, INTEGER_TYPE))
12008 p = c_getstr (src);
12009 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12010 if (p && *p == '\0')
12011 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12012 else if (integer_zerop (len))
12013 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12015 if (! host_integerp (size, 1))
12018 if (! integer_all_onesp (size))
12020 tree src_len = c_strlen (src, 1);
12022 && host_integerp (src_len, 1)
12023 && host_integerp (len, 1)
12024 && ! tree_int_cst_lt (len, src_len))
12026 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12027 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12031 return build_call_expr (fn, 3, dest, src, size);
12036 /* If __builtin_strncat_chk is used, assume strncat is available. */
12037 fn = built_in_decls[BUILT_IN_STRNCAT];
12041 return build_call_expr (fn, 3, dest, src, len);
12044 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12045 a normal call should be emitted rather than expanding the function
12046 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12049 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12051 tree dest, size, len, fn, fmt, flag;
12052 const char *fmt_str;
12053 int nargs = call_expr_nargs (exp);
12055 /* Verify the required arguments in the original call. */
12058 dest = CALL_EXPR_ARG (exp, 0);
12059 if (!validate_arg (dest, POINTER_TYPE))
12061 flag = CALL_EXPR_ARG (exp, 1);
12062 if (!validate_arg (flag, INTEGER_TYPE))
12064 size = CALL_EXPR_ARG (exp, 2);
12065 if (!validate_arg (size, INTEGER_TYPE))
12067 fmt = CALL_EXPR_ARG (exp, 3);
12068 if (!validate_arg (fmt, POINTER_TYPE))
12071 if (! host_integerp (size, 1))
12076 if (!init_target_chars ())
12079 /* Check whether the format is a literal string constant. */
12080 fmt_str = c_getstr (fmt);
12081 if (fmt_str != NULL)
12083 /* If the format doesn't contain % args or %%, we know the size. */
12084 if (strchr (fmt_str, target_percent) == 0)
12086 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12087 len = build_int_cstu (size_type_node, strlen (fmt_str));
12089 /* If the format is "%s" and first ... argument is a string literal,
12090 we know the size too. */
12091 else if (fcode == BUILT_IN_SPRINTF_CHK
12092 && strcmp (fmt_str, target_percent_s) == 0)
12098 arg = CALL_EXPR_ARG (exp, 4);
12099 if (validate_arg (arg, POINTER_TYPE))
12101 len = c_strlen (arg, 1);
12102 if (! len || ! host_integerp (len, 1))
12109 if (! integer_all_onesp (size))
12111 if (! len || ! tree_int_cst_lt (len, size))
12115 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12116 or if format doesn't contain % chars or is "%s". */
12117 if (! integer_zerop (flag))
12119 if (fmt_str == NULL)
12121 if (strchr (fmt_str, target_percent) != NULL
12122 && strcmp (fmt_str, target_percent_s))
12126 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12127 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12128 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12132 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12135 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12136 a normal call should be emitted rather than expanding the function
12137 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12138 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12139 passed as second argument. */
12142 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12143 enum built_in_function fcode)
12145 tree dest, size, len, fn, fmt, flag;
12146 const char *fmt_str;
12148 /* Verify the required arguments in the original call. */
12149 if (call_expr_nargs (exp) < 5)
12151 dest = CALL_EXPR_ARG (exp, 0);
12152 if (!validate_arg (dest, POINTER_TYPE))
12154 len = CALL_EXPR_ARG (exp, 1);
12155 if (!validate_arg (len, INTEGER_TYPE))
12157 flag = CALL_EXPR_ARG (exp, 2);
12158 if (!validate_arg (flag, INTEGER_TYPE))
12160 size = CALL_EXPR_ARG (exp, 3);
12161 if (!validate_arg (size, INTEGER_TYPE))
12163 fmt = CALL_EXPR_ARG (exp, 4);
12164 if (!validate_arg (fmt, POINTER_TYPE))
12167 if (! host_integerp (size, 1))
12170 if (! integer_all_onesp (size))
12172 if (! host_integerp (len, 1))
12174 /* If LEN is not constant, try MAXLEN too.
12175 For MAXLEN only allow optimizing into non-_ocs function
12176 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12177 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12183 if (tree_int_cst_lt (size, maxlen))
12187 if (!init_target_chars ())
12190 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12191 or if format doesn't contain % chars or is "%s". */
12192 if (! integer_zerop (flag))
12194 fmt_str = c_getstr (fmt);
12195 if (fmt_str == NULL)
12197 if (strchr (fmt_str, target_percent) != NULL
12198 && strcmp (fmt_str, target_percent_s))
12202 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12204 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12205 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12209 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12212 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12213 FMT and ARG are the arguments to the call; we don't fold cases with
12214 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12216 Return NULL_TREE if no simplification was possible, otherwise return the
12217 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12218 code of the function to be simplified. */
12221 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12222 enum built_in_function fcode)
12224 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12225 const char *fmt_str = NULL;
12227 /* If the return value is used, don't do the transformation. */
12231 /* Verify the required arguments in the original call. */
12232 if (!validate_arg (fmt, POINTER_TYPE))
12235 /* Check whether the format is a literal string constant. */
12236 fmt_str = c_getstr (fmt);
12237 if (fmt_str == NULL)
12240 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12242 /* If we're using an unlocked function, assume the other
12243 unlocked functions exist explicitly. */
12244 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12245 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12249 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12250 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12253 if (!init_target_chars ())
12256 if (strcmp (fmt_str, target_percent_s) == 0
12257 || strchr (fmt_str, target_percent) == NULL)
12261 if (strcmp (fmt_str, target_percent_s) == 0)
12263 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12266 if (!arg || !validate_arg (arg, POINTER_TYPE))
12269 str = c_getstr (arg);
12275 /* The format specifier doesn't contain any '%' characters. */
12276 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12282 /* If the string was "", printf does nothing. */
12283 if (str[0] == '\0')
12284 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12286 /* If the string has length of 1, call putchar. */
12287 if (str[1] == '\0')
12289 /* Given printf("c"), (where c is any one character,)
12290 convert "c"[0] to an int and pass that to the replacement
12292 newarg = build_int_cst (NULL_TREE, str[0]);
12294 call = build_call_expr (fn_putchar, 1, newarg);
12298 /* If the string was "string\n", call puts("string"). */
12299 size_t len = strlen (str);
12300 if ((unsigned char)str[len - 1] == target_newline)
12302 /* Create a NUL-terminated string that's one char shorter
12303 than the original, stripping off the trailing '\n'. */
12304 char *newstr = alloca (len);
12305 memcpy (newstr, str, len - 1);
12306 newstr[len - 1] = 0;
12308 newarg = build_string_literal (len, newstr);
12310 call = build_call_expr (fn_puts, 1, newarg);
12313 /* We'd like to arrange to call fputs(string,stdout) here,
12314 but we need stdout and don't have a way to get it yet. */
12319 /* The other optimizations can be done only on the non-va_list variants. */
12320 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12323 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12324 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12326 if (!arg || !validate_arg (arg, POINTER_TYPE))
12329 call = build_call_expr (fn_puts, 1, arg);
12332 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12333 else if (strcmp (fmt_str, target_percent_c) == 0)
12335 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12338 call = build_call_expr (fn_putchar, 1, arg);
12344 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12347 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12348 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12349 more than 3 arguments, and ARG may be null in the 2-argument case.
12351 Return NULL_TREE if no simplification was possible, otherwise return the
12352 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12353 code of the function to be simplified. */
12356 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12357 enum built_in_function fcode)
12359 tree fn_fputc, fn_fputs, call = NULL_TREE;
12360 const char *fmt_str = NULL;
12362 /* If the return value is used, don't do the transformation. */
12366 /* Verify the required arguments in the original call. */
12367 if (!validate_arg (fp, POINTER_TYPE))
12369 if (!validate_arg (fmt, POINTER_TYPE))
12372 /* Check whether the format is a literal string constant. */
12373 fmt_str = c_getstr (fmt);
12374 if (fmt_str == NULL)
12377 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12379 /* If we're using an unlocked function, assume the other
12380 unlocked functions exist explicitly. */
12381 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12382 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12386 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12387 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12390 if (!init_target_chars ())
12393 /* If the format doesn't contain % args or %%, use strcpy. */
12394 if (strchr (fmt_str, target_percent) == NULL)
12396 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12400 /* If the format specifier was "", fprintf does nothing. */
12401 if (fmt_str[0] == '\0')
12403 /* If FP has side-effects, just wait until gimplification is
12405 if (TREE_SIDE_EFFECTS (fp))
12408 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12411 /* When "string" doesn't contain %, replace all cases of
12412 fprintf (fp, string) with fputs (string, fp). The fputs
12413 builtin will take care of special cases like length == 1. */
12415 call = build_call_expr (fn_fputs, 2, fmt, fp);
12418 /* The other optimizations can be done only on the non-va_list variants. */
12419 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12422 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12423 else if (strcmp (fmt_str, target_percent_s) == 0)
12425 if (!arg || !validate_arg (arg, POINTER_TYPE))
12428 call = build_call_expr (fn_fputs, 2, arg, fp);
12431 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12432 else if (strcmp (fmt_str, target_percent_c) == 0)
12434 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12437 call = build_call_expr (fn_fputc, 2, arg, fp);
12442 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12445 /* Initialize format string characters in the target charset. */
12448 init_target_chars (void)
12453 target_newline = lang_hooks.to_target_charset ('\n');
12454 target_percent = lang_hooks.to_target_charset ('%');
12455 target_c = lang_hooks.to_target_charset ('c');
12456 target_s = lang_hooks.to_target_charset ('s');
12457 if (target_newline == 0 || target_percent == 0 || target_c == 0
12461 target_percent_c[0] = target_percent;
12462 target_percent_c[1] = target_c;
12463 target_percent_c[2] = '\0';
12465 target_percent_s[0] = target_percent;
12466 target_percent_s[1] = target_s;
12467 target_percent_s[2] = '\0';
12469 target_percent_s_newline[0] = target_percent;
12470 target_percent_s_newline[1] = target_s;
12471 target_percent_s_newline[2] = target_newline;
12472 target_percent_s_newline[3] = '\0';
12479 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12480 and no overflow/underflow occurred. INEXACT is true if M was not
12481 exactly calculated. TYPE is the tree type for the result. This
12482 function assumes that you cleared the MPFR flags and then
12483 calculated M to see if anything subsequently set a flag prior to
12484 entering this function. Return NULL_TREE if any checks fail. */
12487 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12489 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12490 overflow/underflow occurred. If -frounding-math, proceed iff the
12491 result of calling FUNC was exact. */
12492 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12493 && (!flag_rounding_math || !inexact))
12495 REAL_VALUE_TYPE rr;
12497 real_from_mpfr (&rr, m, type, GMP_RNDN);
12498 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12499 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12500 but the mpft_t is not, then we underflowed in the
12502 if (real_isfinite (&rr)
12503 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12505 REAL_VALUE_TYPE rmode;
12507 real_convert (&rmode, TYPE_MODE (type), &rr);
12508 /* Proceed iff the specified mode can hold the value. */
12509 if (real_identical (&rmode, &rr))
12510 return build_real (type, rmode);
12516 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12517 FUNC on it and return the resulting value as a tree with type TYPE.
12518 If MIN and/or MAX are not NULL, then the supplied ARG must be
12519 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12520 acceptable values, otherwise they are not. The mpfr precision is
12521 set to the precision of TYPE. We assume that function FUNC returns
12522 zero if the result could be calculated exactly within the requested
12526 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12527 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12530 tree result = NULL_TREE;
12534 /* To proceed, MPFR must exactly represent the target floating point
12535 format, which only happens when the target base equals two. */
12536 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12537 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12539 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12541 if (real_isfinite (ra)
12542 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12543 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12545 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12549 mpfr_init2 (m, prec);
12550 mpfr_from_real (m, ra, GMP_RNDN);
12551 mpfr_clear_flags ();
12552 inexact = func (m, m, GMP_RNDN);
12553 result = do_mpfr_ckconv (m, type, inexact);
12561 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12562 FUNC on it and return the resulting value as a tree with type TYPE.
12563 The mpfr precision is set to the precision of TYPE. We assume that
12564 function FUNC returns zero if the result could be calculated
12565 exactly within the requested precision. */
12568 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12569 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12571 tree result = NULL_TREE;
12576 /* To proceed, MPFR must exactly represent the target floating point
12577 format, which only happens when the target base equals two. */
12578 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12579 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12580 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12582 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12583 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12585 if (real_isfinite (ra1) && real_isfinite (ra2))
12587 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12591 mpfr_inits2 (prec, m1, m2, NULL);
12592 mpfr_from_real (m1, ra1, GMP_RNDN);
12593 mpfr_from_real (m2, ra2, GMP_RNDN);
12594 mpfr_clear_flags ();
12595 inexact = func (m1, m1, m2, GMP_RNDN);
12596 result = do_mpfr_ckconv (m1, type, inexact);
12597 mpfr_clears (m1, m2, NULL);
12604 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12605 FUNC on it and return the resulting value as a tree with type TYPE.
12606 The mpfr precision is set to the precision of TYPE. We assume that
12607 function FUNC returns zero if the result could be calculated
12608 exactly within the requested precision. */
12611 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12612 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12614 tree result = NULL_TREE;
12620 /* To proceed, MPFR must exactly represent the target floating point
12621 format, which only happens when the target base equals two. */
12622 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12623 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12624 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12625 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12627 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12628 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12629 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12631 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12633 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12637 mpfr_inits2 (prec, m1, m2, m3, NULL);
12638 mpfr_from_real (m1, ra1, GMP_RNDN);
12639 mpfr_from_real (m2, ra2, GMP_RNDN);
12640 mpfr_from_real (m3, ra3, GMP_RNDN);
12641 mpfr_clear_flags ();
12642 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12643 result = do_mpfr_ckconv (m1, type, inexact);
12644 mpfr_clears (m1, m2, m3, NULL);
12651 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12652 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12653 If ARG_SINP and ARG_COSP are NULL then the result is returned
12654 as a complex value.
12655 The type is taken from the type of ARG and is used for setting the
12656 precision of the calculation and results. */
12659 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12661 tree const type = TREE_TYPE (arg);
12662 tree result = NULL_TREE;
12666 /* To proceed, MPFR must exactly represent the target floating point
12667 format, which only happens when the target base equals two. */
12668 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12669 && TREE_CODE (arg) == REAL_CST
12670 && !TREE_OVERFLOW (arg))
12672 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12674 if (real_isfinite (ra))
12676 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12677 tree result_s, result_c;
12681 mpfr_inits2 (prec, m, ms, mc, NULL);
12682 mpfr_from_real (m, ra, GMP_RNDN);
12683 mpfr_clear_flags ();
12684 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12685 result_s = do_mpfr_ckconv (ms, type, inexact);
12686 result_c = do_mpfr_ckconv (mc, type, inexact);
12687 mpfr_clears (m, ms, mc, NULL);
12688 if (result_s && result_c)
12690 /* If we are to return in a complex value do so. */
12691 if (!arg_sinp && !arg_cosp)
12692 return build_complex (build_complex_type (type),
12693 result_c, result_s);
12695 /* Dereference the sin/cos pointer arguments. */
12696 arg_sinp = build_fold_indirect_ref (arg_sinp);
12697 arg_cosp = build_fold_indirect_ref (arg_cosp);
12698 /* Proceed if valid pointer type were passed in. */
12699 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12700 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12702 /* Set the values. */
12703 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12705 TREE_SIDE_EFFECTS (result_s) = 1;
12706 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12708 TREE_SIDE_EFFECTS (result_c) = 1;
12709 /* Combine the assignments into a compound expr. */
12710 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12711 result_s, result_c));
12719 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12720 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12721 two-argument mpfr order N Bessel function FUNC on them and return
12722 the resulting value as a tree with type TYPE. The mpfr precision
12723 is set to the precision of TYPE. We assume that function FUNC
12724 returns zero if the result could be calculated exactly within the
12725 requested precision. */
12727 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12728 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12729 const REAL_VALUE_TYPE *min, bool inclusive)
12731 tree result = NULL_TREE;
12736 /* To proceed, MPFR must exactly represent the target floating point
12737 format, which only happens when the target base equals two. */
12738 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12739 && host_integerp (arg1, 0)
12740 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12742 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12743 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12746 && real_isfinite (ra)
12747 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12749 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12753 mpfr_init2 (m, prec);
12754 mpfr_from_real (m, ra, GMP_RNDN);
12755 mpfr_clear_flags ();
12756 inexact = func (m, n, m, GMP_RNDN);
12757 result = do_mpfr_ckconv (m, type, inexact);
12765 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12766 the pointer *(ARG_QUO) and return the result. The type is taken
12767 from the type of ARG0 and is used for setting the precision of the
12768 calculation and results. */
12771 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12773 tree const type = TREE_TYPE (arg0);
12774 tree result = NULL_TREE;
12779 /* To proceed, MPFR must exactly represent the target floating point
12780 format, which only happens when the target base equals two. */
12781 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12782 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12783 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12785 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12786 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12788 if (real_isfinite (ra0) && real_isfinite (ra1))
12790 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12795 mpfr_inits2 (prec, m0, m1, NULL);
12796 mpfr_from_real (m0, ra0, GMP_RNDN);
12797 mpfr_from_real (m1, ra1, GMP_RNDN);
12798 mpfr_clear_flags ();
12799 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12800 /* Remquo is independent of the rounding mode, so pass
12801 inexact=0 to do_mpfr_ckconv(). */
12802 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12803 mpfr_clears (m0, m1, NULL);
12806 /* MPFR calculates quo in the host's long so it may
12807 return more bits in quo than the target int can hold
12808 if sizeof(host long) > sizeof(target int). This can
12809 happen even for native compilers in LP64 mode. In
12810 these cases, modulo the quo value with the largest
12811 number that the target int can hold while leaving one
12812 bit for the sign. */
12813 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12814 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12816 /* Dereference the quo pointer argument. */
12817 arg_quo = build_fold_indirect_ref (arg_quo);
12818 /* Proceed iff a valid pointer type was passed in. */
12819 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12821 /* Set the value. */
12822 tree result_quo = fold_build2 (MODIFY_EXPR,
12823 TREE_TYPE (arg_quo), arg_quo,
12824 build_int_cst (NULL, integer_quo));
12825 TREE_SIDE_EFFECTS (result_quo) = 1;
12826 /* Combine the quo assignment with the rem. */
12827 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12828 result_quo, result_rem));
12836 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12837 resulting value as a tree with type TYPE. The mpfr precision is
12838 set to the precision of TYPE. We assume that this mpfr function
12839 returns zero if the result could be calculated exactly within the
12840 requested precision. In addition, the integer pointer represented
12841 by ARG_SG will be dereferenced and set to the appropriate signgam
12845 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12847 tree result = NULL_TREE;
12851 /* To proceed, MPFR must exactly represent the target floating point
12852 format, which only happens when the target base equals two. Also
12853 verify ARG is a constant and that ARG_SG is an int pointer. */
12854 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12855 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12856 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12857 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12859 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12861 /* In addition to NaN and Inf, the argument cannot be zero or a
12862 negative integer. */
12863 if (real_isfinite (ra)
12864 && ra->cl != rvc_zero
12865 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
12867 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12872 mpfr_init2 (m, prec);
12873 mpfr_from_real (m, ra, GMP_RNDN);
12874 mpfr_clear_flags ();
12875 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
12876 result_lg = do_mpfr_ckconv (m, type, inexact);
12882 /* Dereference the arg_sg pointer argument. */
12883 arg_sg = build_fold_indirect_ref (arg_sg);
12884 /* Assign the signgam value into *arg_sg. */
12885 result_sg = fold_build2 (MODIFY_EXPR,
12886 TREE_TYPE (arg_sg), arg_sg,
12887 build_int_cst (NULL, sg));
12888 TREE_SIDE_EFFECTS (result_sg) = 1;
12889 /* Combine the signgam assignment with the lgamma result. */
12890 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12891 result_sg, result_lg));